From d738efb7e2f6da7b3cae4df94fa75582ebb96161 Mon Sep 17 00:00:00 2001 From: Daniel Getu Date: Sat, 16 Mar 2024 23:59:30 -0700 Subject: [PATCH] [Search] Regenerate with 2024-03-01-Preview spec (#28576) --- .vscode/cspell.json | 499 +- .../perf-tests/search-documents/package.json | 2 +- sdk/search/search-documents/.eslintrc.json | 14 + .../search-documents/.vscode/settings.json | 3 - sdk/search/search-documents/CHANGELOG.md | 92 + sdk/search/search-documents/README.md | 6 +- .../search-documents/api-extractor.json | 14 +- sdk/search/search-documents/assets.json | 2 +- sdk/search/search-documents/openai-patch.diff | 4 - sdk/search/search-documents/package.json | 65 +- .../review/search-documents.api.md | 1037 ++-- sdk/search/search-documents/sample.env | 6 +- .../bufferedSenderAutoFlushSize.ts | 13 +- .../bufferedSenderAutoFlushTimer.ts | 13 +- .../samples-dev/bufferedSenderManualFlush.ts | 8 +- .../dataSourceConnectionOperations.ts | 22 +- .../samples-dev/indexOperations.ts | 26 +- .../samples-dev/indexerOperations.ts | 35 +- .../samples-dev/interfaces.ts | 8 +- .../samples-dev/searchClientOperations.ts | 6 +- .../search-documents/samples-dev/setup.ts | 16 +- .../samples-dev/skillSetOperations.ts | 31 +- .../samples-dev/stickySession.ts | 84 + .../samples-dev/synonymMapOperations.ts | 27 +- .../samples-dev/vectorSearch.ts | 56 +- .../samples/v12-beta/javascript/README.md | 26 +- .../javascript/bufferedSenderAutoFlushSize.js | 11 +- .../bufferedSenderAutoFlushTimer.js | 11 +- .../javascript/bufferedSenderManualFlush.js | 6 +- .../dataSourceConnectionOperations.js | 12 +- .../v12-beta/javascript/indexOperations.js | 16 +- .../v12-beta/javascript/indexerOperations.js | 18 +- .../samples/v12-beta/javascript/sample.env | 4 +- .../javascript/searchClientOperations.js | 4 +- .../samples/v12-beta/javascript/setup.js | 10 +- .../v12-beta/javascript/skillSetOperations.js | 18 +- .../v12-beta/javascript/stickySession.js | 77 + .../javascript/synonymMapOperations.js | 14 +- .../v12-beta/javascript/vectorSearch.js | 54 +- .../samples/v12-beta/typescript/README.md | 26 +- .../samples/v12-beta/typescript/sample.env | 4 +- .../src/bufferedSenderAutoFlushSize.ts | 17 +- .../src/bufferedSenderAutoFlushTimer.ts | 17 +- .../src/bufferedSenderManualFlush.ts | 12 +- .../src/dataSourceConnectionOperations.ts | 33 +- .../typescript/src/indexOperations.ts | 30 +- .../typescript/src/indexerOperations.ts | 37 +- .../v12-beta/typescript/src/interfaces.ts | 8 +- .../typescript/src/searchClientOperations.ts | 8 +- .../samples/v12-beta/typescript/src/setup.ts | 14 +- .../typescript/src/skillSetOperations.ts | 31 +- .../v12-beta/typescript/src/stickySession.ts | 84 + .../typescript/src/synonymMapOperations.ts | 27 +- .../v12-beta/typescript/src/vectorSearch.ts | 58 +- .../samples/v12/javascript/sample.env | 4 +- .../samples/v12/typescript/sample.env | 4 +- .../scripts/generateSampleEmbeddings.ts | 6 +- sdk/search/search-documents/src/constants.ts | 2 +- .../search-documents/src/errorModels.ts | 54 + .../src/generated/data/models/index.ts | 248 +- .../src/generated/data/models/mappers.ts | 735 +-- .../src/generated/data/models/parameters.ts | 392 +- .../generated/data/operations/documents.ts | 116 +- .../data/operationsInterfaces/documents.ts | 20 +- .../src/generated/data/searchClient.ts | 54 +- .../src/generated/service/models/index.ts | 905 ++-- .../src/generated/service/models/mappers.ts | 4254 +++++++++-------- .../generated/service/models/parameters.ts | 143 +- .../generated/service/operations/aliases.ts | 62 +- .../service/operations/dataSources.ts | 66 +- .../generated/service/operations/indexers.ts | 104 +- .../generated/service/operations/indexes.ts | 86 +- .../generated/service/operations/skillsets.ts | 74 +- .../service/operations/synonymMaps.ts | 64 +- .../service/operationsInterfaces/aliases.ts | 10 +- .../operationsInterfaces/dataSources.ts | 12 +- .../service/operationsInterfaces/indexers.ts | 16 +- .../service/operationsInterfaces/indexes.ts | 14 +- .../service/operationsInterfaces/skillsets.ts | 12 +- .../operationsInterfaces/synonymMaps.ts | 12 +- .../generated/service/searchServiceClient.ts | 68 +- .../src/generatedStringLiteralUnions.ts | 458 ++ sdk/search/search-documents/src/index.ts | 694 +-- .../src/indexDocumentsBatch.ts | 36 +- .../search-documents/src/indexModels.ts | 440 +- .../src/odataMetadataPolicy.ts | 2 +- .../search-documents/src/searchClient.ts | 157 +- .../search-documents/src/searchIndexClient.ts | 37 +- .../src/searchIndexerClient.ts | 32 +- .../src/searchIndexingBufferedSender.ts | 21 +- .../search-documents/src/serviceModels.ts | 357 +- .../search-documents/src/serviceUtils.ts | 295 +- .../search-documents/src/synonymMapHelper.ts | 4 +- sdk/search/search-documents/src/tracing.ts | 2 +- sdk/search/search-documents/swagger/Data.md | 40 +- .../search-documents/swagger/Service.md | 94 +- .../test/compressionDisabled.ts | 4 + .../test/internal/serialization.spec.ts | 2 +- .../test/internal/serviceUtils.spec.ts | 68 +- .../search-documents/test/narrowedTypes.ts | 10 +- .../test/public/generated/typeDefinitions.ts | 157 + .../test/public/node/searchClient.spec.ts | 151 +- .../public/node/searchIndexClient.spec.ts | 27 +- .../test/public/typeDefinitions.ts | 76 +- .../test/public/utils/interfaces.ts | 1 + .../test/public/utils/recordedClient.ts | 120 +- .../test/public/utils/setup.ts | 148 +- sdk/search/search-documents/tsconfig.json | 4 +- 108 files changed, 7835 insertions(+), 5915 deletions(-) create mode 100644 sdk/search/search-documents/.eslintrc.json delete mode 100644 sdk/search/search-documents/.vscode/settings.json delete mode 100644 sdk/search/search-documents/openai-patch.diff create mode 100644 sdk/search/search-documents/samples-dev/stickySession.ts create mode 100644 sdk/search/search-documents/samples/v12-beta/javascript/stickySession.js create mode 100644 sdk/search/search-documents/samples/v12-beta/typescript/src/stickySession.ts create mode 100644 sdk/search/search-documents/src/errorModels.ts create mode 100644 sdk/search/search-documents/src/generatedStringLiteralUnions.ts create mode 100644 sdk/search/search-documents/test/compressionDisabled.ts create mode 100755 sdk/search/search-documents/test/public/generated/typeDefinitions.ts diff --git a/.vscode/cspell.json b/.vscode/cspell.json index f5062300844a..2b55b1a3ea11 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -2,61 +2,99 @@ "version": "0.2", "language": "en", "languageId": "typescript,javascript", - "dictionaries": [ - "powershell", - "typescript", - "node" - ], + "dictionaries": ["node", "powershell", "typescript"], "ignorePaths": [ + "**/*-lintReport.html", "**/node_modules/**", - "**/recordings/**", "**/pnpm-lock.yaml", - "common/temp/**", - "**/*-lintReport.html", + "**/recordings/**", "*.avro", - "*.tgz", - "*.png", + "*.crt", "*.jpg", + "*.key", "*.pdf", - "*.tiff", + "*.png", "*.svg", - "*.crt", - "*.key", - ".vscode/cspell.json", + "*.tgz", + "*.tiff", ".github/CODEOWNERS", + ".vscode/cspell.json", + "common/temp/**", "sdk/**/arm-*/**", - "sdk/test-utils/**", "sdk/agrifood/agrifood-farming-rest/review/*.md", "sdk/confidentialledger/confidential-ledger-rest/review/*.md", "sdk/core/core-client-lro-rest/review/*.md", "sdk/core/core-client-paging-rest/review/*.md", "sdk/core/core-client-rest/review/*.md", "sdk/documenttranslator/ai-document-translator-rest/review/*.md", + "sdk/openai/openai-rest/review/*.md", + "sdk/openai/openai/review/*.md", "sdk/purview/purview-account-rest/review/*.md", "sdk/purview/purview-administration-rest/review/*.md", "sdk/purview/purview-catalog-rest/review/*.md", "sdk/purview/purview-scanning-rest/review/*.md", "sdk/purview/purview-sharing-rest/review/*.md", "sdk/quantum/quantum-jobs/review/*.md", - "sdk/synapse/synapse-access-control/review/*.md", "sdk/synapse/synapse-access-control-rest/review/*.md", + "sdk/synapse/synapse-access-control/review/*.md", "sdk/synapse/synapse-artifacts/review/*.md", "sdk/synapse/synapse-managed-private-endpoints/review/*.md", "sdk/synapse/synapse-monitoring/review/*.md", "sdk/synapse/synapse-spark/review/*.md", - "sdk/translation/ai-translation-text-rest/review/*.md", - "sdk/openai/openai/review/*.md", - "sdk/openai/openai-rest/review/*.md" + "sdk/test-utils/**", + "sdk/translation/ai-translation-text-rest/review/*.md" ], "words": [ + "AMQP", + "BRCPF", + "Brcpf", + "CONTOSO", + "DTDL", + "ECONNRESET", + "ESDNI", + "EUGPS", + "Eloqua", + "Esdni", + "Eugps", + "Fhir", + "Fnhr", + "Guids", + "Hana", + "IDRG", + "IMDS", + "Idrg", + "Kubernetes", + "Localizable", + "Lucene", + "MPNS", + "MSRC", + "Mibps", + "ODATA", + "OTLP", + "Odbc", + "Onco", + "PLREGON", + "Personalizer", + "Petabit", + "Picometer", + "Plregon", + "Rasterize", + "Resourceid", + "Rollup", + "Rtsp", + "Sybase", + "Teradata", + "USUK", + "Uncapitalize", + "Unencrypted", + "Unprocessable", + "Usuk", + "Vertica", + "Xiaomi", "adfs", "agrifood", - "AMQP", "azsdk", - "Brcpf", - "BRCPF", "centralus", - "CONTOSO", "deps", "deserialization", "deserializers", @@ -65,184 +103,113 @@ "devdeps", "dicom", "dotenv", - "DTDL", "dtmi", "dtmis", "eastus", - "ECONNRESET", - "Eloqua", "entra", - "Esdni", - "ESDNI", "etags", - "Eugps", - "EUGPS", - "Fhir", - "Fnhr", - "Guids", - "Hana", "hnsw", - "Idrg", - "IDRG", - "IMDS", - "OTLP", - "Kubernetes", "kusto", "lcov", "lcovonly", - "Localizable", "loinc", - "Lucene", - "Mibps", "mkdir", "mkdirp", "mongodb", - "MPNS", "msal", - "MSRC", "nise", "northcentralus", "npmjs", - "ODATA", - "Odbc", - "Onco", "oncophenotype", "openai", "perfstress", "personalizer", - "Personalizer", - "Petabit", - "Picometer", - "Plregon", - "PLREGON", "pnpm", "prettierrc", "pstn", "pwsh", - "Rasterize", "reoffer", - "Resourceid", - "Rollup", "rrggbb", - "Rtsp", - "reoffer", "rushx", "soundex", "southcentralus", "struct", "structdef", - "Sybase", - "Teradata", "tmpdir", "tshy", "uaecentral", "uksouth", "ukwest", "unassignment", - "Uncapitalize", "undelete", - "Unencrypted", "unpartitioned", - "Unprocessable", "unref", "usdodcentral", "usdodeast", "usgovarizona", "usgovtexas", "usgovvirginia", - "Usuk", - "USUK", - "Vertica", - "westus", - "Xiaomi" + "vectorizer", + "westus" ], "allowCompoundWords": true, "overrides": [ { "filename": "eng/pipelines", - "words": [ - "azuresdkartifacts", - "policheck", - "gdnbaselines" - ] + "words": ["azuresdkartifacts", "gdnbaselines", "policheck"] }, { - "filename": "sdk/videoanalyzer/video-analyzer-edge/review/**/*.md", - "words": [ - "abgr", - "Abgr", - "argb", - "Argb", - "bgra", - "Bgra", - "Grpc", - "onvif", - "Onvif" - ] + "filename": "sdk/apimanagement/api-management-custom-widgets-scaffolder/review/api-management-custom-widgets-scaffolder.api.md", + "words": ["APIM", "scaffolder"] }, { - "filename": "sdk/storage/storage-blob/review/**/*.md", - "words": [ - "RAGRS" - ] + "filename": "sdk/apimanagement/api-management-custom-widgets-tools/review/api-management-custom-widgets-tools.api.md", + "words": ["APIM", "MSAPIM"] }, { - "filename": "sdk/search/search-documents/review/**/*.md", + "filename": "sdk/attestation/attestation/review/**/*.md", "words": [ - "Adls", - "adlsgen", - "bangla", - "beider", - "Bokmaal", - "Decompounder", - "haase", - "koelner", - "kstem", - "kstem", - "lovins", - "nysiis", - "odatatype", - "Phonetik", - "Piqd", - "reranker", - "Rslp", - "sorani", - "Sorani", - "Vectorizable", - "vectorizer", - "vectorizers" + "qeidcertshash", + "qeidcrlhash", + "qeidhash", + "tcbinfocertshash", + "tcbinfocrlhash", + "tcbinfohash" ] }, { - "filename": "sdk/keyvault/keyvault-keys/review/**/*.md", + "filename": "sdk/communication/communication-call-automation/review/**/*.md", "words": [ - "ECHSM", - "OKPHSM", - "RSAHSM", - "RSNULL", - "Rsnull" + "Ssml", + "answeredby", + "playsourcacheid", + "playsourcecacheid", + "sipuui", + "sipx", + "ssml" ] }, { - "filename": "sdk/keyvault/keyvault-certificates/review/**/*.md", - "words": [ - "ECHSM", - "ekus", - "RSAHSM", - "upns" - ] + "filename": "sdk/communication/communication-common/review/**/*.md", + "words": ["gcch"] }, { - "filename": "sdk/digitaltwins/digital-twins-core/review/**/*.md", - "words": [ - "dtdl" - ] + "filename": "sdk/communication/communication-email/review/**/*.md", + "words": ["rpmsg", "xlsb"] + }, + { + "filename": "sdk/containerregistry/container-registry/review/**/*.md", + "words": ["Illumos", "illumos", "mipsle", "riscv"] + }, + { + "filename": "sdk/core/core-amqp/review/**/*.md", + "words": ["EHOSTDOWN", "ENONET", "sastoken"] }, { "filename": "sdk/cosmosdb/cosmos/review/**/*.md", "words": [ - "colls", "Parition", + "colls", "pkranges", "sproc", "sprocs", @@ -254,233 +221,181 @@ ] }, { - "filename": "sdk/attestation/attestation/review/**/*.md", - "words": [ - "qeidcertshash", - "qeidcrlhash", - "qeidhash", - "tcbinfocertshash", - "tcbinfocrlhash", - "tcbinfohash" - ] + "filename": "sdk/cosmosdb/cosmos/review/cosmos.api.md", + "words": ["Funtion"] }, { - "filename": "sdk/formrecognizer/ai-form-recognizer/README.md", - "words": [ - "iddocument" - ] + "filename": "sdk/digitaltwins/digital-twins-core/review/**/*.md", + "words": ["dtdl"] }, { - "filename": "sdk/formrecognizer/ai-form-recognizer/review/**/*.md", - "words": [ - "WDLABCD", - "presentationml", - "spreadsheetml", - "wordprocessingml", - "heif", - "copays", - "Upca", - "Upce" - ] + "filename": "sdk/digitaltwins/digital-twins-core/review/digital-twins-core.api.md", + "words": ["dependecies"] }, { - "filename": "sdk/core/core-amqp/review/**/*.md", - "words": [ - "EHOSTDOWN", - "ENONET", - "sastoken" - ] + "filename": "sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md", + "words": ["presentationml", "spreadsheetml", "wordprocessingml"] }, { - "filename": "sdk/containerregistry/container-registry/review/**/*.md", + "filename": "sdk/easm/defender-easm-rest/review/defender-easm.api.md", "words": [ - "illumos", - "Illumos", - "mipsle", - "riscv" + "Alexa", + "Asns", + "Easm", + "Whois", + "alexa", + "asns", + "cnames", + "easm", + "nxdomain", + "whois" ] }, { - "filename": "sdk/communication/communication-call-automation/review/**/*.md", - "words": [ - "ssml", - "Ssml", - "answeredby", - "playsourcacheid", - "playsourcecacheid", - "sipx", - "sipuui" - ] + "filename": "sdk/eventgrid/eventgrid/review/**/*.md", + "words": ["Dicom", "Gcch", "gcch"] }, { - "filename": "sdk/communication/communication-common/review/**/*.md", - "words": [ - "gcch" - ] + "filename": "sdk/formrecognizer/ai-form-recognizer/README.md", + "words": ["iddocument"] }, { - "filename": "sdk/communication/communication-email/review/**/*.md", + "filename": "sdk/formrecognizer/ai-form-recognizer/review/**/*.md", "words": [ - "rpmsg", - "xlsb" + "Upca", + "Upce", + "WDLABCD", + "copays", + "heif", + "presentationml", + "spreadsheetml", + "wordprocessingml" ] }, { - "filename": "sdk/eventgrid/eventgrid/review/**/*.md", - "words": [ - "Dicom", - "Gcch", - "gcch" - ] + "filename": "sdk/healthinsights/azure-healthinsights-radiologyinsights/**", + "words": ["ctxt", "mros", "nify"] }, { "filename": "sdk/identity/**/*.md", - "words": [ - "MSAL", - "PKCE" - ] + "words": ["MSAL", "PKCE"] }, { "filename": "sdk/iot/iot-modelsrepository/review/**/*.md", - "words": [ - "Dtmi", - "dtmis" - ] - }, - { - "filename": "sdk/storage/storage-blob/review/storage-blob.api.md", - "words": [ - "Uncommited" - ] + "words": ["Dtmi", "dtmis"] }, { - "filename": "sdk/search/search-documents/review/search-documents.api.md", - "words": [ - "Createor" - ] - }, - { - "filename": "sdk/monitor/monitor-query/review/monitor-query.api.md", - "words": [ - "fourty", - "Milli" - ] + "filename": "sdk/keyvault/keyvault-certificates/review/**/*.md", + "words": ["ECHSM", "RSAHSM", "ekus", "upns"] }, { - "filename": "sdk/digitaltwins/digital-twins-core/review/digital-twins-core.api.md", - "words": [ - "dependecies" - ] + "filename": "sdk/keyvault/keyvault-keys/review/**/*.md", + "words": ["ECHSM", "OKPHSM", "RSAHSM", "RSNULL", "Rsnull"] }, { - "filename": "sdk/cosmosdb/cosmos/review/cosmos.api.md", - "words": [ - "Funtion" - ] + "filename": "sdk/loadtestservice/load-testing-rest/review/load-testing.api.md", + "words": ["vusers"] }, { - "filename": "sdk/apimanagement/api-management-custom-widgets-scaffolder/review/api-management-custom-widgets-scaffolder.api.md", - "words": [ - "scaffolder", - "APIM" - ] + "filename": "sdk/maps/maps-common/review/maps-common.api.md", + "words": ["bbox"] }, { - "filename": "sdk/apimanagement/api-management-custom-widgets-tools/review/api-management-custom-widgets-tools.api.md", - "words": [ - "MSAPIM", - "APIM" - ] + "filename": "sdk/maps/maps-render-rest/review/maps-render.api.md", + "words": ["bbox"] }, { - "filename": "sdk/maps/maps-common/review/maps-common.api.md", - "words": [ - "bbox" - ] + "filename": "sdk/maps/maps-route-rest/review/maps-route.api.md", + "words": ["Hundredkm", "UTURN", "bbox"] }, { - "filename": "sdk/maps/maps-route-rest/review/maps-route.api.md", - "words": [ - "bbox", - "UTURN", - "Hundredkm" - ] + "filename": "sdk/maps/maps-search-rest/review/maps-search.api.md", + "words": ["Neighbourhood", "Xstr", "bbox"] }, { - "filename": "sdk/maps/maps-render-rest/review/maps-render.api.md", - "words": [ - "bbox" - ] + "filename": "sdk/monitor/monitor-query/review/monitor-query.api.md", + "words": ["Milli", "fourty"] }, { - "filename": "sdk/maps/maps-search-rest/review/maps-search.api.md", - "words": [ - "Neighbourhood", - "Xstr", - "bbox" - ] + "filename": "sdk/notificationhubs/notification-hubs/review/notification-hubs.api.md", + "words": ["fcmv"] }, { - "filename": "sdk/apimanagement/api-management-custom-widgets-scaffolder/review/api-management-custom-widgets-scaffolder.api.md", + "filename": "sdk/search/search-documents/review/**/*.md", "words": [ - "scaffolder", - "APIM" + "Adls", + "Bokmaal", + "Decompounder", + "Phonetik", + "Piqd", + "Rslp", + "Sorani", + "Vectorizable", + "adlsgen", + "bangla", + "beider", + "haase", + "koelner", + "kstem", + "lovins", + "nysiis", + "odatatype", + "rerank", + "reranker", + "sorani", + "vectorizer", + "vectorizers" ] }, { - "filename": "sdk/loadtestservice/load-testing-rest/review/load-testing.api.md", + "filename": "sdk/search/search-documents/review/**/*.md", "words": [ - "vusers" + "Adls", + "Bokmaal", + "Decompounder", + "Phonetik", + "Piqd", + "Rslp", + "Sorani", + "Vectorizable", + "adlsgen", + "bangla", + "beider", + "haase", + "koelner", + "kstem", + "lovins", + "nysiis", + "odatatype", + "reranker", + "sorani", + "vectorizer", + "vectorizers" ] }, { - "filename": "sdk/web-pubsub/web-pubsub-client/review/web-pubsub-client.api.md", - "words": [ - "protobuf" - ] + "filename": "sdk/search/search-documents/review/search-documents.api.md", + "words": ["Createor"] }, { - "filename": "sdk/web-pubsub/web-pubsub-client-protobuf/review/web-pubsub-client-protobuf.api.md", - "words": [ - "protobuf" - ] + "filename": "sdk/storage/storage-blob/review/**/*.md", + "words": ["RAGRS"] }, { - "filename": "sdk/easm/defender-easm-rest/review/defender-easm.api.md", - "words": [ - "Alexa", - "alexa", - "Asns", - "asns", - "cnames", - "Easm", - "easm", - "nxdomain", - "Whois", - "whois" - ] + "filename": "sdk/storage/storage-blob/review/storage-blob.api.md", + "words": ["Uncommited"] }, { - "filename": "sdk/documentintelligence/ai-document-intelligence-rest/review/ai-document-intelligence.api.md", - "words": [ - "wordprocessingml", - "spreadsheetml", - "presentationml" - ] + "filename": "sdk/videoanalyzer/video-analyzer-edge/review/**/*.md", + "words": ["Abgr", "Argb", "Bgra", "Grpc", "Onvif", "abgr", "argb", "bgra", "onvif"] }, { - "filename": "sdk/healthinsights/azure-healthinsights-radiologyinsights/**", - "words": [ - "ctxt", - "mros", - "nify" - ] + "filename": "sdk/web-pubsub/web-pubsub-client-protobuf/review/web-pubsub-client-protobuf.api.md", + "words": ["protobuf"] }, { - "filename": "sdk/notificationhubs/notification-hubs/review/notification-hubs.api.md", - "words": [ - "fcmv" - ] + "filename": "sdk/web-pubsub/web-pubsub-client/review/web-pubsub-client.api.md", + "words": ["protobuf"] } ] } diff --git a/sdk/search/perf-tests/search-documents/package.json b/sdk/search/perf-tests/search-documents/package.json index 0a4ae52496eb..bac570a8227d 100644 --- a/sdk/search/perf-tests/search-documents/package.json +++ b/sdk/search/perf-tests/search-documents/package.json @@ -9,7 +9,7 @@ "license": "ISC", "dependencies": { "@azure/identity": "^4.0.1", - "@azure/search-documents": "12.0.0-beta.4", + "@azure/search-documents": "12.1.0-beta.1", "@azure/test-utils-perf": "^1.0.0", "dotenv": "^16.0.0" }, diff --git a/sdk/search/search-documents/.eslintrc.json b/sdk/search/search-documents/.eslintrc.json new file mode 100644 index 000000000000..0149781a33a9 --- /dev/null +++ b/sdk/search/search-documents/.eslintrc.json @@ -0,0 +1,14 @@ +{ + "overrides": [ + { + "files": ["samples-dev/**.ts"], + "rules": { + // Suppresses errors for the custom TSDoc syntax we use for docs + "tsdoc/syntax": "off", + // Suppresses spurious missing dependency error as ESLint thinks the sample's runtime deps + // should be runtime deps for us too + "import/no-extraneous-dependencies": "off" + } + } + ] +} diff --git a/sdk/search/search-documents/.vscode/settings.json b/sdk/search/search-documents/.vscode/settings.json deleted file mode 100644 index 0d6ed784aae2..000000000000 --- a/sdk/search/search-documents/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "cSpell.words": ["hnsw", "openai"] -} diff --git a/sdk/search/search-documents/CHANGELOG.md b/sdk/search/search-documents/CHANGELOG.md index 6bfd637e16b7..f8395e4fdb7b 100644 --- a/sdk/search/search-documents/CHANGELOG.md +++ b/sdk/search/search-documents/CHANGELOG.md @@ -1,5 +1,97 @@ # Release History +## 12.1.0-beta.1 (2024-02-06) + +### Breaking Changes + +- Refactor in alignment with v12 [#28576](https://github.com/Azure/azure-sdk-for-js/pull/28576) + - Replace or replace the following types/properties + - Use `ExhaustiveKnnAlgorithmConfiguration` in place of + - `ExhaustiveKnnVectorSearchAlgorithmConfiguration` + - Use `HnswAlgorithmConfiguration` in place of + - `HnswVectorSearchAlgorithmConfiguration` + - Use `PIIDetectionSkill.categories` in place of + - `PIIDetectionSkill.piiCategories` + - Use `QueryAnswer` in place of + - `Answers` + - `AnswersOption` + - `QueryAnswerType` + - Use `QueryAnswerResult` in place of + - `AnswerResult` + - Use `QueryCaption` in place of + - `Captions` + - `QueryCaptionType` + - Use `QueryCaptionResult` in place of + - `CaptionResult` + - Use `SearchRequestOptions.VectorSearchOptions.filterMode` in place of + - `SearchRequestOptions.vectorFilterMode` + - Use `SearchRequestOptions.VectorSearchOptions.queries` in place of + - `SearchRequestOptions.vectorQueries` + - Use `SearchRequestOptions.semanticSearchOptions.answers` in place of + - `SearchRequestOptions.answers` + - Use `SearchRequestOptions.semanticSearchOptions.captions` in place of + - `SearchRequestOptions.captions` + - Use `SearchRequestOptions.semanticSearchOptions.configurationName` in place of + - `SearchRequestOptions.semanticConfiguration` + - Use `SearchRequestOptions.semanticSearchOptions.debugMode` in place of + - `SearchRequestOptions.debugMode` + - Use `SearchRequestOptions.semanticSearchOptions.errorMode` in place of + - `SearchRequestOptions.semanticErrorHandlingMode` + - Use `SearchRequestOptions.semanticSearchOptions.maxWaitInMilliseconds` in place of + - `SearchRequestOptions.semanticMaxWaitInMilliseconds` + - Use `SearchRequestOptions.semanticSearchOptions.semanticFields` in place of + - `SearchRequestOptions.semanticFields` + - Use `SearchRequestOptions.semanticSearchOptions.semanticQuery` in place of + - `SearchRequestOptions.semanticQuery` + - Use `SemanticErrorMode` in place of + - `SemanticErrorHandlingMode` + - Use `SemanticErrorReason` in place of + - `SemanticPartialResponseReason` + - Use `SemanticPrioritizedFields` in place of + - `PrioritizedFields` + - Use `SemanticSearch` in place of + - `SemanticSettings` + - Use `SemanticSearchResultsType` in place of + - `SemanticPartialResponseType` + - Use `SimpleField.vectorSearchProfileName` in place of + - `SimpleField.vectorSearchProfile` + - Use `VectorSearchProfile.algorithmConfigurationName` in place of + - `VectorSearchProfile.algorithm` + - Narrow some enum property types to the respective string literal union + - `BlobIndexerDataToExtract` + - `BlobIndexerImageAction` + - `BlobIndexerParsingMode` + - `BlobIndexerPDFTextRotationAlgorithm` + - `CustomEntityLookupSkillLanguage` + - `EntityCategory` + - `EntityRecognitionSkillLanguage` + - `ImageAnalysisSkillLanguage` + - `ImageDetail` + - `IndexerExecutionEnvironment` + - `KeyPhraseExtractionSkillLanguage` + - `OcrSkillLanguage` + - `RegexFlags` + - `SearchIndexerDataSourceType` + - `SentimentSkillLanguage` + - `SplitSkillLanguage` + - `TextSplitMode` + - `TextTranslationSkillLanguage` + - `VisualFeature` + - Remove `KnownLexicalAnalyzerName` as a duplicate of `KnownAnalyzerNames` + - Remove `KnownCharFilterName` as a duplicate of `KnownCharFilterNames` + - Remove `KnownTokenFilterName` as a duplicate of `KnownTokenFilterNames` + - Remove `SearchRequest` as a duplicate of `SearchRequestOptions` + +### Features Added + +- Add vector compression [#28772](https://github.com/Azure/azure-sdk-for-js/pull/28772) + - Service-side scalar quantization of your vector data + - Optional reranking with full-precision vectors + - Optional oversampling of documents when reranking compressed vectors +- Add `Edm.Half`, `Edm.Int16`, and `Edm.SByte` vector spaces [#28772](https://github.com/Azure/azure-sdk-for-js/pull/28772) +- Add non-persistent vector usage through `SimpleField.stored` [#28772](https://github.com/Azure/azure-sdk-for-js/pull/28772) +- Expose the internal HTTP pipeline to allow users to send raw requests with it + ## 12.0.0-beta.4 (2023-10-11) ### Features Added diff --git a/sdk/search/search-documents/README.md b/sdk/search/search-documents/README.md index ea4102243066..1205f66565a3 100644 --- a/sdk/search/search-documents/README.md +++ b/sdk/search/search-documents/README.md @@ -17,7 +17,7 @@ The Azure AI Search service is well suited for the following application scenari * In a search client application, implement query logic and user experiences similar to commercial web search engines and chat-style apps. -Use the Azure.Search.Documents client library to: +Use the @azure/search-documents client library to: * Submit queries using vector, keyword, and hybrid query forms. * Implement filtered queries for metadata, geospatial search, faceted navigation, @@ -349,14 +349,14 @@ interface Hotel { hotelId?: string; hotelName?: string | null; description?: string | null; - descriptionVector?: Array | null; + descriptionVector?: Array; parkingIncluded?: boolean | null; lastRenovationDate?: Date | null; rating?: number | null; rooms?: Array<{ beds?: number | null; description?: string | null; - } | null>; + }>; } const client = new SearchClient( diff --git a/sdk/search/search-documents/api-extractor.json b/sdk/search/search-documents/api-extractor.json index 5f593659b1e3..b8a764c0c59a 100644 --- a/sdk/search/search-documents/api-extractor.json +++ b/sdk/search/search-documents/api-extractor.json @@ -10,15 +10,10 @@ }, "dtsRollup": { "enabled": true, - "untrimmedFilePath": "", - "publicTrimmedFilePath": "./types/search-documents.d.ts" + "publicTrimmedFilePath": "./types/search-documents.d.ts", + "untrimmedFilePath": "" }, "messages": { - "tsdocMessageReporting": { - "default": { - "logLevel": "none" - } - }, "extractorMessageReporting": { "ae-missing-release-tag": { "logLevel": "none" @@ -26,6 +21,11 @@ "ae-unresolved-link": { "logLevel": "none" } + }, + "tsdocMessageReporting": { + "default": { + "logLevel": "none" + } } } } diff --git a/sdk/search/search-documents/assets.json b/sdk/search/search-documents/assets.json index e373b7adce0d..27cf47b60609 100644 --- a/sdk/search/search-documents/assets.json +++ b/sdk/search/search-documents/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "js", "TagPrefix": "js/search/search-documents", - "Tag": "js/search/search-documents_f8e9f163e2" + "Tag": "js/search/search-documents_b75f2ec5af" } diff --git a/sdk/search/search-documents/openai-patch.diff b/sdk/search/search-documents/openai-patch.diff deleted file mode 100644 index cfc7beb7faf2..000000000000 --- a/sdk/search/search-documents/openai-patch.diff +++ /dev/null @@ -1,4 +0,0 @@ -6c6 -< "main": "dist/index.js", ---- -> "main": "dist/index.cjs", diff --git a/sdk/search/search-documents/package.json b/sdk/search/search-documents/package.json index 1ad535945142..20dcc97af993 100644 --- a/sdk/search/search-documents/package.json +++ b/sdk/search/search-documents/package.json @@ -1,6 +1,6 @@ { "name": "@azure/search-documents", - "version": "12.0.0-beta.4", + "version": "12.1.0-beta.1", "description": "Azure client library to use Cognitive Search for node.js and browser.", "sdk-type": "client", "main": "dist/index.js", @@ -8,37 +8,37 @@ "types": "types/search-documents.d.ts", "scripts": { "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local", "build:browser": "tsc -p . && dev-tool run bundle", "build:node": "tsc -p . && dev-tool run bundle", "build:samples": "echo Obsolete.", - "execute:samples": "dev-tool samples run samples-dev", "build:test": "tsc -p . && dev-tool run bundle", - "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local", "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "dev-tool samples run samples-dev", "extract-api": "tsc -p . && api-extractor run --local", "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", "generate:client": "autorest --typescript swagger/Service.md & autorest --typescript swagger/Data.md & wait", "generate:embeddings": "ts-node scripts/generateSampleEmbeddings.ts", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", "integration-test:browser": "dev-tool run test:browser", "integration-test:node": "dev-tool run test:node-js-input -- --timeout 5000000 'dist-esm/test/**/*.spec.js'", - "integration-test": "npm run integration-test:node && npm run integration-test:browser", - "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", - "lint": "eslint package.json api-extractor.json src test --ext .ts", + "lint": "eslint package.json api-extractor.json src test samples-dev --ext .ts", + "lint:fix": "eslint package.json api-extractor.json src test samples-dev --ext .ts --fix --fix-type [problem,suggestion]", "pack": "npm pack 2>&1", + "test": "npm run build:test && npm run unit-test", "test:browser": "npm run build:test && npm run unit-test:browser", "test:node": "npm run build:test && npm run unit-test:node", - "test": "npm run build:test && npm run unit-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "unit-test:browser": "dev-tool run test:browser", - "unit-test:node": "dev-tool run test:node-ts-input -- --timeout 1200000 \"test/**/*.spec.ts\" \"test/**/**/*.spec.ts\"", - "unit-test": "npm run unit-test:node && npm run unit-test:browser" + "unit-test:node": "dev-tool run test:node-ts-input -- --timeout 1200000 \"test/**/*.spec.ts\" \"test/**/**/*.spec.ts\"" }, "files": [ - "dist/", - "dist-esm/src/", - "types/search-documents.d.ts", + "LICENSE", "README.md", - "LICENSE" + "dist-esm/src/", + "dist/", + "types/search-documents.d.ts" ], "browser": { "./dist-esm/src/base64.js": "./dist-esm/src/base64.browser.js", @@ -47,12 +47,8 @@ "//metadata": { "constantPaths": [ { - "path": "swagger/Service.md", - "prefix": "package-version" - }, - { - "path": "swagger/Data.md", - "prefix": "package-version" + "path": "src/constants.ts", + "prefix": "SDK_VERSION" }, { "path": "src/generated/data/searchClient.ts", @@ -63,8 +59,12 @@ "prefix": "packageDetails" }, { - "path": "src/constants.ts", - "prefix": "SDK_VERSION" + "path": "swagger/Data.md", + "prefix": "package-version" + }, + { + "path": "swagger/Service.md", + "prefix": "package-version" } ] }, @@ -84,31 +84,34 @@ "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/search/search-documents/", "sideEffects": false, "dependencies": { - "@azure/core-client": "^1.3.0", "@azure/core-auth": "^1.3.0", + "@azure/core-client": "^1.3.0", + "@azure/core-http-compat": "^2.0.1", "@azure/core-paging": "^1.1.1", - "@azure/core-tracing": "^1.0.0", "@azure/core-rest-pipeline": "^1.3.0", - "@azure/core-http-compat": "^2.0.1", + "@azure/core-tracing": "^1.0.0", "@azure/logger": "^1.0.0", - "tslib": "^2.2.0", - "events": "^3.0.0" + "events": "^3.0.0", + "tslib": "^2.2.0" }, "devDependencies": { - "@azure/openai": "1.0.0-beta.12", - "@azure/test-utils": "^1.0.0", + "@azure-tools/test-recorder": "^3.0.0", + "@azure/core-util": "^1.6.1", "@azure/dev-tool": "^1.0.0", "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@azure-tools/test-recorder": "^3.0.0", + "@azure/openai": "1.0.0-beta.12", + "@azure/test-utils": "^1.0.0", "@microsoft/api-extractor": "^7.31.1", "@types/chai": "^4.1.6", "@types/mocha": "^10.0.0", "@types/node": "^18.0.0", "@types/sinon": "^17.0.0", + "c8": "^8.0.0", "chai": "^4.2.0", "cross-env": "^7.0.2", "dotenv": "^16.0.0", "eslint": "^8.0.0", + "esm": "^3.2.18", "inherits": "^2.0.3", "karma": "^6.2.0", "karma-chrome-launcher": "^3.0.0", @@ -122,13 +125,11 @@ "karma-mocha-reporter": "^2.2.5", "karma-sourcemap-loader": "^0.3.8", "mocha": "^10.0.0", - "c8": "^8.0.0", "rimraf": "^5.0.5", "sinon": "^17.0.0", "ts-node": "^10.0.0", "typescript": "~5.3.3", - "util": "^0.12.1", - "esm": "^3.2.18" + "util": "^0.12.1" }, "//sampleConfiguration": { "productName": "Azure Search Documents", diff --git a/sdk/search/search-documents/review/search-documents.api.md b/sdk/search/search-documents/review/search-documents.api.md index 5621ebf3918c..556eb019c8a4 100644 --- a/sdk/search/search-documents/review/search-documents.api.md +++ b/sdk/search/search-documents/review/search-documents.api.md @@ -11,6 +11,7 @@ import { ExtendedCommonClientOptions } from '@azure/core-http-compat'; import { KeyCredential } from '@azure/core-auth'; import { OperationOptions } from '@azure/core-client'; import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { Pipeline } from '@azure/core-rest-pipeline'; import { RestError } from '@azure/core-rest-pipeline'; import { TokenCredential } from '@azure/core-auth'; @@ -27,12 +28,12 @@ export interface AnalyzedTokenInfo { // @public export interface AnalyzeRequest { - analyzerName?: string; - charFilters?: string[]; + analyzerName?: LexicalAnalyzerName; + charFilters?: CharFilterName[]; normalizerName?: LexicalNormalizerName; text: string; - tokenFilters?: string[]; - tokenizerName?: string; + tokenFilters?: TokenFilterName[]; + tokenizerName?: LexicalTokenizerName; } // @public @@ -44,31 +45,10 @@ export interface AnalyzeResult { export type AnalyzeTextOptions = OperationOptions & AnalyzeRequest; // @public -export interface AnswerResult { - [property: string]: any; - readonly highlights?: string; - readonly key: string; - readonly score: number; - readonly text: string; -} - -// @public -export type Answers = string; - -// @public -export type AnswersOptions = { - answers: "extractive"; - count?: number; - threshold?: number; -} | { - answers: "none"; -}; - -// @public -export type AsciiFoldingTokenFilter = BaseTokenFilter & { +export interface AsciiFoldingTokenFilter extends BaseTokenFilter { odatatype: "#Microsoft.Azure.Search.AsciiFoldingTokenFilter"; preserveOriginal?: boolean; -}; +} // @public export interface AutocompleteItem { @@ -109,15 +89,15 @@ export interface AzureActiveDirectoryApplicationCredentials { export { AzureKeyCredential } // @public -export type AzureMachineLearningSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Custom.AmlSkill"; - scoringUri?: string; +export interface AzureMachineLearningSkill extends BaseSearchIndexerSkill { authenticationKey?: string; + degreeOfParallelism?: number; + odatatype: "#Microsoft.Skills.Custom.AmlSkill"; + region?: string; resourceId?: string; + scoringUri?: string; timeout?: string; - region?: string; - degreeOfParallelism?: number; -}; +} // @public export interface AzureOpenAIEmbeddingSkill extends BaseSearchIndexerSkill { @@ -205,6 +185,31 @@ export interface BaseSearchIndexerSkill { outputs: OutputFieldMappingEntry[]; } +// @public +export interface BaseSearchRequestOptions = SelectFields> { + facets?: string[]; + filter?: string; + highlightFields?: string; + highlightPostTag?: string; + highlightPreTag?: string; + includeTotalCount?: boolean; + minimumCoverage?: number; + orderBy?: string[]; + queryLanguage?: QueryLanguage; + queryType?: QueryType; + scoringParameters?: string[]; + scoringProfile?: string; + scoringStatistics?: ScoringStatistics; + searchFields?: SearchFieldArray; + searchMode?: SearchMode; + select?: SelectArray; + sessionId?: string; + skip?: number; + speller?: Speller; + top?: number; + vectorSearchOptions?: VectorSearchOptions; +} + // @public export interface BaseTokenFilter { name: string; @@ -217,6 +222,7 @@ export interface BaseVectorQuery { fields?: SearchFieldArray; kind: VectorQueryKind; kNearestNeighborsCount?: number; + oversampling?: number; } // @public @@ -225,41 +231,39 @@ export interface BaseVectorSearchAlgorithmConfiguration { name: string; } +// @public +export interface BaseVectorSearchCompressionConfiguration { + defaultOversampling?: number; + kind: "scalarQuantization"; + name: string; + rerankWithOriginalVectors?: boolean; +} + // @public export interface BaseVectorSearchVectorizer { kind: VectorSearchVectorizerKind; name: string; } -// @public -export type BlobIndexerDataToExtract = string; +// @public (undocumented) +export type BlobIndexerDataToExtract = "storageMetadata" | "allMetadata" | "contentAndMetadata"; -// @public -export type BlobIndexerImageAction = string; +// @public (undocumented) +export type BlobIndexerImageAction = "none" | "generateNormalizedImages" | "generateNormalizedImagePerPage"; -// @public -export type BlobIndexerParsingMode = string; +// @public (undocumented) +export type BlobIndexerParsingMode = "default" | "text" | "delimitedText" | "json" | "jsonArray" | "jsonLines"; -// @public -export type BlobIndexerPDFTextRotationAlgorithm = string; +// @public (undocumented) +export type BlobIndexerPDFTextRotationAlgorithm = "none" | "detectAngles"; // @public -export type BM25Similarity = Similarity & { - odatatype: "#Microsoft.Azure.Search.BM25Similarity"; - k1?: number; +export interface BM25Similarity extends Similarity { b?: number; -}; - -// @public -export interface CaptionResult { - [property: string]: any; - readonly highlights?: string; - readonly text?: string; + k1?: number; + odatatype: "#Microsoft.Azure.Search.BM25Similarity"; } -// @public -export type Captions = string; - // @public export type CharFilter = MappingCharFilter | PatternReplaceCharFilter; @@ -267,42 +271,42 @@ export type CharFilter = MappingCharFilter | PatternReplaceCharFilter; export type CharFilterName = string; // @public -export type CjkBigramTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.CjkBigramTokenFilter"; +export interface CjkBigramTokenFilter extends BaseTokenFilter { ignoreScripts?: CjkBigramTokenFilterScripts[]; + odatatype: "#Microsoft.Azure.Search.CjkBigramTokenFilter"; outputUnigrams?: boolean; -}; +} // @public export type CjkBigramTokenFilterScripts = "han" | "hiragana" | "katakana" | "hangul"; // @public -export type ClassicSimilarity = Similarity & { +export interface ClassicSimilarity extends Similarity { odatatype: "#Microsoft.Azure.Search.ClassicSimilarity"; -}; +} // @public -export type ClassicTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.ClassicTokenizer"; +export interface ClassicTokenizer extends BaseLexicalTokenizer { maxTokenLength?: number; -}; + odatatype: "#Microsoft.Azure.Search.ClassicTokenizer"; +} // @public export type CognitiveServicesAccount = DefaultCognitiveServicesAccount | CognitiveServicesAccountKey; // @public -export type CognitiveServicesAccountKey = BaseCognitiveServicesAccount & { - odatatype: "#Microsoft.Azure.Search.CognitiveServicesByKey"; +export interface CognitiveServicesAccountKey extends BaseCognitiveServicesAccount { key: string; -}; + odatatype: "#Microsoft.Azure.Search.CognitiveServicesByKey"; +} // @public -export type CommonGramTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.CommonGramTokenFilter"; +export interface CommonGramTokenFilter extends BaseTokenFilter { commonWords: string[]; ignoreCase?: boolean; + odatatype: "#Microsoft.Azure.Search.CommonGramTokenFilter"; useQueryMode?: boolean; -}; +} // @public export type ComplexDataType = "Edm.ComplexType" | "Collection(Edm.ComplexType)"; @@ -315,9 +319,9 @@ export interface ComplexField { } // @public -export type ConditionalSkill = BaseSearchIndexerSkill & { +export interface ConditionalSkill extends BaseSearchIndexerSkill { odatatype: "#Microsoft.Skills.Util.ConditionalSkill"; -}; +} // @public export interface CorsOptions { @@ -387,11 +391,11 @@ export type CreateSynonymMapOptions = OperationOptions; // @public export interface CustomAnalyzer { - charFilters?: string[]; + charFilters?: CharFilterName[]; name: string; odatatype: "#Microsoft.Azure.Search.CustomAnalyzer"; - tokenFilters?: string[]; - tokenizerName: string; + tokenFilters?: TokenFilterName[]; + tokenizerName: LexicalTokenizerName; } // @public @@ -419,25 +423,25 @@ export interface CustomEntityAlias { } // @public -export type CustomEntityLookupSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.CustomEntityLookupSkill"; +export interface CustomEntityLookupSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: CustomEntityLookupSkillLanguage; entitiesDefinitionUri?: string; - inlineEntitiesDefinition?: CustomEntity[]; - globalDefaultCaseSensitive?: boolean; globalDefaultAccentSensitive?: boolean; + globalDefaultCaseSensitive?: boolean; globalDefaultFuzzyEditDistance?: number; -}; + inlineEntitiesDefinition?: CustomEntity[]; + odatatype: "#Microsoft.Skills.Text.CustomEntityLookupSkill"; +} -// @public -export type CustomEntityLookupSkillLanguage = string; +// @public (undocumented) +export type CustomEntityLookupSkillLanguage = "da" | "de" | "en" | "es" | "fi" | "fr" | "it" | "ko" | "pt"; // @public -export type CustomNormalizer = BaseLexicalNormalizer & { +export interface CustomNormalizer extends BaseLexicalNormalizer { + charFilters?: CharFilterName[]; odatatype: "#Microsoft.Azure.Search.CustomNormalizer"; tokenFilters?: TokenFilterName[]; - charFilters?: CharFilterName[]; -}; +} // @public export type CustomVectorizer = BaseVectorSearchVectorizer & { @@ -471,9 +475,9 @@ export const DEFAULT_FLUSH_WINDOW: number; export const DEFAULT_RETRY_COUNT: number; // @public -export type DefaultCognitiveServicesAccount = BaseCognitiveServicesAccount & { +export interface DefaultCognitiveServicesAccount extends BaseCognitiveServicesAccount { odatatype: "#Microsoft.Azure.Search.DefaultCognitiveServices"; -}; +} // @public export interface DeleteAliasOptions extends OperationOptions { @@ -509,20 +513,20 @@ export interface DeleteSynonymMapOptions extends OperationOptions { } // @public -export type DictionaryDecompounderTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; - wordList: string[]; - minWordSize?: number; - minSubwordSize?: number; +export interface DictionaryDecompounderTokenFilter extends BaseTokenFilter { maxSubwordSize?: number; + minSubwordSize?: number; + minWordSize?: number; + odatatype: "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; onlyLongestMatch?: boolean; -}; + wordList: string[]; +} // @public -export type DistanceScoringFunction = BaseScoringFunction & { - type: "distance"; +export interface DistanceScoringFunction extends BaseScoringFunction { parameters: DistanceScoringParameters; -}; + type: "distance"; +} // @public export interface DistanceScoringParameters { @@ -536,14 +540,14 @@ export interface DocumentDebugInfo { } // @public -export type DocumentExtractionSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Util.DocumentExtractionSkill"; - parsingMode?: string; - dataToExtract?: string; +export interface DocumentExtractionSkill extends BaseSearchIndexerSkill { configuration?: { [propertyName: string]: any; }; -}; + dataToExtract?: string; + odatatype: "#Microsoft.Skills.Util.DocumentExtractionSkill"; + parsingMode?: string; +} // @public export interface EdgeNGramTokenFilter { @@ -558,70 +562,86 @@ export interface EdgeNGramTokenFilter { export type EdgeNGramTokenFilterSide = "front" | "back"; // @public -export type EdgeNGramTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.EdgeNGramTokenizer"; - minGram?: number; +export interface EdgeNGramTokenizer extends BaseLexicalTokenizer { maxGram?: number; + minGram?: number; + odatatype: "#Microsoft.Azure.Search.EdgeNGramTokenizer"; tokenChars?: TokenCharacterKind[]; -}; +} // @public -export type ElisionTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.ElisionTokenFilter"; +export interface ElisionTokenFilter extends BaseTokenFilter { articles?: string[]; -}; + odatatype: "#Microsoft.Azure.Search.ElisionTokenFilter"; +} -// @public -export type EntityCategory = string; +// @public (undocumented) +export type EntityCategory = "location" | "organization" | "person" | "quantity" | "datetime" | "url" | "email"; // @public -export type EntityLinkingSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; +export interface EntityLinkingSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: string; minimumPrecision?: number; modelVersion?: string; -}; + odatatype: "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; +} // @public @deprecated -export type EntityRecognitionSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.EntityRecognitionSkill"; +export interface EntityRecognitionSkill extends BaseSearchIndexerSkill { categories?: EntityCategory[]; defaultLanguageCode?: EntityRecognitionSkillLanguage; includeTypelessEntities?: boolean; minimumPrecision?: number; -}; + odatatype: "#Microsoft.Skills.Text.EntityRecognitionSkill"; +} -// @public -export type EntityRecognitionSkillLanguage = string; +// @public (undocumented) +export type EntityRecognitionSkillLanguage = "ar" | "cs" | "zh-Hans" | "zh-Hant" | "da" | "nl" | "en" | "fi" | "fr" | "de" | "el" | "hu" | "it" | "ja" | "ko" | "no" | "pl" | "pt-PT" | "pt-BR" | "ru" | "es" | "sv" | "tr"; // @public -export type EntityRecognitionSkillV3 = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; +export interface EntityRecognitionSkillV3 extends BaseSearchIndexerSkill { categories?: string[]; defaultLanguageCode?: string; minimumPrecision?: number; modelVersion?: string; -}; + odatatype: "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; +} // @public (undocumented) export type ExcludedODataTypes = Date | GeographyPoint; // @public -export interface ExhaustiveKnnParameters { - metric?: VectorSearchAlgorithmMetric; -} - -// @public -export type ExhaustiveKnnVectorSearchAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { +export type ExhaustiveKnnAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { kind: "exhaustiveKnn"; parameters?: ExhaustiveKnnParameters; }; +// @public +export interface ExhaustiveKnnParameters { + metric?: VectorSearchAlgorithmMetric; +} + // @public (undocumented) export type ExtractDocumentKey = { [K in keyof TModel as TModel[K] extends string | undefined ? K : never]: TModel[K]; }; +// @public +export interface ExtractiveQueryAnswer { + // (undocumented) + answerType: "extractive"; + count?: number; + threshold?: number; +} + +// @public +export interface ExtractiveQueryCaption { + // (undocumented) + captionType: "extractive"; + // (undocumented) + highlight?: boolean; +} + // @public export interface FacetResult { [property: string]: any; @@ -644,10 +664,10 @@ export interface FieldMappingFunction { } // @public -export type FreshnessScoringFunction = BaseScoringFunction & { - type: "freshness"; +export interface FreshnessScoringFunction extends BaseScoringFunction { parameters: FreshnessScoringParameters; -}; + type: "freshness"; +} // @public export interface FreshnessScoringParameters { @@ -698,9 +718,15 @@ export type GetSkillSetOptions = OperationOptions; export type GetSynonymMapsOptions = OperationOptions; // @public -export type HighWaterMarkChangeDetectionPolicy = BaseDataChangeDetectionPolicy & { - odatatype: "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; +export interface HighWaterMarkChangeDetectionPolicy extends BaseDataChangeDetectionPolicy { highWaterMarkColumnName: string; + odatatype: "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; +} + +// @public +export type HnswAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { + kind: "hnsw"; + parameters?: HnswParameters; }; // @public @@ -712,47 +738,49 @@ export interface HnswParameters { } // @public -export type HnswVectorSearchAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { - kind: "hnsw"; - parameters?: HnswParameters; -}; +export interface ImageAnalysisSkill extends BaseSearchIndexerSkill { + defaultLanguageCode?: ImageAnalysisSkillLanguage; + details?: ImageDetail[]; + odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; + visualFeatures?: VisualFeature[]; +} // @public -export type ImageAnalysisSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; +export interface ImageAnalysisSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: ImageAnalysisSkillLanguage; - visualFeatures?: VisualFeature[]; details?: ImageDetail[]; -}; + odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; + visualFeatures?: VisualFeature[]; +} -// @public -export type ImageAnalysisSkillLanguage = string; +// @public (undocumented) +export type ImageAnalysisSkillLanguage = "ar" | "az" | "bg" | "bs" | "ca" | "cs" | "cy" | "da" | "de" | "el" | "en" | "es" | "et" | "eu" | "fi" | "fr" | "ga" | "gl" | "he" | "hi" | "hr" | "hu" | "id" | "it" | "ja" | "kk" | "ko" | "lt" | "lv" | "mk" | "ms" | "nb" | "nl" | "pl" | "prs" | "pt-BR" | "pt" | "pt-PT" | "ro" | "ru" | "sk" | "sl" | "sr-Cyrl" | "sr-Latn" | "sv" | "th" | "tr" | "uk" | "vi" | "zh" | "zh-Hans" | "zh-Hant"; -// @public -export type ImageDetail = string; +// @public (undocumented) +export type ImageDetail = "celebrities" | "landmarks"; // @public export type IndexActionType = "upload" | "merge" | "mergeOrUpload" | "delete"; // @public -export type IndexDocumentsAction = { +export type IndexDocumentsAction = { __actionType: IndexActionType; -} & Partial; +} & Partial; // @public -export class IndexDocumentsBatch { - constructor(actions?: IndexDocumentsAction[]); - readonly actions: IndexDocumentsAction[]; - delete(keyName: keyof T, keyValues: string[]): void; - delete(documents: T[]): void; - merge(documents: T[]): void; - mergeOrUpload(documents: T[]): void; - upload(documents: T[]): void; +export class IndexDocumentsBatch { + constructor(actions?: IndexDocumentsAction[]); + readonly actions: IndexDocumentsAction[]; + delete(keyName: keyof TModel, keyValues: string[]): void; + delete(documents: TModel[]): void; + merge(documents: TModel[]): void; + mergeOrUpload(documents: TModel[]): void; + upload(documents: TModel[]): void; } // @public -export interface IndexDocumentsClient { - indexDocuments(batch: IndexDocumentsBatch, options: IndexDocumentsOptions): Promise; +export interface IndexDocumentsClient { + indexDocuments(batch: IndexDocumentsBatch, options: IndexDocumentsOptions): Promise; } // @public @@ -765,8 +793,8 @@ export interface IndexDocumentsResult { readonly results: IndexingResult[]; } -// @public -export type IndexerExecutionEnvironment = string; +// @public (undocumented) +export type IndexerExecutionEnvironment = "standard" | "private"; // @public export interface IndexerExecutionResult { @@ -857,7 +885,7 @@ export type IndexIterator = PagedAsyncIterableIterator; // @public -export type IndexProjectionMode = "skipIndexingParentDocuments" | "includeIndexingParentDocuments"; +export type IndexProjectionMode = string; // @public export interface InputFieldMappingEntry { @@ -868,29 +896,29 @@ export interface InputFieldMappingEntry { } // @public -export type KeepTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.KeepTokenFilter"; +export interface KeepTokenFilter extends BaseTokenFilter { keepWords: string[]; lowerCaseKeepWords?: boolean; -}; + odatatype: "#Microsoft.Azure.Search.KeepTokenFilter"; +} // @public -export type KeyPhraseExtractionSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; +export interface KeyPhraseExtractionSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: KeyPhraseExtractionSkillLanguage; maxKeyPhraseCount?: number; modelVersion?: string; -}; + odatatype: "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; +} -// @public -export type KeyPhraseExtractionSkillLanguage = string; +// @public (undocumented) +export type KeyPhraseExtractionSkillLanguage = "da" | "nl" | "en" | "fi" | "fr" | "de" | "it" | "ja" | "ko" | "no" | "pl" | "pt-PT" | "pt-BR" | "ru" | "es" | "sv"; // @public -export type KeywordMarkerTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; - keywords: string[]; +export interface KeywordMarkerTokenFilter extends BaseTokenFilter { ignoreCase?: boolean; -}; + keywords: string[]; + odatatype: "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; +} // @public export interface KeywordTokenizer { @@ -996,12 +1024,6 @@ export enum KnownAnalyzerNames { ZhHantMicrosoft = "zh-Hant.microsoft" } -// @public -export enum KnownAnswers { - Extractive = "extractive", - None = "none" -} - // @public export enum KnownBlobIndexerDataToExtract { AllMetadata = "allMetadata", @@ -1155,6 +1177,12 @@ export enum KnownImageDetail { Landmarks = "landmarks" } +// @public +export enum KnownIndexerExecutionEnvironment { + Private = "private", + Standard = "standard" +} + // @public export enum KnownIndexerExecutionStatusDetail { ResetDocs = "resetDocs" @@ -1166,6 +1194,12 @@ export enum KnownIndexingMode { IndexingResetDocs = "indexingResetDocs" } +// @public +export enum KnownIndexProjectionMode { + IncludeIndexingParentDocuments = "includeIndexingParentDocuments", + SkipIndexingParentDocuments = "skipIndexingParentDocuments" +} + // @public export enum KnownKeyPhraseExtractionSkillLanguage { Da = "da", @@ -1284,29 +1318,48 @@ export enum KnownLexicalAnalyzerName { } // @public -export enum KnownLexicalNormalizerName { +enum KnownLexicalNormalizerName { AsciiFolding = "asciifolding", Elision = "elision", Lowercase = "lowercase", Standard = "standard", Uppercase = "uppercase" } +export { KnownLexicalNormalizerName } +export { KnownLexicalNormalizerName as KnownNormalizerNames } // @public -export enum KnownLineEnding { - CarriageReturn = "carriageReturn", - CarriageReturnLineFeed = "carriageReturnLineFeed", - LineFeed = "lineFeed", - Space = "space" -} - -// @public -export enum KnownOcrSkillLanguage { - Af = "af", - Anp = "anp", - Ar = "ar", - Ast = "ast", - Awa = "awa", +export enum KnownLexicalTokenizerName { + Classic = "classic", + EdgeNGram = "edgeNGram", + Keyword = "keyword_v2", + Letter = "letter", + Lowercase = "lowercase", + MicrosoftLanguageStemmingTokenizer = "microsoft_language_stemming_tokenizer", + MicrosoftLanguageTokenizer = "microsoft_language_tokenizer", + NGram = "nGram", + PathHierarchy = "path_hierarchy_v2", + Pattern = "pattern", + Standard = "standard_v2", + UaxUrlEmail = "uax_url_email", + Whitespace = "whitespace" +} + +// @public +export enum KnownLineEnding { + CarriageReturn = "carriageReturn", + CarriageReturnLineFeed = "carriageReturnLineFeed", + LineFeed = "lineFeed", + Space = "space" +} + +// @public +export enum KnownOcrSkillLanguage { + Af = "af", + Anp = "anp", + Ar = "ar", + Ast = "ast", + Awa = "awa", Az = "az", Be = "be", BeCyrl = "be-cyrl", @@ -1481,15 +1534,9 @@ export enum KnownPIIDetectionSkillMaskingMode { } // @public -export enum KnownQueryAnswerType { - Extractive = "extractive", - None = "none" -} - -// @public -export enum KnownQueryCaptionType { - Extractive = "extractive", - None = "none" +export enum KnownQueryDebugMode { + Disabled = "disabled", + Semantic = "semantic" } // @public @@ -1603,6 +1650,32 @@ export enum KnownSearchIndexerDataSourceType { MySql = "mysql" } +// @public +export enum KnownSemanticErrorMode { + Fail = "fail", + Partial = "partial" +} + +// @public +export enum KnownSemanticErrorReason { + CapacityOverloaded = "capacityOverloaded", + MaxWaitExceeded = "maxWaitExceeded", + Transient = "transient" +} + +// @public +export enum KnownSemanticFieldState { + Partial = "partial", + Unused = "unused", + Used = "used" +} + +// @public +export enum KnownSemanticSearchResultsType { + BaseResults = "baseResults", + RerankedResults = "rerankedResults" +} + // @public export enum KnownSentimentSkillLanguage { Da = "da", @@ -1630,15 +1703,39 @@ export enum KnownSpeller { // @public export enum KnownSplitSkillLanguage { + Am = "am", + Bs = "bs", + Cs = "cs", Da = "da", De = "de", En = "en", Es = "es", + Et = "et", Fi = "fi", Fr = "fr", + He = "he", + Hi = "hi", + Hr = "hr", + Hu = "hu", + Id = "id", + Is = "is", It = "it", + Ja = "ja", Ko = "ko", - Pt = "pt" + Lv = "lv", + Nb = "nb", + Nl = "nl", + Pl = "pl", + Pt = "pt", + PtBr = "pt-br", + Ru = "ru", + Sk = "sk", + Sl = "sl", + Sr = "sr", + Sv = "sv", + Tr = "tr", + Ur = "ur", + Zh = "zh" } // @public @@ -1816,6 +1913,28 @@ export enum KnownTokenizerNames { Whitespace = "whitespace" } +// @public +export enum KnownVectorQueryKind { + $DO_NOT_NORMALIZE$_text = "text", + Vector = "vector" +} + +// @public +export enum KnownVectorSearchCompressionKind { + ScalarQuantization = "scalarQuantization" +} + +// @public +export enum KnownVectorSearchCompressionTargetDataType { + Int8 = "int8" +} + +// @public +export enum KnownVectorSearchVectorizerKind { + AzureOpenAI = "azureOpenAI", + CustomWebApi = "customWebApi" +} + // @public export enum KnownVisualFeature { Adult = "adult", @@ -1828,18 +1947,18 @@ export enum KnownVisualFeature { } // @public -export type LanguageDetectionSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.LanguageDetectionSkill"; +export interface LanguageDetectionSkill extends BaseSearchIndexerSkill { defaultCountryHint?: string; modelVersion?: string; -}; + odatatype: "#Microsoft.Skills.Text.LanguageDetectionSkill"; +} // @public -export type LengthTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.LengthTokenFilter"; - minLength?: number; +export interface LengthTokenFilter extends BaseTokenFilter { maxLength?: number; -}; + minLength?: number; + odatatype: "#Microsoft.Azure.Search.LengthTokenFilter"; +} // @public export type LexicalAnalyzer = CustomAnalyzer | PatternAnalyzer | LuceneStandardAnalyzer | StopAnalyzer; @@ -1857,11 +1976,14 @@ export type LexicalNormalizerName = string; export type LexicalTokenizer = ClassicTokenizer | EdgeNGramTokenizer | KeywordTokenizer | MicrosoftLanguageTokenizer | MicrosoftLanguageStemmingTokenizer | NGramTokenizer | PathHierarchyTokenizer | PatternTokenizer | LuceneStandardTokenizer | UaxUrlEmailTokenizer; // @public -export type LimitTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.LimitTokenFilter"; - maxTokenCount?: number; +export type LexicalTokenizerName = string; + +// @public +export interface LimitTokenFilter extends BaseTokenFilter { consumeAllTokens?: boolean; -}; + maxTokenCount?: number; + odatatype: "#Microsoft.Azure.Search.LimitTokenFilter"; +} // @public export type LineEnding = string; @@ -1890,11 +2012,11 @@ export type ListSkillsetsOptions = OperationOptions; export type ListSynonymMapsOptions = OperationOptions; // @public -export type LuceneStandardAnalyzer = BaseLexicalAnalyzer & { - odatatype: "#Microsoft.Azure.Search.StandardAnalyzer"; +export interface LuceneStandardAnalyzer extends BaseLexicalAnalyzer { maxTokenLength?: number; + odatatype: "#Microsoft.Azure.Search.StandardAnalyzer"; stopwords?: string[]; -}; +} // @public export interface LuceneStandardTokenizer { @@ -1904,10 +2026,10 @@ export interface LuceneStandardTokenizer { } // @public -export type MagnitudeScoringFunction = BaseScoringFunction & { - type: "magnitude"; +export interface MagnitudeScoringFunction extends BaseScoringFunction { parameters: MagnitudeScoringParameters; -}; + type: "magnitude"; +} // @public export interface MagnitudeScoringParameters { @@ -1917,10 +2039,10 @@ export interface MagnitudeScoringParameters { } // @public -export type MappingCharFilter = BaseCharFilter & { - odatatype: "#Microsoft.Azure.Search.MappingCharFilter"; +export interface MappingCharFilter extends BaseCharFilter { mappings: string[]; -}; + odatatype: "#Microsoft.Azure.Search.MappingCharFilter"; +} // @public export type MergeDocumentsOptions = IndexDocumentsOptions; @@ -1929,27 +2051,27 @@ export type MergeDocumentsOptions = IndexDocumentsOptions; export type MergeOrUploadDocumentsOptions = IndexDocumentsOptions; // @public -export type MergeSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.MergeSkill"; - insertPreTag?: string; +export interface MergeSkill extends BaseSearchIndexerSkill { insertPostTag?: string; -}; + insertPreTag?: string; + odatatype: "#Microsoft.Skills.Text.MergeSkill"; +} // @public -export type MicrosoftLanguageStemmingTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; - maxTokenLength?: number; +export interface MicrosoftLanguageStemmingTokenizer extends BaseLexicalTokenizer { isSearchTokenizer?: boolean; language?: MicrosoftStemmingTokenizerLanguage; -}; + maxTokenLength?: number; + odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; +} // @public -export type MicrosoftLanguageTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; - maxTokenLength?: number; +export interface MicrosoftLanguageTokenizer extends BaseLexicalTokenizer { isSearchTokenizer?: boolean; language?: MicrosoftTokenizerLanguage; -}; + maxTokenLength?: number; + odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; +} // @public export type MicrosoftStemmingTokenizerLanguage = "arabic" | "bangla" | "bulgarian" | "catalan" | "croatian" | "czech" | "danish" | "dutch" | "english" | "estonian" | "finnish" | "french" | "german" | "greek" | "gujarati" | "hebrew" | "hindi" | "hungarian" | "icelandic" | "indonesian" | "italian" | "kannada" | "latvian" | "lithuanian" | "malay" | "malayalam" | "marathi" | "norwegianBokmaal" | "polish" | "portuguese" | "portugueseBrazilian" | "punjabi" | "romanian" | "russian" | "serbianCyrillic" | "serbianLatin" | "slovak" | "slovenian" | "spanish" | "swedish" | "tamil" | "telugu" | "turkish" | "ukrainian" | "urdu"; @@ -1961,9 +2083,9 @@ export type MicrosoftTokenizerLanguage = "bangla" | "bulgarian" | "catalan" | "c export type NarrowedModel = SelectFields> = (() => T extends TModel ? true : false) extends () => T extends never ? true : false ? TModel : (() => T extends TModel ? true : false) extends () => T extends object ? true : false ? TModel : (() => T extends TModel ? true : false) extends () => T extends any ? true : false ? TModel : (() => T extends TModel ? true : false) extends () => T extends unknown ? true : false ? TModel : (() => T extends TFields ? true : false) extends () => T extends never ? true : false ? never : (() => T extends TFields ? true : false) extends () => T extends SelectFields ? true : false ? TModel : SearchPick; // @public -export type NativeBlobSoftDeleteDeletionDetectionPolicy = BaseDataDeletionDetectionPolicy & { +export interface NativeBlobSoftDeleteDeletionDetectionPolicy extends BaseDataDeletionDetectionPolicy { odatatype: "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; -}; +} // @public export interface NGramTokenFilter { @@ -1974,23 +2096,22 @@ export interface NGramTokenFilter { } // @public -export type NGramTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.NGramTokenizer"; - minGram?: number; +export interface NGramTokenizer extends BaseLexicalTokenizer { maxGram?: number; + minGram?: number; + odatatype: "#Microsoft.Azure.Search.NGramTokenizer"; tokenChars?: TokenCharacterKind[]; -}; +} // @public -export type OcrSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Vision.OcrSkill"; +export interface OcrSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: OcrSkillLanguage; + odatatype: "#Microsoft.Skills.Vision.OcrSkill"; shouldDetectOrientation?: boolean; - lineEnding?: LineEnding; -}; +} -// @public -export type OcrSkillLanguage = string; +// @public (undocumented) +export type OcrSkillLanguage = "af" | "sq" | "anp" | "ar" | "ast" | "awa" | "az" | "bfy" | "eu" | "be" | "be-cyrl" | "be-latn" | "bho" | "bi" | "brx" | "bs" | "bra" | "br" | "bg" | "bns" | "bua" | "ca" | "ceb" | "rab" | "ch" | "hne" | "zh-Hans" | "zh-Hant" | "kw" | "co" | "crh" | "hr" | "cs" | "da" | "prs" | "dhi" | "doi" | "nl" | "en" | "myv" | "et" | "fo" | "fj" | "fil" | "fi" | "fr" | "fur" | "gag" | "gl" | "de" | "gil" | "gon" | "el" | "kl" | "gvr" | "ht" | "hlb" | "hni" | "bgc" | "haw" | "hi" | "mww" | "hoc" | "hu" | "is" | "smn" | "id" | "ia" | "iu" | "ga" | "it" | "ja" | "Jns" | "jv" | "kea" | "kac" | "xnr" | "krc" | "kaa-cyrl" | "kaa" | "csb" | "kk-cyrl" | "kk-latn" | "klr" | "kha" | "quc" | "ko" | "kfq" | "kpy" | "kos" | "kum" | "ku-arab" | "ku-latn" | "kru" | "ky" | "lkt" | "la" | "lt" | "dsb" | "smj" | "lb" | "bfz" | "ms" | "mt" | "kmj" | "gv" | "mi" | "mr" | "mn" | "cnr-cyrl" | "cnr-latn" | "nap" | "ne" | "niu" | "nog" | "sme" | "nb" | "no" | "oc" | "os" | "ps" | "fa" | "pl" | "pt" | "pa" | "ksh" | "ro" | "rm" | "ru" | "sck" | "sm" | "sa" | "sat" | "sco" | "gd" | "sr" | "sr-Cyrl" | "sr-Latn" | "xsr" | "srx" | "sms" | "sk" | "sl" | "so" | "sma" | "es" | "sw" | "sv" | "tg" | "tt" | "tet" | "thf" | "to" | "tr" | "tk" | "tyv" | "hsb" | "ur" | "ug" | "uz-arab" | "uz-cyrl" | "uz" | "vo" | "wae" | "cy" | "fy" | "yua" | "za" | "zu" | "unk"; // @public export function odata(strings: TemplateStringsArray, ...values: unknown[]): string; @@ -2002,14 +2123,14 @@ export interface OutputFieldMappingEntry { } // @public -export type PathHierarchyTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; +export interface PathHierarchyTokenizer extends BaseLexicalTokenizer { delimiter?: string; - replacement?: string; maxTokenLength?: number; - reverseTokenOrder?: boolean; numberOfTokensToSkip?: number; -}; + odatatype: "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; + replacement?: string; + reverseTokenOrder?: boolean; +} // @public export interface PatternAnalyzer { @@ -2022,25 +2143,25 @@ export interface PatternAnalyzer { } // @public -export type PatternCaptureTokenFilter = BaseTokenFilter & { +export interface PatternCaptureTokenFilter extends BaseTokenFilter { odatatype: "#Microsoft.Azure.Search.PatternCaptureTokenFilter"; patterns: string[]; preserveOriginal?: boolean; -}; +} // @public -export type PatternReplaceCharFilter = BaseCharFilter & { +export interface PatternReplaceCharFilter extends BaseCharFilter { odatatype: "#Microsoft.Azure.Search.PatternReplaceCharFilter"; pattern: string; replacement: string; -}; +} // @public -export type PatternReplaceTokenFilter = BaseTokenFilter & { +export interface PatternReplaceTokenFilter extends BaseTokenFilter { odatatype: "#Microsoft.Azure.Search.PatternReplaceTokenFilter"; pattern: string; replacement: string; -}; +} // @public export interface PatternTokenizer { @@ -2055,42 +2176,51 @@ export interface PatternTokenizer { export type PhoneticEncoder = "metaphone" | "doubleMetaphone" | "soundex" | "refinedSoundex" | "caverphone1" | "caverphone2" | "cologne" | "nysiis" | "koelnerPhonetik" | "haasePhonetik" | "beiderMorse"; // @public -export type PhoneticTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.PhoneticTokenFilter"; +export interface PhoneticTokenFilter extends BaseTokenFilter { encoder?: PhoneticEncoder; + odatatype: "#Microsoft.Azure.Search.PhoneticTokenFilter"; replaceOriginalTokens?: boolean; -}; +} // @public -export type PIIDetectionSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.PIIDetectionSkill"; +export interface PIIDetectionSkill extends BaseSearchIndexerSkill { + categories?: string[]; defaultLanguageCode?: string; - minimumPrecision?: number; - maskingMode?: PIIDetectionSkillMaskingMode; + domain?: string; maskingCharacter?: string; + maskingMode?: PIIDetectionSkillMaskingMode; + minimumPrecision?: number; modelVersion?: string; - piiCategories?: string[]; - domain?: string; -}; + odatatype: "#Microsoft.Skills.Text.PIIDetectionSkill"; +} + +// @public (undocumented) +export type PIIDetectionSkillMaskingMode = "none" | "replace"; // @public -export type PIIDetectionSkillMaskingMode = string; +export type QueryAnswer = ExtractiveQueryAnswer; // @public -export interface PrioritizedFields { - prioritizedContentFields?: SemanticField[]; - prioritizedKeywordsFields?: SemanticField[]; - titleField?: SemanticField; +export interface QueryAnswerResult { + [property: string]: any; + readonly highlights?: string; + readonly key: string; + readonly score: number; + readonly text: string; } // @public -export type QueryAnswerType = string; +export type QueryCaption = ExtractiveQueryCaption; // @public -export type QueryCaptionType = string; +export interface QueryCaptionResult { + [property: string]: any; + readonly highlights?: string; + readonly text?: string; +} // @public -export type QueryDebugMode = "disabled" | "semantic"; +export type QueryDebugMode = string; // @public export type QueryLanguage = string; @@ -2114,14 +2244,8 @@ export type QuerySpellerType = string; // @public export type QueryType = "simple" | "full" | "semantic"; -// @public -export interface RawVectorQuery extends BaseVectorQuery { - kind: "vector"; - vector?: number[]; -} - -// @public -export type RegexFlags = string; +// @public (undocumented) +export type RegexFlags = "CANON_EQ" | "CASE_INSENSITIVE" | "COMMENTS" | "DOTALL" | "LITERAL" | "MULTILINE" | "UNICODE_CASE" | "UNIX_LINES"; // @public export interface ResetDocumentsOptions extends OperationOptions { @@ -2147,6 +2271,17 @@ export interface ResourceCounter { // @public export type RunIndexerOptions = OperationOptions; +// @public +export interface ScalarQuantizationCompressionConfiguration extends BaseVectorSearchCompressionConfiguration { + kind: "scalarQuantization"; + parameters?: ScalarQuantizationParameters; +} + +// @public +export interface ScalarQuantizationParameters { + quantizedDataType?: VectorSearchCompressionTargetDataType; +} + // @public export type ScoringFunction = DistanceScoringFunction | FreshnessScoringFunction | MagnitudeScoringFunction | TagScoringFunction; @@ -2189,6 +2324,7 @@ export class SearchClient implements IndexDocumentsClient readonly indexName: string; mergeDocuments(documents: TModel[], options?: MergeDocumentsOptions): Promise; mergeOrUploadDocuments(documents: TModel[], options?: MergeOrUploadDocumentsOptions): Promise; + readonly pipeline: Pipeline; search>(searchText?: string, options?: SearchOptions): Promise>; readonly serviceVersion: string; suggest = never>(searchText: string, suggesterName: string, options?: SuggestOptions): Promise>; @@ -2216,14 +2352,14 @@ export interface SearchDocumentsResult = (() => T extends TModel ? true : false) extends () => T extends object ? true : false ? readonly string[] : readonly SelectFields[]; // @public -export type SearchFieldDataType = "Edm.String" | "Edm.Int32" | "Edm.Int64" | "Edm.Double" | "Edm.Boolean" | "Edm.DateTimeOffset" | "Edm.GeographyPoint" | "Collection(Edm.String)" | "Collection(Edm.Int32)" | "Collection(Edm.Int64)" | "Collection(Edm.Double)" | "Collection(Edm.Boolean)" | "Collection(Edm.DateTimeOffset)" | "Collection(Edm.GeographyPoint)" | "Collection(Edm.Single)"; +export type SearchFieldDataType = "Edm.String" | "Edm.Int32" | "Edm.Int64" | "Edm.Double" | "Edm.Boolean" | "Edm.DateTimeOffset" | "Edm.GeographyPoint" | "Collection(Edm.String)" | "Collection(Edm.Int32)" | "Collection(Edm.Int64)" | "Collection(Edm.Double)" | "Collection(Edm.Boolean)" | "Collection(Edm.DateTimeOffset)" | "Collection(Edm.GeographyPoint)" | "Collection(Edm.Single)" | "Collection(Edm.Half)" | "Collection(Edm.Int16)" | "Collection(Edm.SByte)"; // @public export interface SearchIndex { @@ -2247,7 +2383,7 @@ export interface SearchIndex { name: string; normalizers?: LexicalNormalizer[]; scoringProfiles?: ScoringProfile[]; - semanticSettings?: SemanticSettings; + semanticSearch?: SemanticSearch; similarity?: SimilarityAlgorithm; suggesters?: SearchSuggester[]; tokenFilters?: TokenFilter[]; @@ -2285,6 +2421,7 @@ export class SearchIndexClient { listIndexesNames(options?: ListIndexesOptions): IndexNameIterator; listSynonymMaps(options?: ListSynonymMapsOptions): Promise>; listSynonymMapsNames(options?: ListSynonymMapsOptions): Promise>; + readonly pipeline: Pipeline; readonly serviceVersion: string; } @@ -2345,6 +2482,7 @@ export class SearchIndexerClient { listIndexersNames(options?: ListIndexersOptions): Promise>; listSkillsets(options?: ListSkillsetsOptions): Promise>; listSkillsetsNames(options?: ListSkillsetsOptions): Promise>; + readonly pipeline: Pipeline; resetDocuments(indexerName: string, options?: ResetDocumentsOptions): Promise; resetIndexer(indexerName: string, options?: ResetIndexerOptions): Promise; resetSkills(skillsetName: string, options?: ResetSkillsOptions): Promise; @@ -2370,9 +2508,9 @@ export interface SearchIndexerDataContainer { export type SearchIndexerDataIdentity = SearchIndexerDataNoneIdentity | SearchIndexerDataUserAssignedIdentity; // @public -export type SearchIndexerDataNoneIdentity = BaseSearchIndexerDataIdentity & { +export interface SearchIndexerDataNoneIdentity extends BaseSearchIndexerDataIdentity { odatatype: "#Microsoft.Azure.Search.DataNoneIdentity"; -}; +} // @public export interface SearchIndexerDataSourceConnection { @@ -2388,14 +2526,14 @@ export interface SearchIndexerDataSourceConnection { type: SearchIndexerDataSourceType; } -// @public -export type SearchIndexerDataSourceType = string; +// @public (undocumented) +export type SearchIndexerDataSourceType = "azuresql" | "cosmosdb" | "azureblob" | "azuretable" | "mysql" | "adlsgen2"; // @public -export type SearchIndexerDataUserAssignedIdentity = BaseSearchIndexerDataIdentity & { +export interface SearchIndexerDataUserAssignedIdentity extends BaseSearchIndexerDataIdentity { odatatype: "#Microsoft.Azure.Search.DataUserAssignedIdentity"; userAssignedIdentity: string; -}; +} // @public export interface SearchIndexerError { @@ -2435,15 +2573,17 @@ export interface SearchIndexerKnowledgeStore { } // @public -export type SearchIndexerKnowledgeStoreBlobProjectionSelector = SearchIndexerKnowledgeStoreProjectionSelector & { +export interface SearchIndexerKnowledgeStoreBlobProjectionSelector extends SearchIndexerKnowledgeStoreProjectionSelector { storageContainer: string; -}; +} // @public -export type SearchIndexerKnowledgeStoreFileProjectionSelector = SearchIndexerKnowledgeStoreBlobProjectionSelector & {}; +export interface SearchIndexerKnowledgeStoreFileProjectionSelector extends SearchIndexerKnowledgeStoreBlobProjectionSelector { +} // @public -export type SearchIndexerKnowledgeStoreObjectProjectionSelector = SearchIndexerKnowledgeStoreBlobProjectionSelector & {}; +export interface SearchIndexerKnowledgeStoreObjectProjectionSelector extends SearchIndexerKnowledgeStoreBlobProjectionSelector { +} // @public export interface SearchIndexerKnowledgeStoreParameters { @@ -2468,9 +2608,9 @@ export interface SearchIndexerKnowledgeStoreProjectionSelector { } // @public -export type SearchIndexerKnowledgeStoreTableProjectionSelector = SearchIndexerKnowledgeStoreProjectionSelector & { +export interface SearchIndexerKnowledgeStoreTableProjectionSelector extends SearchIndexerKnowledgeStoreProjectionSelector { tableName: string; -}; +} // @public (undocumented) export interface SearchIndexerLimits { @@ -2480,7 +2620,7 @@ export interface SearchIndexerLimits { } // @public -export type SearchIndexerSkill = ConditionalSkill | KeyPhraseExtractionSkill | OcrSkill | ImageAnalysisSkill | LanguageDetectionSkill | ShaperSkill | MergeSkill | EntityRecognitionSkill | SentimentSkill | SplitSkill | PIIDetectionSkill | EntityRecognitionSkillV3 | EntityLinkingSkill | SentimentSkillV3 | CustomEntityLookupSkill | TextTranslationSkill | DocumentExtractionSkill | WebApiSkill | AzureMachineLearningSkill | AzureOpenAIEmbeddingSkill; +export type SearchIndexerSkill = AzureMachineLearningSkill | AzureOpenAIEmbeddingSkill | ConditionalSkill | CustomEntityLookupSkill | DocumentExtractionSkill | EntityLinkingSkill | EntityRecognitionSkill | EntityRecognitionSkillV3 | ImageAnalysisSkill | KeyPhraseExtractionSkill | LanguageDetectionSkill | MergeSkill | OcrSkill | PIIDetectionSkill | SentimentSkill | SentimentSkillV3 | ShaperSkill | SplitSkill | TextTranslationSkill | WebApiSkill; // @public export interface SearchIndexerSkillset { @@ -2565,7 +2705,7 @@ export type SearchIndexingBufferedSenderUploadDocumentsOptions = OperationOption export interface SearchIndexStatistics { readonly documentCount: number; readonly storageSize: number; - readonly vectorIndexSize?: number; + readonly vectorIndexSize: number; } // @public @@ -2586,73 +2726,15 @@ UnionToIntersection | Extract : never> & {}; // @public -export interface SearchRequest { - answers?: QueryAnswerType; - captions?: QueryCaptionType; - debugMode?: QueryDebugMode; - facets?: string[]; - filter?: string; - highlightFields?: string; - highlightPostTag?: string; - highlightPreTag?: string; - includeTotalCount?: boolean; - minimumCoverage?: number; - orderBy?: string; - queryLanguage?: QueryLanguage; - queryType?: QueryType; - scoringParameters?: string[]; - scoringProfile?: string; - scoringStatistics?: ScoringStatistics; - searchFields?: string; - searchMode?: SearchMode; - searchText?: string; - select?: string; - semanticConfiguration?: string; - semanticErrorHandlingMode?: SemanticErrorHandlingMode; - semanticFields?: string; - semanticMaxWaitInMilliseconds?: number; - semanticQuery?: string; - sessionId?: string; - skip?: number; - speller?: QuerySpellerType; - top?: number; - vectorFilterMode?: VectorFilterMode; - vectorQueries?: VectorQuery[]; -} +export type SearchRequestOptions = SelectFields> = BaseSearchRequestOptions & SearchRequestQueryTypeOptions; -// @public -export interface SearchRequestOptions = SelectFields> { - answers?: Answers | AnswersOptions; - captions?: Captions; - debugMode?: QueryDebugMode; - facets?: string[]; - filter?: string; - highlightFields?: string; - highlightPostTag?: string; - highlightPreTag?: string; - includeTotalCount?: boolean; - minimumCoverage?: number; - orderBy?: string[]; - queryLanguage?: QueryLanguage; - queryType?: QueryType; - scoringParameters?: string[]; - scoringProfile?: string; - scoringStatistics?: ScoringStatistics; - searchFields?: SearchFieldArray; - searchMode?: SearchMode; - select?: SelectArray; - semanticConfiguration?: string; - semanticErrorHandlingMode?: SemanticErrorHandlingMode; - semanticFields?: string[]; - semanticMaxWaitInMilliseconds?: number; - semanticQuery?: string; - sessionId?: string; - skip?: number; - speller?: Speller; - top?: number; - vectorFilterMode?: VectorFilterMode; - vectorQueries?: VectorQuery[]; -} +// @public (undocumented) +export type SearchRequestQueryTypeOptions = { + queryType: "semantic"; + semanticSearchOptions: SemanticSearchOptions; +} | { + queryType?: "simple" | "full"; +}; // @public export interface SearchResourceEncryptionKey { @@ -2671,7 +2753,7 @@ export type SearchResult]?: string[]; }; - readonly captions?: CaptionResult[]; + readonly captions?: QueryCaptionResult[]; document: NarrowedModel; readonly documentDebugInfo?: DocumentDebugInfo[]; }; @@ -2700,7 +2782,7 @@ export type SelectFields = (() => T extends TModel ? t // @public export interface SemanticConfiguration { name: string; - prioritizedFields: PrioritizedFields; + prioritizedFields: SemanticPrioritizedFields; } // @public @@ -2711,46 +2793,65 @@ export interface SemanticDebugInfo { readonly titleField?: QueryResultDocumentSemanticField; } -// @public -export type SemanticErrorHandlingMode = "partial" | "fail"; +// @public (undocumented) +export type SemanticErrorMode = "partial" | "fail"; + +// @public (undocumented) +export type SemanticErrorReason = "maxWaitExceeded" | "capacityOverloaded" | "transient"; // @public export interface SemanticField { // (undocumented) - name?: string; + name: string; } // @public -export type SemanticFieldState = "used" | "unused" | "partial"; +export type SemanticFieldState = string; // @public -export type SemanticPartialResponseReason = "maxWaitExceeded" | "capacityOverloaded" | "transient"; +export interface SemanticPrioritizedFields { + contentFields?: SemanticField[]; + keywordsFields?: SemanticField[]; + titleField?: SemanticField; +} // @public -export type SemanticPartialResponseType = "baseResults" | "rerankedResults"; +export interface SemanticSearch { + configurations?: SemanticConfiguration[]; + defaultConfigurationName?: string; +} // @public -export interface SemanticSettings { - configurations?: SemanticConfiguration[]; - defaultConfiguration?: string; +export interface SemanticSearchOptions { + answers?: QueryAnswer; + captions?: QueryCaption; + configurationName?: string; + debugMode?: QueryDebugMode; + errorMode?: SemanticErrorMode; + maxWaitInMilliseconds?: number; + semanticFields?: string[]; + semanticQuery?: string; } +// @public (undocumented) +export type SemanticSearchResultsType = "baseResults" | "rerankedResults"; + // @public @deprecated -export type SentimentSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.SentimentSkill"; +export interface SentimentSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: SentimentSkillLanguage; -}; + odatatype: "#Microsoft.Skills.Text.SentimentSkill"; +} -// @public -export type SentimentSkillLanguage = string; +// @public (undocumented) +export type SentimentSkillLanguage = "da" | "nl" | "en" | "fi" | "fr" | "de" | "el" | "it" | "no" | "pl" | "pt-PT" | "ru" | "es" | "sv" | "tr"; // @public -export type SentimentSkillV3 = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.V3.SentimentSkill"; +export interface SentimentSkillV3 extends BaseSearchIndexerSkill { defaultLanguageCode?: string; includeOpinionMining?: boolean; modelVersion?: string; -}; + odatatype: "#Microsoft.Skills.Text.V3.SentimentSkill"; +} // @public export interface ServiceCounters { @@ -2774,20 +2875,20 @@ export interface ServiceLimits { } // @public -export type ShaperSkill = BaseSearchIndexerSkill & { +export interface ShaperSkill extends BaseSearchIndexerSkill { odatatype: "#Microsoft.Skills.Util.ShaperSkill"; -}; +} // @public -export type ShingleTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.ShingleTokenFilter"; +export interface ShingleTokenFilter extends BaseTokenFilter { + filterToken?: string; maxShingleSize?: number; minShingleSize?: number; + odatatype: "#Microsoft.Azure.Search.ShingleTokenFilter"; outputUnigrams?: boolean; outputUnigramsIfNoShingles?: boolean; tokenSeparator?: string; - filterToken?: string; -}; +} // @public export interface Similarity { @@ -2810,81 +2911,80 @@ export interface SimpleField { searchable?: boolean; searchAnalyzerName?: LexicalAnalyzerName; sortable?: boolean; + stored?: boolean; synonymMapNames?: string[]; type: SearchFieldDataType; vectorSearchDimensions?: number; - vectorSearchProfile?: string; + vectorSearchProfileName?: string; } // @public -export type SnowballTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.SnowballTokenFilter"; +export interface SnowballTokenFilter extends BaseTokenFilter { language: SnowballTokenFilterLanguage; -}; + odatatype: "#Microsoft.Azure.Search.SnowballTokenFilter"; +} // @public export type SnowballTokenFilterLanguage = "armenian" | "basque" | "catalan" | "danish" | "dutch" | "english" | "finnish" | "french" | "german" | "german2" | "hungarian" | "italian" | "kp" | "lovins" | "norwegian" | "porter" | "portuguese" | "romanian" | "russian" | "spanish" | "swedish" | "turkish"; // @public -export type SoftDeleteColumnDeletionDetectionPolicy = BaseDataDeletionDetectionPolicy & { +export interface SoftDeleteColumnDeletionDetectionPolicy extends BaseDataDeletionDetectionPolicy { odatatype: "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy"; softDeleteColumnName?: string; softDeleteMarkerValue?: string; -}; +} // @public export type Speller = string; // @public -export type SplitSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.SplitSkill"; +export interface SplitSkill extends BaseSearchIndexerSkill { defaultLanguageCode?: SplitSkillLanguage; - textSplitMode?: TextSplitMode; maxPageLength?: number; - pageOverlapLength?: number; - maximumPagesToTake?: number; -}; + odatatype: "#Microsoft.Skills.Text.SplitSkill"; + textSplitMode?: TextSplitMode; +} -// @public -export type SplitSkillLanguage = string; +// @public (undocumented) +export type SplitSkillLanguage = "am" | "bs" | "cs" | "da" | "de" | "en" | "es" | "et" | "fi" | "fr" | "he" | "hi" | "hr" | "hu" | "id" | "is" | "it" | "ja" | "ko" | "lv" | "nb" | "nl" | "pl" | "pt" | "pt-br" | "ru" | "sk" | "sl" | "sr" | "sv" | "tr" | "ur" | "zh"; // @public -export type SqlIntegratedChangeTrackingPolicy = BaseDataChangeDetectionPolicy & { +export interface SqlIntegratedChangeTrackingPolicy extends BaseDataChangeDetectionPolicy { odatatype: "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy"; -}; +} // @public -export type StemmerOverrideTokenFilter = BaseTokenFilter & { +export interface StemmerOverrideTokenFilter extends BaseTokenFilter { odatatype: "#Microsoft.Azure.Search.StemmerOverrideTokenFilter"; rules: string[]; -}; +} // @public -export type StemmerTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.StemmerTokenFilter"; +export interface StemmerTokenFilter extends BaseTokenFilter { language: StemmerTokenFilterLanguage; -}; + odatatype: "#Microsoft.Azure.Search.StemmerTokenFilter"; +} // @public export type StemmerTokenFilterLanguage = "arabic" | "armenian" | "basque" | "brazilian" | "bulgarian" | "catalan" | "czech" | "danish" | "dutch" | "dutchKp" | "english" | "lightEnglish" | "minimalEnglish" | "possessiveEnglish" | "porter2" | "lovins" | "finnish" | "lightFinnish" | "french" | "lightFrench" | "minimalFrench" | "galician" | "minimalGalician" | "german" | "german2" | "lightGerman" | "minimalGerman" | "greek" | "hindi" | "hungarian" | "lightHungarian" | "indonesian" | "irish" | "italian" | "lightItalian" | "sorani" | "latvian" | "norwegian" | "lightNorwegian" | "minimalNorwegian" | "lightNynorsk" | "minimalNynorsk" | "portuguese" | "lightPortuguese" | "minimalPortuguese" | "portugueseRslp" | "romanian" | "russian" | "lightRussian" | "spanish" | "lightSpanish" | "swedish" | "lightSwedish" | "turkish"; // @public -export type StopAnalyzer = BaseLexicalAnalyzer & { +export interface StopAnalyzer extends BaseLexicalAnalyzer { odatatype: "#Microsoft.Azure.Search.StopAnalyzer"; stopwords?: string[]; -}; +} // @public export type StopwordsList = "arabic" | "armenian" | "basque" | "brazilian" | "bulgarian" | "catalan" | "czech" | "danish" | "dutch" | "english" | "finnish" | "french" | "galician" | "german" | "greek" | "hindi" | "hungarian" | "indonesian" | "irish" | "italian" | "latvian" | "norwegian" | "persian" | "portuguese" | "romanian" | "russian" | "sorani" | "spanish" | "swedish" | "thai" | "turkish"; // @public -export type StopwordsTokenFilter = BaseTokenFilter & { +export interface StopwordsTokenFilter extends BaseTokenFilter { + ignoreCase?: boolean; odatatype: "#Microsoft.Azure.Search.StopwordsTokenFilter"; + removeTrailingStopWords?: boolean; stopwords?: string[]; stopwordsList?: StopwordsList; - ignoreCase?: boolean; - removeTrailingStopWords?: boolean; -}; +} // @public export interface SuggestDocumentsResult = SelectFields> { @@ -2926,37 +3026,37 @@ export interface SynonymMap { } // @public -export type SynonymTokenFilter = BaseTokenFilter & { +export interface SynonymTokenFilter extends BaseTokenFilter { + expand?: boolean; + ignoreCase?: boolean; odatatype: "#Microsoft.Azure.Search.SynonymTokenFilter"; synonyms: string[]; - ignoreCase?: boolean; - expand?: boolean; -}; +} // @public -export type TagScoringFunction = BaseScoringFunction & { - type: "tag"; +export interface TagScoringFunction extends BaseScoringFunction { parameters: TagScoringParameters; -}; + type: "tag"; +} // @public export interface TagScoringParameters { tagsParameter: string; } -// @public -export type TextSplitMode = string; +// @public (undocumented) +export type TextSplitMode = "pages" | "sentences"; // @public -export type TextTranslationSkill = BaseSearchIndexerSkill & { - odatatype: "#Microsoft.Skills.Text.TranslationSkill"; - defaultToLanguageCode: TextTranslationSkillLanguage; +export interface TextTranslationSkill extends BaseSearchIndexerSkill { defaultFromLanguageCode?: TextTranslationSkillLanguage; + defaultToLanguageCode: TextTranslationSkillLanguage; + odatatype: "#Microsoft.Skills.Text.TranslationSkill"; suggestedFrom?: TextTranslationSkillLanguage; -}; +} -// @public -export type TextTranslationSkillLanguage = string; +// @public (undocumented) +export type TextTranslationSkillLanguage = "af" | "ar" | "bn" | "bs" | "bg" | "yue" | "ca" | "zh-Hans" | "zh-Hant" | "hr" | "cs" | "da" | "nl" | "en" | "et" | "fj" | "fil" | "fi" | "fr" | "de" | "el" | "ht" | "he" | "hi" | "mww" | "hu" | "is" | "id" | "it" | "ja" | "sw" | "tlh" | "tlh-Latn" | "tlh-Piqd" | "ko" | "lv" | "lt" | "mg" | "ms" | "mt" | "nb" | "fa" | "pl" | "pt" | "pt-br" | "pt-PT" | "otq" | "ro" | "ru" | "sm" | "sr-Cyrl" | "sr-Latn" | "sk" | "sl" | "es" | "sv" | "ty" | "ta" | "te" | "th" | "to" | "tr" | "uk" | "ur" | "vi" | "cy" | "yua" | "ga" | "kn" | "mi" | "ml" | "pa"; // @public export interface TextWeights { @@ -2975,30 +3075,30 @@ export type TokenFilter = AsciiFoldingTokenFilter | CjkBigramTokenFilter | Commo export type TokenFilterName = string; // @public -export type TruncateTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.TruncateTokenFilter"; +export interface TruncateTokenFilter extends BaseTokenFilter { length?: number; -}; + odatatype: "#Microsoft.Azure.Search.TruncateTokenFilter"; +} // @public -export type UaxUrlEmailTokenizer = BaseLexicalTokenizer & { - odatatype: "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; +export interface UaxUrlEmailTokenizer extends BaseLexicalTokenizer { maxTokenLength?: number; -}; + odatatype: "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; +} // @public (undocumented) export type UnionToIntersection = (Union extends unknown ? (_: Union) => unknown : never) extends (_: infer I) => unknown ? I : never; // @public -export type UniqueTokenFilter = BaseTokenFilter & { +export interface UniqueTokenFilter extends BaseTokenFilter { odatatype: "#Microsoft.Azure.Search.UniqueTokenFilter"; onlyOnSamePosition?: boolean; -}; +} // @public export type UploadDocumentsOptions = IndexDocumentsOptions; -// @public +// @public (undocumented) export type VectorFilterMode = "postFilter" | "preFilter"; // @public @@ -3008,7 +3108,13 @@ export interface VectorizableTextQuery extends BaseVector } // @public -export type VectorQuery = RawVectorQuery | VectorizableTextQuery; +export interface VectorizedQuery extends BaseVectorQuery { + kind: "vector"; + vector: number[]; +} + +// @public +export type VectorQuery = VectorizedQuery | VectorizableTextQuery; // @public (undocumented) export type VectorQueryKind = "vector" | "text"; @@ -3016,22 +3122,39 @@ export type VectorQueryKind = "vector" | "text"; // @public export interface VectorSearch { algorithms?: VectorSearchAlgorithmConfiguration[]; + compressions?: VectorSearchCompressionConfiguration[]; profiles?: VectorSearchProfile[]; vectorizers?: VectorSearchVectorizer[]; } // @public -export type VectorSearchAlgorithmConfiguration = HnswVectorSearchAlgorithmConfiguration | ExhaustiveKnnVectorSearchAlgorithmConfiguration; +export type VectorSearchAlgorithmConfiguration = HnswAlgorithmConfiguration | ExhaustiveKnnAlgorithmConfiguration; // @public (undocumented) export type VectorSearchAlgorithmKind = "hnsw" | "exhaustiveKnn"; -// @public +// @public (undocumented) export type VectorSearchAlgorithmMetric = "cosine" | "euclidean" | "dotProduct"; +// @public +export type VectorSearchCompressionConfiguration = ScalarQuantizationCompressionConfiguration; + +// @public +export type VectorSearchCompressionKind = string; + +// @public +export type VectorSearchCompressionTargetDataType = string; + +// @public +export interface VectorSearchOptions { + filterMode?: VectorFilterMode; + queries: VectorQuery[]; +} + // @public export interface VectorSearchProfile { - algorithm: string; + algorithmConfigurationName: string; + compressionConfigurationName?: string; name: string; vectorizer?: string; } @@ -3042,8 +3165,8 @@ export type VectorSearchVectorizer = AzureOpenAIVectorizer | CustomVectorizer; // @public (undocumented) export type VectorSearchVectorizerKind = "azureOpenAI" | "customWebApi"; -// @public -export type VisualFeature = string; +// @public (undocumented) +export type VisualFeature = "adult" | "brands" | "categories" | "description" | "faces" | "objects" | "tags"; // @public export interface WebApiSkill extends BaseSearchIndexerSkill { @@ -3061,19 +3184,19 @@ export interface WebApiSkill extends BaseSearchIndexerSkill { } // @public -export type WordDelimiterTokenFilter = BaseTokenFilter & { - odatatype: "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; - generateWordParts?: boolean; - generateNumberParts?: boolean; - catenateWords?: boolean; - catenateNumbers?: boolean; +export interface WordDelimiterTokenFilter extends BaseTokenFilter { catenateAll?: boolean; - splitOnCaseChange?: boolean; + catenateNumbers?: boolean; + catenateWords?: boolean; + generateNumberParts?: boolean; + generateWordParts?: boolean; + odatatype: "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; preserveOriginal?: boolean; + protectedWords?: string[]; + splitOnCaseChange?: boolean; splitOnNumerics?: boolean; stemEnglishPossessive?: boolean; - protectedWords?: string[]; -}; +} // (No @packageDocumentation comment for this package) diff --git a/sdk/search/search-documents/sample.env b/sdk/search/search-documents/sample.env index 7ed5a179d66e..86f0916725d2 100644 --- a/sdk/search/search-documents/sample.env +++ b/sdk/search/search-documents/sample.env @@ -8,13 +8,13 @@ SEARCH_API_ADMIN_KEY_ALT= ENDPOINT= # The endpoint for the OpenAI service. -OPENAI_ENDPOINT= +AZURE_OPENAI_ENDPOINT= # The key for the OpenAI service. -OPENAI_KEY= +AZURE_OPENAI_KEY= # The name of the OpenAI deployment you'd like your tests to use. -OPENAI_DEPLOYMENT_NAME= +AZURE_OPENAI_DEPLOYMENT_NAME= # Our tests assume that TEST_MODE is "playback" by default. You can # change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. diff --git a/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushSize.ts b/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushSize.ts index 0141cc035720..d873c2d86bf2 100644 --- a/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushSize.ts +++ b/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushSize.ts @@ -6,14 +6,14 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -58,7 +58,7 @@ function getDocumentsArray(size: number): Hotel[] { return array; } -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -105,9 +105,9 @@ async function main() { }); const documents: Hotel[] = getDocumentsArray(1001); - bufferedClient.uploadDocuments(documents); + await bufferedClient.uploadDocuments(documents); - await WAIT_TIME; + await delay(WAIT_TIME); let count = await searchClient.getDocumentsCount(); while (count !== documents.length) { @@ -122,7 +122,6 @@ async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushTimer.ts b/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushTimer.ts index db58ae2cd0fb..9ac74a28c295 100644 --- a/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushTimer.ts +++ b/sdk/search/search-documents/samples-dev/bufferedSenderAutoFlushTimer.ts @@ -6,15 +6,15 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, + DEFAULT_FLUSH_WINDOW, GeographyPoint, + SearchClient, SearchIndexClient, - DEFAULT_FLUSH_WINDOW, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -30,7 +30,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-5"; -export async function main() { +export async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -76,7 +76,7 @@ export async function main() { console.log(response); }); - bufferedClient.uploadDocuments([ + await bufferedClient.uploadDocuments([ { hotelId: "1", description: @@ -112,7 +112,6 @@ export async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples-dev/bufferedSenderManualFlush.ts b/sdk/search/search-documents/samples-dev/bufferedSenderManualFlush.ts index 30cdea51244b..889cd5856fe7 100644 --- a/sdk/search/search-documents/samples-dev/bufferedSenderManualFlush.ts +++ b/sdk/search/search-documents/samples-dev/bufferedSenderManualFlush.ts @@ -6,14 +6,14 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -27,7 +27,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-6"; -export async function main() { +export async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; diff --git a/sdk/search/search-documents/samples-dev/dataSourceConnectionOperations.ts b/sdk/search/search-documents/samples-dev/dataSourceConnectionOperations.ts index e534e770a04a..1ca1ce4d5048 100644 --- a/sdk/search/search-documents/samples-dev/dataSourceConnectionOperations.ts +++ b/sdk/search/search-documents/samples-dev/dataSourceConnectionOperations.ts @@ -6,8 +6,8 @@ */ import { - SearchIndexerClient, AzureKeyCredential, + SearchIndexerClient, SearchIndexerDataSourceConnection, } from "@azure/search-documents"; @@ -17,12 +17,12 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const connectionString = process.env.CONNECTION_STRING || ""; -const dataSourceConnectionName = "example-ds-connection-sample-1"; +const TEST_DATA_SOURCE_CONNECTION_NAME = "example-ds-connection-sample-1"; async function createDataSourceConnection( dataSourceConnectionName: string, client: SearchIndexerClient, -) { +): Promise { console.log(`Creating DS Connection Operation`); const dataSourceConnection: SearchIndexerDataSourceConnection = { name: dataSourceConnectionName, @@ -39,7 +39,7 @@ async function createDataSourceConnection( async function getAndUpdateDataSourceConnection( dataSourceConnectionName: string, client: SearchIndexerClient, -) { +): Promise { console.log(`Get And Update DS Connection Operation`); const ds: SearchIndexerDataSourceConnection = await client.getDataSourceConnection(dataSourceConnectionName); @@ -48,14 +48,14 @@ async function getAndUpdateDataSourceConnection( await client.createOrUpdateDataSourceConnection(ds); } -async function listDataSourceConnections(client: SearchIndexerClient) { +async function listDataSourceConnections(client: SearchIndexerClient): Promise { console.log(`List DS Connection Operation`); const listOfDataSourceConnections: Array = await client.listDataSourceConnections(); console.log(`List of Data Source Connections`); console.log(`*******************************`); - for (let ds of listOfDataSourceConnections) { + for (const ds of listOfDataSourceConnections) { console.log(`Name: ${ds.name}`); console.log(`Description: ${ds.description}`); console.log(`Connection String: ${ds.connectionString}`); @@ -72,12 +72,12 @@ async function listDataSourceConnections(client: SearchIndexerClient) { async function deleteDataSourceConnection( dataSourceConnectionName: string, client: SearchIndexerClient, -) { +): Promise { console.log(`Deleting DS Connection Operation`); await client.deleteDataSourceConnection(dataSourceConnectionName); } -async function main() { +async function main(): Promise { console.log(`Running DS Connection Operations Sample....`); if (!endpoint || !apiKey || !connectionString) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -85,11 +85,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createDataSourceConnection(dataSourceConnectionName, client); - await getAndUpdateDataSourceConnection(dataSourceConnectionName, client); + await createDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); + await getAndUpdateDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); await listDataSourceConnections(client); } finally { - await deleteDataSourceConnection(dataSourceConnectionName, client); + await deleteDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); } } diff --git a/sdk/search/search-documents/samples-dev/indexOperations.ts b/sdk/search/search-documents/samples-dev/indexOperations.ts index 92d47b8680a2..7774897f3fbc 100644 --- a/sdk/search/search-documents/samples-dev/indexOperations.ts +++ b/sdk/search/search-documents/samples-dev/indexOperations.ts @@ -6,9 +6,9 @@ */ import { - SearchIndexClient, AzureKeyCredential, SearchIndex, + SearchIndexClient, SearchIndexStatistics, } from "@azure/search-documents"; @@ -17,9 +17,9 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const indexName = "example-index-sample-1"; +const TEST_INDEX_NAME = "example-index-sample-1"; -async function createIndex(indexName: string, client: SearchIndexClient) { +async function createIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Creating Index Operation`); const index: SearchIndex = { name: indexName, @@ -62,7 +62,7 @@ async function createIndex(indexName: string, client: SearchIndexClient) { await client.createIndex(index); } -async function getAndUpdateIndex(indexName: string, client: SearchIndexClient) { +async function getAndUpdateIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Get And Update Index Operation`); const index: SearchIndex = await client.getIndex(indexName); index.fields.push({ @@ -73,14 +73,14 @@ async function getAndUpdateIndex(indexName: string, client: SearchIndexClient) { await client.createOrUpdateIndex(index); } -async function getIndexStatistics(indexName: string, client: SearchIndexClient) { +async function getIndexStatistics(indexName: string, client: SearchIndexClient): Promise { console.log(`Get Index Statistics Operation`); const statistics: SearchIndexStatistics = await client.getIndexStatistics(indexName); console.log(`Document Count: ${statistics.documentCount}`); console.log(`Storage Size: ${statistics.storageSize}`); } -async function getServiceStatistics(client: SearchIndexClient) { +async function getServiceStatistics(client: SearchIndexClient): Promise { console.log(`Get Service Statistics Operation`); const { counters, limits } = await client.getServiceStatistics(); console.log(`Counters`); @@ -116,7 +116,7 @@ async function getServiceStatistics(client: SearchIndexClient) { ); } -async function listIndexes(client: SearchIndexClient) { +async function listIndexes(client: SearchIndexClient): Promise { console.log(`List Indexes Operation`); const result = await client.listIndexes(); let listOfIndexes = await result.next(); @@ -132,12 +132,12 @@ async function listIndexes(client: SearchIndexClient) { } } -async function deleteIndex(indexName: string, client: SearchIndexClient) { +async function deleteIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Deleting Index Operation`); await client.deleteIndex(indexName); } -async function main() { +async function main(): Promise { console.log(`Running Index Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -145,13 +145,13 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndex(indexName, client); - await getAndUpdateIndex(indexName, client); - await getIndexStatistics(indexName, client); + await createIndex(TEST_INDEX_NAME, client); + await getAndUpdateIndex(TEST_INDEX_NAME, client); + await getIndexStatistics(TEST_INDEX_NAME, client); await getServiceStatistics(client); await listIndexes(client); } finally { - await deleteIndex(indexName, client); + await deleteIndex(TEST_INDEX_NAME, client); } } diff --git a/sdk/search/search-documents/samples-dev/indexerOperations.ts b/sdk/search/search-documents/samples-dev/indexerOperations.ts index 2ab6b5b43696..5cb8ddd8e62d 100644 --- a/sdk/search/search-documents/samples-dev/indexerOperations.ts +++ b/sdk/search/search-documents/samples-dev/indexerOperations.ts @@ -6,9 +6,9 @@ */ import { - SearchIndexerClient, AzureKeyCredential, SearchIndexer, + SearchIndexerClient, SearchIndexerStatus, } from "@azure/search-documents"; @@ -20,9 +20,9 @@ const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const dataSourceName = process.env.DATA_SOURCE_NAME || ""; const targetIndexName = process.env.TARGET_INDEX_NAME || ""; -const indexerName = "example-indexer-sample-1"; +const TEST_INDEXER_NAME = "example-indexer-sample-1"; -async function createIndexer(indexerName: string, client: SearchIndexerClient) { +async function createIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Creating Indexer Operation`); const indexer: SearchIndexer = { name: indexerName, @@ -34,7 +34,10 @@ async function createIndexer(indexerName: string, client: SearchIndexerClient) { await client.createIndexer(indexer); } -async function getAndUpdateIndexer(indexerName: string, client: SearchIndexerClient) { +async function getAndUpdateIndexer( + indexerName: string, + client: SearchIndexerClient, +): Promise { console.log(`Get And Update Indexer Operation`); const indexer: SearchIndexer = await client.getIndexer(indexerName); indexer.isDisabled = true; @@ -43,7 +46,7 @@ async function getAndUpdateIndexer(indexerName: string, client: SearchIndexerCli await client.createOrUpdateIndexer(indexer); } -async function getIndexerStatus(indexerName: string, client: SearchIndexerClient) { +async function getIndexerStatus(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Get Indexer Status Operation`); const indexerStatus: SearchIndexerStatus = await client.getIndexerStatus(indexerName); console.log(`Status: ${indexerStatus.status}`); @@ -56,7 +59,7 @@ async function getIndexerStatus(indexerName: string, client: SearchIndexerClient console.log(`MaxRunTime: ${indexerStatus.limits.maxRunTime}`); } -async function listIndexers(client: SearchIndexerClient) { +async function listIndexers(client: SearchIndexerClient): Promise { console.log(`List Indexers Operation`); const listOfIndexers: Array = await client.listIndexers(); @@ -82,22 +85,22 @@ async function listIndexers(client: SearchIndexerClient) { } } -async function resetIndexer(indexerName: string, client: SearchIndexerClient) { +async function resetIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Reset Indexer Operation`); await client.resetIndexer(indexerName); } -async function deleteIndexer(indexerName: string, client: SearchIndexerClient) { +async function deleteIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Deleting Indexer Operation`); await client.deleteIndexer(indexerName); } -async function runIndexer(indexerName: string, client: SearchIndexerClient) { +async function runIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Run Indexer Operation`); await client.runIndexer(indexerName); } -async function main() { +async function main(): Promise { console.log(`Running Indexer Operations Sample....`); if (!endpoint || !apiKey || !dataSourceName || !targetIndexName) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -105,14 +108,14 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndexer(indexerName, client); - await getAndUpdateIndexer(indexerName, client); - await getIndexerStatus(indexerName, client); + await createIndexer(TEST_INDEXER_NAME, client); + await getAndUpdateIndexer(TEST_INDEXER_NAME, client); + await getIndexerStatus(TEST_INDEXER_NAME, client); await listIndexers(client); - await resetIndexer(indexerName, client); - await runIndexer(indexerName, client); + await resetIndexer(TEST_INDEXER_NAME, client); + await runIndexer(TEST_INDEXER_NAME, client); } finally { - await deleteIndexer(indexerName, client); + await deleteIndexer(TEST_INDEXER_NAME, client); } } diff --git a/sdk/search/search-documents/samples-dev/interfaces.ts b/sdk/search/search-documents/samples-dev/interfaces.ts index 9a75788ca0a2..494148e11c3c 100644 --- a/sdk/search/search-documents/samples-dev/interfaces.ts +++ b/sdk/search/search-documents/samples-dev/interfaces.ts @@ -14,11 +14,11 @@ export interface Hotel { hotelId?: string; hotelName?: string | null; description?: string | null; - descriptionVectorEn?: number[] | null; - descriptionVectorFr?: number[] | null; + descriptionVectorEn?: number[]; + descriptionVectorFr?: number[]; descriptionFr?: string | null; category?: string | null; - tags?: string[] | null; + tags?: string[]; parkingIncluded?: boolean | null; smokingAllowed?: boolean | null; lastRenovationDate?: Date | null; @@ -40,5 +40,5 @@ export interface Hotel { sleepsCount?: number | null; smokingAllowed?: boolean | null; tags?: string[] | null; - }> | null; + }>; } diff --git a/sdk/search/search-documents/samples-dev/searchClientOperations.ts b/sdk/search/search-documents/samples-dev/searchClientOperations.ts index df00cd6b8bc4..ced0541bb0fc 100644 --- a/sdk/search/search-documents/samples-dev/searchClientOperations.ts +++ b/sdk/search/search-documents/samples-dev/searchClientOperations.ts @@ -7,13 +7,13 @@ import { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, SelectFields, } from "@azure/search-documents"; -import { createIndex, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -25,7 +25,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-2"; -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; diff --git a/sdk/search/search-documents/samples-dev/setup.ts b/sdk/search/search-documents/samples-dev/setup.ts index 0cdafdf01a85..eb6322bcb704 100644 --- a/sdk/search/search-documents/samples-dev/setup.ts +++ b/sdk/search/search-documents/samples-dev/setup.ts @@ -6,9 +6,9 @@ * @azsdk-util */ -import { SearchIndexClient, SearchIndex, KnownAnalyzerNames } from "@azure/search-documents"; -import { Hotel } from "./interfaces"; +import { KnownAnalyzerNames, SearchIndex, SearchIndexClient } from "@azure/search-documents"; import { env } from "process"; +import { Hotel } from "./interfaces"; export const WAIT_TIME = 4000; @@ -55,14 +55,14 @@ export async function createIndex(client: SearchIndexClient, name: string): Prom name: "descriptionVectorEn", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Collection(Edm.Single)", name: "descriptionVectorFr", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Edm.String", @@ -255,16 +255,16 @@ export async function createIndex(client: SearchIndexClient, name: string): Prom name: "vector-search-vectorizer", kind: "azureOpenAI", azureOpenAIParameters: { - resourceUri: env.OPENAI_ENDPOINT, - apiKey: env.OPENAI_KEY, - deploymentId: env.OPENAI_DEPLOYMENT_NAME, + resourceUri: env.AZURE_OPENAI_ENDPOINT, + apiKey: env.AZURE_OPENAI_KEY, + deploymentId: env.AZURE_OPENAI_DEPLOYMENT_NAME, }, }, ], profiles: [ { name: "vector-search-profile", - algorithm: "vector-search-algorithm", + algorithmConfigurationName: "vector-search-algorithm", vectorizer: "vector-search-vectorizer", }, ], diff --git a/sdk/search/search-documents/samples-dev/skillSetOperations.ts b/sdk/search/search-documents/samples-dev/skillSetOperations.ts index c8fc4162aa9b..e9c5fcee5511 100644 --- a/sdk/search/search-documents/samples-dev/skillSetOperations.ts +++ b/sdk/search/search-documents/samples-dev/skillSetOperations.ts @@ -6,8 +6,8 @@ */ import { - SearchIndexerClient, AzureKeyCredential, + SearchIndexerClient, SearchIndexerSkillset, } from "@azure/search-documents"; @@ -17,9 +17,9 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const skillsetName = "example-skillset-sample-1"; +const TEST_SKILLSET_NAME = "example-skillset-sample-1"; -async function createSkillset(skillsetName: string, client: SearchIndexerClient) { +async function createSkillset(skillsetName: string, client: SearchIndexerClient): Promise { console.log(`Creating Skillset Operation`); const skillset: SearchIndexerSkillset = { name: skillsetName, @@ -57,7 +57,10 @@ async function createSkillset(skillsetName: string, client: SearchIndexerClient) await client.createSkillset(skillset); } -async function getAndUpdateSkillset(skillsetName: string, client: SearchIndexerClient) { +async function getAndUpdateSkillset( + skillsetName: string, + client: SearchIndexerClient, +): Promise { console.log(`Get And Update Skillset Operation`); const skillset: SearchIndexerSkillset = await client.getSkillset(skillsetName); @@ -75,26 +78,26 @@ async function getAndUpdateSkillset(skillsetName: string, client: SearchIndexerC await client.createOrUpdateSkillset(skillset); } -async function listSkillsets(client: SearchIndexerClient) { +async function listSkillsets(client: SearchIndexerClient): Promise { console.log(`List Skillset Operation`); const listOfSkillsets: Array = await client.listSkillsets(); console.log(`\tList of Skillsets`); console.log(`\t******************`); - for (let skillset of listOfSkillsets) { + for (const skillset of listOfSkillsets) { console.log(`Name: ${skillset.name}`); console.log(`Description: ${skillset.description}`); console.log(`Skills`); console.log(`******`); - for (let skill of skillset.skills) { + for (const skill of skillset.skills) { console.log(`ODataType: ${skill.odatatype}`); console.log(`Inputs`); - for (let input of skill.inputs) { + for (const input of skill.inputs) { console.log(`\tName: ${input.name}`); console.log(`\tSource: ${input.source}`); } console.log(`Outputs`); - for (let output of skill.outputs) { + for (const output of skill.outputs) { console.log(`\tName: ${output.name}`); console.log(`\tTarget Name: ${output.targetName}`); } @@ -102,12 +105,12 @@ async function listSkillsets(client: SearchIndexerClient) { } } -async function deleteSkillset(skillsetName: string, client: SearchIndexerClient) { +async function deleteSkillset(skillsetName: string, client: SearchIndexerClient): Promise { console.log(`Deleting Skillset Operation`); await client.deleteSkillset(skillsetName); } -async function main() { +async function main(): Promise { console.log(`Running Skillset Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -115,11 +118,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSkillset(skillsetName, client); - await getAndUpdateSkillset(skillsetName, client); + await createSkillset(TEST_SKILLSET_NAME, client); + await getAndUpdateSkillset(TEST_SKILLSET_NAME, client); await listSkillsets(client); } finally { - await deleteSkillset(skillsetName, client); + await deleteSkillset(TEST_SKILLSET_NAME, client); } } diff --git a/sdk/search/search-documents/samples-dev/stickySession.ts b/sdk/search/search-documents/samples-dev/stickySession.ts new file mode 100644 index 000000000000..8f91d1a0fa97 --- /dev/null +++ b/sdk/search/search-documents/samples-dev/stickySession.ts @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/** + * @summary Demonstrates user sticky sessions, a way to reduce inconsistent behavior by targeting a + * single replica. + */ + +import { + AzureKeyCredential, + odata, + SearchClient, + SearchIndexClient, +} from "@azure/search-documents"; +import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; + +import * as dotenv from "dotenv"; +dotenv.config(); + +/** + * If you're querying a replicated index, Azure AI Search may target any replica with your queries. + * As these replicas may not be in a consistent state, the service may appear to have inconsistent + * states between distinct queries. To avoid this, you can use a sticky session. A sticky session + * is used to indicate to the Azure AI Search service that you'd like all requests with the same + * `sessionId` to be directed to the same replica. The service will then make a best effort to do + * so. + * + * Please see the + * {@link https://learn.microsoft.com/en-us/azure/search/index-similarity-and-scoring#scoring-statistics-and-sticky-sessions | documentation} + * for more information. + */ +const endpoint = process.env.ENDPOINT || ""; +const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; +const TEST_INDEX_NAME = "example-index-sample-3"; + +async function main(): Promise { + if (!endpoint || !apiKey) { + console.error( + "Be sure to set valid values for `endpoint` and `apiKey` with proper authorization.", + ); + return; + } + + const credential = new AzureKeyCredential(apiKey); + const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); + const searchClient: SearchClient = indexClient.getSearchClient(TEST_INDEX_NAME); + + // The session id is defined by the user. + const sessionId = "session1"; + + try { + await createIndex(indexClient, TEST_INDEX_NAME); + await delay(WAIT_TIME); + + // The service will make a best effort attempt to direct these queries to the same replica. As + // this overrides load balancing, excessive use of the same `sessionId` may result in + // performance degradation. Be sure to use a distinct `sessionId` for each sticky session. + const ratingQueries = [2, 4]; + for (const rating of ratingQueries) { + const response = await searchClient.search("*", { + filter: odata`rating ge ${rating}`, + sessionId, + }); + + const hotelNames = []; + for await (const result of response.results) { + const hotelName = result.document.hotelName; + if (typeof hotelName === "string") { + hotelNames.push(hotelName); + } + } + + if (hotelNames.length) { + console.log(`Hotels with at least a rating of ${rating}:`); + hotelNames.forEach(console.log); + } + } + } finally { + await indexClient.deleteIndex(TEST_INDEX_NAME); + } +} + +main(); diff --git a/sdk/search/search-documents/samples-dev/synonymMapOperations.ts b/sdk/search/search-documents/samples-dev/synonymMapOperations.ts index 56bcf98f75c5..b7fbfb174a3c 100644 --- a/sdk/search/search-documents/samples-dev/synonymMapOperations.ts +++ b/sdk/search/search-documents/samples-dev/synonymMapOperations.ts @@ -5,16 +5,16 @@ * @summary Demonstrates the SynonymMap Operations. */ -import { SearchIndexClient, AzureKeyCredential, SynonymMap } from "@azure/search-documents"; +import { AzureKeyCredential, SearchIndexClient, SynonymMap } from "@azure/search-documents"; import * as dotenv from "dotenv"; dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const synonymMapName = "example-synonymmap-sample-1"; +const TEST_SYNONYM_MAP_NAME = "example-synonymmap-sample-1"; -async function createSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function createSynonymMap(synonymMapName: string, client: SearchIndexClient): Promise { console.log(`Creating SynonymMap Operation`); const sm: SynonymMap = { name: synonymMapName, @@ -23,7 +23,10 @@ async function createSynonymMap(synonymMapName: string, client: SearchIndexClien await client.createSynonymMap(sm); } -async function getAndUpdateSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function getAndUpdateSynonymMap( + synonymMapName: string, + client: SearchIndexClient, +): Promise { console.log(`Get And Update SynonymMap Operation`); const sm: SynonymMap = await client.getSynonymMap(synonymMapName); console.log(`Update synonyms Synonym Map my-synonymmap`); @@ -31,27 +34,27 @@ async function getAndUpdateSynonymMap(synonymMapName: string, client: SearchInde await client.createOrUpdateSynonymMap(sm); } -async function listSynonymMaps(client: SearchIndexClient) { +async function listSynonymMaps(client: SearchIndexClient): Promise { console.log(`List SynonymMaps Operation`); const listOfSynonymMaps: Array = await client.listSynonymMaps(); console.log(`List of SynonymMaps`); console.log(`*******************`); - for (let sm of listOfSynonymMaps) { + for (const sm of listOfSynonymMaps) { console.log(`Name: ${sm.name}`); console.log(`Synonyms`); - for (let synonym of sm.synonyms) { + for (const synonym of sm.synonyms) { console.log(synonym); } } } -async function deleteSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function deleteSynonymMap(synonymMapName: string, client: SearchIndexClient): Promise { console.log(`Deleting SynonymMap Operation`); await client.deleteSynonymMap(synonymMapName); } -async function main() { +async function main(): Promise { console.log(`Running Index Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -59,11 +62,11 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSynonymMap(synonymMapName, client); - await getAndUpdateSynonymMap(synonymMapName, client); + await createSynonymMap(TEST_SYNONYM_MAP_NAME, client); + await getAndUpdateSynonymMap(TEST_SYNONYM_MAP_NAME, client); await listSynonymMaps(client); } finally { - await deleteSynonymMap(synonymMapName, client); + await deleteSynonymMap(TEST_SYNONYM_MAP_NAME, client); } } diff --git a/sdk/search/search-documents/samples-dev/vectorSearch.ts b/sdk/search/search-documents/samples-dev/vectorSearch.ts index 1fe322bde927..c08d6831381d 100644 --- a/sdk/search/search-documents/samples-dev/vectorSearch.ts +++ b/sdk/search/search-documents/samples-dev/vectorSearch.ts @@ -7,12 +7,12 @@ import { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, } from "@azure/search-documents"; -import { createIndex, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; import { fancyStayEnVector, fancyStayFrVector, luxuryQueryVector } from "./vectors"; @@ -25,7 +25,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-7"; -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -81,30 +81,32 @@ async function main() { await delay(WAIT_TIME); const searchResults = await searchClient.search("*", { - vectorQueries: [ - { - kind: "vector", - fields: ["descriptionVectorEn"], - kNearestNeighborsCount: 3, - // An embedding of the query "What are the most luxurious hotels?" - vector: luxuryQueryVector, - }, - // Multi-vector search is supported - { - kind: "vector", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - vector: luxuryQueryVector, - }, - // The index can be configured with a vectorizer to generate text embeddings - // from a text query - { - kind: "text", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - text: "What are the most luxurious hotels?", - }, - ], + vectorSearchOptions: { + queries: [ + { + kind: "vector", + fields: ["descriptionVectorEn"], + kNearestNeighborsCount: 3, + // An embedding of the query "What are the most luxurious hotels?" + vector: luxuryQueryVector, + }, + // Multi-vector search is supported + { + kind: "vector", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + vector: luxuryQueryVector, + }, + // The index can be configured with a vectorizer to generate text embeddings + // from a text query + { + kind: "text", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + text: "What are the most luxurious hotels?", + }, + ], + }, }); for await (const result of searchResults.results) { diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/README.md b/sdk/search/search-documents/samples/v12-beta/javascript/README.md index 167160ba32eb..0b8fcfa0bb45 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/README.md +++ b/sdk/search/search-documents/samples/v12-beta/javascript/README.md @@ -13,18 +13,19 @@ urlFragment: search-documents-javascript-beta These sample programs show how to use the JavaScript client libraries for Azure Search Documents in some common scenarios. -| **File Name** | **Description** | -| ------------------------------------------------------------------- | ---------------------------------------------------------------------------- | -| [bufferedSenderAutoFlushSize.js][bufferedsenderautoflushsize] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on size. | -| [bufferedSenderAutoFlushTimer.js][bufferedsenderautoflushtimer] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on timer. | -| [bufferedSenderManualFlush.js][bufferedsendermanualflush] | Demonstrates the SearchIndexingBufferedSender with Manual Flush. | -| [dataSourceConnectionOperations.js][datasourceconnectionoperations] | Demonstrates the DataSource Connection Operations. | -| [indexOperations.js][indexoperations] | Demonstrates the Index Operations. | -| [indexerOperations.js][indexeroperations] | Demonstrates the Indexer Operations. | -| [searchClientOperations.js][searchclientoperations] | Demonstrates the SearchClient. | -| [skillSetOperations.js][skillsetoperations] | Demonstrates the Skillset Operations. | -| [synonymMapOperations.js][synonymmapoperations] | Demonstrates the SynonymMap Operations. | -| [vectorSearch.js][vectorsearch] | Demonstrates vector search | +| **File Name** | **Description** | +| ------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| [bufferedSenderAutoFlushSize.js][bufferedsenderautoflushsize] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on size. | +| [bufferedSenderAutoFlushTimer.js][bufferedsenderautoflushtimer] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on timer. | +| [bufferedSenderManualFlush.js][bufferedsendermanualflush] | Demonstrates the SearchIndexingBufferedSender with Manual Flush. | +| [dataSourceConnectionOperations.js][datasourceconnectionoperations] | Demonstrates the DataSource Connection Operations. | +| [indexOperations.js][indexoperations] | Demonstrates the Index Operations. | +| [indexerOperations.js][indexeroperations] | Demonstrates the Indexer Operations. | +| [searchClientOperations.js][searchclientoperations] | Demonstrates the SearchClient. | +| [skillSetOperations.js][skillsetoperations] | Demonstrates the Skillset Operations. | +| [stickySession.js][stickysession] | Demonstrates user sticky sessions, a way to reduce inconsistent behavior by targeting a single replica. | +| [synonymMapOperations.js][synonymmapoperations] | Demonstrates the SynonymMap Operations. | +| [vectorSearch.js][vectorsearch] | Demonstrates vector search | ## Prerequisites @@ -74,6 +75,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP [indexeroperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/indexerOperations.js [searchclientoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/searchClientOperations.js [skillsetoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/skillSetOperations.js +[stickysession]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/stickySession.js [synonymmapoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/synonymMapOperations.js [vectorsearch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/javascript/vectorSearch.js [apiref]: https://docs.microsoft.com/javascript/api/@azure/search-documents diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushSize.js b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushSize.js index c3a7a3975c5a..cc9418f9c822 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushSize.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushSize.js @@ -6,13 +6,13 @@ */ const { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } = require("@azure/search-documents"); -const { createIndex, documentKeyRetriever, WAIT_TIME, delay } = require("./setup"); +const { createIndex, delay, documentKeyRetriever, WAIT_TIME } = require("./setup"); require("dotenv").config(); @@ -95,9 +95,9 @@ async function main() { }); const documents = getDocumentsArray(1001); - bufferedClient.uploadDocuments(documents); + await bufferedClient.uploadDocuments(documents); - await WAIT_TIME; + await delay(WAIT_TIME); let count = await searchClient.getDocumentsCount(); while (count !== documents.length) { @@ -112,7 +112,6 @@ async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushTimer.js b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushTimer.js index 215ec97af8e8..c0009fc9b021 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushTimer.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderAutoFlushTimer.js @@ -6,14 +6,14 @@ */ const { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, + DEFAULT_FLUSH_WINDOW, GeographyPoint, + SearchClient, SearchIndexClient, - DEFAULT_FLUSH_WINDOW, + SearchIndexingBufferedSender, } = require("@azure/search-documents"); -const { createIndex, documentKeyRetriever, WAIT_TIME, delay } = require("./setup"); +const { createIndex, delay, documentKeyRetriever, WAIT_TIME } = require("./setup"); require("dotenv").config(); @@ -66,7 +66,7 @@ async function main() { console.log(response); }); - bufferedClient.uploadDocuments([ + await bufferedClient.uploadDocuments([ { hotelId: "1", description: @@ -102,7 +102,6 @@ async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderManualFlush.js b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderManualFlush.js index bdbb2d8ad5d1..974e5a073373 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderManualFlush.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/bufferedSenderManualFlush.js @@ -6,13 +6,13 @@ */ const { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } = require("@azure/search-documents"); -const { createIndex, documentKeyRetriever, WAIT_TIME, delay } = require("./setup"); +const { createIndex, delay, documentKeyRetriever, WAIT_TIME } = require("./setup"); require("dotenv").config(); diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/dataSourceConnectionOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/dataSourceConnectionOperations.js index 7a6cbda2e070..5b7e8b336b7b 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/dataSourceConnectionOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/dataSourceConnectionOperations.js @@ -5,14 +5,14 @@ * @summary Demonstrates the DataSource Connection Operations. */ -const { SearchIndexerClient, AzureKeyCredential } = require("@azure/search-documents"); +const { AzureKeyCredential, SearchIndexerClient } = require("@azure/search-documents"); require("dotenv").config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const connectionString = process.env.CONNECTION_STRING || ""; -const dataSourceConnectionName = "example-ds-connection-sample-1"; +const TEST_DATA_SOURCE_CONNECTION_NAME = "example-ds-connection-sample-1"; async function createDataSourceConnection(dataSourceConnectionName, client) { console.log(`Creating DS Connection Operation`); @@ -42,7 +42,7 @@ async function listDataSourceConnections(client) { console.log(`List of Data Source Connections`); console.log(`*******************************`); - for (let ds of listOfDataSourceConnections) { + for (const ds of listOfDataSourceConnections) { console.log(`Name: ${ds.name}`); console.log(`Description: ${ds.description}`); console.log(`Connection String: ${ds.connectionString}`); @@ -69,11 +69,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createDataSourceConnection(dataSourceConnectionName, client); - await getAndUpdateDataSourceConnection(dataSourceConnectionName, client); + await createDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); + await getAndUpdateDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); await listDataSourceConnections(client); } finally { - await deleteDataSourceConnection(dataSourceConnectionName, client); + await deleteDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/indexOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/indexOperations.js index 612da181eca3..c1a5239c3ba9 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/indexOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/indexOperations.js @@ -5,13 +5,13 @@ * @summary Demonstrates the Index Operations. */ -const { SearchIndexClient, AzureKeyCredential } = require("@azure/search-documents"); +const { AzureKeyCredential, SearchIndexClient } = require("@azure/search-documents"); require("dotenv").config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const indexName = "example-index-sample-1"; +const TEST_INDEX_NAME = "example-index-sample-1"; async function createIndex(indexName, client) { console.log(`Creating Index Operation`); @@ -103,10 +103,10 @@ async function getServiceStatistics(client) { console.log(`\tMax Fields Per Index: ${limits.maxFieldsPerIndex}`); console.log(`\tMax Field Nesting Depth Per Index: ${limits.maxFieldNestingDepthPerIndex}`); console.log( - `\tMax Complex Collection Fields Per Index: ${limits.maxComplexCollectionFieldsPerIndex}` + `\tMax Complex Collection Fields Per Index: ${limits.maxComplexCollectionFieldsPerIndex}`, ); console.log( - `\tMax Complex Objects In Collections Per Document: ${limits.maxComplexObjectsInCollectionsPerDocument}` + `\tMax Complex Objects In Collections Per Document: ${limits.maxComplexObjectsInCollectionsPerDocument}`, ); } @@ -139,13 +139,13 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndex(indexName, client); - await getAndUpdateIndex(indexName, client); - await getIndexStatistics(indexName, client); + await createIndex(TEST_INDEX_NAME, client); + await getAndUpdateIndex(TEST_INDEX_NAME, client); + await getIndexStatistics(TEST_INDEX_NAME, client); await getServiceStatistics(client); await listIndexes(client); } finally { - await deleteIndex(indexName, client); + await deleteIndex(TEST_INDEX_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/indexerOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/indexerOperations.js index 52dffff86848..59b549220540 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/indexerOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/indexerOperations.js @@ -5,7 +5,7 @@ * @summary Demonstrates the Indexer Operations. */ -const { SearchIndexerClient, AzureKeyCredential } = require("@azure/search-documents"); +const { AzureKeyCredential, SearchIndexerClient } = require("@azure/search-documents"); require("dotenv").config(); @@ -14,7 +14,7 @@ const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const dataSourceName = process.env.DATA_SOURCE_NAME || ""; const targetIndexName = process.env.TARGET_INDEX_NAME || ""; -const indexerName = "example-indexer-sample-1"; +const TEST_INDEXER_NAME = "example-indexer-sample-1"; async function createIndexer(indexerName, client) { console.log(`Creating Indexer Operation`); @@ -44,7 +44,7 @@ async function getIndexerStatus(indexerName, client) { console.log(`Limits`); console.log(`******`); console.log( - `MaxDocumentContentCharactersToExtract: ${indexerStatus.limits.maxDocumentContentCharactersToExtract}` + `MaxDocumentContentCharactersToExtract: ${indexerStatus.limits.maxDocumentContentCharactersToExtract}`, ); console.log(`MaxDocumentExtractionSize: ${indexerStatus.limits.maxDocumentExtractionSize}`); console.log(`MaxRunTime: ${indexerStatus.limits.maxRunTime}`); @@ -99,14 +99,14 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndexer(indexerName, client); - await getAndUpdateIndexer(indexerName, client); - await getIndexerStatus(indexerName, client); + await createIndexer(TEST_INDEXER_NAME, client); + await getAndUpdateIndexer(TEST_INDEXER_NAME, client); + await getIndexerStatus(TEST_INDEXER_NAME, client); await listIndexers(client); - await resetIndexer(indexerName, client); - await runIndexer(indexerName, client); + await resetIndexer(TEST_INDEXER_NAME, client); + await runIndexer(TEST_INDEXER_NAME, client); } finally { - await deleteIndexer(indexerName, client); + await deleteIndexer(TEST_INDEXER_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/sample.env b/sdk/search/search-documents/samples/v12-beta/javascript/sample.env index 13954cec21bd..86f0916725d2 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/sample.env +++ b/sdk/search/search-documents/samples/v12-beta/javascript/sample.env @@ -11,10 +11,10 @@ ENDPOINT= AZURE_OPENAI_ENDPOINT= # The key for the OpenAI service. -OPENAI_KEY= +AZURE_OPENAI_KEY= # The name of the OpenAI deployment you'd like your tests to use. -OPENAI_DEPLOYMENT_NAME= +AZURE_OPENAI_DEPLOYMENT_NAME= # Our tests assume that TEST_MODE is "playback" by default. You can # change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/searchClientOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/searchClientOperations.js index d212bcbe400b..0745f6807406 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/searchClientOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/searchClientOperations.js @@ -7,11 +7,11 @@ const { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, } = require("@azure/search-documents"); -const { createIndex, WAIT_TIME, delay } = require("./setup"); +const { createIndex, delay, WAIT_TIME } = require("./setup"); require("dotenv").config(); diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/setup.js b/sdk/search/search-documents/samples/v12-beta/javascript/setup.js index 52540e166a54..450c2f392ba1 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/setup.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/setup.js @@ -53,14 +53,14 @@ async function createIndex(client, name) { name: "descriptionVectorEn", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Collection(Edm.Single)", name: "descriptionVectorFr", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Edm.String", @@ -254,15 +254,15 @@ async function createIndex(client, name) { kind: "azureOpenAI", azureOpenAIParameters: { resourceUri: env.AZURE_OPENAI_ENDPOINT, - apiKey: env.OPENAI_KEY, - deploymentId: env.OPENAI_DEPLOYMENT_NAME, + apiKey: env.AZURE_OPENAI_KEY, + deploymentId: env.AZURE_OPENAI_DEPLOYMENT_NAME, }, }, ], profiles: [ { name: "vector-search-profile", - algorithm: "vector-search-algorithm", + algorithmConfigurationName: "vector-search-algorithm", vectorizer: "vector-search-vectorizer", }, ], diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/skillSetOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/skillSetOperations.js index 1111b5817434..fc8edb586a66 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/skillSetOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/skillSetOperations.js @@ -5,14 +5,14 @@ * @summary Demonstrates the Skillset Operations. */ -const { SearchIndexerClient, AzureKeyCredential } = require("@azure/search-documents"); +const { AzureKeyCredential, SearchIndexerClient } = require("@azure/search-documents"); require("dotenv").config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const skillsetName = "example-skillset-sample-1"; +const TEST_SKILLSET_NAME = "example-skillset-sample-1"; async function createSkillset(skillsetName, client) { console.log(`Creating Skillset Operation`); @@ -76,20 +76,20 @@ async function listSkillsets(client) { console.log(`\tList of Skillsets`); console.log(`\t******************`); - for (let skillset of listOfSkillsets) { + for (const skillset of listOfSkillsets) { console.log(`Name: ${skillset.name}`); console.log(`Description: ${skillset.description}`); console.log(`Skills`); console.log(`******`); - for (let skill of skillset.skills) { + for (const skill of skillset.skills) { console.log(`ODataType: ${skill.odatatype}`); console.log(`Inputs`); - for (let input of skill.inputs) { + for (const input of skill.inputs) { console.log(`\tName: ${input.name}`); console.log(`\tSource: ${input.source}`); } console.log(`Outputs`); - for (let output of skill.outputs) { + for (const output of skill.outputs) { console.log(`\tName: ${output.name}`); console.log(`\tTarget Name: ${output.targetName}`); } @@ -110,11 +110,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSkillset(skillsetName, client); - await getAndUpdateSkillset(skillsetName, client); + await createSkillset(TEST_SKILLSET_NAME, client); + await getAndUpdateSkillset(TEST_SKILLSET_NAME, client); await listSkillsets(client); } finally { - await deleteSkillset(skillsetName, client); + await deleteSkillset(TEST_SKILLSET_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/stickySession.js b/sdk/search/search-documents/samples/v12-beta/javascript/stickySession.js new file mode 100644 index 000000000000..9f4955fbf4d0 --- /dev/null +++ b/sdk/search/search-documents/samples/v12-beta/javascript/stickySession.js @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/** + * @summary Demonstrates user sticky sessions, a way to reduce inconsistent behavior by targeting a + * single replica. + */ + +const { AzureKeyCredential, odata, SearchIndexClient } = require("@azure/search-documents"); +const { createIndex, delay, WAIT_TIME } = require("./setup"); + +require("dotenv").config(); + +/** + * If you're querying a replicated index, Azure AI Search may target any replica with your queries. + * As these replicas may not be in a consistent state, the service may appear to have inconsistent + * states between distinct queries. To avoid this, you can use a sticky session. A sticky session + * is used to indicate to the Azure AI Search service that you'd like all requests with the same + * `sessionId` to be directed to the same replica. The service will then make a best effort to do + * so. + * + * Please see the + * {@link https://learn.microsoft.com/en-us/azure/search/index-similarity-and-scoring#scoring-statistics-and-sticky-sessions | documentation} + * for more information. + */ +const endpoint = process.env.ENDPOINT || ""; +const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; +const TEST_INDEX_NAME = "example-index-sample-3"; + +async function main() { + if (!endpoint || !apiKey) { + console.error( + "Be sure to set valid values for `endpoint` and `apiKey` with proper authorization.", + ); + return; + } + + const credential = new AzureKeyCredential(apiKey); + const indexClient = new SearchIndexClient(endpoint, credential); + const searchClient = indexClient.getSearchClient(TEST_INDEX_NAME); + + // The session id is defined by the user. + const sessionId = "session1"; + + try { + await createIndex(indexClient, TEST_INDEX_NAME); + await delay(WAIT_TIME); + + // The service will make a best effort attempt to direct these queries to the same replica. As + // this overrides load balancing, excessive use of the same `sessionId` may result in + // performance degradation. Be sure to use a distinct `sessionId` for each sticky session. + const ratingQueries = [2, 4]; + for (const rating of ratingQueries) { + const response = await searchClient.search("*", { + filter: odata`rating ge ${rating}`, + sessionId, + }); + + const hotelNames = []; + for await (const result of response.results) { + const hotelName = result.document.hotelName; + if (typeof hotelName === "string") { + hotelNames.push(hotelName); + } + } + + if (hotelNames.length) { + console.log(`Hotels with at least a rating of ${rating}:`); + hotelNames.forEach(console.log); + } + } + } finally { + await indexClient.deleteIndex(TEST_INDEX_NAME); + } +} + +main(); diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/synonymMapOperations.js b/sdk/search/search-documents/samples/v12-beta/javascript/synonymMapOperations.js index 65860f6d7b86..272bf7d39057 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/synonymMapOperations.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/synonymMapOperations.js @@ -5,13 +5,13 @@ * @summary Demonstrates the SynonymMap Operations. */ -const { SearchIndexClient, AzureKeyCredential } = require("@azure/search-documents"); +const { AzureKeyCredential, SearchIndexClient } = require("@azure/search-documents"); require("dotenv").config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const synonymMapName = "example-synonymmap-sample-1"; +const TEST_SYNONYM_MAP_NAME = "example-synonymmap-sample-1"; async function createSynonymMap(synonymMapName, client) { console.log(`Creating SynonymMap Operation`); @@ -36,10 +36,10 @@ async function listSynonymMaps(client) { console.log(`List of SynonymMaps`); console.log(`*******************`); - for (let sm of listOfSynonymMaps) { + for (const sm of listOfSynonymMaps) { console.log(`Name: ${sm.name}`); console.log(`Synonyms`); - for (let synonym of sm.synonyms) { + for (const synonym of sm.synonyms) { console.log(synonym); } } @@ -58,11 +58,11 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSynonymMap(synonymMapName, client); - await getAndUpdateSynonymMap(synonymMapName, client); + await createSynonymMap(TEST_SYNONYM_MAP_NAME, client); + await getAndUpdateSynonymMap(TEST_SYNONYM_MAP_NAME, client); await listSynonymMaps(client); } finally { - await deleteSynonymMap(synonymMapName, client); + await deleteSynonymMap(TEST_SYNONYM_MAP_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/javascript/vectorSearch.js b/sdk/search/search-documents/samples/v12-beta/javascript/vectorSearch.js index 79c1fe2fb086..c7a42109b136 100644 --- a/sdk/search/search-documents/samples/v12-beta/javascript/vectorSearch.js +++ b/sdk/search/search-documents/samples/v12-beta/javascript/vectorSearch.js @@ -7,11 +7,11 @@ const { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, } = require("@azure/search-documents"); -const { createIndex, WAIT_TIME, delay } = require("./setup"); +const { createIndex, delay, WAIT_TIME } = require("./setup"); const dotenv = require("dotenv"); const { fancyStayEnVector, fancyStayFrVector, luxuryQueryVector } = require("./vectors"); @@ -76,30 +76,32 @@ async function main() { await delay(WAIT_TIME); const searchResults = await searchClient.search("*", { - vectorQueries: [ - { - kind: "vector", - fields: ["descriptionVectorEn"], - kNearestNeighborsCount: 3, - // An embedding of the query "What are the most luxurious hotels?" - vector: luxuryQueryVector, - }, - // Multi-vector search is supported - { - kind: "vector", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - vector: luxuryQueryVector, - }, - // The index can be configured with a vectorizer to generate text embeddings - // from a text query - { - kind: "text", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - text: "What are the most luxurious hotels?", - }, - ], + vectorSearchOptions: { + queries: [ + { + kind: "vector", + fields: ["descriptionVectorEn"], + kNearestNeighborsCount: 3, + // An embedding of the query "What are the most luxurious hotels?" + vector: luxuryQueryVector, + }, + // Multi-vector search is supported + { + kind: "vector", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + vector: luxuryQueryVector, + }, + // The index can be configured with a vectorizer to generate text embeddings + // from a text query + { + kind: "text", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + text: "What are the most luxurious hotels?", + }, + ], + }, }); for await (const result of searchResults.results) { diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/README.md b/sdk/search/search-documents/samples/v12-beta/typescript/README.md index da0592d5a261..9bb632d3e01c 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/README.md +++ b/sdk/search/search-documents/samples/v12-beta/typescript/README.md @@ -13,18 +13,19 @@ urlFragment: search-documents-typescript-beta These sample programs show how to use the TypeScript client libraries for Azure Search Documents in some common scenarios. -| **File Name** | **Description** | -| ------------------------------------------------------------------- | ---------------------------------------------------------------------------- | -| [bufferedSenderAutoFlushSize.ts][bufferedsenderautoflushsize] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on size. | -| [bufferedSenderAutoFlushTimer.ts][bufferedsenderautoflushtimer] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on timer. | -| [bufferedSenderManualFlush.ts][bufferedsendermanualflush] | Demonstrates the SearchIndexingBufferedSender with Manual Flush. | -| [dataSourceConnectionOperations.ts][datasourceconnectionoperations] | Demonstrates the DataSource Connection Operations. | -| [indexOperations.ts][indexoperations] | Demonstrates the Index Operations. | -| [indexerOperations.ts][indexeroperations] | Demonstrates the Indexer Operations. | -| [searchClientOperations.ts][searchclientoperations] | Demonstrates the SearchClient. | -| [skillSetOperations.ts][skillsetoperations] | Demonstrates the Skillset Operations. | -| [synonymMapOperations.ts][synonymmapoperations] | Demonstrates the SynonymMap Operations. | -| [vectorSearch.ts][vectorsearch] | Demonstrates vector search | +| **File Name** | **Description** | +| ------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| [bufferedSenderAutoFlushSize.ts][bufferedsenderautoflushsize] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on size. | +| [bufferedSenderAutoFlushTimer.ts][bufferedsenderautoflushtimer] | Demonstrates the SearchIndexingBufferedSender with Autoflush based on timer. | +| [bufferedSenderManualFlush.ts][bufferedsendermanualflush] | Demonstrates the SearchIndexingBufferedSender with Manual Flush. | +| [dataSourceConnectionOperations.ts][datasourceconnectionoperations] | Demonstrates the DataSource Connection Operations. | +| [indexOperations.ts][indexoperations] | Demonstrates the Index Operations. | +| [indexerOperations.ts][indexeroperations] | Demonstrates the Indexer Operations. | +| [searchClientOperations.ts][searchclientoperations] | Demonstrates the SearchClient. | +| [skillSetOperations.ts][skillsetoperations] | Demonstrates the Skillset Operations. | +| [stickySession.ts][stickysession] | Demonstrates user sticky sessions, a way to reduce inconsistent behavior by targeting a single replica. | +| [synonymMapOperations.ts][synonymmapoperations] | Demonstrates the SynonymMap Operations. | +| [vectorSearch.ts][vectorsearch] | Demonstrates vector search | ## Prerequisites @@ -86,6 +87,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP [indexeroperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/indexerOperations.ts [searchclientoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/searchClientOperations.ts [skillsetoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/skillSetOperations.ts +[stickysession]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/stickySession.ts [synonymmapoperations]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/synonymMapOperations.ts [vectorsearch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/search/search-documents/samples/v12-beta/typescript/src/vectorSearch.ts [apiref]: https://docs.microsoft.com/javascript/api/@azure/search-documents diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/sample.env b/sdk/search/search-documents/samples/v12-beta/typescript/sample.env index 13954cec21bd..86f0916725d2 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/sample.env +++ b/sdk/search/search-documents/samples/v12-beta/typescript/sample.env @@ -11,10 +11,10 @@ ENDPOINT= AZURE_OPENAI_ENDPOINT= # The key for the OpenAI service. -OPENAI_KEY= +AZURE_OPENAI_KEY= # The name of the OpenAI deployment you'd like your tests to use. -OPENAI_DEPLOYMENT_NAME= +AZURE_OPENAI_DEPLOYMENT_NAME= # Our tests assume that TEST_MODE is "playback" by default. You can # change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushSize.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushSize.ts index 6d8f7d22509f..d873c2d86bf2 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushSize.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushSize.ts @@ -6,14 +6,14 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -58,7 +58,7 @@ function getDocumentsArray(size: number): Hotel[] { return array; } -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -70,7 +70,7 @@ async function main() { const searchClient: SearchClient = new SearchClient( endpoint, TEST_INDEX_NAME, - credential + credential, ); const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); @@ -83,7 +83,7 @@ async function main() { documentKeyRetriever, { autoFlush: true, - } + }, ); bufferedClient.on("batchAdded", (response: any) => { @@ -105,9 +105,9 @@ async function main() { }); const documents: Hotel[] = getDocumentsArray(1001); - bufferedClient.uploadDocuments(documents); + await bufferedClient.uploadDocuments(documents); - await WAIT_TIME; + await delay(WAIT_TIME); let count = await searchClient.getDocumentsCount(); while (count !== documents.length) { @@ -122,7 +122,6 @@ async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushTimer.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushTimer.ts index 9b4c33e51157..9ac74a28c295 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushTimer.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderAutoFlushTimer.ts @@ -6,15 +6,15 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, + DEFAULT_FLUSH_WINDOW, GeographyPoint, + SearchClient, SearchIndexClient, - DEFAULT_FLUSH_WINDOW, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -30,7 +30,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-5"; -export async function main() { +export async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -42,7 +42,7 @@ export async function main() { const searchClient: SearchClient = new SearchClient( endpoint, TEST_INDEX_NAME, - credential + credential, ); const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); @@ -55,7 +55,7 @@ export async function main() { documentKeyRetriever, { autoFlush: true, - } + }, ); bufferedClient.on("batchAdded", (response: any) => { @@ -76,7 +76,7 @@ export async function main() { console.log(response); }); - bufferedClient.uploadDocuments([ + await bufferedClient.uploadDocuments([ { hotelId: "1", description: @@ -112,7 +112,6 @@ export async function main() { } finally { await indexClient.deleteIndex(TEST_INDEX_NAME); } - await delay(WAIT_TIME); } main(); diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderManualFlush.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderManualFlush.ts index 6c77acff8e2e..889cd5856fe7 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderManualFlush.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/bufferedSenderManualFlush.ts @@ -6,14 +6,14 @@ */ import { - SearchIndexingBufferedSender, AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, + SearchIndexingBufferedSender, } from "@azure/search-documents"; -import { createIndex, documentKeyRetriever, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, documentKeyRetriever, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -27,7 +27,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-6"; -export async function main() { +export async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -39,7 +39,7 @@ export async function main() { const searchClient: SearchClient = new SearchClient( endpoint, TEST_INDEX_NAME, - credential + credential, ); const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); @@ -52,7 +52,7 @@ export async function main() { documentKeyRetriever, { autoFlush: false, - } + }, ); bufferedClient.on("batchAdded", (response: any) => { diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/dataSourceConnectionOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/dataSourceConnectionOperations.ts index 49c45e886cc3..1ca1ce4d5048 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/dataSourceConnectionOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/dataSourceConnectionOperations.ts @@ -6,8 +6,8 @@ */ import { - SearchIndexerClient, AzureKeyCredential, + SearchIndexerClient, SearchIndexerDataSourceConnection, } from "@azure/search-documents"; @@ -17,12 +17,12 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const connectionString = process.env.CONNECTION_STRING || ""; -const dataSourceConnectionName = "example-ds-connection-sample-1"; +const TEST_DATA_SOURCE_CONNECTION_NAME = "example-ds-connection-sample-1"; async function createDataSourceConnection( dataSourceConnectionName: string, - client: SearchIndexerClient -) { + client: SearchIndexerClient, +): Promise { console.log(`Creating DS Connection Operation`); const dataSourceConnection: SearchIndexerDataSourceConnection = { name: dataSourceConnectionName, @@ -38,25 +38,24 @@ async function createDataSourceConnection( async function getAndUpdateDataSourceConnection( dataSourceConnectionName: string, - client: SearchIndexerClient -) { + client: SearchIndexerClient, +): Promise { console.log(`Get And Update DS Connection Operation`); - const ds: SearchIndexerDataSourceConnection = await client.getDataSourceConnection( - dataSourceConnectionName - ); + const ds: SearchIndexerDataSourceConnection = + await client.getDataSourceConnection(dataSourceConnectionName); ds.container.name = "Listings_5K_KingCounty_WA"; console.log(`Updating Container Name of Datasource Connection ${dataSourceConnectionName}`); await client.createOrUpdateDataSourceConnection(ds); } -async function listDataSourceConnections(client: SearchIndexerClient) { +async function listDataSourceConnections(client: SearchIndexerClient): Promise { console.log(`List DS Connection Operation`); const listOfDataSourceConnections: Array = await client.listDataSourceConnections(); console.log(`List of Data Source Connections`); console.log(`*******************************`); - for (let ds of listOfDataSourceConnections) { + for (const ds of listOfDataSourceConnections) { console.log(`Name: ${ds.name}`); console.log(`Description: ${ds.description}`); console.log(`Connection String: ${ds.connectionString}`); @@ -72,13 +71,13 @@ async function listDataSourceConnections(client: SearchIndexerClient) { async function deleteDataSourceConnection( dataSourceConnectionName: string, - client: SearchIndexerClient -) { + client: SearchIndexerClient, +): Promise { console.log(`Deleting DS Connection Operation`); await client.deleteDataSourceConnection(dataSourceConnectionName); } -async function main() { +async function main(): Promise { console.log(`Running DS Connection Operations Sample....`); if (!endpoint || !apiKey || !connectionString) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -86,11 +85,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createDataSourceConnection(dataSourceConnectionName, client); - await getAndUpdateDataSourceConnection(dataSourceConnectionName, client); + await createDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); + await getAndUpdateDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); await listDataSourceConnections(client); } finally { - await deleteDataSourceConnection(dataSourceConnectionName, client); + await deleteDataSourceConnection(TEST_DATA_SOURCE_CONNECTION_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/indexOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/indexOperations.ts index 9ba3e3da83b9..7774897f3fbc 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/indexOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/indexOperations.ts @@ -6,9 +6,9 @@ */ import { - SearchIndexClient, AzureKeyCredential, SearchIndex, + SearchIndexClient, SearchIndexStatistics, } from "@azure/search-documents"; @@ -17,9 +17,9 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const indexName = "example-index-sample-1"; +const TEST_INDEX_NAME = "example-index-sample-1"; -async function createIndex(indexName: string, client: SearchIndexClient) { +async function createIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Creating Index Operation`); const index: SearchIndex = { name: indexName, @@ -62,7 +62,7 @@ async function createIndex(indexName: string, client: SearchIndexClient) { await client.createIndex(index); } -async function getAndUpdateIndex(indexName: string, client: SearchIndexClient) { +async function getAndUpdateIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Get And Update Index Operation`); const index: SearchIndex = await client.getIndex(indexName); index.fields.push({ @@ -73,14 +73,14 @@ async function getAndUpdateIndex(indexName: string, client: SearchIndexClient) { await client.createOrUpdateIndex(index); } -async function getIndexStatistics(indexName: string, client: SearchIndexClient) { +async function getIndexStatistics(indexName: string, client: SearchIndexClient): Promise { console.log(`Get Index Statistics Operation`); const statistics: SearchIndexStatistics = await client.getIndexStatistics(indexName); console.log(`Document Count: ${statistics.documentCount}`); console.log(`Storage Size: ${statistics.storageSize}`); } -async function getServiceStatistics(client: SearchIndexClient) { +async function getServiceStatistics(client: SearchIndexClient): Promise { console.log(`Get Service Statistics Operation`); const { counters, limits } = await client.getServiceStatistics(); console.log(`Counters`); @@ -109,14 +109,14 @@ async function getServiceStatistics(client: SearchIndexClient) { console.log(`\tMax Fields Per Index: ${limits.maxFieldsPerIndex}`); console.log(`\tMax Field Nesting Depth Per Index: ${limits.maxFieldNestingDepthPerIndex}`); console.log( - `\tMax Complex Collection Fields Per Index: ${limits.maxComplexCollectionFieldsPerIndex}` + `\tMax Complex Collection Fields Per Index: ${limits.maxComplexCollectionFieldsPerIndex}`, ); console.log( - `\tMax Complex Objects In Collections Per Document: ${limits.maxComplexObjectsInCollectionsPerDocument}` + `\tMax Complex Objects In Collections Per Document: ${limits.maxComplexObjectsInCollectionsPerDocument}`, ); } -async function listIndexes(client: SearchIndexClient) { +async function listIndexes(client: SearchIndexClient): Promise { console.log(`List Indexes Operation`); const result = await client.listIndexes(); let listOfIndexes = await result.next(); @@ -132,12 +132,12 @@ async function listIndexes(client: SearchIndexClient) { } } -async function deleteIndex(indexName: string, client: SearchIndexClient) { +async function deleteIndex(indexName: string, client: SearchIndexClient): Promise { console.log(`Deleting Index Operation`); await client.deleteIndex(indexName); } -async function main() { +async function main(): Promise { console.log(`Running Index Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -145,13 +145,13 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndex(indexName, client); - await getAndUpdateIndex(indexName, client); - await getIndexStatistics(indexName, client); + await createIndex(TEST_INDEX_NAME, client); + await getAndUpdateIndex(TEST_INDEX_NAME, client); + await getIndexStatistics(TEST_INDEX_NAME, client); await getServiceStatistics(client); await listIndexes(client); } finally { - await deleteIndex(indexName, client); + await deleteIndex(TEST_INDEX_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/indexerOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/indexerOperations.ts index 1e61c2374071..5cb8ddd8e62d 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/indexerOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/indexerOperations.ts @@ -6,9 +6,9 @@ */ import { - SearchIndexerClient, AzureKeyCredential, SearchIndexer, + SearchIndexerClient, SearchIndexerStatus, } from "@azure/search-documents"; @@ -20,9 +20,9 @@ const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const dataSourceName = process.env.DATA_SOURCE_NAME || ""; const targetIndexName = process.env.TARGET_INDEX_NAME || ""; -const indexerName = "example-indexer-sample-1"; +const TEST_INDEXER_NAME = "example-indexer-sample-1"; -async function createIndexer(indexerName: string, client: SearchIndexerClient) { +async function createIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Creating Indexer Operation`); const indexer: SearchIndexer = { name: indexerName, @@ -34,7 +34,10 @@ async function createIndexer(indexerName: string, client: SearchIndexerClient) { await client.createIndexer(indexer); } -async function getAndUpdateIndexer(indexerName: string, client: SearchIndexerClient) { +async function getAndUpdateIndexer( + indexerName: string, + client: SearchIndexerClient, +): Promise { console.log(`Get And Update Indexer Operation`); const indexer: SearchIndexer = await client.getIndexer(indexerName); indexer.isDisabled = true; @@ -43,20 +46,20 @@ async function getAndUpdateIndexer(indexerName: string, client: SearchIndexerCli await client.createOrUpdateIndexer(indexer); } -async function getIndexerStatus(indexerName: string, client: SearchIndexerClient) { +async function getIndexerStatus(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Get Indexer Status Operation`); const indexerStatus: SearchIndexerStatus = await client.getIndexerStatus(indexerName); console.log(`Status: ${indexerStatus.status}`); console.log(`Limits`); console.log(`******`); console.log( - `MaxDocumentContentCharactersToExtract: ${indexerStatus.limits.maxDocumentContentCharactersToExtract}` + `MaxDocumentContentCharactersToExtract: ${indexerStatus.limits.maxDocumentContentCharactersToExtract}`, ); console.log(`MaxDocumentExtractionSize: ${indexerStatus.limits.maxDocumentExtractionSize}`); console.log(`MaxRunTime: ${indexerStatus.limits.maxRunTime}`); } -async function listIndexers(client: SearchIndexerClient) { +async function listIndexers(client: SearchIndexerClient): Promise { console.log(`List Indexers Operation`); const listOfIndexers: Array = await client.listIndexers(); @@ -82,22 +85,22 @@ async function listIndexers(client: SearchIndexerClient) { } } -async function resetIndexer(indexerName: string, client: SearchIndexerClient) { +async function resetIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Reset Indexer Operation`); await client.resetIndexer(indexerName); } -async function deleteIndexer(indexerName: string, client: SearchIndexerClient) { +async function deleteIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Deleting Indexer Operation`); await client.deleteIndexer(indexerName); } -async function runIndexer(indexerName: string, client: SearchIndexerClient) { +async function runIndexer(indexerName: string, client: SearchIndexerClient): Promise { console.log(`Run Indexer Operation`); await client.runIndexer(indexerName); } -async function main() { +async function main(): Promise { console.log(`Running Indexer Operations Sample....`); if (!endpoint || !apiKey || !dataSourceName || !targetIndexName) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -105,14 +108,14 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createIndexer(indexerName, client); - await getAndUpdateIndexer(indexerName, client); - await getIndexerStatus(indexerName, client); + await createIndexer(TEST_INDEXER_NAME, client); + await getAndUpdateIndexer(TEST_INDEXER_NAME, client); + await getIndexerStatus(TEST_INDEXER_NAME, client); await listIndexers(client); - await resetIndexer(indexerName, client); - await runIndexer(indexerName, client); + await resetIndexer(TEST_INDEXER_NAME, client); + await runIndexer(TEST_INDEXER_NAME, client); } finally { - await deleteIndexer(indexerName, client); + await deleteIndexer(TEST_INDEXER_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/interfaces.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/interfaces.ts index fc116d4805ed..f6ac5440b95e 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/interfaces.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/interfaces.ts @@ -11,11 +11,11 @@ export interface Hotel { hotelId?: string; hotelName?: string | null; description?: string | null; - descriptionVectorEn?: number[] | null; - descriptionVectorFr?: number[] | null; + descriptionVectorEn?: number[]; + descriptionVectorFr?: number[]; descriptionFr?: string | null; category?: string | null; - tags?: string[] | null; + tags?: string[]; parkingIncluded?: boolean | null; smokingAllowed?: boolean | null; lastRenovationDate?: Date | null; @@ -37,5 +37,5 @@ export interface Hotel { sleepsCount?: number | null; smokingAllowed?: boolean | null; tags?: string[] | null; - }> | null; + }>; } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/searchClientOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/searchClientOperations.ts index 4949728c3e32..ced0541bb0fc 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/searchClientOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/searchClientOperations.ts @@ -7,13 +7,13 @@ import { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, SelectFields, } from "@azure/search-documents"; -import { createIndex, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; dotenv.config(); @@ -25,7 +25,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-2"; -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -40,7 +40,7 @@ async function main() { const searchClient: SearchClient = new SearchClient( endpoint, TEST_INDEX_NAME, - credential + credential, ); const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/setup.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/setup.ts index a876b9f0a44b..fabc4db10450 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/setup.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/setup.ts @@ -5,9 +5,9 @@ * Defines the utility methods. */ -import { SearchIndexClient, SearchIndex, KnownAnalyzerNames } from "@azure/search-documents"; -import { Hotel } from "./interfaces"; +import { KnownAnalyzerNames, SearchIndex, SearchIndexClient } from "@azure/search-documents"; import { env } from "process"; +import { Hotel } from "./interfaces"; export const WAIT_TIME = 4000; @@ -54,14 +54,14 @@ export async function createIndex(client: SearchIndexClient, name: string): Prom name: "descriptionVectorEn", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Collection(Edm.Single)", name: "descriptionVectorFr", searchable: true, vectorSearchDimensions: 1536, - vectorSearchProfile: "vector-search-profile", + vectorSearchProfileName: "vector-search-profile", }, { type: "Edm.String", @@ -255,15 +255,15 @@ export async function createIndex(client: SearchIndexClient, name: string): Prom kind: "azureOpenAI", azureOpenAIParameters: { resourceUri: env.AZURE_OPENAI_ENDPOINT, - apiKey: env.OPENAI_KEY, - deploymentId: env.OPENAI_DEPLOYMENT_NAME, + apiKey: env.AZURE_OPENAI_KEY, + deploymentId: env.AZURE_OPENAI_DEPLOYMENT_NAME, }, }, ], profiles: [ { name: "vector-search-profile", - algorithm: "vector-search-algorithm", + algorithmConfigurationName: "vector-search-algorithm", vectorizer: "vector-search-vectorizer", }, ], diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/skillSetOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/skillSetOperations.ts index c8fc4162aa9b..e9c5fcee5511 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/skillSetOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/skillSetOperations.ts @@ -6,8 +6,8 @@ */ import { - SearchIndexerClient, AzureKeyCredential, + SearchIndexerClient, SearchIndexerSkillset, } from "@azure/search-documents"; @@ -17,9 +17,9 @@ dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const skillsetName = "example-skillset-sample-1"; +const TEST_SKILLSET_NAME = "example-skillset-sample-1"; -async function createSkillset(skillsetName: string, client: SearchIndexerClient) { +async function createSkillset(skillsetName: string, client: SearchIndexerClient): Promise { console.log(`Creating Skillset Operation`); const skillset: SearchIndexerSkillset = { name: skillsetName, @@ -57,7 +57,10 @@ async function createSkillset(skillsetName: string, client: SearchIndexerClient) await client.createSkillset(skillset); } -async function getAndUpdateSkillset(skillsetName: string, client: SearchIndexerClient) { +async function getAndUpdateSkillset( + skillsetName: string, + client: SearchIndexerClient, +): Promise { console.log(`Get And Update Skillset Operation`); const skillset: SearchIndexerSkillset = await client.getSkillset(skillsetName); @@ -75,26 +78,26 @@ async function getAndUpdateSkillset(skillsetName: string, client: SearchIndexerC await client.createOrUpdateSkillset(skillset); } -async function listSkillsets(client: SearchIndexerClient) { +async function listSkillsets(client: SearchIndexerClient): Promise { console.log(`List Skillset Operation`); const listOfSkillsets: Array = await client.listSkillsets(); console.log(`\tList of Skillsets`); console.log(`\t******************`); - for (let skillset of listOfSkillsets) { + for (const skillset of listOfSkillsets) { console.log(`Name: ${skillset.name}`); console.log(`Description: ${skillset.description}`); console.log(`Skills`); console.log(`******`); - for (let skill of skillset.skills) { + for (const skill of skillset.skills) { console.log(`ODataType: ${skill.odatatype}`); console.log(`Inputs`); - for (let input of skill.inputs) { + for (const input of skill.inputs) { console.log(`\tName: ${input.name}`); console.log(`\tSource: ${input.source}`); } console.log(`Outputs`); - for (let output of skill.outputs) { + for (const output of skill.outputs) { console.log(`\tName: ${output.name}`); console.log(`\tTarget Name: ${output.targetName}`); } @@ -102,12 +105,12 @@ async function listSkillsets(client: SearchIndexerClient) { } } -async function deleteSkillset(skillsetName: string, client: SearchIndexerClient) { +async function deleteSkillset(skillsetName: string, client: SearchIndexerClient): Promise { console.log(`Deleting Skillset Operation`); await client.deleteSkillset(skillsetName); } -async function main() { +async function main(): Promise { console.log(`Running Skillset Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -115,11 +118,11 @@ async function main() { } const client = new SearchIndexerClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSkillset(skillsetName, client); - await getAndUpdateSkillset(skillsetName, client); + await createSkillset(TEST_SKILLSET_NAME, client); + await getAndUpdateSkillset(TEST_SKILLSET_NAME, client); await listSkillsets(client); } finally { - await deleteSkillset(skillsetName, client); + await deleteSkillset(TEST_SKILLSET_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/stickySession.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/stickySession.ts new file mode 100644 index 000000000000..8f91d1a0fa97 --- /dev/null +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/stickySession.ts @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/** + * @summary Demonstrates user sticky sessions, a way to reduce inconsistent behavior by targeting a + * single replica. + */ + +import { + AzureKeyCredential, + odata, + SearchClient, + SearchIndexClient, +} from "@azure/search-documents"; +import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; + +import * as dotenv from "dotenv"; +dotenv.config(); + +/** + * If you're querying a replicated index, Azure AI Search may target any replica with your queries. + * As these replicas may not be in a consistent state, the service may appear to have inconsistent + * states between distinct queries. To avoid this, you can use a sticky session. A sticky session + * is used to indicate to the Azure AI Search service that you'd like all requests with the same + * `sessionId` to be directed to the same replica. The service will then make a best effort to do + * so. + * + * Please see the + * {@link https://learn.microsoft.com/en-us/azure/search/index-similarity-and-scoring#scoring-statistics-and-sticky-sessions | documentation} + * for more information. + */ +const endpoint = process.env.ENDPOINT || ""; +const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; +const TEST_INDEX_NAME = "example-index-sample-3"; + +async function main(): Promise { + if (!endpoint || !apiKey) { + console.error( + "Be sure to set valid values for `endpoint` and `apiKey` with proper authorization.", + ); + return; + } + + const credential = new AzureKeyCredential(apiKey); + const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); + const searchClient: SearchClient = indexClient.getSearchClient(TEST_INDEX_NAME); + + // The session id is defined by the user. + const sessionId = "session1"; + + try { + await createIndex(indexClient, TEST_INDEX_NAME); + await delay(WAIT_TIME); + + // The service will make a best effort attempt to direct these queries to the same replica. As + // this overrides load balancing, excessive use of the same `sessionId` may result in + // performance degradation. Be sure to use a distinct `sessionId` for each sticky session. + const ratingQueries = [2, 4]; + for (const rating of ratingQueries) { + const response = await searchClient.search("*", { + filter: odata`rating ge ${rating}`, + sessionId, + }); + + const hotelNames = []; + for await (const result of response.results) { + const hotelName = result.document.hotelName; + if (typeof hotelName === "string") { + hotelNames.push(hotelName); + } + } + + if (hotelNames.length) { + console.log(`Hotels with at least a rating of ${rating}:`); + hotelNames.forEach(console.log); + } + } + } finally { + await indexClient.deleteIndex(TEST_INDEX_NAME); + } +} + +main(); diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/synonymMapOperations.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/synonymMapOperations.ts index 56bcf98f75c5..b7fbfb174a3c 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/synonymMapOperations.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/synonymMapOperations.ts @@ -5,16 +5,16 @@ * @summary Demonstrates the SynonymMap Operations. */ -import { SearchIndexClient, AzureKeyCredential, SynonymMap } from "@azure/search-documents"; +import { AzureKeyCredential, SearchIndexClient, SynonymMap } from "@azure/search-documents"; import * as dotenv from "dotenv"; dotenv.config(); const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; -const synonymMapName = "example-synonymmap-sample-1"; +const TEST_SYNONYM_MAP_NAME = "example-synonymmap-sample-1"; -async function createSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function createSynonymMap(synonymMapName: string, client: SearchIndexClient): Promise { console.log(`Creating SynonymMap Operation`); const sm: SynonymMap = { name: synonymMapName, @@ -23,7 +23,10 @@ async function createSynonymMap(synonymMapName: string, client: SearchIndexClien await client.createSynonymMap(sm); } -async function getAndUpdateSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function getAndUpdateSynonymMap( + synonymMapName: string, + client: SearchIndexClient, +): Promise { console.log(`Get And Update SynonymMap Operation`); const sm: SynonymMap = await client.getSynonymMap(synonymMapName); console.log(`Update synonyms Synonym Map my-synonymmap`); @@ -31,27 +34,27 @@ async function getAndUpdateSynonymMap(synonymMapName: string, client: SearchInde await client.createOrUpdateSynonymMap(sm); } -async function listSynonymMaps(client: SearchIndexClient) { +async function listSynonymMaps(client: SearchIndexClient): Promise { console.log(`List SynonymMaps Operation`); const listOfSynonymMaps: Array = await client.listSynonymMaps(); console.log(`List of SynonymMaps`); console.log(`*******************`); - for (let sm of listOfSynonymMaps) { + for (const sm of listOfSynonymMaps) { console.log(`Name: ${sm.name}`); console.log(`Synonyms`); - for (let synonym of sm.synonyms) { + for (const synonym of sm.synonyms) { console.log(synonym); } } } -async function deleteSynonymMap(synonymMapName: string, client: SearchIndexClient) { +async function deleteSynonymMap(synonymMapName: string, client: SearchIndexClient): Promise { console.log(`Deleting SynonymMap Operation`); await client.deleteSynonymMap(synonymMapName); } -async function main() { +async function main(): Promise { console.log(`Running Index Operations Sample....`); if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); @@ -59,11 +62,11 @@ async function main() { } const client = new SearchIndexClient(endpoint, new AzureKeyCredential(apiKey)); try { - await createSynonymMap(synonymMapName, client); - await getAndUpdateSynonymMap(synonymMapName, client); + await createSynonymMap(TEST_SYNONYM_MAP_NAME, client); + await getAndUpdateSynonymMap(TEST_SYNONYM_MAP_NAME, client); await listSynonymMaps(client); } finally { - await deleteSynonymMap(synonymMapName, client); + await deleteSynonymMap(TEST_SYNONYM_MAP_NAME, client); } } diff --git a/sdk/search/search-documents/samples/v12-beta/typescript/src/vectorSearch.ts b/sdk/search/search-documents/samples/v12-beta/typescript/src/vectorSearch.ts index 7f1771a94898..c08d6831381d 100644 --- a/sdk/search/search-documents/samples/v12-beta/typescript/src/vectorSearch.ts +++ b/sdk/search/search-documents/samples/v12-beta/typescript/src/vectorSearch.ts @@ -7,12 +7,12 @@ import { AzureKeyCredential, - SearchClient, GeographyPoint, + SearchClient, SearchIndexClient, } from "@azure/search-documents"; -import { createIndex, WAIT_TIME, delay } from "./setup"; import { Hotel } from "./interfaces"; +import { createIndex, delay, WAIT_TIME } from "./setup"; import * as dotenv from "dotenv"; import { fancyStayEnVector, fancyStayFrVector, luxuryQueryVector } from "./vectors"; @@ -25,7 +25,7 @@ const endpoint = process.env.ENDPOINT || ""; const apiKey = process.env.SEARCH_API_ADMIN_KEY || ""; const TEST_INDEX_NAME = "example-index-sample-7"; -async function main() { +async function main(): Promise { if (!endpoint || !apiKey) { console.log("Make sure to set valid values for endpoint and apiKey with proper authorization."); return; @@ -36,7 +36,7 @@ async function main() { const searchClient: SearchClient = new SearchClient( endpoint, TEST_INDEX_NAME, - credential + credential, ); const indexClient: SearchIndexClient = new SearchIndexClient(endpoint, credential); @@ -81,30 +81,32 @@ async function main() { await delay(WAIT_TIME); const searchResults = await searchClient.search("*", { - vectorQueries: [ - { - kind: "vector", - fields: ["descriptionVectorEn"], - kNearestNeighborsCount: 3, - // An embedding of the query "What are the most luxurious hotels?" - vector: luxuryQueryVector, - }, - // Multi-vector search is supported - { - kind: "vector", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - vector: luxuryQueryVector, - }, - // The index can be configured with a vectorizer to generate text embeddings - // from a text query - { - kind: "text", - fields: ["descriptionVectorFr"], - kNearestNeighborsCount: 3, - text: "What are the most luxurious hotels?", - }, - ], + vectorSearchOptions: { + queries: [ + { + kind: "vector", + fields: ["descriptionVectorEn"], + kNearestNeighborsCount: 3, + // An embedding of the query "What are the most luxurious hotels?" + vector: luxuryQueryVector, + }, + // Multi-vector search is supported + { + kind: "vector", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + vector: luxuryQueryVector, + }, + // The index can be configured with a vectorizer to generate text embeddings + // from a text query + { + kind: "text", + fields: ["descriptionVectorFr"], + kNearestNeighborsCount: 3, + text: "What are the most luxurious hotels?", + }, + ], + }, }); for await (const result of searchResults.results) { diff --git a/sdk/search/search-documents/samples/v12/javascript/sample.env b/sdk/search/search-documents/samples/v12/javascript/sample.env index 13954cec21bd..86f0916725d2 100644 --- a/sdk/search/search-documents/samples/v12/javascript/sample.env +++ b/sdk/search/search-documents/samples/v12/javascript/sample.env @@ -11,10 +11,10 @@ ENDPOINT= AZURE_OPENAI_ENDPOINT= # The key for the OpenAI service. -OPENAI_KEY= +AZURE_OPENAI_KEY= # The name of the OpenAI deployment you'd like your tests to use. -OPENAI_DEPLOYMENT_NAME= +AZURE_OPENAI_DEPLOYMENT_NAME= # Our tests assume that TEST_MODE is "playback" by default. You can # change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. diff --git a/sdk/search/search-documents/samples/v12/typescript/sample.env b/sdk/search/search-documents/samples/v12/typescript/sample.env index 13954cec21bd..86f0916725d2 100644 --- a/sdk/search/search-documents/samples/v12/typescript/sample.env +++ b/sdk/search/search-documents/samples/v12/typescript/sample.env @@ -11,10 +11,10 @@ ENDPOINT= AZURE_OPENAI_ENDPOINT= # The key for the OpenAI service. -OPENAI_KEY= +AZURE_OPENAI_KEY= # The name of the OpenAI deployment you'd like your tests to use. -OPENAI_DEPLOYMENT_NAME= +AZURE_OPENAI_DEPLOYMENT_NAME= # Our tests assume that TEST_MODE is "playback" by default. You can # change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. diff --git a/sdk/search/search-documents/scripts/generateSampleEmbeddings.ts b/sdk/search/search-documents/scripts/generateSampleEmbeddings.ts index e907782a02a3..e6e49ef8582b 100644 --- a/sdk/search/search-documents/scripts/generateSampleEmbeddings.ts +++ b/sdk/search/search-documents/scripts/generateSampleEmbeddings.ts @@ -29,8 +29,8 @@ const inputs = [ async function main() { const client = new OpenAIClient( - process.env.OPENAI_ENDPOINT!, - new AzureKeyCredential(process.env.OPENAI_KEY!) + process.env.AZURE_OPENAI_ENDPOINT!, + new AzureKeyCredential(process.env.AZURE_OPENAI_KEY!) ); const writeStream = createWriteStream(outputPath, { mode: 0o755 }); @@ -43,7 +43,7 @@ async function main() { const expressions = await Promise.all( inputs.map(async ({ ident, text, comment }) => { - const result = await client.getEmbeddings(process.env.OPENAI_DEPLOYMENT_NAME!, [text]); + const result = await client.getEmbeddings(process.env.AZURE_OPENAI_DEPLOYMENT_NAME!, [text]); const embedding = result.data[0].embedding; return `// ${comment}\nexport const ${ident} = [${embedding.toString()}];\n\n`; }) diff --git a/sdk/search/search-documents/src/constants.ts b/sdk/search/search-documents/src/constants.ts index feed49d24387..54e960727fd9 100644 --- a/sdk/search/search-documents/src/constants.ts +++ b/sdk/search/search-documents/src/constants.ts @@ -1,4 +1,4 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -export const SDK_VERSION: string = "12.0.0-beta.4"; +const SDK_VERSION: string = "12.1.0-beta.1"; diff --git a/sdk/search/search-documents/src/errorModels.ts b/sdk/search/search-documents/src/errorModels.ts new file mode 100644 index 000000000000..fa0dc909d9da --- /dev/null +++ b/sdk/search/search-documents/src/errorModels.ts @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/** + * Common error response for all Azure Resource Manager APIs to return error details for failed + * operations. (This also follows the OData error response format.). + */ +export interface ErrorResponse { + /** The error object. */ + error?: ErrorDetail; +} + +/** The error detail. */ +export interface ErrorDetail { + /** + * The error code. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly code?: string; + /** + * The error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * The error target. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly target?: string; + /** + * The error details. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly details?: ErrorDetail[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; +} + +/** The resource management error additional info. */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * The additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly info?: Record; +} diff --git a/sdk/search/search-documents/src/generated/data/models/index.ts b/sdk/search/search-documents/src/generated/data/models/index.ts index b47e7ca25446..4a59236c0149 100644 --- a/sdk/search/search-documents/src/generated/data/models/index.ts +++ b/sdk/search/search-documents/src/generated/data/models/index.ts @@ -11,32 +11,62 @@ import * as coreHttpCompat from "@azure/core-http-compat"; export type VectorQueryUnion = | VectorQuery - | RawVectorQuery + | VectorizedQuery | VectorizableTextQuery; -/** Describes an error condition for the Azure Cognitive Search API. */ -export interface SearchError { +/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). */ +export interface ErrorResponse { + /** The error object. */ + error?: ErrorDetail; +} + +/** The error detail. */ +export interface ErrorDetail { /** - * One of a server-defined set of error codes. + * The error code. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly code?: string; /** - * A human-readable representation of the error. + * The error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * The error target. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly message: string; + readonly target?: string; /** - * An array of details about specific errors that led to this reported error. + * The error details. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly details?: SearchError[]; + readonly details?: ErrorDetail[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; +} + +/** The resource management error additional info. */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * The additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly info?: Record; } /** Response containing search results from an index. */ export interface SearchDocumentsResult { /** - * The total count of results found by the search operation, or null if the count was not requested. If present, the count may be greater than the number of results in this response. This can happen if you use the $top or $skip parameters, or if Azure Cognitive Search can't return all the requested documents in a single Search response. + * The total count of results found by the search operation, or null if the count was not requested. If present, the count may be greater than the number of results in this response. This can happen if you use the $top or $skip parameters, or if the query can't return all the requested documents in a single response. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly count?: number; @@ -54,29 +84,29 @@ export interface SearchDocumentsResult { * The answers query results for the search operation; null if the answers query parameter was not specified or set to 'none'. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly answers?: AnswerResult[]; + readonly answers?: QueryAnswerResult[]; /** - * Continuation JSON payload returned when Azure Cognitive Search can't return all the requested results in a single Search response. You can use this JSON along with @odata.nextLink to formulate another POST Search request to get the next part of the search response. + * Continuation JSON payload returned when the query can't return all the requested results in a single response. You can use this JSON along with @odata.nextLink to formulate another POST Search request to get the next part of the search response. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly nextPageParameters?: SearchRequest; /** - * Reason that a partial response was returned for a semantic search request. + * Reason that a partial response was returned for a semantic ranking request. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly semanticPartialResponseReason?: SemanticPartialResponseReason; + readonly semanticPartialResponseReason?: SemanticErrorReason; /** - * Type of partial response that was returned for a semantic search request. + * Type of partial response that was returned for a semantic ranking request. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly semanticPartialResponseType?: SemanticPartialResponseType; + readonly semanticPartialResponseType?: SemanticSearchResultsType; /** * The sequence of results returned by the query. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly results: SearchResult[]; /** - * Continuation URL returned when Azure Cognitive Search can't return all the requested results in a single Search response. You can use this URL to formulate another GET or POST Search request to get the next part of the search response. Make sure to use the same verb (GET or POST) as the request that produced this response. + * Continuation URL returned when the query can't return all the requested results in a single response. You can use this URL to formulate another GET or POST Search request to get the next part of the search response. Make sure to use the same verb (GET or POST) as the request that produced this response. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly nextLink?: string; @@ -94,7 +124,7 @@ export interface FacetResult { } /** An answer is a text passage extracted from the contents of the most relevant documents that matched the query. Answers are extracted from the top search results. Answer candidates are scored and the top answers are selected. */ -export interface AnswerResult { +export interface QueryAnswerResult { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** @@ -150,12 +180,12 @@ export interface SearchRequest { /** Allows setting a separate search query that will be solely used for semantic reranking, semantic captions and semantic answers. Is useful for scenarios where there is a need to use different queries between the base retrieval and ranking phase, and the L2 semantic phase. */ semanticQuery?: string; /** The name of a semantic configuration that will be used when processing documents for queries of type semantic. */ - semanticConfiguration?: string; + semanticConfigurationName?: string; /** Allows the user to choose whether a semantic call should fail completely, or to return partial results (default). */ - semanticErrorHandling?: SemanticErrorHandling; + semanticErrorHandling?: SemanticErrorMode; /** Allows the user to set an upper bound on the amount of time it takes for semantic enrichment to finish processing before the request fails. */ semanticMaxWaitInMilliseconds?: number; - /** Enables a debugging tool that can be used to further explore your Semantic search results. */ + /** Enables a debugging tool that can be used to further explore your reranked results. */ debug?: QueryDebugMode; /** A full-text search query expression; Use "*" or omit this parameter to match all documents. */ searchText?: string; @@ -177,7 +207,7 @@ export interface SearchRequest { top?: number; /** A value that specifies whether captions should be returned as part of the search response. */ captions?: QueryCaptionType; - /** The comma-separated list of field names used for semantic search. */ + /** The comma-separated list of field names used for semantic ranking. */ semanticFields?: string; /** The query parameters for vector and hybrid search queries. */ vectorQueries?: VectorQueryUnion[]; @@ -195,6 +225,8 @@ export interface VectorQuery { fields?: string; /** When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. */ exhaustive?: boolean; + /** Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is true. This parameter is only permitted when a compression method is used on the underlying vector field. */ + oversampling?: number; } /** Contains a document found by a search query, plus associated metadata. */ @@ -210,7 +242,7 @@ export interface SearchResult { * The relevance score computed by the semantic ranker for the top search results. Search results are sorted by the RerankerScore first and then by the Score. RerankerScore is only returned for queries of type 'semantic'. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly rerankerScore?: number; + readonly _rerankerScore?: number; /** * Text fragments from the document that indicate the matching search terms, organized by each applicable field; null if hit highlighting was not enabled for the query. * NOTE: This property will not be serialized. It can only be populated by the server. @@ -220,7 +252,7 @@ export interface SearchResult { * Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type 'semantic'. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly captions?: CaptionResult[]; + readonly _captions?: QueryCaptionResult[]; /** * Contains debugging information that can be used to further explore your search results. * NOTE: This property will not be serialized. It can only be populated by the server. @@ -229,7 +261,7 @@ export interface SearchResult { } /** Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type 'semantic'.. */ -export interface CaptionResult { +export interface QueryCaptionResult { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** @@ -247,7 +279,7 @@ export interface CaptionResult { /** Contains debugging information that can be used to further explore your search results. */ export interface DocumentDebugInfo { /** - * Contains debugging information specific to semantic search queries. + * Contains debugging information specific to semantic ranking requests. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly semantic?: SemanticDebugInfo; @@ -460,20 +492,20 @@ export interface AutocompleteRequest { } /** The query parameters to use for vector search when a raw vector value is provided. */ -export type RawVectorQuery = VectorQuery & { +export interface VectorizedQuery extends VectorQuery { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "vector"; /** The vector representation of a search query. */ - vector?: number[]; -}; + vector: number[]; +} /** The query parameters to use for vector search when a text value that needs to be vectorized is provided. */ -export type VectorizableTextQuery = VectorQuery & { +export interface VectorizableTextQuery extends VectorQuery { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "text"; /** The text to be vectorized to perform a vector search query. */ - text?: string; -}; + text: string; +} /** Parameter group */ export interface SearchOptions { @@ -504,7 +536,7 @@ export interface SearchOptions { /** The name of the semantic configuration that lists which fields should be used for semantic ranking, captions, highlights, and answers */ semanticConfiguration?: string; /** Allows the user to choose whether a semantic call should fail completely, or to return partial results (default). */ - semanticErrorHandling?: SemanticErrorHandling; + semanticErrorHandling?: SemanticErrorMode; /** Allows the user to set an upper bound on the amount of time it takes for semantic enrichment to finish processing before the request fails. */ semanticMaxWaitInMilliseconds?: number; /** Enables a debugging tool that can be used to further explore your search results. */ @@ -515,8 +547,8 @@ export interface SearchOptions { queryLanguage?: QueryLanguage; /** Improve search recall by spell-correcting individual search query terms. */ speller?: Speller; - /** This parameter is only valid if the query type is 'semantic'. If set, the query returns answers extracted from key passages in the highest ranked documents. The number of answers returned can be configured by appending the pipe character '|' followed by the 'count-' option after the answers parameter value, such as 'extractive|count-3'. Default count is 1. The confidence threshold can be configured by appending the pipe character '|' followed by the 'threshold-' option after the answers parameter value, such as 'extractive|threshold-0.9'. Default threshold is 0.7. */ - answers?: Answers; + /** This parameter is only valid if the query type is `semantic`. If set, the query returns answers extracted from key passages in the highest ranked documents. The number of answers returned can be configured by appending the pipe character `|` followed by the `count-` option after the answers parameter value, such as `extractive|count-3`. Default count is 1. The confidence threshold can be configured by appending the pipe character `|` followed by the `threshold-` option after the answers parameter value, such as `extractive|threshold-0.9`. Default threshold is 0.7. */ + answers?: QueryAnswerType; /** A value that specifies whether any or all of the search terms must be matched in order to count the document as a match. */ searchMode?: SearchMode; /** A value that specifies whether we want to calculate scoring statistics (such as document frequency) globally for more consistent scoring, or locally, for lower latency. */ @@ -529,9 +561,9 @@ export interface SearchOptions { skip?: number; /** The number of search results to retrieve. This can be used in conjunction with $skip to implement client-side paging of search results. If results are truncated due to server-side paging, the response will include a continuation token that can be used to issue another Search request for the next page of results. */ top?: number; - /** This parameter is only valid if the query type is 'semantic'. If set, the query returns captions extracted from key passages in the highest ranked documents. When Captions is set to 'extractive', highlighting is enabled by default, and can be configured by appending the pipe character '|' followed by the 'highlight-' option, such as 'extractive|highlight-true'. Defaults to 'None'. */ - captions?: Captions; - /** The list of field names used for semantic search. */ + /** This parameter is only valid if the query type is `semantic`. If set, the query returns captions extracted from key passages in the highest ranked documents. When Captions is set to `extractive`, highlighting is enabled by default, and can be configured by appending the pipe character `|` followed by the `highlight-` option, such as `extractive|highlight-true`. Defaults to `None`. */ + captions?: QueryCaptionType; + /** The list of field names used for semantic ranking. */ semanticFields?: string[]; } @@ -577,45 +609,45 @@ export interface AutocompleteOptions { top?: number; } -/** Known values of {@link ApiVersion20231001Preview} that the service accepts. */ -export enum KnownApiVersion20231001Preview { - /** Api Version '2023-10-01-Preview' */ - TwoThousandTwentyThree1001Preview = "2023-10-01-Preview" +/** Known values of {@link ApiVersion20240301Preview} that the service accepts. */ +export enum KnownApiVersion20240301Preview { + /** Api Version '2024-03-01-Preview' */ + TwoThousandTwentyFour0301Preview = "2024-03-01-Preview", } /** - * Defines values for ApiVersion20231001Preview. \ - * {@link KnownApiVersion20231001Preview} can be used interchangeably with ApiVersion20231001Preview, + * Defines values for ApiVersion20240301Preview. \ + * {@link KnownApiVersion20240301Preview} can be used interchangeably with ApiVersion20240301Preview, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **2023-10-01-Preview**: Api Version '2023-10-01-Preview' + * **2024-03-01-Preview**: Api Version '2024-03-01-Preview' */ -export type ApiVersion20231001Preview = string; +export type ApiVersion20240301Preview = string; -/** Known values of {@link SemanticErrorHandling} that the service accepts. */ -export enum KnownSemanticErrorHandling { +/** Known values of {@link SemanticErrorMode} that the service accepts. */ +export enum KnownSemanticErrorMode { /** If the semantic processing fails, partial results still return. The definition of partial results depends on what semantic step failed and what was the reason for failure. */ Partial = "partial", /** If there is an exception during the semantic processing step, the query will fail and return the appropriate HTTP code depending on the error. */ - Fail = "fail" + Fail = "fail", } /** - * Defines values for SemanticErrorHandling. \ - * {@link KnownSemanticErrorHandling} can be used interchangeably with SemanticErrorHandling, + * Defines values for SemanticErrorMode. \ + * {@link KnownSemanticErrorMode} can be used interchangeably with SemanticErrorMode, * this enum contains the known values that the service supports. * ### Known values supported by the service * **partial**: If the semantic processing fails, partial results still return. The definition of partial results depends on what semantic step failed and what was the reason for failure. \ * **fail**: If there is an exception during the semantic processing step, the query will fail and return the appropriate HTTP code depending on the error. */ -export type SemanticErrorHandling = string; +export type SemanticErrorMode = string; /** Known values of {@link QueryDebugMode} that the service accepts. */ export enum KnownQueryDebugMode { /** No query debugging information will be returned. */ Disabled = "disabled", - /** Allows the user to further explore their Semantic search results. */ - Semantic = "semantic" + /** Allows the user to further explore their reranked results. */ + Semantic = "semantic", } /** @@ -624,7 +656,7 @@ export enum KnownQueryDebugMode { * this enum contains the known values that the service supports. * ### Known values supported by the service * **disabled**: No query debugging information will be returned. \ - * **semantic**: Allows the user to further explore their Semantic search results. + * **semantic**: Allows the user to further explore their reranked results. */ export type QueryDebugMode = string; @@ -732,7 +764,7 @@ export enum KnownQueryLanguage { LvLv = "lv-lv", /** Query language value for Estonian (Estonia). */ EtEe = "et-ee", - /** Query language value for Catalan (Spain). */ + /** Query language value for Catalan. */ CaEs = "ca-es", /** Query language value for Finnish (Finland). */ FiFi = "fi-fi", @@ -750,9 +782,9 @@ export enum KnownQueryLanguage { HyAm = "hy-am", /** Query language value for Bengali (India). */ BnIn = "bn-in", - /** Query language value for Basque (Spain). */ + /** Query language value for Basque. */ EuEs = "eu-es", - /** Query language value for Galician (Spain). */ + /** Query language value for Galician. */ GlEs = "gl-es", /** Query language value for Gujarati (India). */ GuIn = "gu-in", @@ -773,7 +805,7 @@ export enum KnownQueryLanguage { /** Query language value for Telugu (India). */ TeIn = "te-in", /** Query language value for Urdu (Pakistan). */ - UrPk = "ur-pk" + UrPk = "ur-pk", } /** @@ -832,7 +864,7 @@ export enum KnownQueryLanguage { * **uk-ua**: Query language value for Ukrainian (Ukraine). \ * **lv-lv**: Query language value for Latvian (Latvia). \ * **et-ee**: Query language value for Estonian (Estonia). \ - * **ca-es**: Query language value for Catalan (Spain). \ + * **ca-es**: Query language value for Catalan. \ * **fi-fi**: Query language value for Finnish (Finland). \ * **sr-ba**: Query language value for Serbian (Bosnia and Herzegovina). \ * **sr-me**: Query language value for Serbian (Montenegro). \ @@ -841,8 +873,8 @@ export enum KnownQueryLanguage { * **nb-no**: Query language value for Norwegian (Norway). \ * **hy-am**: Query language value for Armenian (Armenia). \ * **bn-in**: Query language value for Bengali (India). \ - * **eu-es**: Query language value for Basque (Spain). \ - * **gl-es**: Query language value for Galician (Spain). \ + * **eu-es**: Query language value for Basque. \ + * **gl-es**: Query language value for Galician. \ * **gu-in**: Query language value for Gujarati (India). \ * **he-il**: Query language value for Hebrew (Israel). \ * **ga-ie**: Query language value for Irish (Ireland). \ @@ -861,7 +893,7 @@ export enum KnownSpeller { /** Speller not enabled. */ None = "none", /** Speller corrects individual query terms using a static lexicon for the language specified by the queryLanguage parameter. */ - Lexicon = "lexicon" + Lexicon = "lexicon", } /** @@ -874,48 +906,48 @@ export enum KnownSpeller { */ export type Speller = string; -/** Known values of {@link Answers} that the service accepts. */ -export enum KnownAnswers { +/** Known values of {@link QueryAnswerType} that the service accepts. */ +export enum KnownQueryAnswerType { /** Do not return answers for the query. */ None = "none", /** Extracts answer candidates from the contents of the documents returned in response to a query expressed as a question in natural language. */ - Extractive = "extractive" + Extractive = "extractive", } /** - * Defines values for Answers. \ - * {@link KnownAnswers} can be used interchangeably with Answers, + * Defines values for QueryAnswerType. \ + * {@link KnownQueryAnswerType} can be used interchangeably with QueryAnswerType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **none**: Do not return answers for the query. \ * **extractive**: Extracts answer candidates from the contents of the documents returned in response to a query expressed as a question in natural language. */ -export type Answers = string; +export type QueryAnswerType = string; -/** Known values of {@link Captions} that the service accepts. */ -export enum KnownCaptions { +/** Known values of {@link QueryCaptionType} that the service accepts. */ +export enum KnownQueryCaptionType { /** Do not return captions for the query. */ None = "none", /** Extracts captions from the matching documents that contain passages relevant to the search query. */ - Extractive = "extractive" + Extractive = "extractive", } /** - * Defines values for Captions. \ - * {@link KnownCaptions} can be used interchangeably with Captions, + * Defines values for QueryCaptionType. \ + * {@link KnownQueryCaptionType} can be used interchangeably with QueryCaptionType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **none**: Do not return captions for the query. \ * **extractive**: Extracts captions from the matching documents that contain passages relevant to the search query. */ -export type Captions = string; +export type QueryCaptionType = string; /** Known values of {@link QuerySpellerType} that the service accepts. */ export enum KnownQuerySpellerType { /** Speller not enabled. */ None = "none", /** Speller corrects individual query terms using a static lexicon for the language specified by the queryLanguage parameter. */ - Lexicon = "lexicon" + Lexicon = "lexicon", } /** @@ -928,48 +960,12 @@ export enum KnownQuerySpellerType { */ export type QuerySpellerType = string; -/** Known values of {@link QueryAnswerType} that the service accepts. */ -export enum KnownQueryAnswerType { - /** Do not return answers for the query. */ - None = "none", - /** Extracts answer candidates from the contents of the documents returned in response to a query expressed as a question in natural language. */ - Extractive = "extractive" -} - -/** - * Defines values for QueryAnswerType. \ - * {@link KnownQueryAnswerType} can be used interchangeably with QueryAnswerType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **none**: Do not return answers for the query. \ - * **extractive**: Extracts answer candidates from the contents of the documents returned in response to a query expressed as a question in natural language. - */ -export type QueryAnswerType = string; - -/** Known values of {@link QueryCaptionType} that the service accepts. */ -export enum KnownQueryCaptionType { - /** Do not return captions for the query. */ - None = "none", - /** Extracts captions from the matching documents that contain passages relevant to the search query. */ - Extractive = "extractive" -} - -/** - * Defines values for QueryCaptionType. \ - * {@link KnownQueryCaptionType} can be used interchangeably with QueryCaptionType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **none**: Do not return captions for the query. \ - * **extractive**: Extracts captions from the matching documents that contain passages relevant to the search query. - */ -export type QueryCaptionType = string; - /** Known values of {@link VectorQueryKind} that the service accepts. */ export enum KnownVectorQueryKind { /** Vector query where a raw vector value is provided. */ Vector = "vector", /** Vector query where a text value that needs to be vectorized is provided. */ - $DO_NOT_NORMALIZE$_text = "text" + $DO_NOT_NORMALIZE$_text = "text", } /** @@ -987,7 +983,7 @@ export enum KnownVectorFilterMode { /** The filter will be applied after the candidate set of vector results is returned. Depending on the filter selectivity, this can result in fewer results than requested by the parameter 'k'. */ PostFilter = "postFilter", /** The filter will be applied before the search query. */ - PreFilter = "preFilter" + PreFilter = "preFilter", } /** @@ -1000,44 +996,44 @@ export enum KnownVectorFilterMode { */ export type VectorFilterMode = string; -/** Known values of {@link SemanticPartialResponseReason} that the service accepts. */ -export enum KnownSemanticPartialResponseReason { +/** Known values of {@link SemanticErrorReason} that the service accepts. */ +export enum KnownSemanticErrorReason { /** If 'semanticMaxWaitInMilliseconds' was set and the semantic processing duration exceeded that value. Only the base results were returned. */ MaxWaitExceeded = "maxWaitExceeded", /** The request was throttled. Only the base results were returned. */ CapacityOverloaded = "capacityOverloaded", /** At least one step of the semantic process failed. */ - Transient = "transient" + Transient = "transient", } /** - * Defines values for SemanticPartialResponseReason. \ - * {@link KnownSemanticPartialResponseReason} can be used interchangeably with SemanticPartialResponseReason, + * Defines values for SemanticErrorReason. \ + * {@link KnownSemanticErrorReason} can be used interchangeably with SemanticErrorReason, * this enum contains the known values that the service supports. * ### Known values supported by the service * **maxWaitExceeded**: If 'semanticMaxWaitInMilliseconds' was set and the semantic processing duration exceeded that value. Only the base results were returned. \ * **capacityOverloaded**: The request was throttled. Only the base results were returned. \ * **transient**: At least one step of the semantic process failed. */ -export type SemanticPartialResponseReason = string; +export type SemanticErrorReason = string; -/** Known values of {@link SemanticPartialResponseType} that the service accepts. */ -export enum KnownSemanticPartialResponseType { +/** Known values of {@link SemanticSearchResultsType} that the service accepts. */ +export enum KnownSemanticSearchResultsType { /** Results without any semantic enrichment or reranking. */ BaseResults = "baseResults", /** Results have been reranked with the reranker model and will include semantic captions. They will not include any answers, answers highlights or caption highlights. */ - RerankedResults = "rerankedResults" + RerankedResults = "rerankedResults", } /** - * Defines values for SemanticPartialResponseType. \ - * {@link KnownSemanticPartialResponseType} can be used interchangeably with SemanticPartialResponseType, + * Defines values for SemanticSearchResultsType. \ + * {@link KnownSemanticSearchResultsType} can be used interchangeably with SemanticSearchResultsType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **baseResults**: Results without any semantic enrichment or reranking. \ * **rerankedResults**: Results have been reranked with the reranker model and will include semantic captions. They will not include any answers, answers highlights or caption highlights. */ -export type SemanticPartialResponseType = string; +export type SemanticSearchResultsType = string; /** Known values of {@link SemanticFieldState} that the service accepts. */ export enum KnownSemanticFieldState { @@ -1046,7 +1042,7 @@ export enum KnownSemanticFieldState { /** The field was not used for semantic enrichment. */ Unused = "unused", /** The field was partially used for semantic enrichment. */ - Partial = "partial" + Partial = "partial", } /** diff --git a/sdk/search/search-documents/src/generated/data/models/mappers.ts b/sdk/search/search-documents/src/generated/data/models/mappers.ts index 55e0804bd400..ea5538d25ccb 100644 --- a/sdk/search/search-documents/src/generated/data/models/mappers.ts +++ b/sdk/search/search-documents/src/generated/data/models/mappers.ts @@ -8,25 +8,47 @@ import * as coreClient from "@azure/core-client"; -export const SearchError: coreClient.CompositeMapper = { +export const ErrorResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SearchError", + className: "ErrorResponse", + modelProperties: { + error: { + serializedName: "error", + type: { + name: "Composite", + className: "ErrorDetail", + }, + }, + }, + }, +}; + +export const ErrorDetail: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorDetail", modelProperties: { code: { serializedName: "code", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, message: { serializedName: "message", - required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, + }, + target: { + serializedName: "target", + readOnly: true, + type: { + name: "String", + }, }, details: { serializedName: "details", @@ -36,13 +58,50 @@ export const SearchError: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchError" - } - } - } - } - } - } + className: "ErrorDetail", + }, + }, + }, + }, + additionalInfo: { + serializedName: "additionalInfo", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + }, + }, + }, + }, + }, + }, +}; + +export const ErrorAdditionalInfo: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + modelProperties: { + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String", + }, + }, + info: { + serializedName: "info", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, }; export const SearchDocumentsResult: coreClient.CompositeMapper = { @@ -54,15 +113,15 @@ export const SearchDocumentsResult: coreClient.CompositeMapper = { serializedName: "@odata\\.count", readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, coverage: { serializedName: "@search\\.coverage", readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, facets: { serializedName: "@search\\.facets", @@ -72,10 +131,12 @@ export const SearchDocumentsResult: coreClient.CompositeMapper = { value: { type: { name: "Sequence", - element: { type: { name: "Composite", className: "FacetResult" } } - } - } - } + element: { + type: { name: "Composite", className: "FacetResult" }, + }, + }, + }, + }, }, answers: { serializedName: "@search\\.answers", @@ -86,31 +147,31 @@ export const SearchDocumentsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "AnswerResult" - } - } - } + className: "QueryAnswerResult", + }, + }, + }, }, nextPageParameters: { serializedName: "@search\\.nextPageParameters", type: { name: "Composite", - className: "SearchRequest" - } + className: "SearchRequest", + }, }, semanticPartialResponseReason: { serializedName: "@search\\.semanticPartialResponseReason", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, semanticPartialResponseType: { serializedName: "@search\\.semanticPartialResponseType", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, results: { serializedName: "value", @@ -121,20 +182,20 @@ export const SearchDocumentsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchResult" - } - } - } + className: "SearchResult", + }, + }, + }, }, nextLink: { serializedName: "@odata\\.nextLink", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const FacetResult: coreClient.CompositeMapper = { @@ -147,17 +208,17 @@ export const FacetResult: coreClient.CompositeMapper = { serializedName: "count", readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; -export const AnswerResult: coreClient.CompositeMapper = { +export const QueryAnswerResult: coreClient.CompositeMapper = { type: { name: "Composite", - className: "AnswerResult", + className: "QueryAnswerResult", additionalProperties: { type: { name: "Object" } }, modelProperties: { score: { @@ -165,35 +226,35 @@ export const AnswerResult: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, key: { serializedName: "key", required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, text: { serializedName: "text", required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, highlights: { serializedName: "highlights", readOnly: true, nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchRequest: coreClient.CompositeMapper = { @@ -204,8 +265,8 @@ export const SearchRequest: coreClient.CompositeMapper = { includeTotalResultCount: { serializedName: "count", type: { - name: "Boolean" - } + name: "Boolean", + }, }, facets: { serializedName: "facets", @@ -213,66 +274,66 @@ export const SearchRequest: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, filter: { serializedName: "filter", type: { - name: "String" - } + name: "String", + }, }, highlightFields: { serializedName: "highlight", type: { - name: "String" - } + name: "String", + }, }, highlightPostTag: { serializedName: "highlightPostTag", type: { - name: "String" - } + name: "String", + }, }, highlightPreTag: { serializedName: "highlightPreTag", type: { - name: "String" - } + name: "String", + }, }, minimumCoverage: { serializedName: "minimumCoverage", type: { - name: "Number" - } + name: "Number", + }, }, orderBy: { serializedName: "orderby", type: { - name: "String" - } + name: "String", + }, }, queryType: { serializedName: "queryType", type: { name: "Enum", - allowedValues: ["simple", "full", "semantic"] - } + allowedValues: ["simple", "full", "semantic"], + }, }, scoringStatistics: { serializedName: "scoringStatistics", type: { name: "Enum", - allowedValues: ["local", "global"] - } + allowedValues: ["local", "global"], + }, }, sessionId: { serializedName: "sessionId", type: { - name: "String" - } + name: "String", + }, }, scoringParameters: { serializedName: "scoringParameters", @@ -280,117 +341,117 @@ export const SearchRequest: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, scoringProfile: { serializedName: "scoringProfile", type: { - name: "String" - } + name: "String", + }, }, semanticQuery: { serializedName: "semanticQuery", type: { - name: "String" - } + name: "String", + }, }, - semanticConfiguration: { + semanticConfigurationName: { serializedName: "semanticConfiguration", type: { - name: "String" - } + name: "String", + }, }, semanticErrorHandling: { serializedName: "semanticErrorHandling", type: { - name: "String" - } + name: "String", + }, }, semanticMaxWaitInMilliseconds: { constraints: { - InclusiveMinimum: 700 + InclusiveMinimum: 700, }, serializedName: "semanticMaxWaitInMilliseconds", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, debug: { serializedName: "debug", type: { - name: "String" - } + name: "String", + }, }, searchText: { serializedName: "search", type: { - name: "String" - } + name: "String", + }, }, searchFields: { serializedName: "searchFields", type: { - name: "String" - } + name: "String", + }, }, searchMode: { serializedName: "searchMode", type: { name: "Enum", - allowedValues: ["any", "all"] - } + allowedValues: ["any", "all"], + }, }, queryLanguage: { serializedName: "queryLanguage", type: { - name: "String" - } + name: "String", + }, }, speller: { serializedName: "speller", type: { - name: "String" - } + name: "String", + }, }, answers: { serializedName: "answers", type: { - name: "String" - } + name: "String", + }, }, select: { serializedName: "select", type: { - name: "String" - } + name: "String", + }, }, skip: { serializedName: "skip", type: { - name: "Number" - } + name: "Number", + }, }, top: { serializedName: "top", type: { - name: "Number" - } + name: "Number", + }, }, captions: { serializedName: "captions", type: { - name: "String" - } + name: "String", + }, }, semanticFields: { serializedName: "semanticFields", type: { - name: "String" - } + name: "String", + }, }, vectorQueries: { serializedName: "vectorQueries", @@ -399,19 +460,19 @@ export const SearchRequest: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "VectorQuery" - } - } - } + className: "VectorQuery", + }, + }, + }, }, vectorFilterMode: { serializedName: "vectorFilterMode", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const VectorQuery: coreClient.CompositeMapper = { @@ -421,36 +482,42 @@ export const VectorQuery: coreClient.CompositeMapper = { uberParent: "VectorQuery", polymorphicDiscriminator: { serializedName: "kind", - clientName: "kind" + clientName: "kind", }, modelProperties: { kind: { serializedName: "kind", required: true, type: { - name: "String" - } + name: "String", + }, }, kNearestNeighborsCount: { serializedName: "k", type: { - name: "Number" - } + name: "Number", + }, }, fields: { serializedName: "fields", type: { - name: "String" - } + name: "String", + }, }, exhaustive: { serializedName: "exhaustive", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + oversampling: { + serializedName: "oversampling", + type: { + name: "Number", + }, + }, + }, + }, }; export const SearchResult: coreClient.CompositeMapper = { @@ -464,16 +531,16 @@ export const SearchResult: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, - rerankerScore: { + _rerankerScore: { serializedName: "@search\\.rerankerScore", readOnly: true, nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, _highlights: { serializedName: "@search\\.highlights", @@ -481,11 +548,11 @@ export const SearchResult: coreClient.CompositeMapper = { type: { name: "Dictionary", value: { - type: { name: "Sequence", element: { type: { name: "String" } } } - } - } + type: { name: "Sequence", element: { type: { name: "String" } } }, + }, + }, }, - captions: { + _captions: { serializedName: "@search\\.captions", readOnly: true, nullable: true, @@ -494,10 +561,10 @@ export const SearchResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "CaptionResult" - } - } - } + className: "QueryCaptionResult", + }, + }, + }, }, documentDebugInfo: { serializedName: "@search\\.documentDebugInfo", @@ -508,38 +575,38 @@ export const SearchResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "DocumentDebugInfo" - } - } - } - } - } - } + className: "DocumentDebugInfo", + }, + }, + }, + }, + }, + }, }; -export const CaptionResult: coreClient.CompositeMapper = { +export const QueryCaptionResult: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CaptionResult", + className: "QueryCaptionResult", additionalProperties: { type: { name: "Object" } }, modelProperties: { text: { serializedName: "text", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, highlights: { serializedName: "highlights", readOnly: true, nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const DocumentDebugInfo: coreClient.CompositeMapper = { @@ -551,11 +618,11 @@ export const DocumentDebugInfo: coreClient.CompositeMapper = { serializedName: "semantic", type: { name: "Composite", - className: "SemanticDebugInfo" - } - } - } - } + className: "SemanticDebugInfo", + }, + }, + }, + }, }; export const SemanticDebugInfo: coreClient.CompositeMapper = { @@ -567,8 +634,8 @@ export const SemanticDebugInfo: coreClient.CompositeMapper = { serializedName: "titleField", type: { name: "Composite", - className: "QueryResultDocumentSemanticField" - } + className: "QueryResultDocumentSemanticField", + }, }, contentFields: { serializedName: "contentFields", @@ -578,10 +645,10 @@ export const SemanticDebugInfo: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "QueryResultDocumentSemanticField" - } - } - } + className: "QueryResultDocumentSemanticField", + }, + }, + }, }, keywordFields: { serializedName: "keywordFields", @@ -591,20 +658,20 @@ export const SemanticDebugInfo: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "QueryResultDocumentSemanticField" - } - } - } + className: "QueryResultDocumentSemanticField", + }, + }, + }, }, rerankerInput: { serializedName: "rerankerInput", type: { name: "Composite", - className: "QueryResultDocumentRerankerInput" - } - } - } - } + className: "QueryResultDocumentRerankerInput", + }, + }, + }, + }, }; export const QueryResultDocumentSemanticField: coreClient.CompositeMapper = { @@ -616,18 +683,18 @@ export const QueryResultDocumentSemanticField: coreClient.CompositeMapper = { serializedName: "name", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, state: { serializedName: "state", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const QueryResultDocumentRerankerInput: coreClient.CompositeMapper = { @@ -639,25 +706,25 @@ export const QueryResultDocumentRerankerInput: coreClient.CompositeMapper = { serializedName: "title", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, content: { serializedName: "content", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, keywords: { serializedName: "keywords", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SuggestDocumentsResult: coreClient.CompositeMapper = { @@ -674,20 +741,20 @@ export const SuggestDocumentsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SuggestResult" - } - } - } + className: "SuggestResult", + }, + }, + }, }, coverage: { serializedName: "@search\\.coverage", readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const SuggestResult: coreClient.CompositeMapper = { @@ -701,11 +768,11 @@ export const SuggestResult: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SuggestRequest: coreClient.CompositeMapper = { @@ -716,73 +783,73 @@ export const SuggestRequest: coreClient.CompositeMapper = { filter: { serializedName: "filter", type: { - name: "String" - } + name: "String", + }, }, useFuzzyMatching: { serializedName: "fuzzy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, highlightPostTag: { serializedName: "highlightPostTag", type: { - name: "String" - } + name: "String", + }, }, highlightPreTag: { serializedName: "highlightPreTag", type: { - name: "String" - } + name: "String", + }, }, minimumCoverage: { serializedName: "minimumCoverage", type: { - name: "Number" - } + name: "Number", + }, }, orderBy: { serializedName: "orderby", type: { - name: "String" - } + name: "String", + }, }, searchText: { serializedName: "search", required: true, type: { - name: "String" - } + name: "String", + }, }, searchFields: { serializedName: "searchFields", type: { - name: "String" - } + name: "String", + }, }, select: { serializedName: "select", type: { - name: "String" - } + name: "String", + }, }, suggesterName: { serializedName: "suggesterName", required: true, type: { - name: "String" - } + name: "String", + }, }, top: { serializedName: "top", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const IndexBatch: coreClient.CompositeMapper = { @@ -798,13 +865,13 @@ export const IndexBatch: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "IndexAction" - } - } - } - } - } - } + className: "IndexAction", + }, + }, + }, + }, + }, + }, }; export const IndexAction: coreClient.CompositeMapper = { @@ -818,11 +885,11 @@ export const IndexAction: coreClient.CompositeMapper = { required: true, type: { name: "Enum", - allowedValues: ["upload", "merge", "mergeOrUpload", "delete"] - } - } - } - } + allowedValues: ["upload", "merge", "mergeOrUpload", "delete"], + }, + }, + }, + }, }; export const IndexDocumentsResult: coreClient.CompositeMapper = { @@ -839,13 +906,13 @@ export const IndexDocumentsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "IndexingResult" - } - } - } - } - } - } + className: "IndexingResult", + }, + }, + }, + }, + }, + }, }; export const IndexingResult: coreClient.CompositeMapper = { @@ -858,34 +925,34 @@ export const IndexingResult: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, errorMessage: { serializedName: "errorMessage", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, succeeded: { serializedName: "status", required: true, readOnly: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, statusCode: { serializedName: "statusCode", required: true, readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const AutocompleteResult: coreClient.CompositeMapper = { @@ -897,8 +964,8 @@ export const AutocompleteResult: coreClient.CompositeMapper = { serializedName: "@search\\.coverage", readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, results: { serializedName: "value", @@ -909,13 +976,13 @@ export const AutocompleteResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "AutocompleteItem" - } - } - } - } - } - } + className: "AutocompleteItem", + }, + }, + }, + }, + }, + }, }; export const AutocompleteItem: coreClient.CompositeMapper = { @@ -928,19 +995,19 @@ export const AutocompleteItem: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, queryPlusText: { serializedName: "queryPlusText", required: true, readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AutocompleteRequest: coreClient.CompositeMapper = { @@ -952,91 +1019,92 @@ export const AutocompleteRequest: coreClient.CompositeMapper = { serializedName: "search", required: true, type: { - name: "String" - } + name: "String", + }, }, autocompleteMode: { serializedName: "autocompleteMode", type: { name: "Enum", - allowedValues: ["oneTerm", "twoTerms", "oneTermWithContext"] - } + allowedValues: ["oneTerm", "twoTerms", "oneTermWithContext"], + }, }, filter: { serializedName: "filter", type: { - name: "String" - } + name: "String", + }, }, useFuzzyMatching: { serializedName: "fuzzy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, highlightPostTag: { serializedName: "highlightPostTag", type: { - name: "String" - } + name: "String", + }, }, highlightPreTag: { serializedName: "highlightPreTag", type: { - name: "String" - } + name: "String", + }, }, minimumCoverage: { serializedName: "minimumCoverage", type: { - name: "Number" - } + name: "Number", + }, }, searchFields: { serializedName: "searchFields", type: { - name: "String" - } + name: "String", + }, }, suggesterName: { serializedName: "suggesterName", required: true, type: { - name: "String" - } + name: "String", + }, }, top: { serializedName: "top", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; -export const RawVectorQuery: coreClient.CompositeMapper = { +export const VectorizedQuery: coreClient.CompositeMapper = { serializedName: "vector", type: { name: "Composite", - className: "RawVectorQuery", + className: "VectorizedQuery", uberParent: "VectorQuery", polymorphicDiscriminator: VectorQuery.type.polymorphicDiscriminator, modelProperties: { ...VectorQuery.type.modelProperties, vector: { serializedName: "vector", + required: true, type: { name: "Sequence", element: { type: { - name: "Number" - } - } - } - } - } - } + name: "Number", + }, + }, + }, + }, + }, + }, }; export const VectorizableTextQuery: coreClient.CompositeMapper = { @@ -1050,16 +1118,17 @@ export const VectorizableTextQuery: coreClient.CompositeMapper = { ...VectorQuery.type.modelProperties, text: { serializedName: "text", + required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export let discriminators = { VectorQuery: VectorQuery, - "VectorQuery.vector": RawVectorQuery, - "VectorQuery.text": VectorizableTextQuery + "VectorQuery.vector": VectorizedQuery, + "VectorQuery.text": VectorizableTextQuery, }; diff --git a/sdk/search/search-documents/src/generated/data/models/parameters.ts b/sdk/search/search-documents/src/generated/data/models/parameters.ts index c44b306974f9..81a3eb705736 100644 --- a/sdk/search/search-documents/src/generated/data/models/parameters.ts +++ b/sdk/search/search-documents/src/generated/data/models/parameters.ts @@ -9,13 +9,13 @@ import { OperationParameter, OperationURLParameter, - OperationQueryParameter + OperationQueryParameter, } from "@azure/core-client"; import { SearchRequest as SearchRequestMapper, SuggestRequest as SuggestRequestMapper, IndexBatch as IndexBatchMapper, - AutocompleteRequest as AutocompleteRequestMapper + AutocompleteRequest as AutocompleteRequestMapper, } from "../models/mappers"; export const accept: OperationParameter = { @@ -25,9 +25,9 @@ export const accept: OperationParameter = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const endpoint: OperationURLParameter = { @@ -36,10 +36,10 @@ export const endpoint: OperationURLParameter = { serializedName: "endpoint", required: true, type: { - name: "String" - } + name: "String", + }, }, - skipEncoding: true + skipEncoding: true, }; export const indexName: OperationURLParameter = { @@ -48,9 +48,9 @@ export const indexName: OperationURLParameter = { serializedName: "indexName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const apiVersion: OperationQueryParameter = { @@ -59,9 +59,9 @@ export const apiVersion: OperationQueryParameter = { serializedName: "api-version", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const searchText: OperationQueryParameter = { @@ -69,9 +69,9 @@ export const searchText: OperationQueryParameter = { mapper: { serializedName: "search", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const includeTotalResultCount: OperationQueryParameter = { @@ -79,9 +79,9 @@ export const includeTotalResultCount: OperationQueryParameter = { mapper: { serializedName: "$count", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const facets: OperationQueryParameter = { @@ -92,12 +92,12 @@ export const facets: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "Multi" + collectionFormat: "Multi", }; export const filter: OperationQueryParameter = { @@ -105,9 +105,9 @@ export const filter: OperationQueryParameter = { mapper: { serializedName: "$filter", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const highlightFields: OperationQueryParameter = { @@ -118,12 +118,12 @@ export const highlightFields: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const highlightPostTag: OperationQueryParameter = { @@ -131,9 +131,9 @@ export const highlightPostTag: OperationQueryParameter = { mapper: { serializedName: "highlightPostTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const highlightPreTag: OperationQueryParameter = { @@ -141,9 +141,9 @@ export const highlightPreTag: OperationQueryParameter = { mapper: { serializedName: "highlightPreTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const minimumCoverage: OperationQueryParameter = { @@ -151,9 +151,9 @@ export const minimumCoverage: OperationQueryParameter = { mapper: { serializedName: "minimumCoverage", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const orderBy: OperationQueryParameter = { @@ -164,12 +164,12 @@ export const orderBy: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const queryType: OperationQueryParameter = { @@ -178,9 +178,9 @@ export const queryType: OperationQueryParameter = { serializedName: "queryType", type: { name: "Enum", - allowedValues: ["simple", "full", "semantic"] - } - } + allowedValues: ["simple", "full", "semantic"], + }, + }, }; export const scoringParameters: OperationQueryParameter = { @@ -191,12 +191,12 @@ export const scoringParameters: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "Multi" + collectionFormat: "Multi", }; export const scoringProfile: OperationQueryParameter = { @@ -204,9 +204,9 @@ export const scoringProfile: OperationQueryParameter = { mapper: { serializedName: "scoringProfile", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const semanticQuery: OperationQueryParameter = { @@ -214,9 +214,9 @@ export const semanticQuery: OperationQueryParameter = { mapper: { serializedName: "semanticQuery", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const semanticConfiguration: OperationQueryParameter = { @@ -224,9 +224,9 @@ export const semanticConfiguration: OperationQueryParameter = { mapper: { serializedName: "semanticConfiguration", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const semanticErrorHandling: OperationQueryParameter = { @@ -234,22 +234,22 @@ export const semanticErrorHandling: OperationQueryParameter = { mapper: { serializedName: "semanticErrorHandling", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const semanticMaxWaitInMilliseconds: OperationQueryParameter = { parameterPath: ["options", "searchOptions", "semanticMaxWaitInMilliseconds"], mapper: { constraints: { - InclusiveMinimum: 700 + InclusiveMinimum: 700, }, serializedName: "semanticMaxWaitInMilliseconds", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const debug: OperationQueryParameter = { @@ -257,9 +257,9 @@ export const debug: OperationQueryParameter = { mapper: { serializedName: "debug", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const searchFields: OperationQueryParameter = { @@ -270,12 +270,12 @@ export const searchFields: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const queryLanguage: OperationQueryParameter = { @@ -283,9 +283,9 @@ export const queryLanguage: OperationQueryParameter = { mapper: { serializedName: "queryLanguage", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const speller: OperationQueryParameter = { @@ -293,9 +293,9 @@ export const speller: OperationQueryParameter = { mapper: { serializedName: "speller", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const answers: OperationQueryParameter = { @@ -303,9 +303,9 @@ export const answers: OperationQueryParameter = { mapper: { serializedName: "answers", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const searchMode: OperationQueryParameter = { @@ -314,9 +314,9 @@ export const searchMode: OperationQueryParameter = { serializedName: "searchMode", type: { name: "Enum", - allowedValues: ["any", "all"] - } - } + allowedValues: ["any", "all"], + }, + }, }; export const scoringStatistics: OperationQueryParameter = { @@ -325,9 +325,9 @@ export const scoringStatistics: OperationQueryParameter = { serializedName: "scoringStatistics", type: { name: "Enum", - allowedValues: ["local", "global"] - } - } + allowedValues: ["local", "global"], + }, + }, }; export const sessionId: OperationQueryParameter = { @@ -335,9 +335,9 @@ export const sessionId: OperationQueryParameter = { mapper: { serializedName: "sessionId", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const select: OperationQueryParameter = { @@ -348,12 +348,12 @@ export const select: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const skip: OperationQueryParameter = { @@ -361,9 +361,9 @@ export const skip: OperationQueryParameter = { mapper: { serializedName: "$skip", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const top: OperationQueryParameter = { @@ -371,9 +371,9 @@ export const top: OperationQueryParameter = { mapper: { serializedName: "$top", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const captions: OperationQueryParameter = { @@ -381,9 +381,9 @@ export const captions: OperationQueryParameter = { mapper: { serializedName: "captions", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const semanticFields: OperationQueryParameter = { @@ -394,12 +394,12 @@ export const semanticFields: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const contentType: OperationParameter = { @@ -409,14 +409,14 @@ export const contentType: OperationParameter = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const searchRequest: OperationParameter = { parameterPath: "searchRequest", - mapper: SearchRequestMapper + mapper: SearchRequestMapper, }; export const key: OperationURLParameter = { @@ -425,9 +425,9 @@ export const key: OperationURLParameter = { serializedName: "key", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const selectedFields: OperationQueryParameter = { @@ -438,12 +438,12 @@ export const selectedFields: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const searchText1: OperationQueryParameter = { @@ -452,9 +452,9 @@ export const searchText1: OperationQueryParameter = { serializedName: "search", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const suggesterName: OperationQueryParameter = { @@ -463,9 +463,9 @@ export const suggesterName: OperationQueryParameter = { serializedName: "suggesterName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const filter1: OperationQueryParameter = { @@ -473,9 +473,9 @@ export const filter1: OperationQueryParameter = { mapper: { serializedName: "$filter", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const useFuzzyMatching: OperationQueryParameter = { @@ -483,9 +483,9 @@ export const useFuzzyMatching: OperationQueryParameter = { mapper: { serializedName: "fuzzy", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const highlightPostTag1: OperationQueryParameter = { @@ -493,9 +493,9 @@ export const highlightPostTag1: OperationQueryParameter = { mapper: { serializedName: "highlightPostTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const highlightPreTag1: OperationQueryParameter = { @@ -503,9 +503,9 @@ export const highlightPreTag1: OperationQueryParameter = { mapper: { serializedName: "highlightPreTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const minimumCoverage1: OperationQueryParameter = { @@ -513,9 +513,9 @@ export const minimumCoverage1: OperationQueryParameter = { mapper: { serializedName: "minimumCoverage", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const orderBy1: OperationQueryParameter = { @@ -526,12 +526,12 @@ export const orderBy1: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const searchFields1: OperationQueryParameter = { @@ -542,12 +542,12 @@ export const searchFields1: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const select1: OperationQueryParameter = { @@ -558,12 +558,12 @@ export const select1: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const top1: OperationQueryParameter = { @@ -571,19 +571,19 @@ export const top1: OperationQueryParameter = { mapper: { serializedName: "$top", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const suggestRequest: OperationParameter = { parameterPath: "suggestRequest", - mapper: SuggestRequestMapper + mapper: SuggestRequestMapper, }; export const batch: OperationParameter = { parameterPath: "batch", - mapper: IndexBatchMapper + mapper: IndexBatchMapper, }; export const autocompleteMode: OperationQueryParameter = { @@ -592,9 +592,9 @@ export const autocompleteMode: OperationQueryParameter = { serializedName: "autocompleteMode", type: { name: "Enum", - allowedValues: ["oneTerm", "twoTerms", "oneTermWithContext"] - } - } + allowedValues: ["oneTerm", "twoTerms", "oneTermWithContext"], + }, + }, }; export const filter2: OperationQueryParameter = { @@ -602,9 +602,9 @@ export const filter2: OperationQueryParameter = { mapper: { serializedName: "$filter", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const useFuzzyMatching1: OperationQueryParameter = { @@ -612,9 +612,9 @@ export const useFuzzyMatching1: OperationQueryParameter = { mapper: { serializedName: "fuzzy", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const highlightPostTag2: OperationQueryParameter = { @@ -622,9 +622,9 @@ export const highlightPostTag2: OperationQueryParameter = { mapper: { serializedName: "highlightPostTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const highlightPreTag2: OperationQueryParameter = { @@ -632,9 +632,9 @@ export const highlightPreTag2: OperationQueryParameter = { mapper: { serializedName: "highlightPreTag", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const minimumCoverage2: OperationQueryParameter = { @@ -642,9 +642,9 @@ export const minimumCoverage2: OperationQueryParameter = { mapper: { serializedName: "minimumCoverage", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const searchFields2: OperationQueryParameter = { @@ -655,12 +655,12 @@ export const searchFields2: OperationQueryParameter = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: "CSV" + collectionFormat: "CSV", }; export const top2: OperationQueryParameter = { @@ -668,12 +668,12 @@ export const top2: OperationQueryParameter = { mapper: { serializedName: "$top", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const autocompleteRequest: OperationParameter = { parameterPath: "autocompleteRequest", - mapper: AutocompleteRequestMapper + mapper: AutocompleteRequestMapper, }; diff --git a/sdk/search/search-documents/src/generated/data/operations/documents.ts b/sdk/search/search-documents/src/generated/data/operations/documents.ts index b5983df1ca11..45e2d4a84660 100644 --- a/sdk/search/search-documents/src/generated/data/operations/documents.ts +++ b/sdk/search/search-documents/src/generated/data/operations/documents.ts @@ -33,7 +33,7 @@ import { DocumentsAutocompleteGetResponse, AutocompleteRequest, DocumentsAutocompletePostOptionalParams, - DocumentsAutocompletePostResponse + DocumentsAutocompletePostResponse, } from "../models"; /** Class containing Documents operations. */ @@ -53,7 +53,7 @@ export class DocumentsImpl implements Documents { * @param options The options parameters. */ count( - options?: DocumentsCountOptionalParams + options?: DocumentsCountOptionalParams, ): Promise { return this.client.sendOperationRequest({ options }, countOperationSpec); } @@ -63,11 +63,11 @@ export class DocumentsImpl implements Documents { * @param options The options parameters. */ searchGet( - options?: DocumentsSearchGetOptionalParams + options?: DocumentsSearchGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { options }, - searchGetOperationSpec + searchGetOperationSpec, ); } @@ -78,11 +78,11 @@ export class DocumentsImpl implements Documents { */ searchPost( searchRequest: SearchRequest, - options?: DocumentsSearchPostOptionalParams + options?: DocumentsSearchPostOptionalParams, ): Promise { return this.client.sendOperationRequest( { searchRequest, options }, - searchPostOperationSpec + searchPostOperationSpec, ); } @@ -93,7 +93,7 @@ export class DocumentsImpl implements Documents { */ get( key: string, - options?: DocumentsGetOptionalParams + options?: DocumentsGetOptionalParams, ): Promise { return this.client.sendOperationRequest({ key, options }, getOperationSpec); } @@ -109,11 +109,11 @@ export class DocumentsImpl implements Documents { suggestGet( searchText: string, suggesterName: string, - options?: DocumentsSuggestGetOptionalParams + options?: DocumentsSuggestGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { searchText, suggesterName, options }, - suggestGetOperationSpec + suggestGetOperationSpec, ); } @@ -124,11 +124,11 @@ export class DocumentsImpl implements Documents { */ suggestPost( suggestRequest: SuggestRequest, - options?: DocumentsSuggestPostOptionalParams + options?: DocumentsSuggestPostOptionalParams, ): Promise { return this.client.sendOperationRequest( { suggestRequest, options }, - suggestPostOperationSpec + suggestPostOperationSpec, ); } @@ -139,11 +139,11 @@ export class DocumentsImpl implements Documents { */ index( batch: IndexBatch, - options?: DocumentsIndexOptionalParams + options?: DocumentsIndexOptionalParams, ): Promise { return this.client.sendOperationRequest( { batch, options }, - indexOperationSpec + indexOperationSpec, ); } @@ -157,11 +157,11 @@ export class DocumentsImpl implements Documents { autocompleteGet( searchText: string, suggesterName: string, - options?: DocumentsAutocompleteGetOptionalParams + options?: DocumentsAutocompleteGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { searchText, suggesterName, options }, - autocompleteGetOperationSpec + autocompleteGetOperationSpec, ); } @@ -172,11 +172,11 @@ export class DocumentsImpl implements Documents { */ autocompletePost( autocompleteRequest: AutocompleteRequest, - options?: DocumentsAutocompletePostOptionalParams + options?: DocumentsAutocompletePostOptionalParams, ): Promise { return this.client.sendOperationRequest( { autocompleteRequest, options }, - autocompletePostOperationSpec + autocompletePostOperationSpec, ); } } @@ -188,27 +188,27 @@ const countOperationSpec: coreClient.OperationSpec = { httpMethod: "GET", responses: { 200: { - bodyMapper: { type: { name: "Number" } } + bodyMapper: { type: { name: "Number" } }, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const searchGetOperationSpec: coreClient.OperationSpec = { path: "/docs", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchDocumentsResult + bodyMapper: Mappers.SearchDocumentsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [ Parameters.apiVersion, @@ -240,29 +240,29 @@ const searchGetOperationSpec: coreClient.OperationSpec = { Parameters.skip, Parameters.top, Parameters.captions, - Parameters.semanticFields + Parameters.semanticFields, ], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const searchPostOperationSpec: coreClient.OperationSpec = { path: "/docs/search.post.search", httpMethod: "POST", responses: { 200: { - bodyMapper: Mappers.SearchDocumentsResult + bodyMapper: Mappers.SearchDocumentsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.searchRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/docs('{key}')", @@ -270,28 +270,28 @@ const getOperationSpec: coreClient.OperationSpec = { responses: { 200: { bodyMapper: { - type: { name: "Dictionary", value: { type: { name: "any" } } } - } + type: { name: "Dictionary", value: { type: { name: "any" } } }, + }, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.selectedFields], urlParameters: [Parameters.endpoint, Parameters.indexName, Parameters.key], headerParameters: [Parameters.accept], - serializer + serializer, }; const suggestGetOperationSpec: coreClient.OperationSpec = { path: "/docs/search.suggest", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SuggestDocumentsResult + bodyMapper: Mappers.SuggestDocumentsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [ Parameters.apiVersion, @@ -305,61 +305,61 @@ const suggestGetOperationSpec: coreClient.OperationSpec = { Parameters.orderBy1, Parameters.searchFields1, Parameters.select1, - Parameters.top1 + Parameters.top1, ], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const suggestPostOperationSpec: coreClient.OperationSpec = { path: "/docs/search.post.suggest", httpMethod: "POST", responses: { 200: { - bodyMapper: Mappers.SuggestDocumentsResult + bodyMapper: Mappers.SuggestDocumentsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.suggestRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", - serializer + serializer, }; const indexOperationSpec: coreClient.OperationSpec = { path: "/docs/search.index", httpMethod: "POST", responses: { 200: { - bodyMapper: Mappers.IndexDocumentsResult + bodyMapper: Mappers.IndexDocumentsResult, }, 207: { - bodyMapper: Mappers.IndexDocumentsResult + bodyMapper: Mappers.IndexDocumentsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.batch, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", - serializer + serializer, }; const autocompleteGetOperationSpec: coreClient.OperationSpec = { path: "/docs/search.autocomplete", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.AutocompleteResult + bodyMapper: Mappers.AutocompleteResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [ Parameters.apiVersion, @@ -372,27 +372,27 @@ const autocompleteGetOperationSpec: coreClient.OperationSpec = { Parameters.highlightPreTag2, Parameters.minimumCoverage2, Parameters.searchFields2, - Parameters.top2 + Parameters.top2, ], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const autocompletePostOperationSpec: coreClient.OperationSpec = { path: "/docs/search.post.autocomplete", httpMethod: "POST", responses: { 200: { - bodyMapper: Mappers.AutocompleteResult + bodyMapper: Mappers.AutocompleteResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.autocompleteRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/data/operationsInterfaces/documents.ts b/sdk/search/search-documents/src/generated/data/operationsInterfaces/documents.ts index 7ec698d585c8..2cedcc7c4163 100644 --- a/sdk/search/search-documents/src/generated/data/operationsInterfaces/documents.ts +++ b/sdk/search/search-documents/src/generated/data/operationsInterfaces/documents.ts @@ -28,7 +28,7 @@ import { DocumentsAutocompleteGetResponse, AutocompleteRequest, DocumentsAutocompletePostOptionalParams, - DocumentsAutocompletePostResponse + DocumentsAutocompletePostResponse, } from "../models"; /** Interface representing a Documents. */ @@ -38,14 +38,14 @@ export interface Documents { * @param options The options parameters. */ count( - options?: DocumentsCountOptionalParams + options?: DocumentsCountOptionalParams, ): Promise; /** * Searches for documents in the index. * @param options The options parameters. */ searchGet( - options?: DocumentsSearchGetOptionalParams + options?: DocumentsSearchGetOptionalParams, ): Promise; /** * Searches for documents in the index. @@ -54,7 +54,7 @@ export interface Documents { */ searchPost( searchRequest: SearchRequest, - options?: DocumentsSearchPostOptionalParams + options?: DocumentsSearchPostOptionalParams, ): Promise; /** * Retrieves a document from the index. @@ -63,7 +63,7 @@ export interface Documents { */ get( key: string, - options?: DocumentsGetOptionalParams + options?: DocumentsGetOptionalParams, ): Promise; /** * Suggests documents in the index that match the given partial query text. @@ -76,7 +76,7 @@ export interface Documents { suggestGet( searchText: string, suggesterName: string, - options?: DocumentsSuggestGetOptionalParams + options?: DocumentsSuggestGetOptionalParams, ): Promise; /** * Suggests documents in the index that match the given partial query text. @@ -85,7 +85,7 @@ export interface Documents { */ suggestPost( suggestRequest: SuggestRequest, - options?: DocumentsSuggestPostOptionalParams + options?: DocumentsSuggestPostOptionalParams, ): Promise; /** * Sends a batch of document write actions to the index. @@ -94,7 +94,7 @@ export interface Documents { */ index( batch: IndexBatch, - options?: DocumentsIndexOptionalParams + options?: DocumentsIndexOptionalParams, ): Promise; /** * Autocompletes incomplete query terms based on input text and matching terms in the index. @@ -106,7 +106,7 @@ export interface Documents { autocompleteGet( searchText: string, suggesterName: string, - options?: DocumentsAutocompleteGetOptionalParams + options?: DocumentsAutocompleteGetOptionalParams, ): Promise; /** * Autocompletes incomplete query terms based on input text and matching terms in the index. @@ -115,6 +115,6 @@ export interface Documents { */ autocompletePost( autocompleteRequest: AutocompleteRequest, - options?: DocumentsAutocompletePostOptionalParams + options?: DocumentsAutocompletePostOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/data/searchClient.ts b/sdk/search/search-documents/src/generated/data/searchClient.ts index 6058360aa395..72f9e9f4f56f 100644 --- a/sdk/search/search-documents/src/generated/data/searchClient.ts +++ b/sdk/search/search-documents/src/generated/data/searchClient.ts @@ -7,18 +7,23 @@ */ import * as coreHttpCompat from "@azure/core-http-compat"; +import { + PipelineRequest, + PipelineResponse, + SendRequest, +} from "@azure/core-rest-pipeline"; import { DocumentsImpl } from "./operations"; import { Documents } from "./operationsInterfaces"; import { - ApiVersion20231001Preview, - SearchClientOptionalParams + ApiVersion20240301Preview, + SearchClientOptionalParams, } from "./models"; /** @internal */ export class SearchClient extends coreHttpCompat.ExtendedServiceClient { endpoint: string; indexName: string; - apiVersion: ApiVersion20231001Preview; + apiVersion: ApiVersion20240301Preview; /** * Initializes a new instance of the SearchClient class. @@ -30,8 +35,8 @@ export class SearchClient extends coreHttpCompat.ExtendedServiceClient { constructor( endpoint: string, indexName: string, - apiVersion: ApiVersion20231001Preview, - options?: SearchClientOptionalParams + apiVersion: ApiVersion20240301Preview, + options?: SearchClientOptionalParams, ) { if (endpoint === undefined) { throw new Error("'endpoint' cannot be null"); @@ -48,10 +53,10 @@ export class SearchClient extends coreHttpCompat.ExtendedServiceClient { options = {}; } const defaults: SearchClientOptionalParams = { - requestContentType: "application/json; charset=utf-8" + requestContentType: "application/json; charset=utf-8", }; - const packageDetails = `azsdk-js-search-documents/12.0.0-beta.4`; + const packageDetails = `azsdk-js-search-documents/12.1.0-beta.1`; const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` @@ -61,12 +66,12 @@ export class SearchClient extends coreHttpCompat.ExtendedServiceClient { ...defaults, ...options, userAgentOptions: { - userAgentPrefix + userAgentPrefix, }, - baseUri: + endpoint: options.endpoint ?? options.baseUri ?? - "{endpoint}/indexes('{indexName}')" + "{endpoint}/indexes('{indexName}')", }; super(optionsWithDefaults); // Parameter assignments @@ -74,6 +79,35 @@ export class SearchClient extends coreHttpCompat.ExtendedServiceClient { this.indexName = indexName; this.apiVersion = apiVersion; this.documents = new DocumentsImpl(this); + this.addCustomApiVersionPolicy(apiVersion); + } + + /** A function that adds a policy that sets the api-version (or equivalent) to reflect the library version. */ + private addCustomApiVersionPolicy(apiVersion?: string) { + if (!apiVersion) { + return; + } + const apiVersionPolicy = { + name: "CustomApiVersionPolicy", + async sendRequest( + request: PipelineRequest, + next: SendRequest, + ): Promise { + const param = request.url.split("?"); + if (param.length > 1) { + const newParams = param[1].split("&").map((item) => { + if (item.indexOf("api-version") > -1) { + return "api-version=" + apiVersion; + } else { + return item; + } + }); + request.url = param[0] + "?" + newParams.join("&"); + } + return next(request); + }, + }; + this.pipeline.addPolicy(apiVersionPolicy); } documents: Documents; diff --git a/sdk/search/search-documents/src/generated/service/models/index.ts b/sdk/search/search-documents/src/generated/service/models/index.ts index 053e943618dc..6d61aa20aa2e 100644 --- a/sdk/search/search-documents/src/generated/service/models/index.ts +++ b/sdk/search/search-documents/src/generated/service/models/index.ts @@ -108,12 +108,15 @@ export type LexicalNormalizerUnion = LexicalNormalizer | CustomNormalizer; export type SimilarityUnion = Similarity | ClassicSimilarity | BM25Similarity; export type VectorSearchAlgorithmConfigurationUnion = | VectorSearchAlgorithmConfiguration - | HnswVectorSearchAlgorithmConfiguration - | ExhaustiveKnnVectorSearchAlgorithmConfiguration; + | HnswAlgorithmConfiguration + | ExhaustiveKnnAlgorithmConfiguration; export type VectorSearchVectorizerUnion = | VectorSearchVectorizer | AzureOpenAIVectorizer | CustomVectorizer; +export type BaseVectorSearchCompressionConfigurationUnion = + | BaseVectorSearchCompressionConfiguration + | ScalarQuantizationCompressionConfiguration; /** Represents a datasource definition, which can be used to configure an indexer. */ export interface SearchIndexerDataSource { @@ -135,13 +138,13 @@ export interface SearchIndexerDataSource { dataDeletionDetectionPolicy?: DataDeletionDetectionPolicyUnion; /** The ETag of the data source. */ etag?: string; - /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition in Azure Cognitive Search. Once you have encrypted your data source definition, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ + /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ encryptionKey?: SearchResourceEncryptionKey; } /** Represents credentials that can be used to connect to a datasource. */ export interface DataSourceCredentials { - /** The connection string for the datasource. Set to '' if you do not want the connection string updated. */ + /** The connection string for the datasource. Set to `` (with brackets) if you don't want the connection string updated. Set to `` if you want to remove the connection string value from the datasource. */ connectionString?: string; } @@ -177,13 +180,13 @@ export interface DataDeletionDetectionPolicy { | "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; } -/** A customer-managed encryption key in Azure Key Vault. Keys that you create and manage can be used to encrypt or decrypt data-at-rest in Azure Cognitive Search, such as indexes and synonym maps. */ +/** A customer-managed encryption key in Azure Key Vault. Keys that you create and manage can be used to encrypt or decrypt data-at-rest, such as indexes and synonym maps. */ export interface SearchResourceEncryptionKey { /** The name of your Azure Key Vault key to be used to encrypt your data at rest. */ keyName: string; /** The version of your Azure Key Vault key to be used to encrypt your data at rest. */ keyVersion: string; - /** The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be https://my-keyvault-name.vault.azure.net. */ + /** The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be `https://my-keyvault-name.vault.azure.net`. */ vaultUri: string; /** Optional Azure Active Directory credentials used for accessing your Azure Key Vault. Not required if using managed identity instead. */ accessCredentials?: AzureActiveDirectoryApplicationCredentials; @@ -199,23 +202,53 @@ export interface AzureActiveDirectoryApplicationCredentials { applicationSecret?: string; } -/** Describes an error condition for the Azure Cognitive Search API. */ -export interface SearchError { +/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). */ +export interface ErrorResponse { + /** The error object. */ + error?: ErrorDetail; +} + +/** The error detail. */ +export interface ErrorDetail { /** - * One of a server-defined set of error codes. + * The error code. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly code?: string; /** - * A human-readable representation of the error. + * The error message. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly message: string; + readonly message?: string; + /** + * The error target. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly target?: string; + /** + * The error details. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly details?: ErrorDetail[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; +} + +/** The resource management error additional info. */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; /** - * An array of details about specific errors that led to this reported error. + * The additional info. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly details?: SearchError[]; + readonly info?: Record; } /** Response from a List Datasources request. If successful, it includes the full definitions of all datasources. */ @@ -258,7 +291,7 @@ export interface SearchIndexer { isDisabled?: boolean; /** The ETag of the indexer. */ etag?: string; - /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them in Azure Cognitive Search. Once you have encrypted your indexer definition, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ + /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ encryptionKey?: SearchResourceEncryptionKey; /** Adds caching to an enrichment pipeline to allow for incremental modification steps without having to rebuild the index every time. */ cache?: SearchIndexerCache; @@ -574,15 +607,15 @@ export interface SearchIndexerSkillset { description?: string; /** A list of skills in the skillset. */ skills: SearchIndexerSkillUnion[]; - /** Details about cognitive services to be used when running skills. */ + /** Details about the Azure AI service to be used when running skills. */ cognitiveServicesAccount?: CognitiveServicesAccountUnion; - /** Definition of additional projections to azure blob, table, or files, of enriched data. */ + /** Definition of additional projections to Azure blob, table, or files, of enriched data. */ knowledgeStore?: SearchIndexerKnowledgeStore; /** Definition of additional projections to secondary search index(es). */ indexProjections?: SearchIndexerIndexProjections; /** The ETag of the skillset. */ etag?: string; - /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition in Azure Cognitive Search. Once you have encrypted your skillset definition, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ + /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your skillset definition when you want full assurance that no one, not even Microsoft, can decrypt your skillset definition. Once you have encrypted your skillset definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your skillset definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ encryptionKey?: SearchResourceEncryptionKey; } @@ -642,13 +675,13 @@ export interface OutputFieldMappingEntry { targetName?: string; } -/** Base type for describing any cognitive service resource attached to a skillset. */ +/** Base type for describing any Azure AI service resource attached to a skillset. */ export interface CognitiveServicesAccount { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: | "#Microsoft.Azure.Search.DefaultCognitiveServices" | "#Microsoft.Azure.Search.CognitiveServicesByKey"; - /** Description of the cognitive service resource attached to a skillset. */ + /** Description of the Azure AI service resource attached to a skillset. */ description?: string; } @@ -746,7 +779,7 @@ export interface SynonymMap { format: "solr"; /** A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. */ synonyms: string; - /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data in Azure Cognitive Search. Once you have encrypted your data, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ + /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ encryptionKey?: SearchResourceEncryptionKey; /** The ETag of the synonym map. */ etag?: string; @@ -785,12 +818,12 @@ export interface SearchIndex { charFilters?: CharFilterUnion[]; /** The normalizers for the index. */ normalizers?: LexicalNormalizerUnion[]; - /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data in Azure Cognitive Search. Once you have encrypted your data, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ + /** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */ encryptionKey?: SearchResourceEncryptionKey; /** The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. */ similarity?: SimilarityUnion; /** Defines parameters for a search index that influence semantic capabilities. */ - semanticSettings?: SemanticSettings; + semanticSearch?: SemanticSearch; /** Contains configuration options related to vector search. */ vectorSearch?: VectorSearch; /** The ETag of the index. */ @@ -805,13 +838,15 @@ export interface SearchField { type: SearchFieldDataType; /** A value indicating whether the field uniquely identifies documents in the index. Exactly one top-level field in each index must be chosen as the key field and it must be of type Edm.String. Key fields can be used to look up documents directly and update or delete specific documents. Default is false for simple fields and null for complex fields. */ key?: boolean; - /** A value indicating whether the field can be returned in a search result. You can disable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be true for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is true for simple fields and null for complex fields. */ + /** A value indicating whether the field can be returned in a search result. You can disable this option if you want to use a field (for example, margin) as a filter, sorting, or scoring mechanism but do not want the field to be visible to the end user. This property must be true for key fields, and it must be null for complex fields. This property can be changed on existing fields. Enabling this property does not cause any increase in index storage requirements. Default is true for simple fields, false for vector fields, and null for complex fields. */ retrievable?: boolean; - /** A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and "day". This enables full-text searches for these terms. Fields of type Edm.String or Collection(Edm.String) are searchable by default. This property must be false for simple fields of other non-string data types, and it must be null for complex fields. Note: searchable fields consume extra space in your index since Azure Cognitive Search will store an additional tokenized version of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. */ + /** An immutable value indicating whether the field will be persisted separately on disk to be returned in a search result. You can disable this option if you don't plan to return the field contents in a search response to save on storage overhead. This can only be set during index creation and only for vector fields. This property cannot be changed for existing fields or set as false for new fields. If this property is set as false, the property 'retrievable' must also be set to false. This property must be true or unset for key fields, for new fields, and for non-vector fields, and it must be null for complex fields. Disabling this property will reduce index storage requirements. The default is true for vector fields. */ + stored?: boolean; + /** A value indicating whether the field is full-text searchable. This means it will undergo analysis such as word-breaking during indexing. If you set a searchable field to a value like "sunny day", internally it will be split into the individual tokens "sunny" and "day". This enables full-text searches for these terms. Fields of type Edm.String or Collection(Edm.String) are searchable by default. This property must be false for simple fields of other non-string data types, and it must be null for complex fields. Note: searchable fields consume extra space in your index to accommodate additional tokenized versions of the field value for full-text searches. If you want to save space in your index and you don't need a field to be included in searches, set searchable to false. */ searchable?: boolean; /** A value indicating whether to enable the field to be referenced in $filter queries. filterable differs from searchable in how strings are handled. Fields of type Edm.String or Collection(Edm.String) that are filterable do not undergo word-breaking, so comparisons are for exact matches only. For example, if you set such a field f to "sunny day", $filter=f eq 'sunny' will find no matches, but $filter=f eq 'sunny day' will. This property must be null for complex fields. Default is true for simple fields and null for complex fields. */ filterable?: boolean; - /** A value indicating whether to enable the field to be referenced in $orderby expressions. By default Azure Cognitive Search sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection fields cannot be sortable, since they are multi-valued. Simple sub-fields of complex collections are also multi-valued, and therefore cannot be sortable. This is true whether it's an immediate parent field, or an ancestor field, that's the complex collection. Complex fields cannot be sortable and the sortable property must be null for such fields. The default for sortable is true for single-valued simple fields, false for multi-valued simple fields, and null for complex fields. */ + /** A value indicating whether to enable the field to be referenced in $orderby expressions. By default, the search engine sorts results by score, but in many experiences users will want to sort by fields in the documents. A simple field can be sortable only if it is single-valued (it has a single value in the scope of the parent document). Simple collection fields cannot be sortable, since they are multi-valued. Simple sub-fields of complex collections are also multi-valued, and therefore cannot be sortable. This is true whether it's an immediate parent field, or an ancestor field, that's the complex collection. Complex fields cannot be sortable and the sortable property must be null for such fields. The default for sortable is true for single-valued simple fields, false for multi-valued simple fields, and null for complex fields. */ sortable?: boolean; /** A value indicating whether to enable the field to be referenced in facet queries. Typically used in a presentation of search results that includes hit count by category (for example, search for digital cameras and see hits by brand, by megapixels, by price, and so on). This property must be null for complex fields. Fields of type Edm.GeographyPoint or Collection(Edm.GeographyPoint) cannot be facetable. Default is true for all other simple fields. */ facetable?: boolean; @@ -826,7 +861,7 @@ export interface SearchField { /** The dimensionality of the vector field. */ vectorSearchDimensions?: number; /** The name of the vector search profile that specifies the algorithm and vectorizer to use when searching the vector field. */ - vectorSearchProfile?: string; + vectorSearchProfileName?: string; /** A list of the names of synonym maps to associate with this field. This option can be used only with searchable fields. Currently only one synonym map per field is supported. Assigning a synonym map to a field ensures that query terms targeting that field are expanded at query-time using the rules in the synonym map. This attribute can be changed on existing fields. Must be null or an empty collection for complex fields. */ synonymMaps?: string[]; /** A list of sub-fields if this is a field of type Edm.ComplexType or Collection(Edm.ComplexType). Must be null or empty for simple fields. */ @@ -973,9 +1008,9 @@ export interface Similarity { } /** Defines parameters for a search index that influence semantic capabilities. */ -export interface SemanticSettings { +export interface SemanticSearch { /** Allows you to set the name of a default semantic configuration in your index, making it optional to pass it on as a query parameter every time. */ - defaultConfiguration?: string; + defaultConfigurationName?: string; /** The semantic configurations for the index. */ configurations?: SemanticConfiguration[]; } @@ -985,32 +1020,34 @@ export interface SemanticConfiguration { /** The name of the semantic configuration. */ name: string; /** Describes the title, content, and keyword fields to be used for semantic ranking, captions, highlights, and answers. At least one of the three sub properties (titleField, prioritizedKeywordsFields and prioritizedContentFields) need to be set. */ - prioritizedFields: PrioritizedFields; + prioritizedFields: SemanticPrioritizedFields; } /** Describes the title, content, and keywords fields to be used for semantic ranking, captions, highlights, and answers. */ -export interface PrioritizedFields { +export interface SemanticPrioritizedFields { /** Defines the title field to be used for semantic ranking, captions, highlights, and answers. If you don't have a title field in your index, leave this blank. */ titleField?: SemanticField; /** Defines the content fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain text in natural language form. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. */ - prioritizedContentFields?: SemanticField[]; + contentFields?: SemanticField[]; /** Defines the keyword fields to be used for semantic ranking, captions, highlights, and answers. For the best result, the selected fields should contain a list of keywords. The order of the fields in the array represents their priority. Fields with lower priority may get truncated if the content is long. */ - prioritizedKeywordsFields?: SemanticField[]; + keywordsFields?: SemanticField[]; } /** A field that is used as part of the semantic configuration. */ export interface SemanticField { - name?: string; + name: string; } /** Contains configuration options related to vector search. */ export interface VectorSearch { /** Defines combinations of configurations to use with vector search. */ profiles?: VectorSearchProfile[]; - /** Contains configuration options specific to the algorithm used during indexing and/or querying. */ + /** Contains configuration options specific to the algorithm used during indexing or querying. */ algorithms?: VectorSearchAlgorithmConfigurationUnion[]; /** Contains configuration options on how to vectorize text vector queries. */ vectorizers?: VectorSearchVectorizerUnion[]; + /** Contains configuration options specific to the compression method used during indexing or querying. */ + compressions?: BaseVectorSearchCompressionConfigurationUnion[]; } /** Defines a combination of configurations to use with vector search. */ @@ -1018,12 +1055,14 @@ export interface VectorSearchProfile { /** The name to associate with this particular vector search profile. */ name: string; /** The name of the vector search algorithm configuration that specifies the algorithm and optional parameters. */ - algorithm: string; + algorithmConfigurationName: string; /** The name of the kind of vectorization method being configured for use with vector search. */ vectorizer?: string; + /** The name of the compression method configuration that specifies the compression method and optional parameters. */ + compressionConfigurationName?: string; } -/** Contains configuration options specific to the algorithm used during indexing and/or querying. */ +/** Contains configuration options specific to the algorithm used during indexing or querying. */ export interface VectorSearchAlgorithmConfiguration { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "hnsw" | "exhaustiveKnn"; @@ -1031,7 +1070,7 @@ export interface VectorSearchAlgorithmConfiguration { name: string; } -/** Contains specific details for a vectorization method to be used during query time. */ +/** Specifies the vectorization method to be used during query time. */ export interface VectorSearchVectorizer { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "azureOpenAI" | "customWebApi"; @@ -1039,6 +1078,18 @@ export interface VectorSearchVectorizer { name: string; } +/** Contains configuration options specific to the compression method used during indexing or querying. */ +export interface BaseVectorSearchCompressionConfiguration { + /** Polymorphic discriminator, which specifies the different types this object can be */ + kind: "scalarQuantization"; + /** The name to associate with this particular configuration. */ + name: string; + /** If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. */ + rerankWithOriginalVectors?: boolean; + /** Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. */ + defaultOversampling?: number; +} + /** Response from a List Indexes request. If successful, it includes the full definitions of all indexes. */ export interface ListIndexesResult { /** @@ -1182,7 +1233,7 @@ export interface ServiceLimits { maxComplexObjectsInCollectionsPerDocument?: number; } -/** Contains the parameters specific to hnsw algorithm. */ +/** Contains the parameters specific to the HNSW algorithm. */ export interface HnswParameters { /** The number of bi-directional links created for every new element during construction. Increasing this parameter value may improve recall and reduce retrieval times for datasets with high intrinsic dimensionality at the expense of increased memory consumption and longer indexing time. */ m?: number; @@ -1200,25 +1251,31 @@ export interface ExhaustiveKnnParameters { metric?: VectorSearchAlgorithmMetric; } -/** Contains the parameters specific to using an Azure Open AI service for vectorization at query time. */ +/** Contains the parameters specific to Scalar Quantization. */ +export interface ScalarQuantizationParameters { + /** The quantized data type of compressed vector values. */ + quantizedDataType?: VectorSearchCompressionTargetDataType; +} + +/** Specifies the parameters for connecting to the Azure OpenAI resource. */ export interface AzureOpenAIParameters { - /** The resource uri for your Azure Open AI resource. */ + /** The resource URI of the Azure OpenAI resource. */ resourceUri?: string; - /** ID of your Azure Open AI model deployment on the designated resource. */ + /** ID of the Azure OpenAI model deployment on the designated resource. */ deploymentId?: string; - /** API key for the designated Azure Open AI resource. */ + /** API key of the designated Azure OpenAI resource. */ apiKey?: string; /** The user-assigned managed identity used for outbound connections. */ authIdentity?: SearchIndexerDataIdentityUnion; } -/** Contains the parameters specific to generating vector embeddings via a custom endpoint. */ -export interface CustomVectorizerParameters { - /** The uri for the Web API. */ +/** Specifies the properties for connecting to a user-defined vectorizer. */ +export interface CustomWebApiParameters { + /** The URI of the Web API providing the vectorizer. */ uri?: string; - /** The headers required to make the http request. */ + /** The headers required to make the HTTP request. */ httpHeaders?: { [propertyName: string]: string }; - /** The method for the http request. */ + /** The method for the HTTP request. */ httpMethod?: string; /** The desired timeout for the request. Default is 30 seconds. */ timeout?: string; @@ -1299,190 +1356,196 @@ export interface CustomEntityAlias { } /** Clears the identity property of a datasource. */ -export type SearchIndexerDataNoneIdentity = SearchIndexerDataIdentity & { +export interface SearchIndexerDataNoneIdentity + extends SearchIndexerDataIdentity { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.DataNoneIdentity"; -}; +} /** Specifies the identity for a datasource to use. */ -export type SearchIndexerDataUserAssignedIdentity = SearchIndexerDataIdentity & { +export interface SearchIndexerDataUserAssignedIdentity + extends SearchIndexerDataIdentity { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.DataUserAssignedIdentity"; /** The fully qualified Azure resource Id of a user assigned managed identity typically in the form "/subscriptions/12345678-1234-1234-1234-1234567890ab/resourceGroups/rg/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myId" that should have been assigned to the search service. */ userAssignedIdentity: string; -}; +} /** Defines a data change detection policy that captures changes based on the value of a high water mark column. */ -export type HighWaterMarkChangeDetectionPolicy = DataChangeDetectionPolicy & { +export interface HighWaterMarkChangeDetectionPolicy + extends DataChangeDetectionPolicy { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy"; /** The name of the high water mark column. */ highWaterMarkColumnName: string; -}; +} /** Defines a data change detection policy that captures changes using the Integrated Change Tracking feature of Azure SQL Database. */ -export type SqlIntegratedChangeTrackingPolicy = DataChangeDetectionPolicy & { +export interface SqlIntegratedChangeTrackingPolicy + extends DataChangeDetectionPolicy { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy"; -}; +} /** Defines a data deletion detection policy that implements a soft-deletion strategy. It determines whether an item should be deleted based on the value of a designated 'soft delete' column. */ -export type SoftDeleteColumnDeletionDetectionPolicy = DataDeletionDetectionPolicy & { +export interface SoftDeleteColumnDeletionDetectionPolicy + extends DataDeletionDetectionPolicy { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy"; /** The name of the column to use for soft-deletion detection. */ softDeleteColumnName?: string; /** The marker value that identifies an item as deleted. */ softDeleteMarkerValue?: string; -}; +} /** Defines a data deletion detection policy utilizing Azure Blob Storage's native soft delete feature for deletion detection. */ -export type NativeBlobSoftDeleteDeletionDetectionPolicy = DataDeletionDetectionPolicy & { +export interface NativeBlobSoftDeleteDeletionDetectionPolicy + extends DataDeletionDetectionPolicy { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"; -}; +} /** A skill that enables scenarios that require a Boolean operation to determine the data to assign to an output. */ -export type ConditionalSkill = SearchIndexerSkill & { +export interface ConditionalSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Util.ConditionalSkill"; -}; +} /** A skill that uses text analytics for key phrase extraction. */ -export type KeyPhraseExtractionSkill = SearchIndexerSkill & { +export interface KeyPhraseExtractionSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: KeyPhraseExtractionSkillLanguage; /** A number indicating how many key phrases to return. If absent, all identified key phrases will be returned. */ maxKeyPhraseCount?: number; /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; -}; +} /** A skill that extracts text from image files. */ -export type OcrSkill = SearchIndexerSkill & { +export interface OcrSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Vision.OcrSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: OcrSkillLanguage; /** A value indicating to turn orientation detection on or not. Default is false. */ shouldDetectOrientation?: boolean; /** Defines the sequence of characters to use between the lines of text recognized by the OCR skill. The default value is "space". */ lineEnding?: LineEnding; -}; +} /** A skill that analyzes image files. It extracts a rich set of visual features based on the image content. */ -export type ImageAnalysisSkill = SearchIndexerSkill & { +export interface ImageAnalysisSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: ImageAnalysisSkillLanguage; /** A list of visual features. */ visualFeatures?: VisualFeature[]; /** A string indicating which domain-specific details to return. */ details?: ImageDetail[]; -}; +} /** A skill that detects the language of input text and reports a single language code for every document submitted on the request. The language code is paired with a score indicating the confidence of the analysis. */ -export type LanguageDetectionSkill = SearchIndexerSkill & { +export interface LanguageDetectionSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.LanguageDetectionSkill"; /** A country code to use as a hint to the language detection model if it cannot disambiguate the language. */ defaultCountryHint?: string; /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; -}; +} /** A skill for reshaping the outputs. It creates a complex type to support composite fields (also known as multipart fields). */ -export type ShaperSkill = SearchIndexerSkill & { +export interface ShaperSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Util.ShaperSkill"; -}; +} /** A skill for merging two or more strings into a single unified string, with an optional user-defined delimiter separating each component part. */ -export type MergeSkill = SearchIndexerSkill & { +export interface MergeSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.MergeSkill"; /** The tag indicates the start of the merged text. By default, the tag is an empty space. */ insertPreTag?: string; /** The tag indicates the end of the merged text. By default, the tag is an empty space. */ insertPostTag?: string; -}; +} /** * This skill is deprecated. Use the V3.EntityRecognitionSkill instead. * * @deprecated */ -export type EntityRecognitionSkill = SearchIndexerSkill & { +export interface EntityRecognitionSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.EntityRecognitionSkill"; /** A list of entity categories that should be extracted. */ categories?: EntityCategory[]; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: EntityRecognitionSkillLanguage; /** Determines whether or not to include entities which are well known but don't conform to a pre-defined type. If this configuration is not set (default), set to null or set to false, entities which don't conform to one of the pre-defined types will not be surfaced. */ includeTypelessEntities?: boolean; /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ minimumPrecision?: number; -}; +} /** * This skill is deprecated. Use the V3.SentimentSkill instead. * * @deprecated */ -export type SentimentSkill = SearchIndexerSkill & { +export interface SentimentSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.SentimentSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: SentimentSkillLanguage; -}; +} /** Using the Text Analytics API, evaluates unstructured text and for each record, provides sentiment labels (such as "negative", "neutral" and "positive") based on the highest confidence score found by the service at a sentence and document-level. */ -export type SentimentSkillV3 = SearchIndexerSkill & { +export interface SentimentSkillV3 extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.V3.SentimentSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: string; /** If set to true, the skill output will include information from Text Analytics for opinion mining, namely targets (nouns or verbs) and their associated assessment (adjective) in the text. Default is false. */ includeOpinionMining?: boolean; /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; -}; +} /** Using the Text Analytics API, extracts linked entities from text. */ -export type EntityLinkingSkill = SearchIndexerSkill & { +export interface EntityLinkingSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.V3.EntityLinkingSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: string; /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ minimumPrecision?: number; /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; -}; +} /** Using the Text Analytics API, extracts entities of different types from text. */ -export type EntityRecognitionSkillV3 = SearchIndexerSkill & { +export interface EntityRecognitionSkillV3 extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.V3.EntityRecognitionSkill"; /** A list of entity categories that should be extracted. */ categories?: string[]; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: string; /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ minimumPrecision?: number; - /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ + /** The version of the model to use when calling the Text Analytics API. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; -}; +} /** Using the Text Analytics API, extracts personal information from an input text and gives you the option of masking it. */ -export type PIIDetectionSkill = SearchIndexerSkill & { +export interface PIIDetectionSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.PIIDetectionSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: string; /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ minimumPrecision?: number; @@ -1493,16 +1556,16 @@ export type PIIDetectionSkill = SearchIndexerSkill & { /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ modelVersion?: string; /** A list of PII entity categories that should be extracted and masked. */ - piiCategories?: string[]; + categories?: string[]; /** If specified, will set the PII domain to include only a subset of the entity categories. Possible values include: 'phi', 'none'. Default is 'none'. */ domain?: string; -}; +} /** A skill to split a string into chunks of text. */ -export type SplitSkill = SearchIndexerSkill & { +export interface SplitSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.SplitSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: SplitSkillLanguage; /** A value indicating which split mode to perform. */ textSplitMode?: TextSplitMode; @@ -1512,13 +1575,13 @@ export type SplitSkill = SearchIndexerSkill & { pageOverlapLength?: number; /** Only applicable when textSplitMode is set to 'pages'. If specified, the SplitSkill will discontinue splitting after processing the first 'maximumPagesToTake' pages, in order to improve performance when only a few initial pages are needed from each document. */ maximumPagesToTake?: number; -}; +} /** A skill looks for text from a custom, user-defined list of words and phrases. */ -export type CustomEntityLookupSkill = SearchIndexerSkill & { +export interface CustomEntityLookupSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.CustomEntityLookupSkill"; - /** A value indicating which language code to use. Default is en. */ + /** A value indicating which language code to use. Default is `en`. */ defaultLanguageCode?: CustomEntityLookupSkillLanguage; /** Path to a JSON or CSV file containing all the target text to match against. This entity definition is read at the beginning of an indexer run. Any updates to this file during an indexer run will not take effect until subsequent runs. This config must be accessible over HTTPS. */ entitiesDefinitionUri?: string; @@ -1530,22 +1593,22 @@ export type CustomEntityLookupSkill = SearchIndexerSkill & { globalDefaultAccentSensitive?: boolean; /** A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in CustomEntity, this value will be the default value. */ globalDefaultFuzzyEditDistance?: number; -}; +} /** A skill to translate text from one language to another. */ -export type TextTranslationSkill = SearchIndexerSkill & { +export interface TextTranslationSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.TranslationSkill"; /** The language code to translate documents into for documents that don't specify the to language explicitly. */ defaultToLanguageCode: TextTranslationSkillLanguage; /** The language code to translate documents from for documents that don't specify the from language explicitly. */ defaultFromLanguageCode?: TextTranslationSkillLanguage; - /** The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is en. */ + /** The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is `en`. */ suggestedFrom?: TextTranslationSkillLanguage; -}; +} /** A skill that extracts content from a file within the enrichment pipeline. */ -export type DocumentExtractionSkill = SearchIndexerSkill & { +export interface DocumentExtractionSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Util.DocumentExtractionSkill"; /** The parsingMode for the skill. Will be set to 'default' if not defined. */ @@ -1554,10 +1617,10 @@ export type DocumentExtractionSkill = SearchIndexerSkill & { dataToExtract?: string; /** A dictionary of configurations for the skill. */ configuration?: { [propertyName: string]: any }; -}; +} /** A skill that can call a Web API endpoint, allowing you to extend a skillset by having it call your custom code. */ -export type WebApiSkill = SearchIndexerSkill & { +export interface WebApiSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Custom.WebApiSkill"; /** The url for the Web API. */ @@ -1576,10 +1639,10 @@ export type WebApiSkill = SearchIndexerSkill & { authResourceId?: string; /** The user-assigned managed identity used for outbound connections. If an authResourceId is provided and it's not specified, the system-assigned managed identity is used. On updates to the indexer, if the identity is unspecified, the value remains unchanged. If set to "none", the value of this property is cleared. */ authIdentity?: SearchIndexerDataIdentityUnion; -}; +} /** The AML skill allows you to extend AI enrichment with a custom Azure Machine Learning (AML) model. Once an AML model is trained and deployed, an AML skill integrates it into AI enrichment. */ -export type AzureMachineLearningSkill = SearchIndexerSkill & { +export interface AzureMachineLearningSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Custom.AmlSkill"; /** (Required for no authentication or key authentication) The scoring URI of the AML service to which the JSON payload will be sent. Only the https URI scheme is allowed. */ @@ -1594,82 +1657,85 @@ export type AzureMachineLearningSkill = SearchIndexerSkill & { region?: string; /** (Optional) When specified, indicates the number of calls the indexer will make in parallel to the endpoint you have provided. You can decrease this value if your endpoint is failing under too high of a request load, or raise it if your endpoint is able to accept more requests and you would like an increase in the performance of the indexer. If not set, a default value of 5 is used. The degreeOfParallelism can be set to a maximum of 10 and a minimum of 1. */ degreeOfParallelism?: number; -}; +} -/** Allows you to generate a vector embedding for a given text input using the Azure Open AI service. */ -export type AzureOpenAIEmbeddingSkill = SearchIndexerSkill & { +/** Allows you to generate a vector embedding for a given text input using the Azure OpenAI resource. */ +export interface AzureOpenAIEmbeddingSkill extends SearchIndexerSkill { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill"; - /** The resource uri for your Azure Open AI resource. */ + /** The resource URI for your Azure OpenAI resource. */ resourceUri?: string; - /** ID of your Azure Open AI model deployment on the designated resource. */ + /** ID of your Azure OpenAI model deployment on the designated resource. */ deploymentId?: string; - /** API key for the designated Azure Open AI resource. */ + /** API key for the designated Azure OpenAI resource. */ apiKey?: string; /** The user-assigned managed identity used for outbound connections. */ authIdentity?: SearchIndexerDataIdentityUnion; -}; +} -/** An empty object that represents the default cognitive service resource for a skillset. */ -export type DefaultCognitiveServicesAccount = CognitiveServicesAccount & { +/** An empty object that represents the default Azure AI service resource for a skillset. */ +export interface DefaultCognitiveServicesAccount + extends CognitiveServicesAccount { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.DefaultCognitiveServices"; -}; +} -/** A cognitive service resource provisioned with a key that is attached to a skillset. */ -export type CognitiveServicesAccountKey = CognitiveServicesAccount & { +/** The multi-region account key of an Azure AI service resource that's attached to a skillset. */ +export interface CognitiveServicesAccountKey extends CognitiveServicesAccount { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.CognitiveServicesByKey"; - /** The key used to provision the cognitive service resource attached to a skillset. */ + /** The key used to provision the Azure AI service resource attached to a skillset. */ key: string; -}; +} /** Description for what data to store in Azure Tables. */ -export type SearchIndexerKnowledgeStoreTableProjectionSelector = SearchIndexerKnowledgeStoreProjectionSelector & { +export interface SearchIndexerKnowledgeStoreTableProjectionSelector + extends SearchIndexerKnowledgeStoreProjectionSelector { /** Name of the Azure table to store projected data in. */ tableName: string; -}; +} /** Abstract class to share properties between concrete selectors. */ -export type SearchIndexerKnowledgeStoreBlobProjectionSelector = SearchIndexerKnowledgeStoreProjectionSelector & { +export interface SearchIndexerKnowledgeStoreBlobProjectionSelector + extends SearchIndexerKnowledgeStoreProjectionSelector { /** Blob container to store projections in. */ storageContainer: string; -}; +} /** Defines a function that boosts scores based on distance from a geographic location. */ -export type DistanceScoringFunction = ScoringFunction & { +export interface DistanceScoringFunction extends ScoringFunction { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "distance"; /** Parameter values for the distance scoring function. */ parameters: DistanceScoringParameters; -}; +} /** Defines a function that boosts scores based on the value of a date-time field. */ -export type FreshnessScoringFunction = ScoringFunction & { +export interface FreshnessScoringFunction extends ScoringFunction { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "freshness"; /** Parameter values for the freshness scoring function. */ parameters: FreshnessScoringParameters; -}; +} /** Defines a function that boosts scores based on the magnitude of a numeric field. */ -export type MagnitudeScoringFunction = ScoringFunction & { +export interface MagnitudeScoringFunction extends ScoringFunction { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "magnitude"; /** Parameter values for the magnitude scoring function. */ parameters: MagnitudeScoringParameters; -}; +} /** Defines a function that boosts scores of documents with string values matching a given list of tags. */ -export type TagScoringFunction = ScoringFunction & { +export interface TagScoringFunction extends ScoringFunction { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "tag"; /** Parameter values for the tag scoring function. */ parameters: TagScoringParameters; -}; +} /** Allows you to take control over the process of converting text into indexable/searchable tokens. It's a user-defined configuration consisting of a single predefined tokenizer and one or more filters. The tokenizer is responsible for breaking text into tokens, and the filters for modifying tokens emitted by the tokenizer. */ -export type CustomAnalyzer = LexicalAnalyzer & { +export interface CustomAnalyzer extends LexicalAnalyzer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.CustomAnalyzer"; /** The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as breaking a sentence into words. KnownTokenizerNames is an enum containing known values. */ @@ -1678,10 +1744,10 @@ export type CustomAnalyzer = LexicalAnalyzer & { tokenFilters?: string[]; /** A list of character filters used to prepare input text before it is processed by the tokenizer. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. */ charFilters?: string[]; -}; +} /** Flexibly separates text into terms via a regular expression pattern. This analyzer is implemented using Apache Lucene. */ -export type PatternAnalyzer = LexicalAnalyzer & { +export interface PatternAnalyzer extends LexicalAnalyzer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PatternAnalyzer"; /** A value indicating whether terms should be lower-cased. Default is true. */ @@ -1692,36 +1758,36 @@ export type PatternAnalyzer = LexicalAnalyzer & { flags?: string; /** A list of stopwords. */ stopwords?: string[]; -}; +} /** Standard Apache Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. */ -export type LuceneStandardAnalyzer = LexicalAnalyzer & { +export interface LuceneStandardAnalyzer extends LexicalAnalyzer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StandardAnalyzer"; /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */ maxTokenLength?: number; /** A list of stopwords. */ stopwords?: string[]; -}; +} /** Divides text at non-letters; Applies the lowercase and stopword token filters. This analyzer is implemented using Apache Lucene. */ -export type StopAnalyzer = LexicalAnalyzer & { +export interface StopAnalyzer extends LexicalAnalyzer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StopAnalyzer"; /** A list of stopwords. */ stopwords?: string[]; -}; +} /** Grammar-based tokenizer that is suitable for processing most European-language documents. This tokenizer is implemented using Apache Lucene. */ -export type ClassicTokenizer = LexicalTokenizer & { +export interface ClassicTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.ClassicTokenizer"; /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */ maxTokenLength?: number; -}; +} /** Tokenizes the input from an edge into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. */ -export type EdgeNGramTokenizer = LexicalTokenizer & { +export interface EdgeNGramTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.EdgeNGramTokenizer"; /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */ @@ -1730,26 +1796,26 @@ export type EdgeNGramTokenizer = LexicalTokenizer & { maxGram?: number; /** Character classes to keep in the tokens. */ tokenChars?: TokenCharacterKind[]; -}; +} /** Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene. */ -export type KeywordTokenizer = LexicalTokenizer & { +export interface KeywordTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.KeywordTokenizer"; /** The read buffer size in bytes. Default is 256. */ bufferSize?: number; -}; +} /** Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene. */ -export type KeywordTokenizerV2 = LexicalTokenizer & { +export interface KeywordTokenizerV2 extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.KeywordTokenizerV2"; /** The maximum token length. Default is 256. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */ maxTokenLength?: number; -}; +} /** Divides text using language-specific rules. */ -export type MicrosoftLanguageTokenizer = LexicalTokenizer & { +export interface MicrosoftLanguageTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer"; /** The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. */ @@ -1758,10 +1824,10 @@ export type MicrosoftLanguageTokenizer = LexicalTokenizer & { isSearchTokenizer?: boolean; /** The language to use. The default is English. */ language?: MicrosoftTokenizerLanguage; -}; +} /** Divides text using language-specific rules and reduces words to their base forms. */ -export type MicrosoftLanguageStemmingTokenizer = LexicalTokenizer & { +export interface MicrosoftLanguageStemmingTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer"; /** The maximum token length. Tokens longer than the maximum length are split. Maximum token length that can be used is 300 characters. Tokens longer than 300 characters are first split into tokens of length 300 and then each of those tokens is split based on the max token length set. Default is 255. */ @@ -1770,10 +1836,10 @@ export type MicrosoftLanguageStemmingTokenizer = LexicalTokenizer & { isSearchTokenizer?: boolean; /** The language to use. The default is English. */ language?: MicrosoftStemmingTokenizerLanguage; -}; +} /** Tokenizes the input into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene. */ -export type NGramTokenizer = LexicalTokenizer & { +export interface NGramTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.NGramTokenizer"; /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */ @@ -1782,10 +1848,10 @@ export type NGramTokenizer = LexicalTokenizer & { maxGram?: number; /** Character classes to keep in the tokens. */ tokenChars?: TokenCharacterKind[]; -}; +} /** Tokenizer for path-like hierarchies. This tokenizer is implemented using Apache Lucene. */ -export type PathHierarchyTokenizerV2 = LexicalTokenizer & { +export interface PathHierarchyTokenizerV2 extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PathHierarchyTokenizerV2"; /** The delimiter character to use. Default is "/". */ @@ -1798,10 +1864,10 @@ export type PathHierarchyTokenizerV2 = LexicalTokenizer & { reverseTokenOrder?: boolean; /** The number of initial tokens to skip. Default is 0. */ numberOfTokensToSkip?: number; -}; +} /** Tokenizer that uses regex pattern matching to construct distinct tokens. This tokenizer is implemented using Apache Lucene. */ -export type PatternTokenizer = LexicalTokenizer & { +export interface PatternTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PatternTokenizer"; /** A regular expression pattern to match token separators. Default is an expression that matches one or more non-word characters. */ @@ -1810,52 +1876,52 @@ export type PatternTokenizer = LexicalTokenizer & { flags?: string; /** The zero-based ordinal of the matching group in the regular expression pattern to extract into tokens. Use -1 if you want to use the entire pattern to split the input into tokens, irrespective of matching groups. Default is -1. */ group?: number; -}; +} /** Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. */ -export type LuceneStandardTokenizer = LexicalTokenizer & { +export interface LuceneStandardTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StandardTokenizer"; /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. */ maxTokenLength?: number; -}; +} /** Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. */ -export type LuceneStandardTokenizerV2 = LexicalTokenizer & { +export interface LuceneStandardTokenizerV2 extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StandardTokenizerV2"; /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */ maxTokenLength?: number; -}; +} /** Tokenizes urls and emails as one token. This tokenizer is implemented using Apache Lucene. */ -export type UaxUrlEmailTokenizer = LexicalTokenizer & { +export interface UaxUrlEmailTokenizer extends LexicalTokenizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.UaxUrlEmailTokenizer"; /** The maximum token length. Default is 255. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters. */ maxTokenLength?: number; -}; +} /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. This token filter is implemented using Apache Lucene. */ -export type AsciiFoldingTokenFilter = TokenFilter & { +export interface AsciiFoldingTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.AsciiFoldingTokenFilter"; /** A value indicating whether the original token will be kept. Default is false. */ preserveOriginal?: boolean; -}; +} /** Forms bigrams of CJK terms that are generated from the standard tokenizer. This token filter is implemented using Apache Lucene. */ -export type CjkBigramTokenFilter = TokenFilter & { +export interface CjkBigramTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.CjkBigramTokenFilter"; /** The scripts to ignore. */ ignoreScripts?: CjkBigramTokenFilterScripts[]; /** A value indicating whether to output both unigrams and bigrams (if true), or just bigrams (if false). Default is false. */ outputUnigrams?: boolean; -}; +} /** Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. This token filter is implemented using Apache Lucene. */ -export type CommonGramTokenFilter = TokenFilter & { +export interface CommonGramTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.CommonGramTokenFilter"; /** The set of common words. */ @@ -1864,10 +1930,10 @@ export type CommonGramTokenFilter = TokenFilter & { ignoreCase?: boolean; /** A value that indicates whether the token filter is in query mode. When in query mode, the token filter generates bigrams and then removes common words and single terms followed by a common word. Default is false. */ useQueryMode?: boolean; -}; +} /** Decomposes compound words found in many Germanic languages. This token filter is implemented using Apache Lucene. */ -export type DictionaryDecompounderTokenFilter = TokenFilter & { +export interface DictionaryDecompounderTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter"; /** The list of words to match against. */ @@ -1880,10 +1946,10 @@ export type DictionaryDecompounderTokenFilter = TokenFilter & { maxSubwordSize?: number; /** A value indicating whether to add only the longest matching subword to the output. Default is false. */ onlyLongestMatch?: boolean; -}; +} /** Generates n-grams of the given size(s) starting from the front or the back of an input token. This token filter is implemented using Apache Lucene. */ -export type EdgeNGramTokenFilter = TokenFilter & { +export interface EdgeNGramTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.EdgeNGramTokenFilter"; /** The minimum n-gram length. Default is 1. Must be less than the value of maxGram. */ @@ -1892,10 +1958,10 @@ export type EdgeNGramTokenFilter = TokenFilter & { maxGram?: number; /** Specifies which side of the input the n-gram should be generated from. Default is "front". */ side?: EdgeNGramTokenFilterSide; -}; +} /** Generates n-grams of the given size(s) starting from the front or the back of an input token. This token filter is implemented using Apache Lucene. */ -export type EdgeNGramTokenFilterV2 = TokenFilter & { +export interface EdgeNGramTokenFilterV2 extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.EdgeNGramTokenFilterV2"; /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */ @@ -1904,108 +1970,108 @@ export type EdgeNGramTokenFilterV2 = TokenFilter & { maxGram?: number; /** Specifies which side of the input the n-gram should be generated from. Default is "front". */ side?: EdgeNGramTokenFilterSide; -}; +} /** Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). This token filter is implemented using Apache Lucene. */ -export type ElisionTokenFilter = TokenFilter & { +export interface ElisionTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.ElisionTokenFilter"; /** The set of articles to remove. */ articles?: string[]; -}; +} /** A token filter that only keeps tokens with text contained in a specified list of words. This token filter is implemented using Apache Lucene. */ -export type KeepTokenFilter = TokenFilter & { +export interface KeepTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.KeepTokenFilter"; /** The list of words to keep. */ keepWords: string[]; /** A value indicating whether to lower case all words first. Default is false. */ lowerCaseKeepWords?: boolean; -}; +} /** Marks terms as keywords. This token filter is implemented using Apache Lucene. */ -export type KeywordMarkerTokenFilter = TokenFilter & { +export interface KeywordMarkerTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.KeywordMarkerTokenFilter"; /** A list of words to mark as keywords. */ keywords: string[]; /** A value indicating whether to ignore case. If true, all words are converted to lower case first. Default is false. */ ignoreCase?: boolean; -}; +} /** Removes words that are too long or too short. This token filter is implemented using Apache Lucene. */ -export type LengthTokenFilter = TokenFilter & { +export interface LengthTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.LengthTokenFilter"; /** The minimum length in characters. Default is 0. Maximum is 300. Must be less than the value of max. */ minLength?: number; /** The maximum length in characters. Default and maximum is 300. */ maxLength?: number; -}; +} /** Limits the number of tokens while indexing. This token filter is implemented using Apache Lucene. */ -export type LimitTokenFilter = TokenFilter & { +export interface LimitTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.LimitTokenFilter"; /** The maximum number of tokens to produce. Default is 1. */ maxTokenCount?: number; /** A value indicating whether all tokens from the input must be consumed even if maxTokenCount is reached. Default is false. */ consumeAllTokens?: boolean; -}; +} /** Generates n-grams of the given size(s). This token filter is implemented using Apache Lucene. */ -export type NGramTokenFilter = TokenFilter & { +export interface NGramTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.NGramTokenFilter"; /** The minimum n-gram length. Default is 1. Must be less than the value of maxGram. */ minGram?: number; /** The maximum n-gram length. Default is 2. */ maxGram?: number; -}; +} /** Generates n-grams of the given size(s). This token filter is implemented using Apache Lucene. */ -export type NGramTokenFilterV2 = TokenFilter & { +export interface NGramTokenFilterV2 extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.NGramTokenFilterV2"; /** The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram. */ minGram?: number; /** The maximum n-gram length. Default is 2. Maximum is 300. */ maxGram?: number; -}; +} /** Uses Java regexes to emit multiple tokens - one for each capture group in one or more patterns. This token filter is implemented using Apache Lucene. */ -export type PatternCaptureTokenFilter = TokenFilter & { +export interface PatternCaptureTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PatternCaptureTokenFilter"; /** A list of patterns to match against each token. */ patterns: string[]; /** A value indicating whether to return the original token even if one of the patterns matches. Default is true. */ preserveOriginal?: boolean; -}; +} /** A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the result would be "aa#bb aa#bb". This token filter is implemented using Apache Lucene. */ -export type PatternReplaceTokenFilter = TokenFilter & { +export interface PatternReplaceTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PatternReplaceTokenFilter"; /** A regular expression pattern. */ pattern: string; /** The replacement text. */ replacement: string; -}; +} /** Create tokens for phonetic matches. This token filter is implemented using Apache Lucene. */ -export type PhoneticTokenFilter = TokenFilter & { +export interface PhoneticTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PhoneticTokenFilter"; /** The phonetic encoder to use. Default is "metaphone". */ encoder?: PhoneticEncoder; /** A value indicating whether encoded tokens should replace original tokens. If false, encoded tokens are added as synonyms. Default is true. */ replaceOriginalTokens?: boolean; -}; +} /** Creates combinations of tokens as a single token. This token filter is implemented using Apache Lucene. */ -export type ShingleTokenFilter = TokenFilter & { +export interface ShingleTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.ShingleTokenFilter"; /** The maximum shingle size. Default and minimum value is 2. */ @@ -2020,34 +2086,34 @@ export type ShingleTokenFilter = TokenFilter & { tokenSeparator?: string; /** The string to insert for each position at which there is no token. Default is an underscore ("_"). */ filterToken?: string; -}; +} /** A filter that stems words using a Snowball-generated stemmer. This token filter is implemented using Apache Lucene. */ -export type SnowballTokenFilter = TokenFilter & { +export interface SnowballTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.SnowballTokenFilter"; /** The language to use. */ language: SnowballTokenFilterLanguage; -}; +} /** Language specific stemming filter. This token filter is implemented using Apache Lucene. */ -export type StemmerTokenFilter = TokenFilter & { +export interface StemmerTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StemmerTokenFilter"; /** The language to use. */ language: StemmerTokenFilterLanguage; -}; +} /** Provides the ability to override other stemming filters with custom dictionary-based stemming. Any dictionary-stemmed terms will be marked as keywords so that they will not be stemmed with stemmers down the chain. Must be placed before any stemming filters. This token filter is implemented using Apache Lucene. */ -export type StemmerOverrideTokenFilter = TokenFilter & { +export interface StemmerOverrideTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StemmerOverrideTokenFilter"; /** A list of stemming rules in the following format: "word => stem", for example: "ran => run". */ rules: string[]; -}; +} /** Removes stop words from a token stream. This token filter is implemented using Apache Lucene. */ -export type StopwordsTokenFilter = TokenFilter & { +export interface StopwordsTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.StopwordsTokenFilter"; /** The list of stopwords. This property and the stopwords list property cannot both be set. */ @@ -2058,10 +2124,10 @@ export type StopwordsTokenFilter = TokenFilter & { ignoreCase?: boolean; /** A value indicating whether to ignore the last search term if it's a stop word. Default is true. */ removeTrailingStopWords?: boolean; -}; +} /** Matches single or multi-word synonyms in a token stream. This token filter is implemented using Apache Lucene. */ -export type SynonymTokenFilter = TokenFilter & { +export interface SynonymTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.SynonymTokenFilter"; /** A list of synonyms in following one of two formats: 1. incredible, unbelievable, fabulous => amazing - all terms on the left side of => symbol will be replaced with all terms on its right side; 2. incredible, unbelievable, fabulous, amazing - comma separated list of equivalent words. Set the expand option to change how this list is interpreted. */ @@ -2070,26 +2136,26 @@ export type SynonymTokenFilter = TokenFilter & { ignoreCase?: boolean; /** A value indicating whether all words in the list of synonyms (if => notation is not used) will map to one another. If true, all words in the list of synonyms (if => notation is not used) will map to one another. The following list: incredible, unbelievable, fabulous, amazing is equivalent to: incredible, unbelievable, fabulous, amazing => incredible, unbelievable, fabulous, amazing. If false, the following list: incredible, unbelievable, fabulous, amazing will be equivalent to: incredible, unbelievable, fabulous, amazing => incredible. Default is true. */ expand?: boolean; -}; +} /** Truncates the terms to a specific length. This token filter is implemented using Apache Lucene. */ -export type TruncateTokenFilter = TokenFilter & { +export interface TruncateTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.TruncateTokenFilter"; /** The length at which terms will be truncated. Default and maximum is 300. */ length?: number; -}; +} /** Filters out tokens with same text as the previous token. This token filter is implemented using Apache Lucene. */ -export type UniqueTokenFilter = TokenFilter & { +export interface UniqueTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.UniqueTokenFilter"; /** A value indicating whether to remove duplicates only at the same position. Default is false. */ onlyOnSamePosition?: boolean; -}; +} /** Splits words into subwords and performs optional transformations on subword groups. This token filter is implemented using Apache Lucene. */ -export type WordDelimiterTokenFilter = TokenFilter & { +export interface WordDelimiterTokenFilter extends TokenFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.WordDelimiterTokenFilter"; /** A value indicating whether to generate part words. If set, causes parts of words to be generated; for example "AzureSearch" becomes "Azure" "Search". Default is true. */ @@ -2112,104 +2178,117 @@ export type WordDelimiterTokenFilter = TokenFilter & { stemEnglishPossessive?: boolean; /** A list of tokens to protect from being delimited. */ protectedWords?: string[]; -}; +} /** A character filter that applies mappings defined with the mappings option. Matching is greedy (longest pattern matching at a given point wins). Replacement is allowed to be the empty string. This character filter is implemented using Apache Lucene. */ -export type MappingCharFilter = CharFilter & { +export interface MappingCharFilter extends CharFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.MappingCharFilter"; /** A list of mappings of the following format: "a=>b" (all occurrences of the character "a" will be replaced with character "b"). */ mappings: string[]; -}; +} /** A character filter that replaces characters in the input string. It uses a regular expression to identify character sequences to preserve and a replacement pattern to identify characters to replace. For example, given the input text "aa bb aa bb", pattern "(aa)\s+(bb)", and replacement "$1#$2", the result would be "aa#bb aa#bb". This character filter is implemented using Apache Lucene. */ -export type PatternReplaceCharFilter = CharFilter & { +export interface PatternReplaceCharFilter extends CharFilter { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.PatternReplaceCharFilter"; /** A regular expression pattern. */ pattern: string; /** The replacement text. */ replacement: string; -}; +} /** Allows you to configure normalization for filterable, sortable, and facetable fields, which by default operate with strict matching. This is a user-defined configuration consisting of at least one or more filters, which modify the token that is stored. */ -export type CustomNormalizer = LexicalNormalizer & { +export interface CustomNormalizer extends LexicalNormalizer { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.CustomNormalizer"; /** A list of token filters used to filter out or modify the input token. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. */ tokenFilters?: TokenFilterName[]; /** A list of character filters used to prepare input text before it is processed. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. */ charFilters?: CharFilterName[]; -}; +} /** Legacy similarity algorithm which uses the Lucene TFIDFSimilarity implementation of TF-IDF. This variation of TF-IDF introduces static document length normalization as well as coordinating factors that penalize documents that only partially match the searched queries. */ -export type ClassicSimilarity = Similarity & { +export interface ClassicSimilarity extends Similarity { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.ClassicSimilarity"; -}; +} /** Ranking function based on the Okapi BM25 similarity algorithm. BM25 is a TF-IDF-like algorithm that includes length normalization (controlled by the 'b' parameter) as well as term frequency saturation (controlled by the 'k1' parameter). */ -export type BM25Similarity = Similarity & { +export interface BM25Similarity extends Similarity { /** Polymorphic discriminator, which specifies the different types this object can be */ odatatype: "#Microsoft.Azure.Search.BM25Similarity"; /** This property controls the scaling function between the term frequency of each matching terms and the final relevance score of a document-query pair. By default, a value of 1.2 is used. A value of 0.0 means the score does not scale with an increase in term frequency. */ k1?: number; /** This property controls how the length of a document affects the relevance score. By default, a value of 0.75 is used. A value of 0.0 means no length normalization is applied, while a value of 1.0 means the score is fully normalized by the length of the document. */ b?: number; -}; +} -/** Contains configuration options specific to the hnsw approximate nearest neighbors algorithm used during indexing and querying. The hnsw algorithm offers a tunable trade-off between search speed and accuracy. */ -export type HnswVectorSearchAlgorithmConfiguration = VectorSearchAlgorithmConfiguration & { +/** Contains configuration options specific to the HNSW approximate nearest neighbors algorithm used during indexing and querying. The HNSW algorithm offers a tunable trade-off between search speed and accuracy. */ +export interface HnswAlgorithmConfiguration + extends VectorSearchAlgorithmConfiguration { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "hnsw"; - /** Contains the parameters specific to hnsw algorithm. */ + /** Contains the parameters specific to HNSW algorithm. */ parameters?: HnswParameters; -}; +} /** Contains configuration options specific to the exhaustive KNN algorithm used during querying, which will perform brute-force search across the entire vector index. */ -export type ExhaustiveKnnVectorSearchAlgorithmConfiguration = VectorSearchAlgorithmConfiguration & { +export interface ExhaustiveKnnAlgorithmConfiguration + extends VectorSearchAlgorithmConfiguration { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "exhaustiveKnn"; /** Contains the parameters specific to exhaustive KNN algorithm. */ parameters?: ExhaustiveKnnParameters; -}; +} -/** Contains the parameters specific to using an Azure Open AI service for vectorization at query time. */ -export type AzureOpenAIVectorizer = VectorSearchVectorizer & { +/** Specifies the Azure OpenAI resource used to vectorize a query string. */ +export interface AzureOpenAIVectorizer extends VectorSearchVectorizer { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "azureOpenAI"; - /** Contains the parameters specific to Azure Open AI embedding vectorization. */ + /** Contains the parameters specific to Azure OpenAI embedding vectorization. */ azureOpenAIParameters?: AzureOpenAIParameters; -}; +} -/** Contains the parameters specific to generating vector embeddings via a custom endpoint. */ -export type CustomVectorizer = VectorSearchVectorizer & { +/** Specifies a user-defined vectorizer for generating the vector embedding of a query string. Integration of an external vectorizer is achieved using the custom Web API interface of a skillset. */ +export interface CustomVectorizer extends VectorSearchVectorizer { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "customWebApi"; - /** Contains the parameters specific to generating vector embeddings via a custom endpoint. */ - customVectorizerParameters?: CustomVectorizerParameters; -}; + /** Specifies the properties of the user-defined vectorizer. */ + customWebApiParameters?: CustomWebApiParameters; +} + +/** Contains configuration options specific to the scalar quantization compression method used during indexing and querying. */ +export interface ScalarQuantizationCompressionConfiguration + extends BaseVectorSearchCompressionConfiguration { + /** Polymorphic discriminator, which specifies the different types this object can be */ + kind: "scalarQuantization"; + /** Contains the parameters specific to Scalar Quantization. */ + parameters?: ScalarQuantizationParameters; +} /** Projection definition for what data to store in Azure Blob. */ -export type SearchIndexerKnowledgeStoreObjectProjectionSelector = SearchIndexerKnowledgeStoreBlobProjectionSelector & {}; +export interface SearchIndexerKnowledgeStoreObjectProjectionSelector + extends SearchIndexerKnowledgeStoreBlobProjectionSelector {} /** Projection definition for what data to store in Azure Files. */ -export type SearchIndexerKnowledgeStoreFileProjectionSelector = SearchIndexerKnowledgeStoreBlobProjectionSelector & {}; +export interface SearchIndexerKnowledgeStoreFileProjectionSelector + extends SearchIndexerKnowledgeStoreBlobProjectionSelector {} -/** Known values of {@link ApiVersion20231001Preview} that the service accepts. */ -export enum KnownApiVersion20231001Preview { - /** Api Version '2023-10-01-Preview' */ - TwoThousandTwentyThree1001Preview = "2023-10-01-Preview" +/** Known values of {@link ApiVersion20240301Preview} that the service accepts. */ +export enum KnownApiVersion20240301Preview { + /** Api Version '2024-03-01-Preview' */ + TwoThousandTwentyFour0301Preview = "2024-03-01-Preview", } /** - * Defines values for ApiVersion20231001Preview. \ - * {@link KnownApiVersion20231001Preview} can be used interchangeably with ApiVersion20231001Preview, + * Defines values for ApiVersion20240301Preview. \ + * {@link KnownApiVersion20240301Preview} can be used interchangeably with ApiVersion20240301Preview, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **2023-10-01-Preview**: Api Version '2023-10-01-Preview' + * **2024-03-01-Preview**: Api Version '2024-03-01-Preview' */ -export type ApiVersion20231001Preview = string; +export type ApiVersion20240301Preview = string; /** Known values of {@link SearchIndexerDataSourceType} that the service accepts. */ export enum KnownSearchIndexerDataSourceType { @@ -2224,7 +2303,7 @@ export enum KnownSearchIndexerDataSourceType { /** Indicates a MySql datasource. */ MySql = "mysql", /** Indicates an ADLS Gen2 datasource. */ - AdlsGen2 = "adlsgen2" + AdlsGen2 = "adlsgen2", } /** @@ -2251,10 +2330,10 @@ export enum KnownBlobIndexerParsingMode { DelimitedText = "delimitedText", /** Set to json to extract structured content from JSON files. */ Json = "json", - /** Set to jsonArray to extract individual elements of a JSON array as separate documents in Azure Cognitive Search. */ + /** Set to jsonArray to extract individual elements of a JSON array as separate documents. */ JsonArray = "jsonArray", - /** Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents in Azure Cognitive Search. */ - JsonLines = "jsonLines" + /** Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents. */ + JsonLines = "jsonLines", } /** @@ -2266,8 +2345,8 @@ export enum KnownBlobIndexerParsingMode { * **text**: Set to text to improve indexing performance on plain text files in blob storage. \ * **delimitedText**: Set to delimitedText when blobs are plain CSV files. \ * **json**: Set to json to extract structured content from JSON files. \ - * **jsonArray**: Set to jsonArray to extract individual elements of a JSON array as separate documents in Azure Cognitive Search. \ - * **jsonLines**: Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents in Azure Cognitive Search. + * **jsonArray**: Set to jsonArray to extract individual elements of a JSON array as separate documents. \ + * **jsonLines**: Set to jsonLines to extract individual JSON entities, separated by a new line, as separate documents. */ export type BlobIndexerParsingMode = string; @@ -2278,7 +2357,7 @@ export enum KnownBlobIndexerDataToExtract { /** Extracts metadata provided by the Azure blob storage subsystem and the content-type specific metadata (for example, metadata unique to just .png files are indexed). */ AllMetadata = "allMetadata", /** Extracts all metadata and textual content from each blob. */ - ContentAndMetadata = "contentAndMetadata" + ContentAndMetadata = "contentAndMetadata", } /** @@ -2299,7 +2378,7 @@ export enum KnownBlobIndexerImageAction { /** Extracts text from images (for example, the word "STOP" from a traffic stop sign), and embeds it into the content field. This action requires that "dataToExtract" is set to "contentAndMetadata". A normalized image refers to additional processing resulting in uniform image output, sized and rotated to promote consistent rendering when you include images in visual search results. This information is generated for each image when you use this option. */ GenerateNormalizedImages = "generateNormalizedImages", /** Extracts text from images (for example, the word "STOP" from a traffic stop sign), and embeds it into the content field, but treats PDF files differently in that each page will be rendered as an image and normalized accordingly, instead of extracting embedded images. Non-PDF file types will be treated the same as if "generateNormalizedImages" was set. */ - GenerateNormalizedImagePerPage = "generateNormalizedImagePerPage" + GenerateNormalizedImagePerPage = "generateNormalizedImagePerPage", } /** @@ -2318,7 +2397,7 @@ export enum KnownBlobIndexerPDFTextRotationAlgorithm { /** Leverages normal text extraction. This is the default. */ None = "none", /** May produce better and more readable text extraction from PDF files that have rotated text within them. Note that there may be a small performance speed impact when this parameter is used. This parameter only applies to PDF files, and only to PDFs with embedded text. If the rotated text appears within an embedded image in the PDF, this parameter does not apply. */ - DetectAngles = "detectAngles" + DetectAngles = "detectAngles", } /** @@ -2333,10 +2412,10 @@ export type BlobIndexerPDFTextRotationAlgorithm = string; /** Known values of {@link IndexerExecutionEnvironment} that the service accepts. */ export enum KnownIndexerExecutionEnvironment { - /** Indicates that Azure Cognitive Search can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. */ + /** Indicates that the search service can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. */ Standard = "standard", /** Indicates that the indexer should run with the environment provisioned specifically for the search service. This should only be specified as the execution environment if the indexer needs to access resources securely over shared private link resources. */ - Private = "private" + Private = "private", } /** @@ -2344,7 +2423,7 @@ export enum KnownIndexerExecutionEnvironment { * {@link KnownIndexerExecutionEnvironment} can be used interchangeably with IndexerExecutionEnvironment, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **standard**: Indicates that Azure Cognitive Search can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. \ + * **standard**: Indicates that the search service can determine where the indexer should execute. This is the default environment when nothing is specified and is the recommended value. \ * **private**: Indicates that the indexer should run with the environment provisioned specifically for the search service. This should only be specified as the execution environment if the indexer needs to access resources securely over shared private link resources. */ export type IndexerExecutionEnvironment = string; @@ -2352,7 +2431,7 @@ export type IndexerExecutionEnvironment = string; /** Known values of {@link IndexerExecutionStatusDetail} that the service accepts. */ export enum KnownIndexerExecutionStatusDetail { /** Indicates that the reset that occurred was for a call to ResetDocs. */ - ResetDocs = "resetDocs" + ResetDocs = "resetDocs", } /** @@ -2369,7 +2448,7 @@ export enum KnownIndexingMode { /** The indexer is indexing all documents in the datasource. */ IndexingAllDocs = "indexingAllDocs", /** The indexer is indexing selective, reset documents in the datasource. The documents being indexed are defined on indexer status. */ - IndexingResetDocs = "indexingResetDocs" + IndexingResetDocs = "indexingResetDocs", } /** @@ -2387,7 +2466,7 @@ export enum KnownIndexProjectionMode { /** The source document will be skipped from writing into the indexer's target index. */ SkipIndexingParentDocuments = "skipIndexingParentDocuments", /** The source document will be written into the indexer's target index. This is the default pattern. */ - IncludeIndexingParentDocuments = "includeIndexingParentDocuments" + IncludeIndexingParentDocuments = "includeIndexingParentDocuments", } /** @@ -2412,14 +2491,20 @@ export enum KnownSearchFieldDataType { Double = "Edm.Double", /** Indicates that a field contains a Boolean value (true or false). */ Boolean = "Edm.Boolean", - /** Indicates that a field contains a date/time value, including timezone information. */ + /** Indicates that a field contains a date\/time value, including timezone information. */ DateTimeOffset = "Edm.DateTimeOffset", /** Indicates that a field contains a geo-location in terms of longitude and latitude. */ GeographyPoint = "Edm.GeographyPoint", /** Indicates that a field contains one or more complex objects that in turn have sub-fields of other types. */ Complex = "Edm.ComplexType", /** Indicates that a field contains a single-precision floating point number. This is only valid when used with Collection(Edm.Single). */ - Single = "Edm.Single" + Single = "Edm.Single", + /** Indicates that a field contains a half-precision floating point number. This is only valid when used with Collection(Edm.Half). */ + Half = "Edm.Half", + /** Indicates that a field contains a 16-bit signed integer. This is only valid when used with Collection(Edm.Int16). */ + Int16 = "Edm.Int16", + /** Indicates that a field contains a 8-bit signed integer. This is only valid when used with Collection(Edm.SByte). */ + SByte = "Edm.SByte", } /** @@ -2435,7 +2520,10 @@ export enum KnownSearchFieldDataType { * **Edm.DateTimeOffset**: Indicates that a field contains a date\/time value, including timezone information. \ * **Edm.GeographyPoint**: Indicates that a field contains a geo-location in terms of longitude and latitude. \ * **Edm.ComplexType**: Indicates that a field contains one or more complex objects that in turn have sub-fields of other types. \ - * **Edm.Single**: Indicates that a field contains a single-precision floating point number. This is only valid when used with Collection(Edm.Single). + * **Edm.Single**: Indicates that a field contains a single-precision floating point number. This is only valid when used with Collection(Edm.Single). \ + * **Edm.Half**: Indicates that a field contains a half-precision floating point number. This is only valid when used with Collection(Edm.Half). \ + * **Edm.Int16**: Indicates that a field contains a 16-bit signed integer. This is only valid when used with Collection(Edm.Int16). \ + * **Edm.SByte**: Indicates that a field contains a 8-bit signed integer. This is only valid when used with Collection(Edm.SByte). */ export type SearchFieldDataType = string; @@ -2615,18 +2703,18 @@ export enum KnownLexicalAnalyzerName { ViMicrosoft = "vi.microsoft", /** Standard Lucene analyzer. */ StandardLucene = "standard.lucene", - /** Standard ASCII Folding Lucene analyzer. See https://docs.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#Analyzers */ + /** Standard ASCII Folding Lucene analyzer. See https:\//docs.microsoft.com\/rest\/api\/searchservice\/Custom-analyzers-in-Azure-Search#Analyzers */ StandardAsciiFoldingLucene = "standardasciifolding.lucene", - /** Treats the entire content of a field as a single token. This is useful for data like zip codes, ids, and some product names. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordAnalyzer.html */ + /** Treats the entire content of a field as a single token. This is useful for data like zip codes, ids, and some product names. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/KeywordAnalyzer.html */ Keyword = "keyword", - /** Flexibly separates text into terms via a regular expression pattern. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/PatternAnalyzer.html */ + /** Flexibly separates text into terms via a regular expression pattern. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/PatternAnalyzer.html */ Pattern = "pattern", - /** Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/SimpleAnalyzer.html */ + /** Divides text at non-letters and converts them to lower case. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/SimpleAnalyzer.html */ Simple = "simple", - /** Divides text at non-letters; Applies the lowercase and stopword token filters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopAnalyzer.html */ + /** Divides text at non-letters; Applies the lowercase and stopword token filters. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/StopAnalyzer.html */ Stop = "stop", - /** An analyzer that uses the whitespace tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceAnalyzer.html */ - Whitespace = "whitespace" + /** An analyzer that uses the whitespace tokenizer. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/WhitespaceAnalyzer.html */ + Whitespace = "whitespace", } /** @@ -2732,16 +2820,16 @@ export type LexicalAnalyzerName = string; /** Known values of {@link LexicalNormalizerName} that the service accepts. */ export enum KnownLexicalNormalizerName { - /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html */ + /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/ASCIIFoldingFilter.html */ AsciiFolding = "asciifolding", - /** Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html */ + /** Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/util\/ElisionFilter.html */ Elision = "elision", - /** Normalizes token text to lowercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html */ + /** Normalizes token text to lowercase. See https:\//lucene.apache.org\/core\/6_6_1\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/LowerCaseFilter.html */ Lowercase = "lowercase", - /** Standard normalizer, which consists of lowercase and asciifolding. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html */ + /** Standard normalizer, which consists of lowercase and asciifolding. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/reverse\/ReverseStringFilter.html */ Standard = "standard", - /** Normalizes token text to uppercase. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html */ - Uppercase = "uppercase" + /** Normalizes token text to uppercase. See https:\//lucene.apache.org\/core\/6_6_1\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/UpperCaseFilter.html */ + Uppercase = "uppercase", } /** @@ -2759,10 +2847,10 @@ export type LexicalNormalizerName = string; /** Known values of {@link VectorSearchAlgorithmKind} that the service accepts. */ export enum KnownVectorSearchAlgorithmKind { - /** Hnsw (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. */ + /** HNSW (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. */ Hnsw = "hnsw", /** Exhaustive KNN algorithm which will perform brute-force search. */ - ExhaustiveKnn = "exhaustiveKnn" + ExhaustiveKnn = "exhaustiveKnn", } /** @@ -2770,17 +2858,17 @@ export enum KnownVectorSearchAlgorithmKind { * {@link KnownVectorSearchAlgorithmKind} can be used interchangeably with VectorSearchAlgorithmKind, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **hnsw**: Hnsw (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. \ + * **hnsw**: HNSW (Hierarchical Navigable Small World), a type of approximate nearest neighbors algorithm. \ * **exhaustiveKnn**: Exhaustive KNN algorithm which will perform brute-force search. */ export type VectorSearchAlgorithmKind = string; /** Known values of {@link VectorSearchVectorizerKind} that the service accepts. */ export enum KnownVectorSearchVectorizerKind { - /** Generate embeddings using an Azure Open AI service at query time. */ + /** Generate embeddings using an Azure OpenAI resource at query time. */ AzureOpenAI = "azureOpenAI", /** Generate embeddings using a custom web endpoint at query time. */ - CustomWebApi = "customWebApi" + CustomWebApi = "customWebApi", } /** @@ -2788,81 +2876,96 @@ export enum KnownVectorSearchVectorizerKind { * {@link KnownVectorSearchVectorizerKind} can be used interchangeably with VectorSearchVectorizerKind, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **azureOpenAI**: Generate embeddings using an Azure Open AI service at query time. \ + * **azureOpenAI**: Generate embeddings using an Azure OpenAI resource at query time. \ * **customWebApi**: Generate embeddings using a custom web endpoint at query time. */ export type VectorSearchVectorizerKind = string; +/** Known values of {@link VectorSearchCompressionKind} that the service accepts. */ +export enum KnownVectorSearchCompressionKind { + /** Scalar Quantization, a type of compression method. In scalar quantization, the original vectors values are compressed to a narrower type by discretizing and representing each component of a vector using a reduced set of quantized values, thereby reducing the overall data size. */ + ScalarQuantization = "scalarQuantization", +} + +/** + * Defines values for VectorSearchCompressionKind. \ + * {@link KnownVectorSearchCompressionKind} can be used interchangeably with VectorSearchCompressionKind, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **scalarQuantization**: Scalar Quantization, a type of compression method. In scalar quantization, the original vectors values are compressed to a narrower type by discretizing and representing each component of a vector using a reduced set of quantized values, thereby reducing the overall data size. + */ +export type VectorSearchCompressionKind = string; + /** Known values of {@link TokenFilterName} that the service accepts. */ export enum KnownTokenFilterName { - /** A token filter that applies the Arabic normalizer to normalize the orthography. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ar/ArabicNormalizationFilter.html */ + /** A token filter that applies the Arabic normalizer to normalize the orthography. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ar\/ArabicNormalizationFilter.html */ ArabicNormalization = "arabic_normalization", - /** Strips all characters after an apostrophe (including the apostrophe itself). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/tr/ApostropheFilter.html */ + /** Strips all characters after an apostrophe (including the apostrophe itself). See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/tr\/ApostropheFilter.html */ Apostrophe = "apostrophe", - /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ASCIIFoldingFilter.html */ + /** Converts alphabetic, numeric, and symbolic Unicode characters which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if such equivalents exist. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/ASCIIFoldingFilter.html */ AsciiFolding = "asciifolding", - /** Forms bigrams of CJK terms that are generated from the standard tokenizer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKBigramFilter.html */ + /** Forms bigrams of CJK terms that are generated from the standard tokenizer. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/cjk\/CJKBigramFilter.html */ CjkBigram = "cjk_bigram", - /** Normalizes CJK width differences. Folds fullwidth ASCII variants into the equivalent basic Latin, and half-width Katakana variants into the equivalent Kana. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/cjk/CJKWidthFilter.html */ + /** Normalizes CJK width differences. Folds fullwidth ASCII variants into the equivalent basic Latin, and half-width Katakana variants into the equivalent Kana. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/cjk\/CJKWidthFilter.html */ CjkWidth = "cjk_width", - /** Removes English possessives, and dots from acronyms. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicFilter.html */ + /** Removes English possessives, and dots from acronyms. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/standard\/ClassicFilter.html */ Classic = "classic", - /** Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/commongrams/CommonGramsFilter.html */ + /** Construct bigrams for frequently occurring terms while indexing. Single terms are still indexed too, with bigrams overlaid. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/commongrams\/CommonGramsFilter.html */ CommonGram = "common_grams", - /** Generates n-grams of the given size(s) starting from the front or the back of an input token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.html */ + /** Generates n-grams of the given size(s) starting from the front or the back of an input token. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ngram\/EdgeNGramTokenFilter.html */ EdgeNGram = "edgeNGram_v2", - /** Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/util/ElisionFilter.html */ + /** Removes elisions. For example, "l'avion" (the plane) will be converted to "avion" (plane). See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/util\/ElisionFilter.html */ Elision = "elision", - /** Normalizes German characters according to the heuristics of the German2 snowball algorithm. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/de/GermanNormalizationFilter.html */ + /** Normalizes German characters according to the heuristics of the German2 snowball algorithm. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/de\/GermanNormalizationFilter.html */ GermanNormalization = "german_normalization", - /** Normalizes text in Hindi to remove some differences in spelling variations. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/hi/HindiNormalizationFilter.html */ + /** Normalizes text in Hindi to remove some differences in spelling variations. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/hi\/HindiNormalizationFilter.html */ HindiNormalization = "hindi_normalization", - /** Normalizes the Unicode representation of text in Indian languages. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/in/IndicNormalizationFilter.html */ + /** Normalizes the Unicode representation of text in Indian languages. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/in\/IndicNormalizationFilter.html */ IndicNormalization = "indic_normalization", - /** Emits each incoming token twice, once as keyword and once as non-keyword. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/KeywordRepeatFilter.html */ + /** Emits each incoming token twice, once as keyword and once as non-keyword. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/KeywordRepeatFilter.html */ KeywordRepeat = "keyword_repeat", - /** A high-performance kstem filter for English. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/en/KStemFilter.html */ + /** A high-performance kstem filter for English. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/en\/KStemFilter.html */ KStem = "kstem", - /** Removes words that are too long or too short. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LengthFilter.html */ + /** Removes words that are too long or too short. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/LengthFilter.html */ Length = "length", - /** Limits the number of tokens while indexing. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/LimitTokenCountFilter.html */ + /** Limits the number of tokens while indexing. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/LimitTokenCountFilter.html */ Limit = "limit", - /** Normalizes token text to lower case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/LowerCaseFilter.html */ + /** Normalizes token text to lower case. See https:\//lucene.apache.org\/core\/6_6_1\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/LowerCaseFilter.html */ Lowercase = "lowercase", - /** Generates n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenFilter.html */ + /** Generates n-grams of the given size(s). See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ngram\/NGramTokenFilter.html */ NGram = "nGram_v2", - /** Applies normalization for Persian. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/fa/PersianNormalizationFilter.html */ + /** Applies normalization for Persian. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/fa\/PersianNormalizationFilter.html */ PersianNormalization = "persian_normalization", - /** Create tokens for phonetic matches. See https://lucene.apache.org/core/4_10_3/analyzers-phonetic/org/apache/lucene/analysis/phonetic/package-tree.html */ + /** Create tokens for phonetic matches. See https:\//lucene.apache.org\/core\/4_10_3\/analyzers-phonetic\/org\/apache\/lucene\/analysis\/phonetic\/package-tree.html */ Phonetic = "phonetic", - /** Uses the Porter stemming algorithm to transform the token stream. See http://tartarus.org/~martin/PorterStemmer */ + /** Uses the Porter stemming algorithm to transform the token stream. See http:\//tartarus.org\/~martin\/PorterStemmer */ PorterStem = "porter_stem", - /** Reverses the token string. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/reverse/ReverseStringFilter.html */ + /** Reverses the token string. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/reverse\/ReverseStringFilter.html */ Reverse = "reverse", - /** Normalizes use of the interchangeable Scandinavian characters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianNormalizationFilter.html */ + /** Normalizes use of the interchangeable Scandinavian characters. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/ScandinavianNormalizationFilter.html */ ScandinavianNormalization = "scandinavian_normalization", - /** Folds Scandinavian characters åÅäæÄÆ->a and öÖøØ->o. It also discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just the first one. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/ScandinavianFoldingFilter.html */ + /** Folds Scandinavian characters åÅäæÄÆ->a and öÖøØ->o. It also discriminates against use of double vowels aa, ae, ao, oe and oo, leaving just the first one. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/ScandinavianFoldingFilter.html */ ScandinavianFoldingNormalization = "scandinavian_folding", - /** Creates combinations of tokens as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/shingle/ShingleFilter.html */ + /** Creates combinations of tokens as a single token. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/shingle\/ShingleFilter.html */ Shingle = "shingle", - /** A filter that stems words using a Snowball-generated stemmer. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/snowball/SnowballFilter.html */ + /** A filter that stems words using a Snowball-generated stemmer. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/snowball\/SnowballFilter.html */ Snowball = "snowball", - /** Normalizes the Unicode representation of Sorani text. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ckb/SoraniNormalizationFilter.html */ + /** Normalizes the Unicode representation of Sorani text. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ckb\/SoraniNormalizationFilter.html */ SoraniNormalization = "sorani_normalization", - /** Language specific stemming filter. See https://docs.microsoft.com/rest/api/searchservice/Custom-analyzers-in-Azure-Search#TokenFilters */ + /** Language specific stemming filter. See https:\//docs.microsoft.com\/rest\/api\/searchservice\/Custom-analyzers-in-Azure-Search#TokenFilters */ Stemmer = "stemmer", - /** Removes stop words from a token stream. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/StopFilter.html */ + /** Removes stop words from a token stream. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/StopFilter.html */ Stopwords = "stopwords", - /** Trims leading and trailing whitespace from tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TrimFilter.html */ + /** Trims leading and trailing whitespace from tokens. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/TrimFilter.html */ Trim = "trim", - /** Truncates the terms to a specific length. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilter.html */ + /** Truncates the terms to a specific length. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/TruncateTokenFilter.html */ Truncate = "truncate", - /** Filters out tokens with same text as the previous token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/miscellaneous/RemoveDuplicatesTokenFilter.html */ + /** Filters out tokens with same text as the previous token. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/miscellaneous\/RemoveDuplicatesTokenFilter.html */ Unique = "unique", - /** Normalizes token text to upper case. See https://lucene.apache.org/core/6_6_1/analyzers-common/org/apache/lucene/analysis/core/UpperCaseFilter.html */ + /** Normalizes token text to upper case. See https:\//lucene.apache.org\/core\/6_6_1\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/UpperCaseFilter.html */ Uppercase = "uppercase", /** Splits words into subwords and performs optional transformations on subword groups. */ - WordDelimiter = "word_delimiter" + WordDelimiter = "word_delimiter", } /** @@ -2909,8 +3012,8 @@ export type TokenFilterName = string; /** Known values of {@link CharFilterName} that the service accepts. */ export enum KnownCharFilterName { - /** A character filter that attempts to strip out HTML constructs. See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.html */ - HtmlStrip = "html_strip" + /** A character filter that attempts to strip out HTML constructs. See https:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/charfilter\/HTMLStripCharFilter.html */ + HtmlStrip = "html_strip", } /** @@ -2924,9 +3027,12 @@ export type CharFilterName = string; /** Known values of {@link VectorSearchAlgorithmMetric} that the service accepts. */ export enum KnownVectorSearchAlgorithmMetric { + /** Cosine */ Cosine = "cosine", + /** Euclidean */ Euclidean = "euclidean", - DotProduct = "dotProduct" + /** DotProduct */ + DotProduct = "dotProduct", } /** @@ -2940,6 +3046,21 @@ export enum KnownVectorSearchAlgorithmMetric { */ export type VectorSearchAlgorithmMetric = string; +/** Known values of {@link VectorSearchCompressionTargetDataType} that the service accepts. */ +export enum KnownVectorSearchCompressionTargetDataType { + /** Int8 */ + Int8 = "int8", +} + +/** + * Defines values for VectorSearchCompressionTargetDataType. \ + * {@link KnownVectorSearchCompressionTargetDataType} can be used interchangeably with VectorSearchCompressionTargetDataType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **int8** + */ +export type VectorSearchCompressionTargetDataType = string; + /** Known values of {@link KeyPhraseExtractionSkillLanguage} that the service accepts. */ export enum KnownKeyPhraseExtractionSkillLanguage { /** Danish */ @@ -2973,7 +3094,7 @@ export enum KnownKeyPhraseExtractionSkillLanguage { /** Spanish */ Es = "es", /** Swedish */ - Sv = "sv" + Sv = "sv", } /** @@ -3341,7 +3462,7 @@ export enum KnownOcrSkillLanguage { /** Zulu */ Zu = "zu", /** Unknown (All) */ - Unk = "unk" + Unk = "unk", } /** @@ -3531,7 +3652,7 @@ export enum KnownLineEnding { /** Lines are separated by a single line feed ('\n') character. */ LineFeed = "lineFeed", /** Lines are separated by a carriage return and a line feed ('\r\n') character. */ - CarriageReturnLineFeed = "carriageReturnLineFeed" + CarriageReturnLineFeed = "carriageReturnLineFeed", } /** @@ -3651,7 +3772,7 @@ export enum KnownImageAnalysisSkillLanguage { /** Chinese Simplified */ ZhHans = "zh-Hans", /** Chinese Traditional */ - ZhHant = "zh-Hant" + ZhHant = "zh-Hant", } /** @@ -3729,7 +3850,7 @@ export enum KnownVisualFeature { /** Visual features recognized as objects. */ Objects = "objects", /** Tags. */ - Tags = "tags" + Tags = "tags", } /** @@ -3752,7 +3873,7 @@ export enum KnownImageDetail { /** Details recognized as celebrities. */ Celebrities = "celebrities", /** Details recognized as landmarks. */ - Landmarks = "landmarks" + Landmarks = "landmarks", } /** @@ -3780,7 +3901,7 @@ export enum KnownEntityCategory { /** Entities describing a URL. */ Url = "url", /** Entities describing an email address. */ - Email = "email" + Email = "email", } /** @@ -3845,7 +3966,7 @@ export enum KnownEntityRecognitionSkillLanguage { /** Swedish */ Sv = "sv", /** Turkish */ - Tr = "tr" + Tr = "tr", } /** @@ -3910,7 +4031,7 @@ export enum KnownSentimentSkillLanguage { /** Swedish */ Sv = "sv", /** Turkish */ - Tr = "tr" + Tr = "tr", } /** @@ -3941,7 +4062,7 @@ export enum KnownPIIDetectionSkillMaskingMode { /** No masking occurs and the maskedText output will not be returned. */ None = "none", /** Replaces the detected entities with the character given in the maskingCharacter parameter. The character will be repeated to the length of the detected entity so that the offsets will correctly correspond to both the input text as well as the output maskedText. */ - Replace = "replace" + Replace = "replace", } /** @@ -3956,6 +4077,12 @@ export type PIIDetectionSkillMaskingMode = string; /** Known values of {@link SplitSkillLanguage} that the service accepts. */ export enum KnownSplitSkillLanguage { + /** Amharic */ + Am = "am", + /** Bosnian */ + Bs = "bs", + /** Czech */ + Cs = "cs", /** Danish */ Da = "da", /** German */ @@ -3964,16 +4091,58 @@ export enum KnownSplitSkillLanguage { En = "en", /** Spanish */ Es = "es", + /** Estonian */ + Et = "et", /** Finnish */ Fi = "fi", /** French */ Fr = "fr", + /** Hebrew */ + He = "he", + /** Hindi */ + Hi = "hi", + /** Croatian */ + Hr = "hr", + /** Hungarian */ + Hu = "hu", + /** Indonesian */ + Id = "id", + /** Icelandic */ + Is = "is", /** Italian */ It = "it", + /** Japanese */ + Ja = "ja", /** Korean */ Ko = "ko", - /** Portuguese */ - Pt = "pt" + /** Latvian */ + Lv = "lv", + /** Norwegian */ + Nb = "nb", + /** Dutch */ + Nl = "nl", + /** Polish */ + Pl = "pl", + /** Portuguese (Portugal) */ + Pt = "pt", + /** Portuguese (Brazil) */ + PtBr = "pt-br", + /** Russian */ + Ru = "ru", + /** Slovak */ + Sk = "sk", + /** Slovenian */ + Sl = "sl", + /** Serbian */ + Sr = "sr", + /** Swedish */ + Sv = "sv", + /** Turkish */ + Tr = "tr", + /** Urdu */ + Ur = "ur", + /** Chinese (Simplified) */ + Zh = "zh", } /** @@ -3981,15 +4150,39 @@ export enum KnownSplitSkillLanguage { * {@link KnownSplitSkillLanguage} can be used interchangeably with SplitSkillLanguage, * this enum contains the known values that the service supports. * ### Known values supported by the service + * **am**: Amharic \ + * **bs**: Bosnian \ + * **cs**: Czech \ * **da**: Danish \ * **de**: German \ * **en**: English \ * **es**: Spanish \ + * **et**: Estonian \ * **fi**: Finnish \ * **fr**: French \ + * **he**: Hebrew \ + * **hi**: Hindi \ + * **hr**: Croatian \ + * **hu**: Hungarian \ + * **id**: Indonesian \ + * **is**: Icelandic \ * **it**: Italian \ + * **ja**: Japanese \ * **ko**: Korean \ - * **pt**: Portuguese + * **lv**: Latvian \ + * **nb**: Norwegian \ + * **nl**: Dutch \ + * **pl**: Polish \ + * **pt**: Portuguese (Portugal) \ + * **pt-br**: Portuguese (Brazil) \ + * **ru**: Russian \ + * **sk**: Slovak \ + * **sl**: Slovenian \ + * **sr**: Serbian \ + * **sv**: Swedish \ + * **tr**: Turkish \ + * **ur**: Urdu \ + * **zh**: Chinese (Simplified) */ export type SplitSkillLanguage = string; @@ -3998,7 +4191,7 @@ export enum KnownTextSplitMode { /** Split the text into individual pages. */ Pages = "pages", /** Split the text into individual sentences. */ - Sentences = "sentences" + Sentences = "sentences", } /** @@ -4030,7 +4223,7 @@ export enum KnownCustomEntityLookupSkillLanguage { /** Korean */ Ko = "ko", /** Portuguese */ - Pt = "pt" + Pt = "pt", } /** @@ -4195,7 +4388,7 @@ export enum KnownTextTranslationSkillLanguage { /** Malayalam */ Ml = "ml", /** Punjabi */ - Pa = "pa" + Pa = "pa", } /** @@ -4280,32 +4473,32 @@ export type TextTranslationSkillLanguage = string; /** Known values of {@link LexicalTokenizerName} that the service accepts. */ export enum KnownLexicalTokenizerName { - /** Grammar-based tokenizer that is suitable for processing most European-language documents. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/ClassicTokenizer.html */ + /** Grammar-based tokenizer that is suitable for processing most European-language documents. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/standard\/ClassicTokenizer.html */ Classic = "classic", - /** Tokenizes the input from an edge into n-grams of the given size(s). See https://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/EdgeNGramTokenizer.html */ + /** Tokenizes the input from an edge into n-grams of the given size(s). See https:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ngram\/EdgeNGramTokenizer.html */ EdgeNGram = "edgeNGram", - /** Emits the entire input as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/KeywordTokenizer.html */ + /** Emits the entire input as a single token. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/KeywordTokenizer.html */ Keyword = "keyword_v2", - /** Divides text at non-letters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LetterTokenizer.html */ + /** Divides text at non-letters. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/LetterTokenizer.html */ Letter = "letter", - /** Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/LowerCaseTokenizer.html */ + /** Divides text at non-letters and converts them to lower case. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/LowerCaseTokenizer.html */ Lowercase = "lowercase", /** Divides text using language-specific rules. */ MicrosoftLanguageTokenizer = "microsoft_language_tokenizer", /** Divides text using language-specific rules and reduces words to their base forms. */ MicrosoftLanguageStemmingTokenizer = "microsoft_language_stemming_tokenizer", - /** Tokenizes the input into n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/ngram/NGramTokenizer.html */ + /** Tokenizes the input into n-grams of the given size(s). See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/ngram\/NGramTokenizer.html */ NGram = "nGram", - /** Tokenizer for path-like hierarchies. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/path/PathHierarchyTokenizer.html */ + /** Tokenizer for path-like hierarchies. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/path\/PathHierarchyTokenizer.html */ PathHierarchy = "path_hierarchy_v2", - /** Tokenizer that uses regex pattern matching to construct distinct tokens. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/pattern/PatternTokenizer.html */ + /** Tokenizer that uses regex pattern matching to construct distinct tokens. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/pattern\/PatternTokenizer.html */ Pattern = "pattern", - /** Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/StandardTokenizer.html */ + /** Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/standard\/StandardTokenizer.html */ Standard = "standard_v2", - /** Tokenizes urls and emails as one token. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/UAX29URLEmailTokenizer.html */ + /** Tokenizes urls and emails as one token. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/standard\/UAX29URLEmailTokenizer.html */ UaxUrlEmail = "uax_url_email", - /** Divides text at whitespace. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/core/WhitespaceTokenizer.html */ - Whitespace = "whitespace" + /** Divides text at whitespace. See http:\//lucene.apache.org\/core\/4_10_3\/analyzers-common\/org\/apache\/lucene\/analysis\/core\/WhitespaceTokenizer.html */ + Whitespace = "whitespace", } /** @@ -4346,7 +4539,7 @@ export enum KnownRegexFlags { /** Enables Unicode-aware case folding. */ UnicodeCase = "UNICODE_CASE", /** Enables Unix lines mode. */ - UnixLines = "UNIX_LINES" + UnixLines = "UNIX_LINES", } /** diff --git a/sdk/search/search-documents/src/generated/service/models/mappers.ts b/sdk/search/search-documents/src/generated/service/models/mappers.ts index ec156fac7d8e..3ac093ef6565 100644 --- a/sdk/search/search-documents/src/generated/service/models/mappers.ts +++ b/sdk/search/search-documents/src/generated/service/models/mappers.ts @@ -17,72 +17,72 @@ export const SearchIndexerDataSource: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", type: { - name: "String" - } + name: "String", + }, }, type: { serializedName: "type", required: true, type: { - name: "String" - } + name: "String", + }, }, credentials: { serializedName: "credentials", type: { name: "Composite", - className: "DataSourceCredentials" - } + className: "DataSourceCredentials", + }, }, container: { serializedName: "container", type: { name: "Composite", - className: "SearchIndexerDataContainer" - } + className: "SearchIndexerDataContainer", + }, }, identity: { serializedName: "identity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } + className: "SearchIndexerDataIdentity", + }, }, dataChangeDetectionPolicy: { serializedName: "dataChangeDetectionPolicy", type: { name: "Composite", - className: "DataChangeDetectionPolicy" - } + className: "DataChangeDetectionPolicy", + }, }, dataDeletionDetectionPolicy: { serializedName: "dataDeletionDetectionPolicy", type: { name: "Composite", - className: "DataDeletionDetectionPolicy" - } + className: "DataDeletionDetectionPolicy", + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } + name: "String", + }, }, encryptionKey: { serializedName: "encryptionKey", type: { name: "Composite", - className: "SearchResourceEncryptionKey" - } - } - } - } + className: "SearchResourceEncryptionKey", + }, + }, + }, + }, }; export const DataSourceCredentials: coreClient.CompositeMapper = { @@ -93,11 +93,11 @@ export const DataSourceCredentials: coreClient.CompositeMapper = { connectionString: { serializedName: "connectionString", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchIndexerDataContainer: coreClient.CompositeMapper = { @@ -109,17 +109,17 @@ export const SearchIndexerDataContainer: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, query: { serializedName: "query", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchIndexerDataIdentity: coreClient.CompositeMapper = { @@ -129,18 +129,18 @@ export const SearchIndexerDataIdentity: coreClient.CompositeMapper = { uberParent: "SearchIndexerDataIdentity", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const DataChangeDetectionPolicy: coreClient.CompositeMapper = { @@ -150,18 +150,18 @@ export const DataChangeDetectionPolicy: coreClient.CompositeMapper = { uberParent: "DataChangeDetectionPolicy", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const DataDeletionDetectionPolicy: coreClient.CompositeMapper = { @@ -171,18 +171,18 @@ export const DataDeletionDetectionPolicy: coreClient.CompositeMapper = { uberParent: "DataDeletionDetectionPolicy", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchResourceEncryptionKey: coreClient.CompositeMapper = { @@ -194,82 +194,105 @@ export const SearchResourceEncryptionKey: coreClient.CompositeMapper = { serializedName: "keyVaultKeyName", required: true, type: { - name: "String" - } + name: "String", + }, }, keyVersion: { serializedName: "keyVaultKeyVersion", required: true, type: { - name: "String" - } + name: "String", + }, }, vaultUri: { serializedName: "keyVaultUri", required: true, type: { - name: "String" - } + name: "String", + }, }, accessCredentials: { serializedName: "accessCredentials", type: { name: "Composite", - className: "AzureActiveDirectoryApplicationCredentials" - } + className: "AzureActiveDirectoryApplicationCredentials", + }, }, identity: { serializedName: "identity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } -}; + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, +}; + +export const AzureActiveDirectoryApplicationCredentials: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "AzureActiveDirectoryApplicationCredentials", + modelProperties: { + applicationId: { + serializedName: "applicationId", + required: true, + type: { + name: "String", + }, + }, + applicationSecret: { + serializedName: "applicationSecret", + type: { + name: "String", + }, + }, + }, + }, + }; -export const AzureActiveDirectoryApplicationCredentials: coreClient.CompositeMapper = { +export const ErrorResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "AzureActiveDirectoryApplicationCredentials", + className: "ErrorResponse", modelProperties: { - applicationId: { - serializedName: "applicationId", - required: true, + error: { + serializedName: "error", type: { - name: "String" - } + name: "Composite", + className: "ErrorDetail", + }, }, - applicationSecret: { - serializedName: "applicationSecret", - type: { - name: "String" - } - } - } - } + }, + }, }; -export const SearchError: coreClient.CompositeMapper = { +export const ErrorDetail: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SearchError", + className: "ErrorDetail", modelProperties: { code: { serializedName: "code", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, message: { serializedName: "message", - required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, + }, + target: { + serializedName: "target", + readOnly: true, + type: { + name: "String", + }, }, details: { serializedName: "details", @@ -279,13 +302,50 @@ export const SearchError: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchError" - } - } - } - } - } - } + className: "ErrorDetail", + }, + }, + }, + }, + additionalInfo: { + serializedName: "additionalInfo", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + }, + }, + }, + }, + }, + }, +}; + +export const ErrorAdditionalInfo: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + modelProperties: { + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String", + }, + }, + info: { + serializedName: "info", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, }; export const ListDataSourcesResult: coreClient.CompositeMapper = { @@ -302,13 +362,13 @@ export const ListDataSourcesResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerDataSource" - } - } - } - } - } - } + className: "SearchIndexerDataSource", + }, + }, + }, + }, + }, + }, }; export const DocumentKeysOrIds: coreClient.CompositeMapper = { @@ -322,10 +382,10 @@ export const DocumentKeysOrIds: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, datasourceDocumentIds: { serializedName: "datasourceDocumentIds", @@ -333,13 +393,13 @@ export const DocumentKeysOrIds: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const SearchIndexer: coreClient.CompositeMapper = { @@ -351,48 +411,48 @@ export const SearchIndexer: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", type: { - name: "String" - } + name: "String", + }, }, dataSourceName: { serializedName: "dataSourceName", required: true, type: { - name: "String" - } + name: "String", + }, }, skillsetName: { serializedName: "skillsetName", type: { - name: "String" - } + name: "String", + }, }, targetIndexName: { serializedName: "targetIndexName", required: true, type: { - name: "String" - } + name: "String", + }, }, schedule: { serializedName: "schedule", type: { name: "Composite", - className: "IndexingSchedule" - } + className: "IndexingSchedule", + }, }, parameters: { serializedName: "parameters", type: { name: "Composite", - className: "IndexingParameters" - } + className: "IndexingParameters", + }, }, fieldMappings: { serializedName: "fieldMappings", @@ -401,10 +461,10 @@ export const SearchIndexer: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "FieldMapping" - } - } - } + className: "FieldMapping", + }, + }, + }, }, outputFieldMappings: { serializedName: "outputFieldMappings", @@ -413,41 +473,41 @@ export const SearchIndexer: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "FieldMapping" - } - } - } + className: "FieldMapping", + }, + }, + }, }, isDisabled: { defaultValue: false, serializedName: "disabled", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } + name: "String", + }, }, encryptionKey: { serializedName: "encryptionKey", type: { name: "Composite", - className: "SearchResourceEncryptionKey" - } + className: "SearchResourceEncryptionKey", + }, }, cache: { serializedName: "cache", type: { name: "Composite", - className: "SearchIndexerCache" - } - } - } - } + className: "SearchIndexerCache", + }, + }, + }, + }, }; export const IndexingSchedule: coreClient.CompositeMapper = { @@ -459,17 +519,17 @@ export const IndexingSchedule: coreClient.CompositeMapper = { serializedName: "interval", required: true, type: { - name: "TimeSpan" - } + name: "TimeSpan", + }, }, startTime: { serializedName: "startTime", type: { - name: "DateTime" - } - } - } - } + name: "DateTime", + }, + }, + }, + }, }; export const IndexingParameters: coreClient.CompositeMapper = { @@ -481,34 +541,34 @@ export const IndexingParameters: coreClient.CompositeMapper = { serializedName: "batchSize", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maxFailedItems: { defaultValue: 0, serializedName: "maxFailedItems", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maxFailedItemsPerBatch: { defaultValue: 0, serializedName: "maxFailedItemsPerBatch", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, configuration: { serializedName: "configuration", type: { name: "Composite", - className: "IndexingParametersConfiguration" - } - } - } - } + className: "IndexingParametersConfiguration", + }, + }, + }, + }, }; export const IndexingParametersConfiguration: coreClient.CompositeMapper = { @@ -521,113 +581,113 @@ export const IndexingParametersConfiguration: coreClient.CompositeMapper = { defaultValue: "default", serializedName: "parsingMode", type: { - name: "String" - } + name: "String", + }, }, excludedFileNameExtensions: { defaultValue: "", serializedName: "excludedFileNameExtensions", type: { - name: "String" - } + name: "String", + }, }, indexedFileNameExtensions: { defaultValue: "", serializedName: "indexedFileNameExtensions", type: { - name: "String" - } + name: "String", + }, }, failOnUnsupportedContentType: { defaultValue: false, serializedName: "failOnUnsupportedContentType", type: { - name: "Boolean" - } + name: "Boolean", + }, }, failOnUnprocessableDocument: { defaultValue: false, serializedName: "failOnUnprocessableDocument", type: { - name: "Boolean" - } + name: "Boolean", + }, }, indexStorageMetadataOnlyForOversizedDocuments: { defaultValue: false, serializedName: "indexStorageMetadataOnlyForOversizedDocuments", type: { - name: "Boolean" - } + name: "Boolean", + }, }, delimitedTextHeaders: { serializedName: "delimitedTextHeaders", type: { - name: "String" - } + name: "String", + }, }, delimitedTextDelimiter: { serializedName: "delimitedTextDelimiter", type: { - name: "String" - } + name: "String", + }, }, firstLineContainsHeaders: { defaultValue: true, serializedName: "firstLineContainsHeaders", type: { - name: "Boolean" - } + name: "Boolean", + }, }, documentRoot: { serializedName: "documentRoot", type: { - name: "String" - } + name: "String", + }, }, dataToExtract: { defaultValue: "contentAndMetadata", serializedName: "dataToExtract", type: { - name: "String" - } + name: "String", + }, }, imageAction: { defaultValue: "none", serializedName: "imageAction", type: { - name: "String" - } + name: "String", + }, }, allowSkillsetToReadFileData: { defaultValue: false, serializedName: "allowSkillsetToReadFileData", type: { - name: "Boolean" - } + name: "Boolean", + }, }, pdfTextRotationAlgorithm: { defaultValue: "none", serializedName: "pdfTextRotationAlgorithm", type: { - name: "String" - } + name: "String", + }, }, executionEnvironment: { defaultValue: "standard", serializedName: "executionEnvironment", type: { - name: "String" - } + name: "String", + }, }, queryTimeout: { defaultValue: "00:05:00", serializedName: "queryTimeout", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const FieldMapping: coreClient.CompositeMapper = { @@ -639,24 +699,24 @@ export const FieldMapping: coreClient.CompositeMapper = { serializedName: "sourceFieldName", required: true, type: { - name: "String" - } + name: "String", + }, }, targetFieldName: { serializedName: "targetFieldName", type: { - name: "String" - } + name: "String", + }, }, mappingFunction: { serializedName: "mappingFunction", type: { name: "Composite", - className: "FieldMappingFunction" - } - } - } - } + className: "FieldMappingFunction", + }, + }, + }, + }, }; export const FieldMappingFunction: coreClient.CompositeMapper = { @@ -668,19 +728,19 @@ export const FieldMappingFunction: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, parameters: { serializedName: "parameters", nullable: true, type: { name: "Dictionary", - value: { type: { name: "any" } } - } - } - } - } + value: { type: { name: "any" } }, + }, + }, + }, + }, }; export const SearchIndexerCache: coreClient.CompositeMapper = { @@ -691,25 +751,25 @@ export const SearchIndexerCache: coreClient.CompositeMapper = { storageConnectionString: { serializedName: "storageConnectionString", type: { - name: "String" - } + name: "String", + }, }, enableReprocessing: { serializedName: "enableReprocessing", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, identity: { serializedName: "identity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, }; export const ListIndexersResult: coreClient.CompositeMapper = { @@ -726,13 +786,13 @@ export const ListIndexersResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexer" - } - } - } - } - } - } + className: "SearchIndexer", + }, + }, + }, + }, + }, + }, }; export const SearchIndexerStatus: coreClient.CompositeMapper = { @@ -746,15 +806,15 @@ export const SearchIndexerStatus: coreClient.CompositeMapper = { readOnly: true, type: { name: "Enum", - allowedValues: ["unknown", "error", "running"] - } + allowedValues: ["unknown", "error", "running"], + }, }, lastResult: { serializedName: "lastResult", type: { name: "Composite", - className: "IndexerExecutionResult" - } + className: "IndexerExecutionResult", + }, }, executionHistory: { serializedName: "executionHistory", @@ -765,20 +825,20 @@ export const SearchIndexerStatus: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "IndexerExecutionResult" - } - } - } + className: "IndexerExecutionResult", + }, + }, + }, }, limits: { serializedName: "limits", type: { name: "Composite", - className: "SearchIndexerLimits" - } - } - } - } + className: "SearchIndexerLimits", + }, + }, + }, + }, }; export const IndexerExecutionResult: coreClient.CompositeMapper = { @@ -792,44 +852,44 @@ export const IndexerExecutionResult: coreClient.CompositeMapper = { readOnly: true, type: { name: "Enum", - allowedValues: ["transientFailure", "success", "inProgress", "reset"] - } + allowedValues: ["transientFailure", "success", "inProgress", "reset"], + }, }, statusDetail: { serializedName: "statusDetail", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, currentState: { serializedName: "currentState", type: { name: "Composite", - className: "IndexerState" - } + className: "IndexerState", + }, }, errorMessage: { serializedName: "errorMessage", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, startTime: { serializedName: "startTime", readOnly: true, type: { - name: "DateTime" - } + name: "DateTime", + }, }, endTime: { serializedName: "endTime", readOnly: true, nullable: true, type: { - name: "DateTime" - } + name: "DateTime", + }, }, errors: { serializedName: "errors", @@ -840,10 +900,10 @@ export const IndexerExecutionResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerError" - } - } - } + className: "SearchIndexerError", + }, + }, + }, }, warnings: { serializedName: "warnings", @@ -854,43 +914,43 @@ export const IndexerExecutionResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerWarning" - } - } - } + className: "SearchIndexerWarning", + }, + }, + }, }, itemCount: { serializedName: "itemsProcessed", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, failedItemCount: { serializedName: "itemsFailed", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, initialTrackingState: { serializedName: "initialTrackingState", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, finalTrackingState: { serializedName: "finalTrackingState", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const IndexerState: coreClient.CompositeMapper = { @@ -902,36 +962,36 @@ export const IndexerState: coreClient.CompositeMapper = { serializedName: "mode", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, allDocumentsInitialChangeTrackingState: { serializedName: "allDocsInitialChangeTrackingState", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, allDocumentsFinalChangeTrackingState: { serializedName: "allDocsFinalChangeTrackingState", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, resetDocumentsInitialChangeTrackingState: { serializedName: "resetDocsInitialChangeTrackingState", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, resetDocumentsFinalChangeTrackingState: { serializedName: "resetDocsFinalChangeTrackingState", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, resetDocumentKeys: { serializedName: "resetDocumentKeys", @@ -940,10 +1000,10 @@ export const IndexerState: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, resetDatasourceDocumentIds: { serializedName: "resetDatasourceDocumentIds", @@ -952,13 +1012,13 @@ export const IndexerState: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const SearchIndexerError: coreClient.CompositeMapper = { @@ -970,48 +1030,48 @@ export const SearchIndexerError: coreClient.CompositeMapper = { serializedName: "key", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, errorMessage: { serializedName: "errorMessage", required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, statusCode: { serializedName: "statusCode", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, name: { serializedName: "name", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, details: { serializedName: "details", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, documentationLink: { serializedName: "documentationLink", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchIndexerWarning: coreClient.CompositeMapper = { @@ -1023,40 +1083,40 @@ export const SearchIndexerWarning: coreClient.CompositeMapper = { serializedName: "key", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, message: { serializedName: "message", required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, details: { serializedName: "details", readOnly: true, type: { - name: "String" - } + name: "String", + }, }, documentationLink: { serializedName: "documentationLink", readOnly: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchIndexerLimits: coreClient.CompositeMapper = { @@ -1068,25 +1128,25 @@ export const SearchIndexerLimits: coreClient.CompositeMapper = { serializedName: "maxRunTime", readOnly: true, type: { - name: "TimeSpan" - } + name: "TimeSpan", + }, }, maxDocumentExtractionSize: { serializedName: "maxDocumentExtractionSize", readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, maxDocumentContentCharactersToExtract: { serializedName: "maxDocumentContentCharactersToExtract", readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const SearchIndexerSkillset: coreClient.CompositeMapper = { @@ -1098,14 +1158,14 @@ export const SearchIndexerSkillset: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", type: { - name: "String" - } + name: "String", + }, }, skills: { serializedName: "skills", @@ -1115,47 +1175,47 @@ export const SearchIndexerSkillset: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerSkill" - } - } - } + className: "SearchIndexerSkill", + }, + }, + }, }, cognitiveServicesAccount: { serializedName: "cognitiveServices", type: { name: "Composite", - className: "CognitiveServicesAccount" - } + className: "CognitiveServicesAccount", + }, }, knowledgeStore: { serializedName: "knowledgeStore", type: { name: "Composite", - className: "SearchIndexerKnowledgeStore" - } + className: "SearchIndexerKnowledgeStore", + }, }, indexProjections: { serializedName: "indexProjections", type: { name: "Composite", - className: "SearchIndexerIndexProjections" - } + className: "SearchIndexerIndexProjections", + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } + name: "String", + }, }, encryptionKey: { serializedName: "encryptionKey", type: { name: "Composite", - className: "SearchResourceEncryptionKey" - } - } - } - } + className: "SearchResourceEncryptionKey", + }, + }, + }, + }, }; export const SearchIndexerSkill: coreClient.CompositeMapper = { @@ -1165,33 +1225,33 @@ export const SearchIndexerSkill: coreClient.CompositeMapper = { uberParent: "SearchIndexerSkill", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", type: { - name: "String" - } + name: "String", + }, }, context: { serializedName: "context", type: { - name: "String" - } + name: "String", + }, }, inputs: { serializedName: "inputs", @@ -1201,10 +1261,10 @@ export const SearchIndexerSkill: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "InputFieldMappingEntry" - } - } - } + className: "InputFieldMappingEntry", + }, + }, + }, }, outputs: { serializedName: "outputs", @@ -1214,13 +1274,13 @@ export const SearchIndexerSkill: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "OutputFieldMappingEntry" - } - } - } - } - } - } + className: "OutputFieldMappingEntry", + }, + }, + }, + }, + }, + }, }; export const InputFieldMappingEntry: coreClient.CompositeMapper = { @@ -1232,20 +1292,20 @@ export const InputFieldMappingEntry: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, source: { serializedName: "source", type: { - name: "String" - } + name: "String", + }, }, sourceContext: { serializedName: "sourceContext", type: { - name: "String" - } + name: "String", + }, }, inputs: { serializedName: "inputs", @@ -1254,13 +1314,13 @@ export const InputFieldMappingEntry: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "InputFieldMappingEntry" - } - } - } - } - } - } + className: "InputFieldMappingEntry", + }, + }, + }, + }, + }, + }, }; export const OutputFieldMappingEntry: coreClient.CompositeMapper = { @@ -1272,17 +1332,17 @@ export const OutputFieldMappingEntry: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, targetName: { serializedName: "targetName", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const CognitiveServicesAccount: coreClient.CompositeMapper = { @@ -1292,24 +1352,24 @@ export const CognitiveServicesAccount: coreClient.CompositeMapper = { uberParent: "CognitiveServicesAccount", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchIndexerKnowledgeStore: coreClient.CompositeMapper = { @@ -1321,8 +1381,8 @@ export const SearchIndexerKnowledgeStore: coreClient.CompositeMapper = { serializedName: "storageConnectionString", required: true, type: { - name: "String" - } + name: "String", + }, }, projections: { serializedName: "projections", @@ -1332,223 +1392,229 @@ export const SearchIndexerKnowledgeStore: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerKnowledgeStoreProjection" - } - } - } + className: "SearchIndexerKnowledgeStoreProjection", + }, + }, + }, }, identity: { serializedName: "identity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } + className: "SearchIndexerDataIdentity", + }, }, parameters: { serializedName: "parameters", type: { name: "Composite", - className: "SearchIndexerKnowledgeStoreParameters" - } - } - } - } -}; + className: "SearchIndexerKnowledgeStoreParameters", + }, + }, + }, + }, +}; + +export const SearchIndexerKnowledgeStoreProjection: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreProjection", + modelProperties: { + tables: { + serializedName: "tables", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreTableProjectionSelector", + }, + }, + }, + }, + objects: { + serializedName: "objects", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: + "SearchIndexerKnowledgeStoreObjectProjectionSelector", + }, + }, + }, + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreFileProjectionSelector", + }, + }, + }, + }, + }, + }, + }; + +export const SearchIndexerKnowledgeStoreProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreProjectionSelector", + modelProperties: { + referenceKeyName: { + serializedName: "referenceKeyName", + type: { + name: "String", + }, + }, + generatedKeyName: { + serializedName: "generatedKeyName", + type: { + name: "String", + }, + }, + source: { + serializedName: "source", + type: { + name: "String", + }, + }, + sourceContext: { + serializedName: "sourceContext", + type: { + name: "String", + }, + }, + inputs: { + serializedName: "inputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "InputFieldMappingEntry", + }, + }, + }, + }, + }, + }, + }; + +export const SearchIndexerKnowledgeStoreParameters: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreParameters", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + synthesizeGeneratedKeyName: { + defaultValue: false, + serializedName: "synthesizeGeneratedKeyName", + type: { + name: "Boolean", + }, + }, + }, + }, + }; -export const SearchIndexerKnowledgeStoreProjection: coreClient.CompositeMapper = { +export const SearchIndexerIndexProjections: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SearchIndexerKnowledgeStoreProjection", + className: "SearchIndexerIndexProjections", modelProperties: { - tables: { - serializedName: "tables", + selectors: { + serializedName: "selectors", + required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "SearchIndexerKnowledgeStoreTableProjectionSelector" - } - } - } - }, - objects: { - serializedName: "objects", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreObjectProjectionSelector" - } - } - } - }, - files: { - serializedName: "files", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreFileProjectionSelector" - } - } - } - } - } - } -}; - -export const SearchIndexerKnowledgeStoreProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreProjectionSelector", - modelProperties: { - referenceKeyName: { - serializedName: "referenceKeyName", - type: { - name: "String" - } - }, - generatedKeyName: { - serializedName: "generatedKeyName", - type: { - name: "String" - } - }, - source: { - serializedName: "source", - type: { - name: "String" - } - }, - sourceContext: { - serializedName: "sourceContext", - type: { - name: "String" - } - }, - inputs: { - serializedName: "inputs", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "InputFieldMappingEntry" - } - } - } - } - } - } -}; - -export const SearchIndexerKnowledgeStoreParameters: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreParameters", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - synthesizeGeneratedKeyName: { - defaultValue: false, - serializedName: "synthesizeGeneratedKeyName", - type: { - name: "Boolean" - } - } - } - } -}; - -export const SearchIndexerIndexProjections: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerIndexProjections", - modelProperties: { - selectors: { - serializedName: "selectors", - required: true, - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SearchIndexerIndexProjectionSelector" - } - } - } + className: "SearchIndexerIndexProjectionSelector", + }, + }, + }, }, parameters: { serializedName: "parameters", type: { name: "Composite", - className: "SearchIndexerIndexProjectionsParameters" - } - } - } - } -}; - -export const SearchIndexerIndexProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerIndexProjectionSelector", - modelProperties: { - targetIndexName: { - serializedName: "targetIndexName", - required: true, - type: { - name: "String" - } + className: "SearchIndexerIndexProjectionsParameters", + }, }, - parentKeyFieldName: { - serializedName: "parentKeyFieldName", - required: true, - type: { - name: "String" - } + }, + }, +}; + +export const SearchIndexerIndexProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerIndexProjectionSelector", + modelProperties: { + targetIndexName: { + serializedName: "targetIndexName", + required: true, + type: { + name: "String", + }, + }, + parentKeyFieldName: { + serializedName: "parentKeyFieldName", + required: true, + type: { + name: "String", + }, + }, + sourceContext: { + serializedName: "sourceContext", + required: true, + type: { + name: "String", + }, + }, + mappings: { + serializedName: "mappings", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "InputFieldMappingEntry", + }, + }, + }, + }, }, - sourceContext: { - serializedName: "sourceContext", - required: true, - type: { - name: "String" - } + }, + }; + +export const SearchIndexerIndexProjectionsParameters: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerIndexProjectionsParameters", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + projectionMode: { + serializedName: "projectionMode", + type: { + name: "String", + }, + }, }, - mappings: { - serializedName: "mappings", - required: true, - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "InputFieldMappingEntry" - } - } - } - } - } - } -}; - -export const SearchIndexerIndexProjectionsParameters: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerIndexProjectionsParameters", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - projectionMode: { - serializedName: "projectionMode", - type: { - name: "String" - } - } - } - } -}; + }, + }; export const ListSkillsetsResult: coreClient.CompositeMapper = { type: { @@ -1564,13 +1630,13 @@ export const ListSkillsetsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndexerSkillset" - } - } - } - } - } - } + className: "SearchIndexerSkillset", + }, + }, + }, + }, + }, + }, }; export const SkillNames: coreClient.CompositeMapper = { @@ -1584,13 +1650,13 @@ export const SkillNames: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const SynonymMap: coreClient.CompositeMapper = { @@ -1602,39 +1668,39 @@ export const SynonymMap: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, format: { defaultValue: "solr", isConstant: true, serializedName: "format", type: { - name: "String" - } + name: "String", + }, }, synonyms: { serializedName: "synonyms", required: true, type: { - name: "String" - } + name: "String", + }, }, encryptionKey: { serializedName: "encryptionKey", type: { name: "Composite", - className: "SearchResourceEncryptionKey" - } + className: "SearchResourceEncryptionKey", + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ListSynonymMapsResult: coreClient.CompositeMapper = { @@ -1651,13 +1717,13 @@ export const ListSynonymMapsResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SynonymMap" - } - } - } - } - } - } + className: "SynonymMap", + }, + }, + }, + }, + }, + }, }; export const SearchIndex: coreClient.CompositeMapper = { @@ -1669,8 +1735,8 @@ export const SearchIndex: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, fields: { serializedName: "fields", @@ -1680,10 +1746,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchField" - } - } - } + className: "SearchField", + }, + }, + }, }, scoringProfiles: { serializedName: "scoringProfiles", @@ -1692,23 +1758,23 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "ScoringProfile" - } - } - } + className: "ScoringProfile", + }, + }, + }, }, defaultScoringProfile: { serializedName: "defaultScoringProfile", type: { - name: "String" - } + name: "String", + }, }, corsOptions: { serializedName: "corsOptions", type: { name: "Composite", - className: "CorsOptions" - } + className: "CorsOptions", + }, }, suggesters: { serializedName: "suggesters", @@ -1717,10 +1783,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "Suggester" - } - } - } + className: "Suggester", + }, + }, + }, }, analyzers: { serializedName: "analyzers", @@ -1729,10 +1795,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "LexicalAnalyzer" - } - } - } + className: "LexicalAnalyzer", + }, + }, + }, }, tokenizers: { serializedName: "tokenizers", @@ -1741,10 +1807,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "LexicalTokenizer" - } - } - } + className: "LexicalTokenizer", + }, + }, + }, }, tokenFilters: { serializedName: "tokenFilters", @@ -1753,10 +1819,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "TokenFilter" - } - } - } + className: "TokenFilter", + }, + }, + }, }, charFilters: { serializedName: "charFilters", @@ -1765,10 +1831,10 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "CharFilter" - } - } - } + className: "CharFilter", + }, + }, + }, }, normalizers: { serializedName: "normalizers", @@ -1777,47 +1843,47 @@ export const SearchIndex: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "LexicalNormalizer" - } - } - } + className: "LexicalNormalizer", + }, + }, + }, }, encryptionKey: { serializedName: "encryptionKey", type: { name: "Composite", - className: "SearchResourceEncryptionKey" - } + className: "SearchResourceEncryptionKey", + }, }, similarity: { serializedName: "similarity", type: { name: "Composite", - className: "Similarity" - } + className: "Similarity", + }, }, - semanticSettings: { + semanticSearch: { serializedName: "semantic", type: { name: "Composite", - className: "SemanticSettings" - } + className: "SemanticSearch", + }, }, vectorSearch: { serializedName: "vectorSearch", type: { name: "Composite", - className: "VectorSearch" - } + className: "VectorSearch", + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SearchField: coreClient.CompositeMapper = { @@ -1829,97 +1895,103 @@ export const SearchField: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, type: { serializedName: "type", required: true, type: { - name: "String" - } + name: "String", + }, }, key: { serializedName: "key", type: { - name: "Boolean" - } + name: "Boolean", + }, }, retrievable: { serializedName: "retrievable", type: { - name: "Boolean" - } + name: "Boolean", + }, + }, + stored: { + serializedName: "stored", + type: { + name: "Boolean", + }, }, searchable: { serializedName: "searchable", type: { - name: "Boolean" - } + name: "Boolean", + }, }, filterable: { serializedName: "filterable", type: { - name: "Boolean" - } + name: "Boolean", + }, }, sortable: { serializedName: "sortable", type: { - name: "Boolean" - } + name: "Boolean", + }, }, facetable: { serializedName: "facetable", type: { - name: "Boolean" - } + name: "Boolean", + }, }, analyzer: { serializedName: "analyzer", nullable: true, type: { - name: "String" - } + name: "String", + }, }, searchAnalyzer: { serializedName: "searchAnalyzer", nullable: true, type: { - name: "String" - } + name: "String", + }, }, indexAnalyzer: { serializedName: "indexAnalyzer", nullable: true, type: { - name: "String" - } + name: "String", + }, }, normalizer: { serializedName: "normalizer", nullable: true, type: { - name: "String" - } + name: "String", + }, }, vectorSearchDimensions: { constraints: { InclusiveMaximum: 2048, - InclusiveMinimum: 2 + InclusiveMinimum: 2, }, serializedName: "dimensions", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, - vectorSearchProfile: { + vectorSearchProfileName: { serializedName: "vectorSearchProfile", nullable: true, type: { - name: "String" - } + name: "String", + }, }, synonymMaps: { serializedName: "synonymMaps", @@ -1927,10 +1999,10 @@ export const SearchField: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, fields: { serializedName: "fields", @@ -1939,13 +2011,13 @@ export const SearchField: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchField" - } - } - } - } - } - } + className: "SearchField", + }, + }, + }, + }, + }, + }, }; export const ScoringProfile: coreClient.CompositeMapper = { @@ -1957,15 +2029,15 @@ export const ScoringProfile: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, textWeights: { serializedName: "text", type: { name: "Composite", - className: "TextWeights" - } + className: "TextWeights", + }, }, functions: { serializedName: "functions", @@ -1974,10 +2046,10 @@ export const ScoringProfile: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "ScoringFunction" - } - } - } + className: "ScoringFunction", + }, + }, + }, }, functionAggregation: { serializedName: "functionAggregation", @@ -1988,12 +2060,12 @@ export const ScoringProfile: coreClient.CompositeMapper = { "average", "minimum", "maximum", - "firstMatching" - ] - } - } - } - } + "firstMatching", + ], + }, + }, + }, + }, }; export const TextWeights: coreClient.CompositeMapper = { @@ -2006,11 +2078,11 @@ export const TextWeights: coreClient.CompositeMapper = { required: true, type: { name: "Dictionary", - value: { type: { name: "Number" } } - } - } - } - } + value: { type: { name: "Number" } }, + }, + }, + }, + }, }; export const ScoringFunction: coreClient.CompositeMapper = { @@ -2020,39 +2092,39 @@ export const ScoringFunction: coreClient.CompositeMapper = { uberParent: "ScoringFunction", polymorphicDiscriminator: { serializedName: "type", - clientName: "type" + clientName: "type", }, modelProperties: { type: { serializedName: "type", required: true, type: { - name: "String" - } + name: "String", + }, }, fieldName: { serializedName: "fieldName", required: true, type: { - name: "String" - } + name: "String", + }, }, boost: { serializedName: "boost", required: true, type: { - name: "Number" - } + name: "Number", + }, }, interpolation: { serializedName: "interpolation", type: { name: "Enum", - allowedValues: ["linear", "constant", "quadratic", "logarithmic"] - } - } - } - } + allowedValues: ["linear", "constant", "quadratic", "logarithmic"], + }, + }, + }, + }, }; export const CorsOptions: coreClient.CompositeMapper = { @@ -2067,20 +2139,20 @@ export const CorsOptions: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, maxAgeInSeconds: { serializedName: "maxAgeInSeconds", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const Suggester: coreClient.CompositeMapper = { @@ -2092,16 +2164,16 @@ export const Suggester: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, searchMode: { defaultValue: "analyzingInfixMatching", isConstant: true, serializedName: "searchMode", type: { - name: "String" - } + name: "String", + }, }, sourceFields: { serializedName: "sourceFields", @@ -2110,13 +2182,13 @@ export const Suggester: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const LexicalAnalyzer: coreClient.CompositeMapper = { @@ -2126,25 +2198,25 @@ export const LexicalAnalyzer: coreClient.CompositeMapper = { uberParent: "LexicalAnalyzer", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const LexicalTokenizer: coreClient.CompositeMapper = { @@ -2154,25 +2226,25 @@ export const LexicalTokenizer: coreClient.CompositeMapper = { uberParent: "LexicalTokenizer", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const TokenFilter: coreClient.CompositeMapper = { @@ -2182,25 +2254,25 @@ export const TokenFilter: coreClient.CompositeMapper = { uberParent: "TokenFilter", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const CharFilter: coreClient.CompositeMapper = { @@ -2210,25 +2282,25 @@ export const CharFilter: coreClient.CompositeMapper = { uberParent: "CharFilter", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const LexicalNormalizer: coreClient.CompositeMapper = { @@ -2238,25 +2310,25 @@ export const LexicalNormalizer: coreClient.CompositeMapper = { uberParent: "LexicalNormalizer", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "name", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const Similarity: coreClient.CompositeMapper = { @@ -2266,30 +2338,30 @@ export const Similarity: coreClient.CompositeMapper = { uberParent: "Similarity", polymorphicDiscriminator: { serializedName: "@odata\\.type", - clientName: "odatatype" + clientName: "odatatype", }, modelProperties: { odatatype: { serializedName: "@odata\\.type", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -export const SemanticSettings: coreClient.CompositeMapper = { +export const SemanticSearch: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SemanticSettings", + className: "SemanticSearch", modelProperties: { - defaultConfiguration: { + defaultConfigurationName: { serializedName: "defaultConfiguration", type: { - name: "String" - } + name: "String", + }, }, configurations: { serializedName: "configurations", @@ -2298,13 +2370,13 @@ export const SemanticSettings: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SemanticConfiguration" - } - } - } - } - } - } + className: "SemanticConfiguration", + }, + }, + }, + }, + }, + }, }; export const SemanticConfiguration: coreClient.CompositeMapper = { @@ -2316,58 +2388,58 @@ export const SemanticConfiguration: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, prioritizedFields: { serializedName: "prioritizedFields", type: { name: "Composite", - className: "PrioritizedFields" - } - } - } - } + className: "SemanticPrioritizedFields", + }, + }, + }, + }, }; -export const PrioritizedFields: coreClient.CompositeMapper = { +export const SemanticPrioritizedFields: coreClient.CompositeMapper = { type: { name: "Composite", - className: "PrioritizedFields", + className: "SemanticPrioritizedFields", modelProperties: { titleField: { serializedName: "titleField", type: { name: "Composite", - className: "SemanticField" - } + className: "SemanticField", + }, }, - prioritizedContentFields: { + contentFields: { serializedName: "prioritizedContentFields", type: { name: "Sequence", element: { type: { name: "Composite", - className: "SemanticField" - } - } - } + className: "SemanticField", + }, + }, + }, }, - prioritizedKeywordsFields: { + keywordsFields: { serializedName: "prioritizedKeywordsFields", type: { name: "Sequence", element: { type: { name: "Composite", - className: "SemanticField" - } - } - } - } - } - } + className: "SemanticField", + }, + }, + }, + }, + }, + }, }; export const SemanticField: coreClient.CompositeMapper = { @@ -2377,12 +2449,13 @@ export const SemanticField: coreClient.CompositeMapper = { modelProperties: { name: { serializedName: "fieldName", + required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const VectorSearch: coreClient.CompositeMapper = { @@ -2397,10 +2470,10 @@ export const VectorSearch: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "VectorSearchProfile" - } - } - } + className: "VectorSearchProfile", + }, + }, + }, }, algorithms: { serializedName: "algorithms", @@ -2409,10 +2482,10 @@ export const VectorSearch: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "VectorSearchAlgorithmConfiguration" - } - } - } + className: "VectorSearchAlgorithmConfiguration", + }, + }, + }, }, vectorizers: { serializedName: "vectorizers", @@ -2421,13 +2494,25 @@ export const VectorSearch: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "VectorSearchVectorizer" - } - } - } - } - } - } + className: "VectorSearchVectorizer", + }, + }, + }, + }, + compressions: { + serializedName: "compressions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BaseVectorSearchCompressionConfiguration", + }, + }, + }, + }, + }, + }, }; export const VectorSearchProfile: coreClient.CompositeMapper = { @@ -2439,24 +2524,30 @@ export const VectorSearchProfile: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, - algorithm: { + algorithmConfigurationName: { serializedName: "algorithm", required: true, type: { - name: "String" - } + name: "String", + }, }, vectorizer: { serializedName: "vectorizer", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + compressionConfigurationName: { + serializedName: "compression", + type: { + name: "String", + }, + }, + }, + }, }; export const VectorSearchAlgorithmConfiguration: coreClient.CompositeMapper = { @@ -2466,25 +2557,25 @@ export const VectorSearchAlgorithmConfiguration: coreClient.CompositeMapper = { uberParent: "VectorSearchAlgorithmConfiguration", polymorphicDiscriminator: { serializedName: "kind", - clientName: "kind" + clientName: "kind", }, modelProperties: { name: { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, kind: { serializedName: "kind", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const VectorSearchVectorizer: coreClient.CompositeMapper = { @@ -2494,27 +2585,70 @@ export const VectorSearchVectorizer: coreClient.CompositeMapper = { uberParent: "VectorSearchVectorizer", polymorphicDiscriminator: { serializedName: "kind", - clientName: "kind" + clientName: "kind", }, modelProperties: { name: { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, kind: { serializedName: "kind", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; +export const BaseVectorSearchCompressionConfiguration: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "BaseVectorSearchCompressionConfiguration", + uberParent: "BaseVectorSearchCompressionConfiguration", + polymorphicDiscriminator: { + serializedName: "kind", + clientName: "kind", + }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String", + }, + }, + kind: { + serializedName: "kind", + required: true, + type: { + name: "String", + }, + }, + rerankWithOriginalVectors: { + defaultValue: true, + serializedName: "rerankWithOriginalVectors", + type: { + name: "Boolean", + }, + }, + defaultOversampling: { + serializedName: "defaultOversampling", + nullable: true, + type: { + name: "Number", + }, + }, + }, + }, + }; + export const ListIndexesResult: coreClient.CompositeMapper = { type: { name: "Composite", @@ -2529,13 +2663,13 @@ export const ListIndexesResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchIndex" - } - } - } - } - } - } + className: "SearchIndex", + }, + }, + }, + }, + }, + }, }; export const GetIndexStatisticsResult: coreClient.CompositeMapper = { @@ -2548,27 +2682,27 @@ export const GetIndexStatisticsResult: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, storageSize: { serializedName: "storageSize", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, vectorIndexSize: { serializedName: "vectorIndexSize", required: true, readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const AnalyzeRequest: coreClient.CompositeMapper = { @@ -2580,26 +2714,26 @@ export const AnalyzeRequest: coreClient.CompositeMapper = { serializedName: "text", required: true, type: { - name: "String" - } + name: "String", + }, }, analyzer: { serializedName: "analyzer", type: { - name: "String" - } + name: "String", + }, }, tokenizer: { serializedName: "tokenizer", type: { - name: "String" - } + name: "String", + }, }, normalizer: { serializedName: "normalizer", type: { - name: "String" - } + name: "String", + }, }, tokenFilters: { serializedName: "tokenFilters", @@ -2607,10 +2741,10 @@ export const AnalyzeRequest: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, charFilters: { serializedName: "charFilters", @@ -2618,13 +2752,13 @@ export const AnalyzeRequest: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const AnalyzeResult: coreClient.CompositeMapper = { @@ -2640,13 +2774,13 @@ export const AnalyzeResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "AnalyzedTokenInfo" - } - } - } - } - } - } + className: "AnalyzedTokenInfo", + }, + }, + }, + }, + }, + }, }; export const AnalyzedTokenInfo: coreClient.CompositeMapper = { @@ -2659,35 +2793,35 @@ export const AnalyzedTokenInfo: coreClient.CompositeMapper = { required: true, readOnly: true, type: { - name: "String" - } + name: "String", + }, }, startOffset: { serializedName: "startOffset", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, endOffset: { serializedName: "endOffset", required: true, readOnly: true, type: { - name: "Number" - } + name: "Number", + }, }, position: { serializedName: "position", required: true, readOnly: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const SearchAlias: coreClient.CompositeMapper = { @@ -2699,8 +2833,8 @@ export const SearchAlias: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, indexes: { serializedName: "indexes", @@ -2709,19 +2843,19 @@ export const SearchAlias: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, etag: { serializedName: "@odata\\.etag", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ListAliasesResult: coreClient.CompositeMapper = { @@ -2738,13 +2872,13 @@ export const ListAliasesResult: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "SearchAlias" - } - } - } - } - } - } + className: "SearchAlias", + }, + }, + }, + }, + }, + }, }; export const ServiceStatistics: coreClient.CompositeMapper = { @@ -2756,18 +2890,18 @@ export const ServiceStatistics: coreClient.CompositeMapper = { serializedName: "counters", type: { name: "Composite", - className: "ServiceCounters" - } + className: "ServiceCounters", + }, }, limits: { serializedName: "limits", type: { name: "Composite", - className: "ServiceLimits" - } - } - } - } + className: "ServiceLimits", + }, + }, + }, + }, }; export const ServiceCounters: coreClient.CompositeMapper = { @@ -2779,67 +2913,67 @@ export const ServiceCounters: coreClient.CompositeMapper = { serializedName: "aliasesCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, documentCounter: { serializedName: "documentCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, indexCounter: { serializedName: "indexesCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, indexerCounter: { serializedName: "indexersCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, dataSourceCounter: { serializedName: "dataSourcesCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, storageSizeCounter: { serializedName: "storageSize", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, synonymMapCounter: { serializedName: "synonymMaps", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, skillsetCounter: { serializedName: "skillsetCount", type: { name: "Composite", - className: "ResourceCounter" - } + className: "ResourceCounter", + }, }, vectorIndexSizeCounter: { serializedName: "vectorIndexSize", type: { name: "Composite", - className: "ResourceCounter" - } - } - } - } + className: "ResourceCounter", + }, + }, + }, + }, }; export const ResourceCounter: coreClient.CompositeMapper = { @@ -2851,18 +2985,18 @@ export const ResourceCounter: coreClient.CompositeMapper = { serializedName: "usage", required: true, type: { - name: "Number" - } + name: "Number", + }, }, quota: { serializedName: "quota", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const ServiceLimits: coreClient.CompositeMapper = { @@ -2874,32 +3008,32 @@ export const ServiceLimits: coreClient.CompositeMapper = { serializedName: "maxFieldsPerIndex", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maxFieldNestingDepthPerIndex: { serializedName: "maxFieldNestingDepthPerIndex", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maxComplexCollectionFieldsPerIndex: { serializedName: "maxComplexCollectionFieldsPerIndex", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maxComplexObjectsInCollectionsPerDocument: { serializedName: "maxComplexObjectsInCollectionsPerDocument", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const HnswParameters: coreClient.CompositeMapper = { @@ -2911,47 +3045,47 @@ export const HnswParameters: coreClient.CompositeMapper = { defaultValue: 4, constraints: { InclusiveMaximum: 10, - InclusiveMinimum: 4 + InclusiveMinimum: 4, }, serializedName: "m", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, efConstruction: { defaultValue: 400, constraints: { InclusiveMaximum: 1000, - InclusiveMinimum: 100 + InclusiveMinimum: 100, }, serializedName: "efConstruction", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, efSearch: { defaultValue: 500, constraints: { InclusiveMaximum: 1000, - InclusiveMinimum: 100 + InclusiveMinimum: 100, }, serializedName: "efSearch", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, metric: { serializedName: "metric", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ExhaustiveKnnParameters: coreClient.CompositeMapper = { @@ -2963,11 +3097,27 @@ export const ExhaustiveKnnParameters: coreClient.CompositeMapper = { serializedName: "metric", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, +}; + +export const ScalarQuantizationParameters: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ScalarQuantizationParameters", + modelProperties: { + quantizedDataType: { + serializedName: "quantizedDataType", + nullable: true, + type: { + name: "String", + }, + }, + }, + }, }; export const AzureOpenAIParameters: coreClient.CompositeMapper = { @@ -2978,78 +3128,78 @@ export const AzureOpenAIParameters: coreClient.CompositeMapper = { resourceUri: { serializedName: "resourceUri", type: { - name: "String" - } + name: "String", + }, }, deploymentId: { serializedName: "deploymentId", type: { - name: "String" - } + name: "String", + }, }, apiKey: { serializedName: "apiKey", type: { - name: "String" - } + name: "String", + }, }, authIdentity: { serializedName: "authIdentity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, }; -export const CustomVectorizerParameters: coreClient.CompositeMapper = { +export const CustomWebApiParameters: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CustomVectorizerParameters", + className: "CustomWebApiParameters", modelProperties: { uri: { serializedName: "uri", type: { - name: "String" - } + name: "String", + }, }, httpHeaders: { serializedName: "httpHeaders", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, httpMethod: { serializedName: "httpMethod", type: { - name: "String" - } + name: "String", + }, }, timeout: { serializedName: "timeout", type: { - name: "TimeSpan" - } + name: "TimeSpan", + }, }, authResourceId: { serializedName: "authResourceId", nullable: true, type: { - name: "String" - } + name: "String", + }, }, authIdentity: { serializedName: "authIdentity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, }; export const DistanceScoringParameters: coreClient.CompositeMapper = { @@ -3061,18 +3211,18 @@ export const DistanceScoringParameters: coreClient.CompositeMapper = { serializedName: "referencePointParameter", required: true, type: { - name: "String" - } + name: "String", + }, }, boostingDistance: { serializedName: "boostingDistance", required: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const FreshnessScoringParameters: coreClient.CompositeMapper = { @@ -3084,11 +3234,11 @@ export const FreshnessScoringParameters: coreClient.CompositeMapper = { serializedName: "boostingDuration", required: true, type: { - name: "TimeSpan" - } - } - } - } + name: "TimeSpan", + }, + }, + }, + }, }; export const MagnitudeScoringParameters: coreClient.CompositeMapper = { @@ -3100,24 +3250,24 @@ export const MagnitudeScoringParameters: coreClient.CompositeMapper = { serializedName: "boostingRangeStart", required: true, type: { - name: "Number" - } + name: "Number", + }, }, boostingRangeEnd: { serializedName: "boostingRangeEnd", required: true, type: { - name: "Number" - } + name: "Number", + }, }, shouldBoostBeyondRangeByConstant: { serializedName: "constantBoostBeyondRange", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const TagScoringParameters: coreClient.CompositeMapper = { @@ -3129,11 +3279,11 @@ export const TagScoringParameters: coreClient.CompositeMapper = { serializedName: "tagsParameter", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const CustomEntity: coreClient.CompositeMapper = { @@ -3145,78 +3295,78 @@ export const CustomEntity: coreClient.CompositeMapper = { serializedName: "name", required: true, type: { - name: "String" - } + name: "String", + }, }, description: { serializedName: "description", nullable: true, type: { - name: "String" - } + name: "String", + }, }, type: { serializedName: "type", nullable: true, type: { - name: "String" - } + name: "String", + }, }, subtype: { serializedName: "subtype", nullable: true, type: { - name: "String" - } + name: "String", + }, }, id: { serializedName: "id", nullable: true, type: { - name: "String" - } + name: "String", + }, }, caseSensitive: { serializedName: "caseSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, accentSensitive: { serializedName: "accentSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, fuzzyEditDistance: { serializedName: "fuzzyEditDistance", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, defaultCaseSensitive: { serializedName: "defaultCaseSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultAccentSensitive: { serializedName: "defaultAccentSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultFuzzyEditDistance: { serializedName: "defaultFuzzyEditDistance", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, aliases: { serializedName: "aliases", @@ -3226,13 +3376,13 @@ export const CustomEntity: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "CustomEntityAlias" - } - } - } - } - } - } + className: "CustomEntityAlias", + }, + }, + }, + }, + }, + }, }; export const CustomEntityAlias: coreClient.CompositeMapper = { @@ -3244,32 +3394,32 @@ export const CustomEntityAlias: coreClient.CompositeMapper = { serializedName: "text", required: true, type: { - name: "String" - } + name: "String", + }, }, caseSensitive: { serializedName: "caseSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, accentSensitive: { serializedName: "accentSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, fuzzyEditDistance: { serializedName: "fuzzyEditDistance", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const SearchIndexerDataNoneIdentity: coreClient.CompositeMapper = { @@ -3278,34 +3428,35 @@ export const SearchIndexerDataNoneIdentity: coreClient.CompositeMapper = { name: "Composite", className: "SearchIndexerDataNoneIdentity", uberParent: "SearchIndexerDataIdentity", - polymorphicDiscriminator: - SearchIndexerDataIdentity.type.polymorphicDiscriminator, - modelProperties: { - ...SearchIndexerDataIdentity.type.modelProperties - } - } -}; - -export const SearchIndexerDataUserAssignedIdentity: coreClient.CompositeMapper = { - serializedName: "#Microsoft.Azure.Search.DataUserAssignedIdentity", - type: { - name: "Composite", - className: "SearchIndexerDataUserAssignedIdentity", - uberParent: "SearchIndexerDataIdentity", polymorphicDiscriminator: SearchIndexerDataIdentity.type.polymorphicDiscriminator, modelProperties: { ...SearchIndexerDataIdentity.type.modelProperties, - userAssignedIdentity: { - serializedName: "userAssignedIdentity", - required: true, - type: { - name: "String" - } - } - } - } -}; + }, + }, +}; + +export const SearchIndexerDataUserAssignedIdentity: coreClient.CompositeMapper = + { + serializedName: "#Microsoft.Azure.Search.DataUserAssignedIdentity", + type: { + name: "Composite", + className: "SearchIndexerDataUserAssignedIdentity", + uberParent: "SearchIndexerDataIdentity", + polymorphicDiscriminator: + SearchIndexerDataIdentity.type.polymorphicDiscriminator, + modelProperties: { + ...SearchIndexerDataIdentity.type.modelProperties, + userAssignedIdentity: { + serializedName: "userAssignedIdentity", + required: true, + type: { + name: "String", + }, + }, + }, + }, + }; export const HighWaterMarkChangeDetectionPolicy: coreClient.CompositeMapper = { serializedName: "#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy", @@ -3321,11 +3472,11 @@ export const HighWaterMarkChangeDetectionPolicy: coreClient.CompositeMapper = { serializedName: "highWaterMarkColumnName", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SqlIntegratedChangeTrackingPolicy: coreClient.CompositeMapper = { @@ -3337,52 +3488,54 @@ export const SqlIntegratedChangeTrackingPolicy: coreClient.CompositeMapper = { polymorphicDiscriminator: DataChangeDetectionPolicy.type.polymorphicDiscriminator, modelProperties: { - ...DataChangeDetectionPolicy.type.modelProperties - } - } -}; - -export const SoftDeleteColumnDeletionDetectionPolicy: coreClient.CompositeMapper = { - serializedName: - "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy", - type: { - name: "Composite", - className: "SoftDeleteColumnDeletionDetectionPolicy", - uberParent: "DataDeletionDetectionPolicy", - polymorphicDiscriminator: - DataDeletionDetectionPolicy.type.polymorphicDiscriminator, - modelProperties: { - ...DataDeletionDetectionPolicy.type.modelProperties, - softDeleteColumnName: { - serializedName: "softDeleteColumnName", - type: { - name: "String" - } + ...DataChangeDetectionPolicy.type.modelProperties, + }, + }, +}; + +export const SoftDeleteColumnDeletionDetectionPolicy: coreClient.CompositeMapper = + { + serializedName: + "#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy", + type: { + name: "Composite", + className: "SoftDeleteColumnDeletionDetectionPolicy", + uberParent: "DataDeletionDetectionPolicy", + polymorphicDiscriminator: + DataDeletionDetectionPolicy.type.polymorphicDiscriminator, + modelProperties: { + ...DataDeletionDetectionPolicy.type.modelProperties, + softDeleteColumnName: { + serializedName: "softDeleteColumnName", + type: { + name: "String", + }, + }, + softDeleteMarkerValue: { + serializedName: "softDeleteMarkerValue", + type: { + name: "String", + }, + }, }, - softDeleteMarkerValue: { - serializedName: "softDeleteMarkerValue", - type: { - name: "String" - } - } - } - } -}; - -export const NativeBlobSoftDeleteDeletionDetectionPolicy: coreClient.CompositeMapper = { - serializedName: - "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy", - type: { - name: "Composite", - className: "NativeBlobSoftDeleteDeletionDetectionPolicy", - uberParent: "DataDeletionDetectionPolicy", - polymorphicDiscriminator: - DataDeletionDetectionPolicy.type.polymorphicDiscriminator, - modelProperties: { - ...DataDeletionDetectionPolicy.type.modelProperties - } - } -}; + }, + }; + +export const NativeBlobSoftDeleteDeletionDetectionPolicy: coreClient.CompositeMapper = + { + serializedName: + "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy", + type: { + name: "Composite", + className: "NativeBlobSoftDeleteDeletionDetectionPolicy", + uberParent: "DataDeletionDetectionPolicy", + polymorphicDiscriminator: + DataDeletionDetectionPolicy.type.polymorphicDiscriminator, + modelProperties: { + ...DataDeletionDetectionPolicy.type.modelProperties, + }, + }, + }; export const ConditionalSkill: coreClient.CompositeMapper = { serializedName: "#Microsoft.Skills.Util.ConditionalSkill", @@ -3392,9 +3545,9 @@ export const ConditionalSkill: coreClient.CompositeMapper = { uberParent: "SearchIndexerSkill", polymorphicDiscriminator: SearchIndexerSkill.type.polymorphicDiscriminator, modelProperties: { - ...SearchIndexerSkill.type.modelProperties - } - } + ...SearchIndexerSkill.type.modelProperties, + }, + }, }; export const KeyPhraseExtractionSkill: coreClient.CompositeMapper = { @@ -3409,25 +3562,25 @@ export const KeyPhraseExtractionSkill: coreClient.CompositeMapper = { defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } + name: "String", + }, }, maxKeyPhraseCount: { serializedName: "maxKeyPhraseCount", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const OcrSkill: coreClient.CompositeMapper = { @@ -3442,24 +3595,24 @@ export const OcrSkill: coreClient.CompositeMapper = { defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } + name: "String", + }, }, shouldDetectOrientation: { defaultValue: false, serializedName: "detectOrientation", type: { - name: "Boolean" - } + name: "Boolean", + }, }, lineEnding: { serializedName: "lineEnding", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ImageAnalysisSkill: coreClient.CompositeMapper = { @@ -3474,8 +3627,8 @@ export const ImageAnalysisSkill: coreClient.CompositeMapper = { defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } + name: "String", + }, }, visualFeatures: { serializedName: "visualFeatures", @@ -3483,10 +3636,10 @@ export const ImageAnalysisSkill: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, details: { serializedName: "details", @@ -3494,13 +3647,13 @@ export const ImageAnalysisSkill: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const LanguageDetectionSkill: coreClient.CompositeMapper = { @@ -3516,18 +3669,18 @@ export const LanguageDetectionSkill: coreClient.CompositeMapper = { serializedName: "defaultCountryHint", nullable: true, type: { - name: "String" - } + name: "String", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ShaperSkill: coreClient.CompositeMapper = { @@ -3538,9 +3691,9 @@ export const ShaperSkill: coreClient.CompositeMapper = { uberParent: "SearchIndexerSkill", polymorphicDiscriminator: SearchIndexerSkill.type.polymorphicDiscriminator, modelProperties: { - ...SearchIndexerSkill.type.modelProperties - } - } + ...SearchIndexerSkill.type.modelProperties, + }, + }, }; export const MergeSkill: coreClient.CompositeMapper = { @@ -3556,18 +3709,18 @@ export const MergeSkill: coreClient.CompositeMapper = { defaultValue: " ", serializedName: "insertPreTag", type: { - name: "String" - } + name: "String", + }, }, insertPostTag: { defaultValue: " ", serializedName: "insertPostTag", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const EntityRecognitionSkill: coreClient.CompositeMapper = { @@ -3585,33 +3738,33 @@ export const EntityRecognitionSkill: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } + name: "String", + }, }, includeTypelessEntities: { serializedName: "includeTypelessEntities", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, minimumPrecision: { serializedName: "minimumPrecision", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const SentimentSkill: coreClient.CompositeMapper = { @@ -3626,11 +3779,11 @@ export const SentimentSkill: coreClient.CompositeMapper = { defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SentimentSkillV3: coreClient.CompositeMapper = { @@ -3646,25 +3799,25 @@ export const SentimentSkillV3: coreClient.CompositeMapper = { serializedName: "defaultLanguageCode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, includeOpinionMining: { defaultValue: false, serializedName: "includeOpinionMining", type: { - name: "Boolean" - } + name: "Boolean", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const EntityLinkingSkill: coreClient.CompositeMapper = { @@ -3680,29 +3833,29 @@ export const EntityLinkingSkill: coreClient.CompositeMapper = { serializedName: "defaultLanguageCode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, minimumPrecision: { constraints: { InclusiveMaximum: 1, - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "minimumPrecision", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const EntityRecognitionSkillV3: coreClient.CompositeMapper = { @@ -3720,38 +3873,38 @@ export const EntityRecognitionSkillV3: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, defaultLanguageCode: { serializedName: "defaultLanguageCode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, minimumPrecision: { constraints: { InclusiveMaximum: 1, - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "minimumPrecision", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PIIDetectionSkill: coreClient.CompositeMapper = { @@ -3767,63 +3920,63 @@ export const PIIDetectionSkill: coreClient.CompositeMapper = { serializedName: "defaultLanguageCode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, minimumPrecision: { constraints: { InclusiveMaximum: 1, - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "minimumPrecision", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maskingMode: { serializedName: "maskingMode", type: { - name: "String" - } + name: "String", + }, }, maskingCharacter: { constraints: { - MaxLength: 1 + MaxLength: 1, }, serializedName: "maskingCharacter", nullable: true, type: { - name: "String" - } + name: "String", + }, }, modelVersion: { serializedName: "modelVersion", nullable: true, type: { - name: "String" - } + name: "String", + }, }, - piiCategories: { + categories: { serializedName: "piiCategories", type: { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, domain: { serializedName: "domain", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SplitSkill: coreClient.CompositeMapper = { @@ -3838,38 +3991,38 @@ export const SplitSkill: coreClient.CompositeMapper = { defaultLanguageCode: { serializedName: "defaultLanguageCode", type: { - name: "String" - } + name: "String", + }, }, textSplitMode: { serializedName: "textSplitMode", type: { - name: "String" - } + name: "String", + }, }, maxPageLength: { serializedName: "maximumPageLength", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, pageOverlapLength: { serializedName: "pageOverlapLength", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, maximumPagesToTake: { serializedName: "maximumPagesToTake", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const CustomEntityLookupSkill: coreClient.CompositeMapper = { @@ -3885,15 +4038,15 @@ export const CustomEntityLookupSkill: coreClient.CompositeMapper = { serializedName: "defaultLanguageCode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, entitiesDefinitionUri: { serializedName: "entitiesDefinitionUri", nullable: true, type: { - name: "String" - } + name: "String", + }, }, inlineEntitiesDefinition: { serializedName: "inlineEntitiesDefinition", @@ -3903,34 +4056,34 @@ export const CustomEntityLookupSkill: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "CustomEntity" - } - } - } + className: "CustomEntity", + }, + }, + }, }, globalDefaultCaseSensitive: { serializedName: "globalDefaultCaseSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, globalDefaultAccentSensitive: { serializedName: "globalDefaultAccentSensitive", nullable: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, globalDefaultFuzzyEditDistance: { serializedName: "globalDefaultFuzzyEditDistance", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const TextTranslationSkill: coreClient.CompositeMapper = { @@ -3946,24 +4099,24 @@ export const TextTranslationSkill: coreClient.CompositeMapper = { serializedName: "defaultToLanguageCode", required: true, type: { - name: "String" - } + name: "String", + }, }, defaultFromLanguageCode: { serializedName: "defaultFromLanguageCode", type: { - name: "String" - } + name: "String", + }, }, suggestedFrom: { serializedName: "suggestedFrom", nullable: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const DocumentExtractionSkill: coreClient.CompositeMapper = { @@ -3979,26 +4132,26 @@ export const DocumentExtractionSkill: coreClient.CompositeMapper = { serializedName: "parsingMode", nullable: true, type: { - name: "String" - } + name: "String", + }, }, dataToExtract: { serializedName: "dataToExtract", nullable: true, type: { - name: "String" - } + name: "String", + }, }, configuration: { serializedName: "configuration", nullable: true, type: { name: "Dictionary", - value: { type: { name: "any" } } - } - } - } - } + value: { type: { name: "any" } }, + }, + }, + }, + }, }; export const WebApiSkill: coreClient.CompositeMapper = { @@ -4014,58 +4167,58 @@ export const WebApiSkill: coreClient.CompositeMapper = { serializedName: "uri", required: true, type: { - name: "String" - } + name: "String", + }, }, httpHeaders: { serializedName: "httpHeaders", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, httpMethod: { serializedName: "httpMethod", type: { - name: "String" - } + name: "String", + }, }, timeout: { serializedName: "timeout", type: { - name: "TimeSpan" - } + name: "TimeSpan", + }, }, batchSize: { serializedName: "batchSize", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, degreeOfParallelism: { serializedName: "degreeOfParallelism", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, authResourceId: { serializedName: "authResourceId", nullable: true, type: { - name: "String" - } + name: "String", + }, }, authIdentity: { serializedName: "authIdentity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, }; export const AzureMachineLearningSkill: coreClient.CompositeMapper = { @@ -4081,46 +4234,46 @@ export const AzureMachineLearningSkill: coreClient.CompositeMapper = { serializedName: "uri", nullable: true, type: { - name: "String" - } + name: "String", + }, }, authenticationKey: { serializedName: "key", nullable: true, type: { - name: "String" - } + name: "String", + }, }, resourceId: { serializedName: "resourceId", nullable: true, type: { - name: "String" - } + name: "String", + }, }, timeout: { serializedName: "timeout", nullable: true, type: { - name: "TimeSpan" - } + name: "TimeSpan", + }, }, region: { serializedName: "region", nullable: true, type: { - name: "String" - } + name: "String", + }, }, degreeOfParallelism: { serializedName: "degreeOfParallelism", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const AzureOpenAIEmbeddingSkill: coreClient.CompositeMapper = { @@ -4135,30 +4288,30 @@ export const AzureOpenAIEmbeddingSkill: coreClient.CompositeMapper = { resourceUri: { serializedName: "resourceUri", type: { - name: "String" - } + name: "String", + }, }, deploymentId: { serializedName: "deploymentId", type: { - name: "String" - } + name: "String", + }, }, apiKey: { serializedName: "apiKey", type: { - name: "String" - } + name: "String", + }, }, authIdentity: { serializedName: "authIdentity", type: { name: "Composite", - className: "SearchIndexerDataIdentity" - } - } - } - } + className: "SearchIndexerDataIdentity", + }, + }, + }, + }, }; export const DefaultCognitiveServicesAccount: coreClient.CompositeMapper = { @@ -4170,9 +4323,9 @@ export const DefaultCognitiveServicesAccount: coreClient.CompositeMapper = { polymorphicDiscriminator: CognitiveServicesAccount.type.polymorphicDiscriminator, modelProperties: { - ...CognitiveServicesAccount.type.modelProperties - } - } + ...CognitiveServicesAccount.type.modelProperties, + }, + }, }; export const CognitiveServicesAccountKey: coreClient.CompositeMapper = { @@ -4184,51 +4337,53 @@ export const CognitiveServicesAccountKey: coreClient.CompositeMapper = { polymorphicDiscriminator: CognitiveServicesAccount.type.polymorphicDiscriminator, modelProperties: { - ...CognitiveServicesAccount.type.modelProperties, - key: { - serializedName: "key", - required: true, - type: { - name: "String" - } - } - } - } -}; - -export const SearchIndexerKnowledgeStoreTableProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreTableProjectionSelector", - modelProperties: { - ...SearchIndexerKnowledgeStoreProjectionSelector.type.modelProperties, - tableName: { - serializedName: "tableName", - required: true, - type: { - name: "String" - } - } - } - } -}; - -export const SearchIndexerKnowledgeStoreBlobProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreBlobProjectionSelector", - modelProperties: { - ...SearchIndexerKnowledgeStoreProjectionSelector.type.modelProperties, - storageContainer: { - serializedName: "storageContainer", + ...CognitiveServicesAccount.type.modelProperties, + key: { + serializedName: "key", required: true, type: { - name: "String" - } - } - } - } -}; + name: "String", + }, + }, + }, + }, +}; + +export const SearchIndexerKnowledgeStoreTableProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreTableProjectionSelector", + modelProperties: { + ...SearchIndexerKnowledgeStoreProjectionSelector.type.modelProperties, + tableName: { + serializedName: "tableName", + required: true, + type: { + name: "String", + }, + }, + }, + }, + }; + +export const SearchIndexerKnowledgeStoreBlobProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreBlobProjectionSelector", + modelProperties: { + ...SearchIndexerKnowledgeStoreProjectionSelector.type.modelProperties, + storageContainer: { + serializedName: "storageContainer", + required: true, + type: { + name: "String", + }, + }, + }, + }, + }; export const DistanceScoringFunction: coreClient.CompositeMapper = { serializedName: "distance", @@ -4243,11 +4398,11 @@ export const DistanceScoringFunction: coreClient.CompositeMapper = { serializedName: "distance", type: { name: "Composite", - className: "DistanceScoringParameters" - } - } - } - } + className: "DistanceScoringParameters", + }, + }, + }, + }, }; export const FreshnessScoringFunction: coreClient.CompositeMapper = { @@ -4263,11 +4418,11 @@ export const FreshnessScoringFunction: coreClient.CompositeMapper = { serializedName: "freshness", type: { name: "Composite", - className: "FreshnessScoringParameters" - } - } - } - } + className: "FreshnessScoringParameters", + }, + }, + }, + }, }; export const MagnitudeScoringFunction: coreClient.CompositeMapper = { @@ -4283,11 +4438,11 @@ export const MagnitudeScoringFunction: coreClient.CompositeMapper = { serializedName: "magnitude", type: { name: "Composite", - className: "MagnitudeScoringParameters" - } - } - } - } + className: "MagnitudeScoringParameters", + }, + }, + }, + }, }; export const TagScoringFunction: coreClient.CompositeMapper = { @@ -4303,11 +4458,11 @@ export const TagScoringFunction: coreClient.CompositeMapper = { serializedName: "tag", type: { name: "Composite", - className: "TagScoringParameters" - } - } - } - } + className: "TagScoringParameters", + }, + }, + }, + }, }; export const CustomAnalyzer: coreClient.CompositeMapper = { @@ -4323,8 +4478,8 @@ export const CustomAnalyzer: coreClient.CompositeMapper = { serializedName: "tokenizer", required: true, type: { - name: "String" - } + name: "String", + }, }, tokenFilters: { serializedName: "tokenFilters", @@ -4332,10 +4487,10 @@ export const CustomAnalyzer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, charFilters: { serializedName: "charFilters", @@ -4343,13 +4498,13 @@ export const CustomAnalyzer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const PatternAnalyzer: coreClient.CompositeMapper = { @@ -4365,21 +4520,21 @@ export const PatternAnalyzer: coreClient.CompositeMapper = { defaultValue: true, serializedName: "lowercase", type: { - name: "Boolean" - } + name: "Boolean", + }, }, pattern: { defaultValue: "W+", serializedName: "pattern", type: { - name: "String" - } + name: "String", + }, }, flags: { serializedName: "flags", type: { - name: "String" - } + name: "String", + }, }, stopwords: { serializedName: "stopwords", @@ -4387,13 +4542,13 @@ export const PatternAnalyzer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const LuceneStandardAnalyzer: coreClient.CompositeMapper = { @@ -4408,12 +4563,12 @@ export const LuceneStandardAnalyzer: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } + name: "Number", + }, }, stopwords: { serializedName: "stopwords", @@ -4421,13 +4576,13 @@ export const LuceneStandardAnalyzer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const StopAnalyzer: coreClient.CompositeMapper = { @@ -4445,13 +4600,13 @@ export const StopAnalyzer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const ClassicTokenizer: coreClient.CompositeMapper = { @@ -4466,15 +4621,15 @@ export const ClassicTokenizer: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const EdgeNGramTokenizer: coreClient.CompositeMapper = { @@ -4489,22 +4644,22 @@ export const EdgeNGramTokenizer: coreClient.CompositeMapper = { minGram: { defaultValue: 1, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxGram", type: { - name: "Number" - } + name: "Number", + }, }, tokenChars: { serializedName: "tokenChars", @@ -4518,14 +4673,14 @@ export const EdgeNGramTokenizer: coreClient.CompositeMapper = { "digit", "whitespace", "punctuation", - "symbol" - ] - } - } - } - } - } - } + "symbol", + ], + }, + }, + }, + }, + }, + }, }; export const KeywordTokenizer: coreClient.CompositeMapper = { @@ -4541,11 +4696,11 @@ export const KeywordTokenizer: coreClient.CompositeMapper = { defaultValue: 256, serializedName: "bufferSize", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const KeywordTokenizerV2: coreClient.CompositeMapper = { @@ -4560,15 +4715,15 @@ export const KeywordTokenizerV2: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 256, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const MicrosoftLanguageTokenizer: coreClient.CompositeMapper = { @@ -4583,19 +4738,19 @@ export const MicrosoftLanguageTokenizer: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } + name: "Number", + }, }, isSearchTokenizer: { defaultValue: false, serializedName: "isSearchTokenizer", type: { - name: "Boolean" - } + name: "Boolean", + }, }, language: { serializedName: "language", @@ -4643,12 +4798,12 @@ export const MicrosoftLanguageTokenizer: coreClient.CompositeMapper = { "thai", "ukrainian", "urdu", - "vietnamese" - ] - } - } - } - } + "vietnamese", + ], + }, + }, + }, + }, }; export const MicrosoftLanguageStemmingTokenizer: coreClient.CompositeMapper = { @@ -4663,19 +4818,19 @@ export const MicrosoftLanguageStemmingTokenizer: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } + name: "Number", + }, }, isSearchTokenizer: { defaultValue: false, serializedName: "isSearchTokenizer", type: { - name: "Boolean" - } + name: "Boolean", + }, }, language: { serializedName: "language", @@ -4726,12 +4881,12 @@ export const MicrosoftLanguageStemmingTokenizer: coreClient.CompositeMapper = { "telugu", "turkish", "ukrainian", - "urdu" - ] - } - } - } - } + "urdu", + ], + }, + }, + }, + }, }; export const NGramTokenizer: coreClient.CompositeMapper = { @@ -4746,22 +4901,22 @@ export const NGramTokenizer: coreClient.CompositeMapper = { minGram: { defaultValue: 1, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxGram", type: { - name: "Number" - } + name: "Number", + }, }, tokenChars: { serializedName: "tokenChars", @@ -4775,14 +4930,14 @@ export const NGramTokenizer: coreClient.CompositeMapper = { "digit", "whitespace", "punctuation", - "symbol" - ] - } - } - } - } - } - } + "symbol", + ], + }, + }, + }, + }, + }, + }, }; export const PathHierarchyTokenizerV2: coreClient.CompositeMapper = { @@ -4798,42 +4953,42 @@ export const PathHierarchyTokenizerV2: coreClient.CompositeMapper = { defaultValue: "/", serializedName: "delimiter", type: { - name: "String" - } + name: "String", + }, }, replacement: { defaultValue: "/", serializedName: "replacement", type: { - name: "String" - } + name: "String", + }, }, maxTokenLength: { defaultValue: 300, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } + name: "Number", + }, }, reverseTokenOrder: { defaultValue: false, serializedName: "reverse", type: { - name: "Boolean" - } + name: "Boolean", + }, }, numberOfTokensToSkip: { defaultValue: 0, serializedName: "skip", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const PatternTokenizer: coreClient.CompositeMapper = { @@ -4849,24 +5004,24 @@ export const PatternTokenizer: coreClient.CompositeMapper = { defaultValue: "W+", serializedName: "pattern", type: { - name: "String" - } + name: "String", + }, }, flags: { serializedName: "flags", type: { - name: "String" - } + name: "String", + }, }, group: { defaultValue: -1, serializedName: "group", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const LuceneStandardTokenizer: coreClient.CompositeMapper = { @@ -4882,11 +5037,11 @@ export const LuceneStandardTokenizer: coreClient.CompositeMapper = { defaultValue: 255, serializedName: "maxTokenLength", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const LuceneStandardTokenizerV2: coreClient.CompositeMapper = { @@ -4901,15 +5056,15 @@ export const LuceneStandardTokenizerV2: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const UaxUrlEmailTokenizer: coreClient.CompositeMapper = { @@ -4924,15 +5079,15 @@ export const UaxUrlEmailTokenizer: coreClient.CompositeMapper = { maxTokenLength: { defaultValue: 255, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxTokenLength", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const AsciiFoldingTokenFilter: coreClient.CompositeMapper = { @@ -4948,11 +5103,11 @@ export const AsciiFoldingTokenFilter: coreClient.CompositeMapper = { defaultValue: false, serializedName: "preserveOriginal", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const CjkBigramTokenFilter: coreClient.CompositeMapper = { @@ -4971,20 +5126,20 @@ export const CjkBigramTokenFilter: coreClient.CompositeMapper = { element: { type: { name: "Enum", - allowedValues: ["han", "hiragana", "katakana", "hangul"] - } - } - } + allowedValues: ["han", "hiragana", "katakana", "hangul"], + }, + }, + }, }, outputUnigrams: { defaultValue: false, serializedName: "outputUnigrams", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const CommonGramTokenFilter: coreClient.CompositeMapper = { @@ -5003,27 +5158,27 @@ export const CommonGramTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, ignoreCase: { defaultValue: false, serializedName: "ignoreCase", type: { - name: "Boolean" - } + name: "Boolean", + }, }, useQueryMode: { defaultValue: false, serializedName: "queryMode", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const DictionaryDecompounderTokenFilter: coreClient.CompositeMapper = { @@ -5042,50 +5197,50 @@ export const DictionaryDecompounderTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, minWordSize: { defaultValue: 5, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minWordSize", type: { - name: "Number" - } + name: "Number", + }, }, minSubwordSize: { defaultValue: 2, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minSubwordSize", type: { - name: "Number" - } + name: "Number", + }, }, maxSubwordSize: { defaultValue: 15, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxSubwordSize", type: { - name: "Number" - } + name: "Number", + }, }, onlyLongestMatch: { defaultValue: false, serializedName: "onlyLongestMatch", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const EdgeNGramTokenFilter: coreClient.CompositeMapper = { @@ -5101,25 +5256,25 @@ export const EdgeNGramTokenFilter: coreClient.CompositeMapper = { defaultValue: 1, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, serializedName: "maxGram", type: { - name: "Number" - } + name: "Number", + }, }, side: { serializedName: "side", type: { name: "Enum", - allowedValues: ["front", "back"] - } - } - } - } + allowedValues: ["front", "back"], + }, + }, + }, + }, }; export const EdgeNGramTokenFilterV2: coreClient.CompositeMapper = { @@ -5134,32 +5289,32 @@ export const EdgeNGramTokenFilterV2: coreClient.CompositeMapper = { minGram: { defaultValue: 1, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxGram", type: { - name: "Number" - } + name: "Number", + }, }, side: { serializedName: "side", type: { name: "Enum", - allowedValues: ["front", "back"] - } - } - } - } + allowedValues: ["front", "back"], + }, + }, + }, + }, }; export const ElisionTokenFilter: coreClient.CompositeMapper = { @@ -5177,13 +5332,13 @@ export const ElisionTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const KeepTokenFilter: coreClient.CompositeMapper = { @@ -5202,20 +5357,20 @@ export const KeepTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, lowerCaseKeepWords: { defaultValue: false, serializedName: "keepWordsCase", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const KeywordMarkerTokenFilter: coreClient.CompositeMapper = { @@ -5234,20 +5389,20 @@ export const KeywordMarkerTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, ignoreCase: { defaultValue: false, serializedName: "ignoreCase", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const LengthTokenFilter: coreClient.CompositeMapper = { @@ -5262,25 +5417,25 @@ export const LengthTokenFilter: coreClient.CompositeMapper = { minLength: { defaultValue: 0, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "min", type: { - name: "Number" - } + name: "Number", + }, }, maxLength: { defaultValue: 300, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "max", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const LimitTokenFilter: coreClient.CompositeMapper = { @@ -5296,18 +5451,18 @@ export const LimitTokenFilter: coreClient.CompositeMapper = { defaultValue: 1, serializedName: "maxTokenCount", type: { - name: "Number" - } + name: "Number", + }, }, consumeAllTokens: { defaultValue: false, serializedName: "consumeAllTokens", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const NGramTokenFilter: coreClient.CompositeMapper = { @@ -5323,18 +5478,18 @@ export const NGramTokenFilter: coreClient.CompositeMapper = { defaultValue: 1, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, serializedName: "maxGram", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const NGramTokenFilterV2: coreClient.CompositeMapper = { @@ -5349,25 +5504,25 @@ export const NGramTokenFilterV2: coreClient.CompositeMapper = { minGram: { defaultValue: 1, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "minGram", type: { - name: "Number" - } + name: "Number", + }, }, maxGram: { defaultValue: 2, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "maxGram", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const PatternCaptureTokenFilter: coreClient.CompositeMapper = { @@ -5386,20 +5541,20 @@ export const PatternCaptureTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, preserveOriginal: { defaultValue: true, serializedName: "preserveOriginal", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const PatternReplaceTokenFilter: coreClient.CompositeMapper = { @@ -5415,18 +5570,18 @@ export const PatternReplaceTokenFilter: coreClient.CompositeMapper = { serializedName: "pattern", required: true, type: { - name: "String" - } + name: "String", + }, }, replacement: { serializedName: "replacement", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PhoneticTokenFilter: coreClient.CompositeMapper = { @@ -5453,19 +5608,19 @@ export const PhoneticTokenFilter: coreClient.CompositeMapper = { "nysiis", "koelnerPhonetik", "haasePhonetik", - "beiderMorse" - ] - } + "beiderMorse", + ], + }, }, replaceOriginalTokens: { defaultValue: true, serializedName: "replace", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const ShingleTokenFilter: coreClient.CompositeMapper = { @@ -5480,53 +5635,53 @@ export const ShingleTokenFilter: coreClient.CompositeMapper = { maxShingleSize: { defaultValue: 2, constraints: { - InclusiveMinimum: 2 + InclusiveMinimum: 2, }, serializedName: "maxShingleSize", type: { - name: "Number" - } + name: "Number", + }, }, minShingleSize: { defaultValue: 2, constraints: { - InclusiveMinimum: 2 + InclusiveMinimum: 2, }, serializedName: "minShingleSize", type: { - name: "Number" - } + name: "Number", + }, }, outputUnigrams: { defaultValue: true, serializedName: "outputUnigrams", type: { - name: "Boolean" - } + name: "Boolean", + }, }, outputUnigramsIfNoShingles: { defaultValue: false, serializedName: "outputUnigramsIfNoShingles", type: { - name: "Boolean" - } + name: "Boolean", + }, }, tokenSeparator: { defaultValue: " ", serializedName: "tokenSeparator", type: { - name: "String" - } + name: "String", + }, }, filterToken: { defaultValue: "_", serializedName: "filterToken", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SnowballTokenFilter: coreClient.CompositeMapper = { @@ -5565,12 +5720,12 @@ export const SnowballTokenFilter: coreClient.CompositeMapper = { "russian", "spanish", "swedish", - "turkish" - ] - } - } - } - } + "turkish", + ], + }, + }, + }, + }, }; export const StemmerTokenFilter: coreClient.CompositeMapper = { @@ -5641,12 +5796,12 @@ export const StemmerTokenFilter: coreClient.CompositeMapper = { "lightSpanish", "swedish", "lightSwedish", - "turkish" - ] - } - } - } - } + "turkish", + ], + }, + }, + }, + }, }; export const StemmerOverrideTokenFilter: coreClient.CompositeMapper = { @@ -5665,13 +5820,13 @@ export const StemmerOverrideTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const StopwordsTokenFilter: coreClient.CompositeMapper = { @@ -5689,10 +5844,10 @@ export const StopwordsTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, stopwordsList: { serializedName: "stopwordsList", @@ -5729,26 +5884,26 @@ export const StopwordsTokenFilter: coreClient.CompositeMapper = { "spanish", "swedish", "thai", - "turkish" - ] - } + "turkish", + ], + }, }, ignoreCase: { defaultValue: false, serializedName: "ignoreCase", type: { - name: "Boolean" - } + name: "Boolean", + }, }, removeTrailingStopWords: { defaultValue: true, serializedName: "removeTrailing", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const SynonymTokenFilter: coreClient.CompositeMapper = { @@ -5767,27 +5922,27 @@ export const SynonymTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, ignoreCase: { defaultValue: false, serializedName: "ignoreCase", type: { - name: "Boolean" - } + name: "Boolean", + }, }, expand: { defaultValue: true, serializedName: "expand", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const TruncateTokenFilter: coreClient.CompositeMapper = { @@ -5802,15 +5957,15 @@ export const TruncateTokenFilter: coreClient.CompositeMapper = { length: { defaultValue: 300, constraints: { - InclusiveMaximum: 300 + InclusiveMaximum: 300, }, serializedName: "length", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const UniqueTokenFilter: coreClient.CompositeMapper = { @@ -5826,11 +5981,11 @@ export const UniqueTokenFilter: coreClient.CompositeMapper = { defaultValue: false, serializedName: "onlyOnSamePosition", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const WordDelimiterTokenFilter: coreClient.CompositeMapper = { @@ -5846,64 +6001,64 @@ export const WordDelimiterTokenFilter: coreClient.CompositeMapper = { defaultValue: true, serializedName: "generateWordParts", type: { - name: "Boolean" - } + name: "Boolean", + }, }, generateNumberParts: { defaultValue: true, serializedName: "generateNumberParts", type: { - name: "Boolean" - } + name: "Boolean", + }, }, catenateWords: { defaultValue: false, serializedName: "catenateWords", type: { - name: "Boolean" - } + name: "Boolean", + }, }, catenateNumbers: { defaultValue: false, serializedName: "catenateNumbers", type: { - name: "Boolean" - } + name: "Boolean", + }, }, catenateAll: { defaultValue: false, serializedName: "catenateAll", type: { - name: "Boolean" - } + name: "Boolean", + }, }, splitOnCaseChange: { defaultValue: true, serializedName: "splitOnCaseChange", type: { - name: "Boolean" - } + name: "Boolean", + }, }, preserveOriginal: { defaultValue: false, serializedName: "preserveOriginal", type: { - name: "Boolean" - } + name: "Boolean", + }, }, splitOnNumerics: { defaultValue: true, serializedName: "splitOnNumerics", type: { - name: "Boolean" - } + name: "Boolean", + }, }, stemEnglishPossessive: { defaultValue: true, serializedName: "stemEnglishPossessive", type: { - name: "Boolean" - } + name: "Boolean", + }, }, protectedWords: { serializedName: "protectedWords", @@ -5911,13 +6066,13 @@ export const WordDelimiterTokenFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const MappingCharFilter: coreClient.CompositeMapper = { @@ -5936,13 +6091,13 @@ export const MappingCharFilter: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const PatternReplaceCharFilter: coreClient.CompositeMapper = { @@ -5958,18 +6113,18 @@ export const PatternReplaceCharFilter: coreClient.CompositeMapper = { serializedName: "pattern", required: true, type: { - name: "String" - } + name: "String", + }, }, replacement: { serializedName: "replacement", required: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const CustomNormalizer: coreClient.CompositeMapper = { @@ -5987,10 +6142,10 @@ export const CustomNormalizer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, charFilters: { serializedName: "charFilters", @@ -5998,13 +6153,13 @@ export const CustomNormalizer: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const ClassicSimilarity: coreClient.CompositeMapper = { @@ -6015,9 +6170,9 @@ export const ClassicSimilarity: coreClient.CompositeMapper = { uberParent: "Similarity", polymorphicDiscriminator: Similarity.type.polymorphicDiscriminator, modelProperties: { - ...Similarity.type.modelProperties - } - } + ...Similarity.type.modelProperties, + }, + }, }; export const BM25Similarity: coreClient.CompositeMapper = { @@ -6033,25 +6188,25 @@ export const BM25Similarity: coreClient.CompositeMapper = { serializedName: "k1", nullable: true, type: { - name: "Number" - } + name: "Number", + }, }, b: { serializedName: "b", nullable: true, type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; -export const HnswVectorSearchAlgorithmConfiguration: coreClient.CompositeMapper = { +export const HnswAlgorithmConfiguration: coreClient.CompositeMapper = { serializedName: "hnsw", type: { name: "Composite", - className: "HnswVectorSearchAlgorithmConfiguration", + className: "HnswAlgorithmConfiguration", uberParent: "VectorSearchAlgorithmConfiguration", polymorphicDiscriminator: VectorSearchAlgorithmConfiguration.type.polymorphicDiscriminator, @@ -6061,18 +6216,18 @@ export const HnswVectorSearchAlgorithmConfiguration: coreClient.CompositeMapper serializedName: "hnswParameters", type: { name: "Composite", - className: "HnswParameters" - } - } - } - } + className: "HnswParameters", + }, + }, + }, + }, }; -export const ExhaustiveKnnVectorSearchAlgorithmConfiguration: coreClient.CompositeMapper = { +export const ExhaustiveKnnAlgorithmConfiguration: coreClient.CompositeMapper = { serializedName: "exhaustiveKnn", type: { name: "Composite", - className: "ExhaustiveKnnVectorSearchAlgorithmConfiguration", + className: "ExhaustiveKnnAlgorithmConfiguration", uberParent: "VectorSearchAlgorithmConfiguration", polymorphicDiscriminator: VectorSearchAlgorithmConfiguration.type.polymorphicDiscriminator, @@ -6082,11 +6237,11 @@ export const ExhaustiveKnnVectorSearchAlgorithmConfiguration: coreClient.Composi serializedName: "exhaustiveKnnParameters", type: { name: "Composite", - className: "ExhaustiveKnnParameters" - } - } - } - } + className: "ExhaustiveKnnParameters", + }, + }, + }, + }, }; export const AzureOpenAIVectorizer: coreClient.CompositeMapper = { @@ -6103,11 +6258,11 @@ export const AzureOpenAIVectorizer: coreClient.CompositeMapper = { serializedName: "azureOpenAIParameters", type: { name: "Composite", - className: "AzureOpenAIParameters" - } - } - } - } + className: "AzureOpenAIParameters", + }, + }, + }, + }, }; export const CustomVectorizer: coreClient.CompositeMapper = { @@ -6120,36 +6275,62 @@ export const CustomVectorizer: coreClient.CompositeMapper = { VectorSearchVectorizer.type.polymorphicDiscriminator, modelProperties: { ...VectorSearchVectorizer.type.modelProperties, - customVectorizerParameters: { - serializedName: "customVectorizerParameters", + customWebApiParameters: { + serializedName: "customWebApiParameters", type: { name: "Composite", - className: "CustomVectorizerParameters" - } - } - } - } -}; + className: "CustomWebApiParameters", + }, + }, + }, + }, +}; + +export const ScalarQuantizationCompressionConfiguration: coreClient.CompositeMapper = + { + serializedName: "scalarQuantization", + type: { + name: "Composite", + className: "ScalarQuantizationCompressionConfiguration", + uberParent: "BaseVectorSearchCompressionConfiguration", + polymorphicDiscriminator: + BaseVectorSearchCompressionConfiguration.type.polymorphicDiscriminator, + modelProperties: { + ...BaseVectorSearchCompressionConfiguration.type.modelProperties, + parameters: { + serializedName: "scalarQuantizationParameters", + type: { + name: "Composite", + className: "ScalarQuantizationParameters", + }, + }, + }, + }, + }; -export const SearchIndexerKnowledgeStoreObjectProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreObjectProjectionSelector", - modelProperties: { - ...SearchIndexerKnowledgeStoreBlobProjectionSelector.type.modelProperties - } - } -}; +export const SearchIndexerKnowledgeStoreObjectProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreObjectProjectionSelector", + modelProperties: { + ...SearchIndexerKnowledgeStoreBlobProjectionSelector.type + .modelProperties, + }, + }, + }; -export const SearchIndexerKnowledgeStoreFileProjectionSelector: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SearchIndexerKnowledgeStoreFileProjectionSelector", - modelProperties: { - ...SearchIndexerKnowledgeStoreBlobProjectionSelector.type.modelProperties - } - } -}; +export const SearchIndexerKnowledgeStoreFileProjectionSelector: coreClient.CompositeMapper = + { + type: { + name: "Composite", + className: "SearchIndexerKnowledgeStoreFileProjectionSelector", + modelProperties: { + ...SearchIndexerKnowledgeStoreBlobProjectionSelector.type + .modelProperties, + }, + }, + }; export let discriminators = { SearchIndexerDataIdentity: SearchIndexerDataIdentity, @@ -6166,86 +6347,139 @@ export let discriminators = { Similarity: Similarity, VectorSearchAlgorithmConfiguration: VectorSearchAlgorithmConfiguration, VectorSearchVectorizer: VectorSearchVectorizer, - "SearchIndexerDataIdentity.#Microsoft.Azure.Search.DataNoneIdentity": SearchIndexerDataNoneIdentity, - "SearchIndexerDataIdentity.#Microsoft.Azure.Search.DataUserAssignedIdentity": SearchIndexerDataUserAssignedIdentity, - "DataChangeDetectionPolicy.#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy": HighWaterMarkChangeDetectionPolicy, - "DataChangeDetectionPolicy.#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy": SqlIntegratedChangeTrackingPolicy, - "DataDeletionDetectionPolicy.#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy": SoftDeleteColumnDeletionDetectionPolicy, - "DataDeletionDetectionPolicy.#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy": NativeBlobSoftDeleteDeletionDetectionPolicy, - "SearchIndexerSkill.#Microsoft.Skills.Util.ConditionalSkill": ConditionalSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.KeyPhraseExtractionSkill": KeyPhraseExtractionSkill, + BaseVectorSearchCompressionConfiguration: + BaseVectorSearchCompressionConfiguration, + "SearchIndexerDataIdentity.#Microsoft.Azure.Search.DataNoneIdentity": + SearchIndexerDataNoneIdentity, + "SearchIndexerDataIdentity.#Microsoft.Azure.Search.DataUserAssignedIdentity": + SearchIndexerDataUserAssignedIdentity, + "DataChangeDetectionPolicy.#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy": + HighWaterMarkChangeDetectionPolicy, + "DataChangeDetectionPolicy.#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy": + SqlIntegratedChangeTrackingPolicy, + "DataDeletionDetectionPolicy.#Microsoft.Azure.Search.SoftDeleteColumnDeletionDetectionPolicy": + SoftDeleteColumnDeletionDetectionPolicy, + "DataDeletionDetectionPolicy.#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy": + NativeBlobSoftDeleteDeletionDetectionPolicy, + "SearchIndexerSkill.#Microsoft.Skills.Util.ConditionalSkill": + ConditionalSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.KeyPhraseExtractionSkill": + KeyPhraseExtractionSkill, "SearchIndexerSkill.#Microsoft.Skills.Vision.OcrSkill": OcrSkill, - "SearchIndexerSkill.#Microsoft.Skills.Vision.ImageAnalysisSkill": ImageAnalysisSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.LanguageDetectionSkill": LanguageDetectionSkill, + "SearchIndexerSkill.#Microsoft.Skills.Vision.ImageAnalysisSkill": + ImageAnalysisSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.LanguageDetectionSkill": + LanguageDetectionSkill, "SearchIndexerSkill.#Microsoft.Skills.Util.ShaperSkill": ShaperSkill, "SearchIndexerSkill.#Microsoft.Skills.Text.MergeSkill": MergeSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.EntityRecognitionSkill": EntityRecognitionSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.EntityRecognitionSkill": + EntityRecognitionSkill, "SearchIndexerSkill.#Microsoft.Skills.Text.SentimentSkill": SentimentSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.V3.SentimentSkill": SentimentSkillV3, - "SearchIndexerSkill.#Microsoft.Skills.Text.V3.EntityLinkingSkill": EntityLinkingSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.V3.EntityRecognitionSkill": EntityRecognitionSkillV3, - "SearchIndexerSkill.#Microsoft.Skills.Text.PIIDetectionSkill": PIIDetectionSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.V3.SentimentSkill": + SentimentSkillV3, + "SearchIndexerSkill.#Microsoft.Skills.Text.V3.EntityLinkingSkill": + EntityLinkingSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.V3.EntityRecognitionSkill": + EntityRecognitionSkillV3, + "SearchIndexerSkill.#Microsoft.Skills.Text.PIIDetectionSkill": + PIIDetectionSkill, "SearchIndexerSkill.#Microsoft.Skills.Text.SplitSkill": SplitSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.CustomEntityLookupSkill": CustomEntityLookupSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.TranslationSkill": TextTranslationSkill, - "SearchIndexerSkill.#Microsoft.Skills.Util.DocumentExtractionSkill": DocumentExtractionSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.CustomEntityLookupSkill": + CustomEntityLookupSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.TranslationSkill": + TextTranslationSkill, + "SearchIndexerSkill.#Microsoft.Skills.Util.DocumentExtractionSkill": + DocumentExtractionSkill, "SearchIndexerSkill.#Microsoft.Skills.Custom.WebApiSkill": WebApiSkill, - "SearchIndexerSkill.#Microsoft.Skills.Custom.AmlSkill": AzureMachineLearningSkill, - "SearchIndexerSkill.#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill": AzureOpenAIEmbeddingSkill, - "CognitiveServicesAccount.#Microsoft.Azure.Search.DefaultCognitiveServices": DefaultCognitiveServicesAccount, - "CognitiveServicesAccount.#Microsoft.Azure.Search.CognitiveServicesByKey": CognitiveServicesAccountKey, + "SearchIndexerSkill.#Microsoft.Skills.Custom.AmlSkill": + AzureMachineLearningSkill, + "SearchIndexerSkill.#Microsoft.Skills.Text.AzureOpenAIEmbeddingSkill": + AzureOpenAIEmbeddingSkill, + "CognitiveServicesAccount.#Microsoft.Azure.Search.DefaultCognitiveServices": + DefaultCognitiveServicesAccount, + "CognitiveServicesAccount.#Microsoft.Azure.Search.CognitiveServicesByKey": + CognitiveServicesAccountKey, "ScoringFunction.distance": DistanceScoringFunction, "ScoringFunction.freshness": FreshnessScoringFunction, "ScoringFunction.magnitude": MagnitudeScoringFunction, "ScoringFunction.tag": TagScoringFunction, "LexicalAnalyzer.#Microsoft.Azure.Search.CustomAnalyzer": CustomAnalyzer, "LexicalAnalyzer.#Microsoft.Azure.Search.PatternAnalyzer": PatternAnalyzer, - "LexicalAnalyzer.#Microsoft.Azure.Search.StandardAnalyzer": LuceneStandardAnalyzer, + "LexicalAnalyzer.#Microsoft.Azure.Search.StandardAnalyzer": + LuceneStandardAnalyzer, "LexicalAnalyzer.#Microsoft.Azure.Search.StopAnalyzer": StopAnalyzer, "LexicalTokenizer.#Microsoft.Azure.Search.ClassicTokenizer": ClassicTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.EdgeNGramTokenizer": EdgeNGramTokenizer, + "LexicalTokenizer.#Microsoft.Azure.Search.EdgeNGramTokenizer": + EdgeNGramTokenizer, "LexicalTokenizer.#Microsoft.Azure.Search.KeywordTokenizer": KeywordTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.KeywordTokenizerV2": KeywordTokenizerV2, - "LexicalTokenizer.#Microsoft.Azure.Search.MicrosoftLanguageTokenizer": MicrosoftLanguageTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer": MicrosoftLanguageStemmingTokenizer, + "LexicalTokenizer.#Microsoft.Azure.Search.KeywordTokenizerV2": + KeywordTokenizerV2, + "LexicalTokenizer.#Microsoft.Azure.Search.MicrosoftLanguageTokenizer": + MicrosoftLanguageTokenizer, + "LexicalTokenizer.#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer": + MicrosoftLanguageStemmingTokenizer, "LexicalTokenizer.#Microsoft.Azure.Search.NGramTokenizer": NGramTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.PathHierarchyTokenizerV2": PathHierarchyTokenizerV2, + "LexicalTokenizer.#Microsoft.Azure.Search.PathHierarchyTokenizerV2": + PathHierarchyTokenizerV2, "LexicalTokenizer.#Microsoft.Azure.Search.PatternTokenizer": PatternTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.StandardTokenizer": LuceneStandardTokenizer, - "LexicalTokenizer.#Microsoft.Azure.Search.StandardTokenizerV2": LuceneStandardTokenizerV2, - "LexicalTokenizer.#Microsoft.Azure.Search.UaxUrlEmailTokenizer": UaxUrlEmailTokenizer, - "TokenFilter.#Microsoft.Azure.Search.AsciiFoldingTokenFilter": AsciiFoldingTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.CjkBigramTokenFilter": CjkBigramTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.CommonGramTokenFilter": CommonGramTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter": DictionaryDecompounderTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.EdgeNGramTokenFilter": EdgeNGramTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.EdgeNGramTokenFilterV2": EdgeNGramTokenFilterV2, + "LexicalTokenizer.#Microsoft.Azure.Search.StandardTokenizer": + LuceneStandardTokenizer, + "LexicalTokenizer.#Microsoft.Azure.Search.StandardTokenizerV2": + LuceneStandardTokenizerV2, + "LexicalTokenizer.#Microsoft.Azure.Search.UaxUrlEmailTokenizer": + UaxUrlEmailTokenizer, + "TokenFilter.#Microsoft.Azure.Search.AsciiFoldingTokenFilter": + AsciiFoldingTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.CjkBigramTokenFilter": + CjkBigramTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.CommonGramTokenFilter": + CommonGramTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.DictionaryDecompounderTokenFilter": + DictionaryDecompounderTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.EdgeNGramTokenFilter": + EdgeNGramTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.EdgeNGramTokenFilterV2": + EdgeNGramTokenFilterV2, "TokenFilter.#Microsoft.Azure.Search.ElisionTokenFilter": ElisionTokenFilter, "TokenFilter.#Microsoft.Azure.Search.KeepTokenFilter": KeepTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.KeywordMarkerTokenFilter": KeywordMarkerTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.KeywordMarkerTokenFilter": + KeywordMarkerTokenFilter, "TokenFilter.#Microsoft.Azure.Search.LengthTokenFilter": LengthTokenFilter, "TokenFilter.#Microsoft.Azure.Search.LimitTokenFilter": LimitTokenFilter, "TokenFilter.#Microsoft.Azure.Search.NGramTokenFilter": NGramTokenFilter, "TokenFilter.#Microsoft.Azure.Search.NGramTokenFilterV2": NGramTokenFilterV2, - "TokenFilter.#Microsoft.Azure.Search.PatternCaptureTokenFilter": PatternCaptureTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.PatternReplaceTokenFilter": PatternReplaceTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.PhoneticTokenFilter": PhoneticTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.PatternCaptureTokenFilter": + PatternCaptureTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.PatternReplaceTokenFilter": + PatternReplaceTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.PhoneticTokenFilter": + PhoneticTokenFilter, "TokenFilter.#Microsoft.Azure.Search.ShingleTokenFilter": ShingleTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.SnowballTokenFilter": SnowballTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.SnowballTokenFilter": + SnowballTokenFilter, "TokenFilter.#Microsoft.Azure.Search.StemmerTokenFilter": StemmerTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.StemmerOverrideTokenFilter": StemmerOverrideTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.StopwordsTokenFilter": StopwordsTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.StemmerOverrideTokenFilter": + StemmerOverrideTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.StopwordsTokenFilter": + StopwordsTokenFilter, "TokenFilter.#Microsoft.Azure.Search.SynonymTokenFilter": SynonymTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.TruncateTokenFilter": TruncateTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.TruncateTokenFilter": + TruncateTokenFilter, "TokenFilter.#Microsoft.Azure.Search.UniqueTokenFilter": UniqueTokenFilter, - "TokenFilter.#Microsoft.Azure.Search.WordDelimiterTokenFilter": WordDelimiterTokenFilter, + "TokenFilter.#Microsoft.Azure.Search.WordDelimiterTokenFilter": + WordDelimiterTokenFilter, "CharFilter.#Microsoft.Azure.Search.MappingCharFilter": MappingCharFilter, - "CharFilter.#Microsoft.Azure.Search.PatternReplaceCharFilter": PatternReplaceCharFilter, - "LexicalNormalizer.#Microsoft.Azure.Search.CustomNormalizer": CustomNormalizer, + "CharFilter.#Microsoft.Azure.Search.PatternReplaceCharFilter": + PatternReplaceCharFilter, + "LexicalNormalizer.#Microsoft.Azure.Search.CustomNormalizer": + CustomNormalizer, "Similarity.#Microsoft.Azure.Search.ClassicSimilarity": ClassicSimilarity, "Similarity.#Microsoft.Azure.Search.BM25Similarity": BM25Similarity, - "VectorSearchAlgorithmConfiguration.hnsw": HnswVectorSearchAlgorithmConfiguration, - "VectorSearchAlgorithmConfiguration.exhaustiveKnn": ExhaustiveKnnVectorSearchAlgorithmConfiguration, + "VectorSearchAlgorithmConfiguration.hnsw": HnswAlgorithmConfiguration, + "VectorSearchAlgorithmConfiguration.exhaustiveKnn": + ExhaustiveKnnAlgorithmConfiguration, "VectorSearchVectorizer.azureOpenAI": AzureOpenAIVectorizer, - "VectorSearchVectorizer.customWebApi": CustomVectorizer + "VectorSearchVectorizer.customWebApi": CustomVectorizer, + "BaseVectorSearchCompressionConfiguration.scalarQuantization": + ScalarQuantizationCompressionConfiguration, }; diff --git a/sdk/search/search-documents/src/generated/service/models/parameters.ts b/sdk/search/search-documents/src/generated/service/models/parameters.ts index 0b86642a816d..6e88bb7c4e73 100644 --- a/sdk/search/search-documents/src/generated/service/models/parameters.ts +++ b/sdk/search/search-documents/src/generated/service/models/parameters.ts @@ -9,7 +9,7 @@ import { OperationParameter, OperationURLParameter, - OperationQueryParameter + OperationQueryParameter, } from "@azure/core-client"; import { SearchIndexerDataSource as SearchIndexerDataSourceMapper, @@ -20,7 +20,7 @@ import { SynonymMap as SynonymMapMapper, SearchIndex as SearchIndexMapper, AnalyzeRequest as AnalyzeRequestMapper, - SearchAlias as SearchAliasMapper + SearchAlias as SearchAliasMapper, } from "../models/mappers"; export const contentType: OperationParameter = { @@ -30,14 +30,14 @@ export const contentType: OperationParameter = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const dataSource: OperationParameter = { parameterPath: "dataSource", - mapper: SearchIndexerDataSourceMapper + mapper: SearchIndexerDataSourceMapper, }; export const accept: OperationParameter = { @@ -47,9 +47,9 @@ export const accept: OperationParameter = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const endpoint: OperationURLParameter = { @@ -58,10 +58,10 @@ export const endpoint: OperationURLParameter = { serializedName: "endpoint", required: true, type: { - name: "String" - } + name: "String", + }, }, - skipEncoding: true + skipEncoding: true, }; export const dataSourceName: OperationURLParameter = { @@ -70,9 +70,9 @@ export const dataSourceName: OperationURLParameter = { serializedName: "dataSourceName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifMatch: OperationParameter = { @@ -80,9 +80,9 @@ export const ifMatch: OperationParameter = { mapper: { serializedName: "If-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifNoneMatch: OperationParameter = { @@ -90,9 +90,9 @@ export const ifNoneMatch: OperationParameter = { mapper: { serializedName: "If-None-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const prefer: OperationParameter = { @@ -102,9 +102,9 @@ export const prefer: OperationParameter = { isConstant: true, serializedName: "Prefer", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const apiVersion: OperationQueryParameter = { @@ -113,9 +113,9 @@ export const apiVersion: OperationQueryParameter = { serializedName: "api-version", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const skipIndexerResetRequirementForCache: OperationQueryParameter = { @@ -123,9 +123,9 @@ export const skipIndexerResetRequirementForCache: OperationQueryParameter = { mapper: { serializedName: "ignoreResetRequirements", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const select: OperationQueryParameter = { @@ -133,9 +133,9 @@ export const select: OperationQueryParameter = { mapper: { serializedName: "$select", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const indexerName: OperationURLParameter = { @@ -144,14 +144,14 @@ export const indexerName: OperationURLParameter = { serializedName: "indexerName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const keysOrIds: OperationParameter = { parameterPath: ["options", "keysOrIds"], - mapper: DocumentKeysOrIdsMapper + mapper: DocumentKeysOrIdsMapper, }; export const overwrite: OperationQueryParameter = { @@ -160,29 +160,30 @@ export const overwrite: OperationQueryParameter = { defaultValue: false, serializedName: "overwrite", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const indexer: OperationParameter = { parameterPath: "indexer", - mapper: SearchIndexerMapper + mapper: SearchIndexerMapper, }; -export const disableCacheReprocessingChangeDetection: OperationQueryParameter = { - parameterPath: ["options", "disableCacheReprocessingChangeDetection"], - mapper: { - serializedName: "disableCacheReprocessingChangeDetection", - type: { - name: "Boolean" - } - } -}; +export const disableCacheReprocessingChangeDetection: OperationQueryParameter = + { + parameterPath: ["options", "disableCacheReprocessingChangeDetection"], + mapper: { + serializedName: "disableCacheReprocessingChangeDetection", + type: { + name: "Boolean", + }, + }, + }; export const skillset: OperationParameter = { parameterPath: "skillset", - mapper: SearchIndexerSkillsetMapper + mapper: SearchIndexerSkillsetMapper, }; export const skillsetName: OperationURLParameter = { @@ -191,19 +192,19 @@ export const skillsetName: OperationURLParameter = { serializedName: "skillsetName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const skillNames: OperationParameter = { parameterPath: "skillNames", - mapper: SkillNamesMapper + mapper: SkillNamesMapper, }; export const synonymMap: OperationParameter = { parameterPath: "synonymMap", - mapper: SynonymMapMapper + mapper: SynonymMapMapper, }; export const synonymMapName: OperationURLParameter = { @@ -212,14 +213,14 @@ export const synonymMapName: OperationURLParameter = { serializedName: "synonymMapName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const index: OperationParameter = { parameterPath: "index", - mapper: SearchIndexMapper + mapper: SearchIndexMapper, }; export const indexName: OperationURLParameter = { @@ -228,9 +229,9 @@ export const indexName: OperationURLParameter = { serializedName: "indexName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; export const allowIndexDowntime: OperationQueryParameter = { @@ -238,19 +239,19 @@ export const allowIndexDowntime: OperationQueryParameter = { mapper: { serializedName: "allowIndexDowntime", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const request: OperationParameter = { parameterPath: "request", - mapper: AnalyzeRequestMapper + mapper: AnalyzeRequestMapper, }; export const alias: OperationParameter = { parameterPath: "alias", - mapper: SearchAliasMapper + mapper: SearchAliasMapper, }; export const aliasName: OperationURLParameter = { @@ -259,7 +260,7 @@ export const aliasName: OperationURLParameter = { serializedName: "aliasName", required: true, type: { - name: "String" - } - } + name: "String", + }, + }, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/aliases.ts b/sdk/search/search-documents/src/generated/service/operations/aliases.ts index f57ba73176cf..cd858260b466 100644 --- a/sdk/search/search-documents/src/generated/service/operations/aliases.ts +++ b/sdk/search/search-documents/src/generated/service/operations/aliases.ts @@ -21,7 +21,7 @@ import { AliasesCreateOrUpdateResponse, AliasesDeleteOptionalParams, AliasesGetOptionalParams, - AliasesGetResponse + AliasesGetResponse, } from "../models"; /** Class containing Aliases operations. */ @@ -43,11 +43,11 @@ export class AliasesImpl implements Aliases { */ create( alias: SearchAlias, - options?: AliasesCreateOptionalParams + options?: AliasesCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { alias, options }, - createOperationSpec + createOperationSpec, ); } @@ -68,11 +68,11 @@ export class AliasesImpl implements Aliases { createOrUpdate( aliasName: string, alias: SearchAlias, - options?: AliasesCreateOrUpdateOptionalParams + options?: AliasesCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { aliasName, alias, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -84,11 +84,11 @@ export class AliasesImpl implements Aliases { */ delete( aliasName: string, - options?: AliasesDeleteOptionalParams + options?: AliasesDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { aliasName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -99,11 +99,11 @@ export class AliasesImpl implements Aliases { */ get( aliasName: string, - options?: AliasesGetOptionalParams + options?: AliasesGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { aliasName, options }, - getOperationSpec + getOperationSpec, ); } } @@ -115,48 +115,48 @@ const createOperationSpec: coreClient.OperationSpec = { httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SearchAlias + bodyMapper: Mappers.SearchAlias, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.alias, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/aliases", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListAliasesResult + bodyMapper: Mappers.ListAliasesResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOrUpdateOperationSpec: coreClient.OperationSpec = { path: "/aliases('{aliasName}')", httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SearchAlias + bodyMapper: Mappers.SearchAlias, }, 201: { - bodyMapper: Mappers.SearchAlias + bodyMapper: Mappers.SearchAlias, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.alias, queryParameters: [Parameters.apiVersion], @@ -166,10 +166,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/aliases('{aliasName}')", @@ -178,31 +178,31 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.aliasName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/aliases('{aliasName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchAlias + bodyMapper: Mappers.SearchAlias, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.aliasName], headerParameters: [Parameters.accept], - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/dataSources.ts b/sdk/search/search-documents/src/generated/service/operations/dataSources.ts index 40e1c9531a6c..fb0129f8b837 100644 --- a/sdk/search/search-documents/src/generated/service/operations/dataSources.ts +++ b/sdk/search/search-documents/src/generated/service/operations/dataSources.ts @@ -21,7 +21,7 @@ import { DataSourcesListOptionalParams, DataSourcesListResponse, DataSourcesCreateOptionalParams, - DataSourcesCreateResponse + DataSourcesCreateResponse, } from "../models"; /** Class containing DataSources operations. */ @@ -45,11 +45,11 @@ export class DataSourcesImpl implements DataSources { createOrUpdate( dataSourceName: string, dataSource: SearchIndexerDataSource, - options?: DataSourcesCreateOrUpdateOptionalParams + options?: DataSourcesCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { dataSourceName, dataSource, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -60,11 +60,11 @@ export class DataSourcesImpl implements DataSources { */ delete( dataSourceName: string, - options?: DataSourcesDeleteOptionalParams + options?: DataSourcesDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { dataSourceName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -75,11 +75,11 @@ export class DataSourcesImpl implements DataSources { */ get( dataSourceName: string, - options?: DataSourcesGetOptionalParams + options?: DataSourcesGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { dataSourceName, options }, - getOperationSpec + getOperationSpec, ); } @@ -88,7 +88,7 @@ export class DataSourcesImpl implements DataSources { * @param options The options parameters. */ list( - options?: DataSourcesListOptionalParams + options?: DataSourcesListOptionalParams, ): Promise { return this.client.sendOperationRequest({ options }, listOperationSpec); } @@ -100,11 +100,11 @@ export class DataSourcesImpl implements DataSources { */ create( dataSource: SearchIndexerDataSource, - options?: DataSourcesCreateOptionalParams + options?: DataSourcesCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { dataSource, options }, - createOperationSpec + createOperationSpec, ); } } @@ -116,19 +116,19 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SearchIndexerDataSource + bodyMapper: Mappers.SearchIndexerDataSource, }, 201: { - bodyMapper: Mappers.SearchIndexerDataSource + bodyMapper: Mappers.SearchIndexerDataSource, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.dataSource, queryParameters: [ Parameters.apiVersion, - Parameters.skipIndexerResetRequirementForCache + Parameters.skipIndexerResetRequirementForCache, ], urlParameters: [Parameters.endpoint, Parameters.dataSourceName], headerParameters: [ @@ -136,10 +136,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/datasources('{dataSourceName}')", @@ -148,65 +148,65 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataSourceName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/datasources('{dataSourceName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchIndexerDataSource + bodyMapper: Mappers.SearchIndexerDataSource, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataSourceName], headerParameters: [Parameters.accept], - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/datasources", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListDataSourcesResult + bodyMapper: Mappers.ListDataSourcesResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.select], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOperationSpec: coreClient.OperationSpec = { path: "/datasources", httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SearchIndexerDataSource + bodyMapper: Mappers.SearchIndexerDataSource, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.dataSource, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/indexers.ts b/sdk/search/search-documents/src/generated/service/operations/indexers.ts index 5a2dad5839d3..e8895a6dd85c 100644 --- a/sdk/search/search-documents/src/generated/service/operations/indexers.ts +++ b/sdk/search/search-documents/src/generated/service/operations/indexers.ts @@ -26,7 +26,7 @@ import { IndexersCreateOptionalParams, IndexersCreateResponse, IndexersGetStatusOptionalParams, - IndexersGetStatusResponse + IndexersGetStatusResponse, } from "../models"; /** Class containing Indexers operations. */ @@ -48,11 +48,11 @@ export class IndexersImpl implements Indexers { */ reset( indexerName: string, - options?: IndexersResetOptionalParams + options?: IndexersResetOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, options }, - resetOperationSpec + resetOperationSpec, ); } @@ -63,11 +63,11 @@ export class IndexersImpl implements Indexers { */ resetDocs( indexerName: string, - options?: IndexersResetDocsOptionalParams + options?: IndexersResetDocsOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, options }, - resetDocsOperationSpec + resetDocsOperationSpec, ); } @@ -79,7 +79,7 @@ export class IndexersImpl implements Indexers { run(indexerName: string, options?: IndexersRunOptionalParams): Promise { return this.client.sendOperationRequest( { indexerName, options }, - runOperationSpec + runOperationSpec, ); } @@ -92,11 +92,11 @@ export class IndexersImpl implements Indexers { createOrUpdate( indexerName: string, indexer: SearchIndexer, - options?: IndexersCreateOrUpdateOptionalParams + options?: IndexersCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, indexer, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -107,11 +107,11 @@ export class IndexersImpl implements Indexers { */ delete( indexerName: string, - options?: IndexersDeleteOptionalParams + options?: IndexersDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -122,11 +122,11 @@ export class IndexersImpl implements Indexers { */ get( indexerName: string, - options?: IndexersGetOptionalParams + options?: IndexersGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, options }, - getOperationSpec + getOperationSpec, ); } @@ -145,11 +145,11 @@ export class IndexersImpl implements Indexers { */ create( indexer: SearchIndexer, - options?: IndexersCreateOptionalParams + options?: IndexersCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexer, options }, - createOperationSpec + createOperationSpec, ); } @@ -160,11 +160,11 @@ export class IndexersImpl implements Indexers { */ getStatus( indexerName: string, - options?: IndexersGetStatusOptionalParams + options?: IndexersGetStatusOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexerName, options }, - getStatusOperationSpec + getStatusOperationSpec, ); } } @@ -177,13 +177,13 @@ const resetOperationSpec: coreClient.OperationSpec = { responses: { 204: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [Parameters.accept], - serializer + serializer, }; const resetDocsOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')/search.resetdocs", @@ -191,15 +191,15 @@ const resetDocsOperationSpec: coreClient.OperationSpec = { responses: { 204: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.keysOrIds, queryParameters: [Parameters.apiVersion, Parameters.overwrite], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; const runOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')/search.run", @@ -207,33 +207,33 @@ const runOperationSpec: coreClient.OperationSpec = { responses: { 202: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOrUpdateOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')", httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SearchIndexer + bodyMapper: Mappers.SearchIndexer, }, 201: { - bodyMapper: Mappers.SearchIndexer + bodyMapper: Mappers.SearchIndexer, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.indexer, queryParameters: [ Parameters.apiVersion, Parameters.skipIndexerResetRequirementForCache, - Parameters.disableCacheReprocessingChangeDetection + Parameters.disableCacheReprocessingChangeDetection, ], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [ @@ -241,10 +241,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')", @@ -253,81 +253,81 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchIndexer + bodyMapper: Mappers.SearchIndexer, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [Parameters.accept], - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/indexers", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListIndexersResult + bodyMapper: Mappers.ListIndexersResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.select], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOperationSpec: coreClient.OperationSpec = { path: "/indexers", httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SearchIndexer + bodyMapper: Mappers.SearchIndexer, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.indexer, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; const getStatusOperationSpec: coreClient.OperationSpec = { path: "/indexers('{indexerName}')/search.status", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchIndexerStatus + bodyMapper: Mappers.SearchIndexerStatus, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexerName], headerParameters: [Parameters.accept], - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/indexes.ts b/sdk/search/search-documents/src/generated/service/operations/indexes.ts index c38c2ec54b2c..c456c969db12 100644 --- a/sdk/search/search-documents/src/generated/service/operations/indexes.ts +++ b/sdk/search/search-documents/src/generated/service/operations/indexes.ts @@ -26,7 +26,7 @@ import { IndexesGetStatisticsResponse, AnalyzeRequest, IndexesAnalyzeOptionalParams, - IndexesAnalyzeResponse + IndexesAnalyzeResponse, } from "../models"; /** Class containing Indexes operations. */ @@ -48,11 +48,11 @@ export class IndexesImpl implements Indexes { */ create( index: SearchIndex, - options?: IndexesCreateOptionalParams + options?: IndexesCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { index, options }, - createOperationSpec + createOperationSpec, ); } @@ -73,11 +73,11 @@ export class IndexesImpl implements Indexes { createOrUpdate( indexName: string, index: SearchIndex, - options?: IndexesCreateOrUpdateOptionalParams + options?: IndexesCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexName, index, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -90,11 +90,11 @@ export class IndexesImpl implements Indexes { */ delete( indexName: string, - options?: IndexesDeleteOptionalParams + options?: IndexesDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -105,11 +105,11 @@ export class IndexesImpl implements Indexes { */ get( indexName: string, - options?: IndexesGetOptionalParams + options?: IndexesGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexName, options }, - getOperationSpec + getOperationSpec, ); } @@ -120,11 +120,11 @@ export class IndexesImpl implements Indexes { */ getStatistics( indexName: string, - options?: IndexesGetStatisticsOptionalParams + options?: IndexesGetStatisticsOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexName, options }, - getStatisticsOperationSpec + getStatisticsOperationSpec, ); } @@ -137,11 +137,11 @@ export class IndexesImpl implements Indexes { analyze( indexName: string, request: AnalyzeRequest, - options?: IndexesAnalyzeOptionalParams + options?: IndexesAnalyzeOptionalParams, ): Promise { return this.client.sendOperationRequest( { indexName, request, options }, - analyzeOperationSpec + analyzeOperationSpec, ); } } @@ -153,48 +153,48 @@ const createOperationSpec: coreClient.OperationSpec = { httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SearchIndex + bodyMapper: Mappers.SearchIndex, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.index, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/indexes", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListIndexesResult + bodyMapper: Mappers.ListIndexesResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.select], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOrUpdateOperationSpec: coreClient.OperationSpec = { path: "/indexes('{indexName}')", httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SearchIndex + bodyMapper: Mappers.SearchIndex, }, 201: { - bodyMapper: Mappers.SearchIndex + bodyMapper: Mappers.SearchIndex, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.index, queryParameters: [Parameters.apiVersion, Parameters.allowIndexDowntime], @@ -204,10 +204,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/indexes('{indexName}')", @@ -216,65 +216,65 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/indexes('{indexName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchIndex + bodyMapper: Mappers.SearchIndex, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const getStatisticsOperationSpec: coreClient.OperationSpec = { path: "/indexes('{indexName}')/search.stats", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.GetIndexStatisticsResult + bodyMapper: Mappers.GetIndexStatisticsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.accept], - serializer + serializer, }; const analyzeOperationSpec: coreClient.OperationSpec = { path: "/indexes('{indexName}')/search.analyze", httpMethod: "POST", responses: { 200: { - bodyMapper: Mappers.AnalyzeResult + bodyMapper: Mappers.AnalyzeResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.request, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.indexName], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/skillsets.ts b/sdk/search/search-documents/src/generated/service/operations/skillsets.ts index cf156dbb34d3..59a4347a6c09 100644 --- a/sdk/search/search-documents/src/generated/service/operations/skillsets.ts +++ b/sdk/search/search-documents/src/generated/service/operations/skillsets.ts @@ -23,7 +23,7 @@ import { SkillsetsCreateOptionalParams, SkillsetsCreateResponse, SkillNames, - SkillsetsResetSkillsOptionalParams + SkillsetsResetSkillsOptionalParams, } from "../models"; /** Class containing Skillsets operations. */ @@ -47,11 +47,11 @@ export class SkillsetsImpl implements Skillsets { createOrUpdate( skillsetName: string, skillset: SearchIndexerSkillset, - options?: SkillsetsCreateOrUpdateOptionalParams + options?: SkillsetsCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { skillsetName, skillset, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -62,11 +62,11 @@ export class SkillsetsImpl implements Skillsets { */ delete( skillsetName: string, - options?: SkillsetsDeleteOptionalParams + options?: SkillsetsDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { skillsetName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -77,11 +77,11 @@ export class SkillsetsImpl implements Skillsets { */ get( skillsetName: string, - options?: SkillsetsGetOptionalParams + options?: SkillsetsGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { skillsetName, options }, - getOperationSpec + getOperationSpec, ); } @@ -100,11 +100,11 @@ export class SkillsetsImpl implements Skillsets { */ create( skillset: SearchIndexerSkillset, - options?: SkillsetsCreateOptionalParams + options?: SkillsetsCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { skillset, options }, - createOperationSpec + createOperationSpec, ); } @@ -117,11 +117,11 @@ export class SkillsetsImpl implements Skillsets { resetSkills( skillsetName: string, skillNames: SkillNames, - options?: SkillsetsResetSkillsOptionalParams + options?: SkillsetsResetSkillsOptionalParams, ): Promise { return this.client.sendOperationRequest( { skillsetName, skillNames, options }, - resetSkillsOperationSpec + resetSkillsOperationSpec, ); } } @@ -133,20 +133,20 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SearchIndexerSkillset + bodyMapper: Mappers.SearchIndexerSkillset, }, 201: { - bodyMapper: Mappers.SearchIndexerSkillset + bodyMapper: Mappers.SearchIndexerSkillset, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.skillset, queryParameters: [ Parameters.apiVersion, Parameters.skipIndexerResetRequirementForCache, - Parameters.disableCacheReprocessingChangeDetection + Parameters.disableCacheReprocessingChangeDetection, ], urlParameters: [Parameters.endpoint, Parameters.skillsetName], headerParameters: [ @@ -154,10 +154,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/skillsets('{skillsetName}')", @@ -166,67 +166,67 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.skillsetName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/skillsets('{skillsetName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SearchIndexerSkillset + bodyMapper: Mappers.SearchIndexerSkillset, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.skillsetName], headerParameters: [Parameters.accept], - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/skillsets", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListSkillsetsResult + bodyMapper: Mappers.ListSkillsetsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.select], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOperationSpec: coreClient.OperationSpec = { path: "/skillsets", httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SearchIndexerSkillset + bodyMapper: Mappers.SearchIndexerSkillset, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.skillset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; const resetSkillsOperationSpec: coreClient.OperationSpec = { path: "/skillsets('{skillsetName}')/search.resetskills", @@ -234,13 +234,13 @@ const resetSkillsOperationSpec: coreClient.OperationSpec = { responses: { 204: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.skillNames, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.skillsetName], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operations/synonymMaps.ts b/sdk/search/search-documents/src/generated/service/operations/synonymMaps.ts index d4e23f498e70..afde7649c7d9 100644 --- a/sdk/search/search-documents/src/generated/service/operations/synonymMaps.ts +++ b/sdk/search/search-documents/src/generated/service/operations/synonymMaps.ts @@ -21,7 +21,7 @@ import { SynonymMapsListOptionalParams, SynonymMapsListResponse, SynonymMapsCreateOptionalParams, - SynonymMapsCreateResponse + SynonymMapsCreateResponse, } from "../models"; /** Class containing SynonymMaps operations. */ @@ -45,11 +45,11 @@ export class SynonymMapsImpl implements SynonymMaps { createOrUpdate( synonymMapName: string, synonymMap: SynonymMap, - options?: SynonymMapsCreateOrUpdateOptionalParams + options?: SynonymMapsCreateOrUpdateOptionalParams, ): Promise { return this.client.sendOperationRequest( { synonymMapName, synonymMap, options }, - createOrUpdateOperationSpec + createOrUpdateOperationSpec, ); } @@ -60,11 +60,11 @@ export class SynonymMapsImpl implements SynonymMaps { */ delete( synonymMapName: string, - options?: SynonymMapsDeleteOptionalParams + options?: SynonymMapsDeleteOptionalParams, ): Promise { return this.client.sendOperationRequest( { synonymMapName, options }, - deleteOperationSpec + deleteOperationSpec, ); } @@ -75,11 +75,11 @@ export class SynonymMapsImpl implements SynonymMaps { */ get( synonymMapName: string, - options?: SynonymMapsGetOptionalParams + options?: SynonymMapsGetOptionalParams, ): Promise { return this.client.sendOperationRequest( { synonymMapName, options }, - getOperationSpec + getOperationSpec, ); } @@ -88,7 +88,7 @@ export class SynonymMapsImpl implements SynonymMaps { * @param options The options parameters. */ list( - options?: SynonymMapsListOptionalParams + options?: SynonymMapsListOptionalParams, ): Promise { return this.client.sendOperationRequest({ options }, listOperationSpec); } @@ -100,11 +100,11 @@ export class SynonymMapsImpl implements SynonymMaps { */ create( synonymMap: SynonymMap, - options?: SynonymMapsCreateOptionalParams + options?: SynonymMapsCreateOptionalParams, ): Promise { return this.client.sendOperationRequest( { synonymMap, options }, - createOperationSpec + createOperationSpec, ); } } @@ -116,14 +116,14 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { httpMethod: "PUT", responses: { 200: { - bodyMapper: Mappers.SynonymMap + bodyMapper: Mappers.SynonymMap, }, 201: { - bodyMapper: Mappers.SynonymMap + bodyMapper: Mappers.SynonymMap, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.synonymMap, queryParameters: [Parameters.apiVersion], @@ -133,10 +133,10 @@ const createOrUpdateOperationSpec: coreClient.OperationSpec = { Parameters.accept, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.prefer + Parameters.prefer, ], mediaType: "json", - serializer + serializer, }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/synonymmaps('{synonymMapName}')", @@ -145,65 +145,65 @@ const deleteOperationSpec: coreClient.OperationSpec = { 204: {}, 404: {}, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.synonymMapName], headerParameters: [ Parameters.accept, Parameters.ifMatch, - Parameters.ifNoneMatch + Parameters.ifNoneMatch, ], - serializer + serializer, }; const getOperationSpec: coreClient.OperationSpec = { path: "/synonymmaps('{synonymMapName}')", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.SynonymMap + bodyMapper: Mappers.SynonymMap, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.synonymMapName], headerParameters: [Parameters.accept], - serializer + serializer, }; const listOperationSpec: coreClient.OperationSpec = { path: "/synonymmaps", httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ListSynonymMapsResult + bodyMapper: Mappers.ListSynonymMapsResult, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion, Parameters.select], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; const createOperationSpec: coreClient.OperationSpec = { path: "/synonymmaps", httpMethod: "POST", responses: { 201: { - bodyMapper: Mappers.SynonymMap + bodyMapper: Mappers.SynonymMap, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, requestBody: Parameters.synonymMap, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/aliases.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/aliases.ts index 6248725ff47f..ae616f642590 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/aliases.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/aliases.ts @@ -16,7 +16,7 @@ import { AliasesCreateOrUpdateResponse, AliasesDeleteOptionalParams, AliasesGetOptionalParams, - AliasesGetResponse + AliasesGetResponse, } from "../models"; /** Interface representing a Aliases. */ @@ -28,7 +28,7 @@ export interface Aliases { */ create( alias: SearchAlias, - options?: AliasesCreateOptionalParams + options?: AliasesCreateOptionalParams, ): Promise; /** * Lists all aliases available for a search service. @@ -44,7 +44,7 @@ export interface Aliases { createOrUpdate( aliasName: string, alias: SearchAlias, - options?: AliasesCreateOrUpdateOptionalParams + options?: AliasesCreateOrUpdateOptionalParams, ): Promise; /** * Deletes a search alias and its associated mapping to an index. This operation is permanent, with no @@ -54,7 +54,7 @@ export interface Aliases { */ delete( aliasName: string, - options?: AliasesDeleteOptionalParams + options?: AliasesDeleteOptionalParams, ): Promise; /** * Retrieves an alias definition. @@ -63,6 +63,6 @@ export interface Aliases { */ get( aliasName: string, - options?: AliasesGetOptionalParams + options?: AliasesGetOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/dataSources.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/dataSources.ts index 89c09ec35f54..801ff187e26a 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/dataSources.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/dataSources.ts @@ -16,7 +16,7 @@ import { DataSourcesListOptionalParams, DataSourcesListResponse, DataSourcesCreateOptionalParams, - DataSourcesCreateResponse + DataSourcesCreateResponse, } from "../models"; /** Interface representing a DataSources. */ @@ -30,7 +30,7 @@ export interface DataSources { createOrUpdate( dataSourceName: string, dataSource: SearchIndexerDataSource, - options?: DataSourcesCreateOrUpdateOptionalParams + options?: DataSourcesCreateOrUpdateOptionalParams, ): Promise; /** * Deletes a datasource. @@ -39,7 +39,7 @@ export interface DataSources { */ delete( dataSourceName: string, - options?: DataSourcesDeleteOptionalParams + options?: DataSourcesDeleteOptionalParams, ): Promise; /** * Retrieves a datasource definition. @@ -48,14 +48,14 @@ export interface DataSources { */ get( dataSourceName: string, - options?: DataSourcesGetOptionalParams + options?: DataSourcesGetOptionalParams, ): Promise; /** * Lists all datasources available for a search service. * @param options The options parameters. */ list( - options?: DataSourcesListOptionalParams + options?: DataSourcesListOptionalParams, ): Promise; /** * Creates a new datasource. @@ -64,6 +64,6 @@ export interface DataSources { */ create( dataSource: SearchIndexerDataSource, - options?: DataSourcesCreateOptionalParams + options?: DataSourcesCreateOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexers.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexers.ts index 146e9f669225..95e8c3bac62e 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexers.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexers.ts @@ -21,7 +21,7 @@ import { IndexersCreateOptionalParams, IndexersCreateResponse, IndexersGetStatusOptionalParams, - IndexersGetStatusResponse + IndexersGetStatusResponse, } from "../models"; /** Interface representing a Indexers. */ @@ -33,7 +33,7 @@ export interface Indexers { */ reset( indexerName: string, - options?: IndexersResetOptionalParams + options?: IndexersResetOptionalParams, ): Promise; /** * Resets specific documents in the datasource to be selectively re-ingested by the indexer. @@ -42,7 +42,7 @@ export interface Indexers { */ resetDocs( indexerName: string, - options?: IndexersResetDocsOptionalParams + options?: IndexersResetDocsOptionalParams, ): Promise; /** * Runs an indexer on-demand. @@ -59,7 +59,7 @@ export interface Indexers { createOrUpdate( indexerName: string, indexer: SearchIndexer, - options?: IndexersCreateOrUpdateOptionalParams + options?: IndexersCreateOrUpdateOptionalParams, ): Promise; /** * Deletes an indexer. @@ -68,7 +68,7 @@ export interface Indexers { */ delete( indexerName: string, - options?: IndexersDeleteOptionalParams + options?: IndexersDeleteOptionalParams, ): Promise; /** * Retrieves an indexer definition. @@ -77,7 +77,7 @@ export interface Indexers { */ get( indexerName: string, - options?: IndexersGetOptionalParams + options?: IndexersGetOptionalParams, ): Promise; /** * Lists all indexers available for a search service. @@ -91,7 +91,7 @@ export interface Indexers { */ create( indexer: SearchIndexer, - options?: IndexersCreateOptionalParams + options?: IndexersCreateOptionalParams, ): Promise; /** * Returns the current status and execution history of an indexer. @@ -100,6 +100,6 @@ export interface Indexers { */ getStatus( indexerName: string, - options?: IndexersGetStatusOptionalParams + options?: IndexersGetStatusOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexes.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexes.ts index 3c1135daeb43..dc88a3a325d4 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexes.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/indexes.ts @@ -21,7 +21,7 @@ import { IndexesGetStatisticsResponse, AnalyzeRequest, IndexesAnalyzeOptionalParams, - IndexesAnalyzeResponse + IndexesAnalyzeResponse, } from "../models"; /** Interface representing a Indexes. */ @@ -33,7 +33,7 @@ export interface Indexes { */ create( index: SearchIndex, - options?: IndexesCreateOptionalParams + options?: IndexesCreateOptionalParams, ): Promise; /** * Lists all indexes available for a search service. @@ -49,7 +49,7 @@ export interface Indexes { createOrUpdate( indexName: string, index: SearchIndex, - options?: IndexesCreateOrUpdateOptionalParams + options?: IndexesCreateOrUpdateOptionalParams, ): Promise; /** * Deletes a search index and all the documents it contains. This operation is permanent, with no @@ -60,7 +60,7 @@ export interface Indexes { */ delete( indexName: string, - options?: IndexesDeleteOptionalParams + options?: IndexesDeleteOptionalParams, ): Promise; /** * Retrieves an index definition. @@ -69,7 +69,7 @@ export interface Indexes { */ get( indexName: string, - options?: IndexesGetOptionalParams + options?: IndexesGetOptionalParams, ): Promise; /** * Returns statistics for the given index, including a document count and storage usage. @@ -78,7 +78,7 @@ export interface Indexes { */ getStatistics( indexName: string, - options?: IndexesGetStatisticsOptionalParams + options?: IndexesGetStatisticsOptionalParams, ): Promise; /** * Shows how an analyzer breaks text into tokens. @@ -89,6 +89,6 @@ export interface Indexes { analyze( indexName: string, request: AnalyzeRequest, - options?: IndexesAnalyzeOptionalParams + options?: IndexesAnalyzeOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/skillsets.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/skillsets.ts index 70f61999d669..96aa1e923598 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/skillsets.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/skillsets.ts @@ -18,7 +18,7 @@ import { SkillsetsCreateOptionalParams, SkillsetsCreateResponse, SkillNames, - SkillsetsResetSkillsOptionalParams + SkillsetsResetSkillsOptionalParams, } from "../models"; /** Interface representing a Skillsets. */ @@ -32,7 +32,7 @@ export interface Skillsets { createOrUpdate( skillsetName: string, skillset: SearchIndexerSkillset, - options?: SkillsetsCreateOrUpdateOptionalParams + options?: SkillsetsCreateOrUpdateOptionalParams, ): Promise; /** * Deletes a skillset in a search service. @@ -41,7 +41,7 @@ export interface Skillsets { */ delete( skillsetName: string, - options?: SkillsetsDeleteOptionalParams + options?: SkillsetsDeleteOptionalParams, ): Promise; /** * Retrieves a skillset in a search service. @@ -50,7 +50,7 @@ export interface Skillsets { */ get( skillsetName: string, - options?: SkillsetsGetOptionalParams + options?: SkillsetsGetOptionalParams, ): Promise; /** * List all skillsets in a search service. @@ -64,7 +64,7 @@ export interface Skillsets { */ create( skillset: SearchIndexerSkillset, - options?: SkillsetsCreateOptionalParams + options?: SkillsetsCreateOptionalParams, ): Promise; /** * Reset an existing skillset in a search service. @@ -75,6 +75,6 @@ export interface Skillsets { resetSkills( skillsetName: string, skillNames: SkillNames, - options?: SkillsetsResetSkillsOptionalParams + options?: SkillsetsResetSkillsOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/operationsInterfaces/synonymMaps.ts b/sdk/search/search-documents/src/generated/service/operationsInterfaces/synonymMaps.ts index b9000aafb98b..b26e83a49d74 100644 --- a/sdk/search/search-documents/src/generated/service/operationsInterfaces/synonymMaps.ts +++ b/sdk/search/search-documents/src/generated/service/operationsInterfaces/synonymMaps.ts @@ -16,7 +16,7 @@ import { SynonymMapsListOptionalParams, SynonymMapsListResponse, SynonymMapsCreateOptionalParams, - SynonymMapsCreateResponse + SynonymMapsCreateResponse, } from "../models"; /** Interface representing a SynonymMaps. */ @@ -30,7 +30,7 @@ export interface SynonymMaps { createOrUpdate( synonymMapName: string, synonymMap: SynonymMap, - options?: SynonymMapsCreateOrUpdateOptionalParams + options?: SynonymMapsCreateOrUpdateOptionalParams, ): Promise; /** * Deletes a synonym map. @@ -39,7 +39,7 @@ export interface SynonymMaps { */ delete( synonymMapName: string, - options?: SynonymMapsDeleteOptionalParams + options?: SynonymMapsDeleteOptionalParams, ): Promise; /** * Retrieves a synonym map definition. @@ -48,14 +48,14 @@ export interface SynonymMaps { */ get( synonymMapName: string, - options?: SynonymMapsGetOptionalParams + options?: SynonymMapsGetOptionalParams, ): Promise; /** * Lists all synonym maps available for a search service. * @param options The options parameters. */ list( - options?: SynonymMapsListOptionalParams + options?: SynonymMapsListOptionalParams, ): Promise; /** * Creates a new synonym map. @@ -64,6 +64,6 @@ export interface SynonymMaps { */ create( synonymMap: SynonymMap, - options?: SynonymMapsCreateOptionalParams + options?: SynonymMapsCreateOptionalParams, ): Promise; } diff --git a/sdk/search/search-documents/src/generated/service/searchServiceClient.ts b/sdk/search/search-documents/src/generated/service/searchServiceClient.ts index 9155188a8a7d..5aa9cded0b4a 100644 --- a/sdk/search/search-documents/src/generated/service/searchServiceClient.ts +++ b/sdk/search/search-documents/src/generated/service/searchServiceClient.ts @@ -8,13 +8,18 @@ import * as coreClient from "@azure/core-client"; import * as coreHttpCompat from "@azure/core-http-compat"; +import { + PipelineRequest, + PipelineResponse, + SendRequest, +} from "@azure/core-rest-pipeline"; import { DataSourcesImpl, IndexersImpl, SkillsetsImpl, SynonymMapsImpl, IndexesImpl, - AliasesImpl + AliasesImpl, } from "./operations"; import { DataSources, @@ -22,21 +27,21 @@ import { Skillsets, SynonymMaps, Indexes, - Aliases + Aliases, } from "./operationsInterfaces"; import * as Parameters from "./models/parameters"; import * as Mappers from "./models/mappers"; import { - ApiVersion20231001Preview, + ApiVersion20240301Preview, SearchServiceClientOptionalParams, GetServiceStatisticsOptionalParams, - GetServiceStatisticsResponse + GetServiceStatisticsResponse, } from "./models"; /** @internal */ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { endpoint: string; - apiVersion: ApiVersion20231001Preview; + apiVersion: ApiVersion20240301Preview; /** * Initializes a new instance of the SearchServiceClient class. @@ -46,8 +51,8 @@ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { */ constructor( endpoint: string, - apiVersion: ApiVersion20231001Preview, - options?: SearchServiceClientOptionalParams + apiVersion: ApiVersion20240301Preview, + options?: SearchServiceClientOptionalParams, ) { if (endpoint === undefined) { throw new Error("'endpoint' cannot be null"); @@ -61,10 +66,10 @@ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { options = {}; } const defaults: SearchServiceClientOptionalParams = { - requestContentType: "application/json; charset=utf-8" + requestContentType: "application/json; charset=utf-8", }; - const packageDetails = `azsdk-js-search-documents/12.0.0-beta.4`; + const packageDetails = `azsdk-js-search-documents/12.1.0-beta.1`; const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` @@ -74,9 +79,9 @@ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { ...defaults, ...options, userAgentOptions: { - userAgentPrefix + userAgentPrefix, }, - baseUri: options.endpoint ?? options.baseUri ?? "{endpoint}" + endpoint: options.endpoint ?? options.baseUri ?? "{endpoint}", }; super(optionsWithDefaults); // Parameter assignments @@ -88,6 +93,35 @@ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { this.synonymMaps = new SynonymMapsImpl(this); this.indexes = new IndexesImpl(this); this.aliases = new AliasesImpl(this); + this.addCustomApiVersionPolicy(apiVersion); + } + + /** A function that adds a policy that sets the api-version (or equivalent) to reflect the library version. */ + private addCustomApiVersionPolicy(apiVersion?: string) { + if (!apiVersion) { + return; + } + const apiVersionPolicy = { + name: "CustomApiVersionPolicy", + async sendRequest( + request: PipelineRequest, + next: SendRequest, + ): Promise { + const param = request.url.split("?"); + if (param.length > 1) { + const newParams = param[1].split("&").map((item) => { + if (item.indexOf("api-version") > -1) { + return "api-version=" + apiVersion; + } else { + return item; + } + }); + request.url = param[0] + "?" + newParams.join("&"); + } + return next(request); + }, + }; + this.pipeline.addPolicy(apiVersionPolicy); } /** @@ -95,11 +129,11 @@ export class SearchServiceClient extends coreHttpCompat.ExtendedServiceClient { * @param options The options parameters. */ getServiceStatistics( - options?: GetServiceStatisticsOptionalParams + options?: GetServiceStatisticsOptionalParams, ): Promise { return this.sendOperationRequest( { options }, - getServiceStatisticsOperationSpec + getServiceStatisticsOperationSpec, ); } @@ -118,14 +152,14 @@ const getServiceStatisticsOperationSpec: coreClient.OperationSpec = { httpMethod: "GET", responses: { 200: { - bodyMapper: Mappers.ServiceStatistics + bodyMapper: Mappers.ServiceStatistics, }, default: { - bodyMapper: Mappers.SearchError - } + bodyMapper: Mappers.ErrorResponse, + }, }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], - serializer + serializer, }; diff --git a/sdk/search/search-documents/src/generatedStringLiteralUnions.ts b/sdk/search/search-documents/src/generatedStringLiteralUnions.ts new file mode 100644 index 000000000000..b41fce75648e --- /dev/null +++ b/sdk/search/search-documents/src/generatedStringLiteralUnions.ts @@ -0,0 +1,458 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +export type BlobIndexerDataToExtract = "storageMetadata" | "allMetadata" | "contentAndMetadata"; +export type BlobIndexerImageAction = + | "none" + | "generateNormalizedImages" + | "generateNormalizedImagePerPage"; +export type BlobIndexerParsingMode = + | "default" + | "text" + | "delimitedText" + | "json" + | "jsonArray" + | "jsonLines"; +export type BlobIndexerPDFTextRotationAlgorithm = "none" | "detectAngles"; +export type CustomEntityLookupSkillLanguage = + | "da" + | "de" + | "en" + | "es" + | "fi" + | "fr" + | "it" + | "ko" + | "pt"; +export type EntityCategory = + | "location" + | "organization" + | "person" + | "quantity" + | "datetime" + | "url" + | "email"; +export type EntityRecognitionSkillLanguage = + | "ar" + | "cs" + | "zh-Hans" + | "zh-Hant" + | "da" + | "nl" + | "en" + | "fi" + | "fr" + | "de" + | "el" + | "hu" + | "it" + | "ja" + | "ko" + | "no" + | "pl" + | "pt-PT" + | "pt-BR" + | "ru" + | "es" + | "sv" + | "tr"; +export type ImageAnalysisSkillLanguage = + | "ar" + | "az" + | "bg" + | "bs" + | "ca" + | "cs" + | "cy" + | "da" + | "de" + | "el" + | "en" + | "es" + | "et" + | "eu" + | "fi" + | "fr" + | "ga" + | "gl" + | "he" + | "hi" + | "hr" + | "hu" + | "id" + | "it" + | "ja" + | "kk" + | "ko" + | "lt" + | "lv" + | "mk" + | "ms" + | "nb" + | "nl" + | "pl" + | "prs" + | "pt-BR" + | "pt" + | "pt-PT" + | "ro" + | "ru" + | "sk" + | "sl" + | "sr-Cyrl" + | "sr-Latn" + | "sv" + | "th" + | "tr" + | "uk" + | "vi" + | "zh" + | "zh-Hans" + | "zh-Hant"; +export type ImageDetail = "celebrities" | "landmarks"; +export type IndexerExecutionEnvironment = "standard" | "private"; +export type KeyPhraseExtractionSkillLanguage = + | "da" + | "nl" + | "en" + | "fi" + | "fr" + | "de" + | "it" + | "ja" + | "ko" + | "no" + | "pl" + | "pt-PT" + | "pt-BR" + | "ru" + | "es" + | "sv"; +export type OcrSkillLanguage = + | "af" + | "sq" + | "anp" + | "ar" + | "ast" + | "awa" + | "az" + | "bfy" + | "eu" + | "be" + | "be-cyrl" + | "be-latn" + | "bho" + | "bi" + | "brx" + | "bs" + | "bra" + | "br" + | "bg" + | "bns" + | "bua" + | "ca" + | "ceb" + | "rab" + | "ch" + | "hne" + | "zh-Hans" + | "zh-Hant" + | "kw" + | "co" + | "crh" + | "hr" + | "cs" + | "da" + | "prs" + | "dhi" + | "doi" + | "nl" + | "en" + | "myv" + | "et" + | "fo" + | "fj" + | "fil" + | "fi" + | "fr" + | "fur" + | "gag" + | "gl" + | "de" + | "gil" + | "gon" + | "el" + | "kl" + | "gvr" + | "ht" + | "hlb" + | "hni" + | "bgc" + | "haw" + | "hi" + | "mww" + | "hoc" + | "hu" + | "is" + | "smn" + | "id" + | "ia" + | "iu" + | "ga" + | "it" + | "ja" + | "Jns" + | "jv" + | "kea" + | "kac" + | "xnr" + | "krc" + | "kaa-cyrl" + | "kaa" + | "csb" + | "kk-cyrl" + | "kk-latn" + | "klr" + | "kha" + | "quc" + | "ko" + | "kfq" + | "kpy" + | "kos" + | "kum" + | "ku-arab" + | "ku-latn" + | "kru" + | "ky" + | "lkt" + | "la" + | "lt" + | "dsb" + | "smj" + | "lb" + | "bfz" + | "ms" + | "mt" + | "kmj" + | "gv" + | "mi" + | "mr" + | "mn" + | "cnr-cyrl" + | "cnr-latn" + | "nap" + | "ne" + | "niu" + | "nog" + | "sme" + | "nb" + | "no" + | "oc" + | "os" + | "ps" + | "fa" + | "pl" + | "pt" + | "pa" + | "ksh" + | "ro" + | "rm" + | "ru" + | "sck" + | "sm" + | "sa" + | "sat" + | "sco" + | "gd" + | "sr" + | "sr-Cyrl" + | "sr-Latn" + | "xsr" + | "srx" + | "sms" + | "sk" + | "sl" + | "so" + | "sma" + | "es" + | "sw" + | "sv" + | "tg" + | "tt" + | "tet" + | "thf" + | "to" + | "tr" + | "tk" + | "tyv" + | "hsb" + | "ur" + | "ug" + | "uz-arab" + | "uz-cyrl" + | "uz" + | "vo" + | "wae" + | "cy" + | "fy" + | "yua" + | "za" + | "zu" + | "unk"; +export type PIIDetectionSkillMaskingMode = "none" | "replace"; +export type RegexFlags = + | "CANON_EQ" + | "CASE_INSENSITIVE" + | "COMMENTS" + | "DOTALL" + | "LITERAL" + | "MULTILINE" + | "UNICODE_CASE" + | "UNIX_LINES"; +export type SearchIndexerDataSourceType = + | "azuresql" + | "cosmosdb" + | "azureblob" + | "azuretable" + | "mysql" + | "adlsgen2"; +export type SemanticErrorMode = "partial" | "fail"; +export type SemanticErrorReason = "maxWaitExceeded" | "capacityOverloaded" | "transient"; +export type SemanticSearchResultsType = "baseResults" | "rerankedResults"; +export type SentimentSkillLanguage = + | "da" + | "nl" + | "en" + | "fi" + | "fr" + | "de" + | "el" + | "it" + | "no" + | "pl" + | "pt-PT" + | "ru" + | "es" + | "sv" + | "tr"; +export type SplitSkillLanguage = + | "am" + | "bs" + | "cs" + | "da" + | "de" + | "en" + | "es" + | "et" + | "fi" + | "fr" + | "he" + | "hi" + | "hr" + | "hu" + | "id" + | "is" + | "it" + | "ja" + | "ko" + | "lv" + | "nb" + | "nl" + | "pl" + | "pt" + | "pt-br" + | "ru" + | "sk" + | "sl" + | "sr" + | "sv" + | "tr" + | "ur" + | "zh"; +export type TextSplitMode = "pages" | "sentences"; +export type TextTranslationSkillLanguage = + | "af" + | "ar" + | "bn" + | "bs" + | "bg" + | "yue" + | "ca" + | "zh-Hans" + | "zh-Hant" + | "hr" + | "cs" + | "da" + | "nl" + | "en" + | "et" + | "fj" + | "fil" + | "fi" + | "fr" + | "de" + | "el" + | "ht" + | "he" + | "hi" + | "mww" + | "hu" + | "is" + | "id" + | "it" + | "ja" + | "sw" + | "tlh" + | "tlh-Latn" + | "tlh-Piqd" + | "ko" + | "lv" + | "lt" + | "mg" + | "ms" + | "mt" + | "nb" + | "fa" + | "pl" + | "pt" + | "pt-br" + | "pt-PT" + | "otq" + | "ro" + | "ru" + | "sm" + | "sr-Cyrl" + | "sr-Latn" + | "sk" + | "sl" + | "es" + | "sv" + | "ty" + | "ta" + | "te" + | "th" + | "to" + | "tr" + | "uk" + | "ur" + | "vi" + | "cy" + | "yua" + | "ga" + | "kn" + | "mi" + | "ml" + | "pa"; +export type VectorFilterMode = "postFilter" | "preFilter"; +export type VectorQueryKind = "vector" | "text"; +export type VectorSearchAlgorithmKind = "hnsw" | "exhaustiveKnn"; +export type VectorSearchAlgorithmMetric = "cosine" | "euclidean" | "dotProduct"; +export type VectorSearchVectorizerKind = "azureOpenAI" | "customWebApi"; +export type VisualFeature = + | "adult" + | "brands" + | "categories" + | "description" + | "faces" + | "objects" + | "tags"; diff --git a/sdk/search/search-documents/src/index.ts b/sdk/search/search-documents/src/index.ts index c01feec4ab04..b008c9568869 100644 --- a/sdk/search/search-documents/src/index.ts +++ b/sdk/search/search-documents/src/index.ts @@ -1,394 +1,416 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -export { SearchClient, SearchClientOptions } from "./searchClient"; +export { AzureKeyCredential } from "@azure/core-auth"; export { - DEFAULT_BATCH_SIZE, - DEFAULT_FLUSH_WINDOW, - DEFAULT_RETRY_COUNT, -} from "./searchIndexingBufferedSender"; + AutocompleteItem, + AutocompleteMode, + AutocompleteResult, + FacetResult, + IndexActionType, + IndexDocumentsResult, + IndexingResult, + KnownQueryDebugMode, + KnownQueryLanguage, + KnownQuerySpellerType, + KnownSemanticErrorMode, + KnownSemanticErrorReason, + KnownSemanticFieldState, + KnownSemanticSearchResultsType, + KnownSpeller, + KnownVectorQueryKind, + QueryAnswerResult, + QueryCaptionResult, + QueryDebugMode, + QueryLanguage, + QueryResultDocumentRerankerInput, + QuerySpellerType, + QueryType, + ScoringStatistics, + SearchMode, + SemanticFieldState, + Speller, +} from "./generated/data/models"; +export { + AnalyzedTokenInfo, + AnalyzeResult, + AsciiFoldingTokenFilter, + AzureActiveDirectoryApplicationCredentials, + AzureMachineLearningSkill, + BaseVectorSearchCompressionConfiguration, + BM25Similarity, + CharFilter as BaseCharFilter, + CharFilterName, + CjkBigramTokenFilter, + CjkBigramTokenFilterScripts, + ClassicSimilarity, + ClassicTokenizer, + CognitiveServicesAccount as BaseCognitiveServicesAccount, + CognitiveServicesAccountKey, + CommonGramTokenFilter, + ConditionalSkill, + CorsOptions, + CustomEntity, + CustomEntityAlias, + CustomNormalizer, + DataChangeDetectionPolicy as BaseDataChangeDetectionPolicy, + DataDeletionDetectionPolicy as BaseDataDeletionDetectionPolicy, + DefaultCognitiveServicesAccount, + DictionaryDecompounderTokenFilter, + DistanceScoringFunction, + DistanceScoringParameters, + DocumentExtractionSkill, + EdgeNGramTokenFilterSide, + EdgeNGramTokenizer, + ElisionTokenFilter, + EntityLinkingSkill, + EntityRecognitionSkillV3, + FieldMapping, + FieldMappingFunction, + FreshnessScoringFunction, + FreshnessScoringParameters, + HighWaterMarkChangeDetectionPolicy, + IndexerExecutionResult, + IndexerExecutionStatus, + IndexerExecutionStatusDetail, + IndexerState, + IndexerStatus, + IndexingMode, + IndexingSchedule, + IndexProjectionMode, + InputFieldMappingEntry, + KeepTokenFilter, + KeywordMarkerTokenFilter, + KnownBlobIndexerDataToExtract, + KnownBlobIndexerImageAction, + KnownBlobIndexerParsingMode, + KnownBlobIndexerPDFTextRotationAlgorithm, + KnownCharFilterName, + KnownCustomEntityLookupSkillLanguage, + KnownEntityCategory, + KnownEntityRecognitionSkillLanguage, + KnownImageAnalysisSkillLanguage, + KnownImageDetail, + KnownIndexerExecutionEnvironment, + KnownIndexerExecutionStatusDetail, + KnownIndexingMode, + KnownIndexProjectionMode, + KnownKeyPhraseExtractionSkillLanguage, + KnownLexicalAnalyzerName, + KnownLexicalNormalizerName, + KnownLexicalNormalizerName as KnownNormalizerNames, + KnownLexicalTokenizerName, + KnownLineEnding, + KnownOcrSkillLanguage, + KnownPIIDetectionSkillMaskingMode, + KnownRegexFlags, + KnownSearchIndexerDataSourceType, + KnownSentimentSkillLanguage, + KnownSplitSkillLanguage, + KnownTextSplitMode, + KnownTextTranslationSkillLanguage, + KnownTokenFilterName, + KnownVectorSearchCompressionKind, + KnownVectorSearchCompressionTargetDataType, + KnownVectorSearchVectorizerKind, + KnownVisualFeature, + LanguageDetectionSkill, + LengthTokenFilter, + LexicalAnalyzer as BaseLexicalAnalyzer, + LexicalAnalyzerName, + LexicalNormalizer as BaseLexicalNormalizer, + LexicalNormalizerName, + LexicalTokenizer as BaseLexicalTokenizer, + LexicalTokenizerName, + LimitTokenFilter, + LineEnding, + LuceneStandardAnalyzer, + MagnitudeScoringFunction, + MagnitudeScoringParameters, + MappingCharFilter, + MergeSkill, + MicrosoftLanguageStemmingTokenizer, + MicrosoftLanguageTokenizer, + MicrosoftStemmingTokenizerLanguage, + MicrosoftTokenizerLanguage, + NativeBlobSoftDeleteDeletionDetectionPolicy, + NGramTokenizer, + OutputFieldMappingEntry, + PathHierarchyTokenizerV2 as PathHierarchyTokenizer, + PatternCaptureTokenFilter, + PatternReplaceCharFilter, + PatternReplaceTokenFilter, + PhoneticEncoder, + PhoneticTokenFilter, + ResourceCounter, + ScalarQuantizationCompressionConfiguration, + ScalarQuantizationParameters, + ScoringFunction as BaseScoringFunction, + ScoringFunctionAggregation, + ScoringFunctionInterpolation, + SearchAlias, + SearchIndexerDataContainer, + SearchIndexerDataIdentity as BaseSearchIndexerDataIdentity, + SearchIndexerDataNoneIdentity, + SearchIndexerDataUserAssignedIdentity, + SearchIndexerError, + SearchIndexerIndexProjectionSelector, + SearchIndexerKnowledgeStoreBlobProjectionSelector, + SearchIndexerKnowledgeStoreFileProjectionSelector, + SearchIndexerKnowledgeStoreObjectProjectionSelector, + SearchIndexerKnowledgeStoreProjection, + SearchIndexerKnowledgeStoreProjectionSelector, + SearchIndexerKnowledgeStoreTableProjectionSelector, + SearchIndexerLimits, + SearchIndexerSkill as BaseSearchIndexerSkill, + SearchIndexerStatus, + SearchIndexerWarning, + SemanticConfiguration, + SemanticField, + SemanticPrioritizedFields, + SemanticSearch, + SentimentSkillV3, + ServiceCounters, + ServiceLimits, + ShaperSkill, + ShingleTokenFilter, + Similarity, + SnowballTokenFilter, + SnowballTokenFilterLanguage, + SoftDeleteColumnDeletionDetectionPolicy, + SqlIntegratedChangeTrackingPolicy, + StemmerOverrideTokenFilter, + StemmerTokenFilter, + StemmerTokenFilterLanguage, + StopAnalyzer, + StopwordsList, + StopwordsTokenFilter, + Suggester as SearchSuggester, + SynonymTokenFilter, + TagScoringFunction, + TagScoringParameters, + TextWeights, + TokenCharacterKind, + TokenFilter as BaseTokenFilter, + TokenFilterName, + TruncateTokenFilter, + UaxUrlEmailTokenizer, + UniqueTokenFilter, + VectorSearchCompressionKind, + VectorSearchCompressionTargetDataType, + VectorSearchProfile, + WordDelimiterTokenFilter, +} from "./generated/service/models"; +export { + BlobIndexerDataToExtract, + BlobIndexerImageAction, + BlobIndexerParsingMode, + BlobIndexerPDFTextRotationAlgorithm, + CustomEntityLookupSkillLanguage, + EntityCategory, + EntityRecognitionSkillLanguage, + ImageAnalysisSkillLanguage, + ImageDetail, + IndexerExecutionEnvironment, + KeyPhraseExtractionSkillLanguage, + OcrSkillLanguage, + PIIDetectionSkillMaskingMode, + RegexFlags, + SearchIndexerDataSourceType, + SemanticErrorMode, + SemanticErrorReason, + SemanticSearchResultsType, + SentimentSkillLanguage, + SplitSkillLanguage, + TextSplitMode, + TextTranslationSkillLanguage, + VectorFilterMode, + VectorQueryKind, + VectorSearchAlgorithmKind, + VectorSearchAlgorithmMetric, + VectorSearchVectorizerKind, + VisualFeature, +} from "./generatedStringLiteralUnions"; +export { default as GeographyPoint } from "./geographyPoint"; +export { IndexDocumentsBatch } from "./indexDocumentsBatch"; export { - AutocompleteRequest, AutocompleteOptions, + AutocompleteRequest, + BaseSearchRequestOptions, + BaseVectorQuery, CountDocumentsOptions, DeleteDocumentsOptions, + DocumentDebugInfo, ExcludedODataTypes, ExtractDocumentKey, + ExtractiveQueryAnswer, + ExtractiveQueryCaption, GetDocumentOptions, IndexDocumentsAction, - ListSearchResultsPageSettings, IndexDocumentsOptions, - SearchDocumentsResultBase, - SearchDocumentsResult, - SearchDocumentsPageResult, - SearchIterator, - SearchOptions, - SearchRequestOptions, - SearchRequest, - SearchResult, - SuggestDocumentsResult, - SuggestRequest, - SuggestResult, - SuggestOptions, + ListSearchResultsPageSettings, MergeDocumentsOptions, MergeOrUploadDocumentsOptions, NarrowedModel, - UploadDocumentsOptions, - SearchIndexingBufferedSenderOptions, + QueryAnswer, + QueryCaption, + QueryResultDocumentSemanticField, + SearchDocumentsPageResult, + SearchDocumentsResult, + SearchDocumentsResultBase, + SearchFieldArray, SearchIndexingBufferedSenderDeleteDocumentsOptions, SearchIndexingBufferedSenderFlushDocumentsOptions, SearchIndexingBufferedSenderMergeDocumentsOptions, SearchIndexingBufferedSenderMergeOrUploadDocumentsOptions, + SearchIndexingBufferedSenderOptions, SearchIndexingBufferedSenderUploadDocumentsOptions, + SearchIterator, + SearchOptions, SearchPick, - SearchFieldArray, + SearchRequestOptions, + SearchRequestQueryTypeOptions, + SearchResult, SelectArray, SelectFields, + SemanticDebugInfo, + SemanticSearchOptions, + SuggestDocumentsResult, SuggestNarrowedModel, + SuggestOptions, + SuggestRequest, + SuggestResult, UnionToIntersection, - SemanticPartialResponseReason, - SemanticPartialResponseType, - QueryDebugMode, - SemanticErrorHandlingMode, - SemanticFieldState, - AnswersOptions, - DocumentDebugInfo, - SemanticDebugInfo, - QueryResultDocumentSemanticField, - Answers, - VectorQuery, - BaseVectorQuery, - RawVectorQuery, + UploadDocumentsOptions, VectorizableTextQuery, - VectorQueryKind, - VectorFilterMode, + VectorizedQuery, + VectorQuery, + VectorSearchOptions, } from "./indexModels"; -export { SearchIndexingBufferedSender, IndexDocumentsClient } from "./searchIndexingBufferedSender"; -export { SearchIndexClient, SearchIndexClientOptions } from "./searchIndexClient"; +export { odata } from "./odata"; +export { KnownSearchAudience } from "./searchAudience"; +export { SearchClient, SearchClientOptions } from "./searchClient"; +export { SearchIndexClient, SearchIndexClientOptions } from "./searchIndexClient"; export { SearchIndexerClient, SearchIndexerClientOptions } from "./searchIndexerClient"; export { - SearchIndex, - LexicalAnalyzer, - TokenFilter, - LexicalTokenizer, + DEFAULT_BATCH_SIZE, + DEFAULT_FLUSH_WINDOW, + DEFAULT_RETRY_COUNT, + IndexDocumentsClient, + SearchIndexingBufferedSender, +} from "./searchIndexingBufferedSender"; +export { + AliasIterator, + AnalyzeRequest, + AnalyzeTextOptions, + AzureOpenAIEmbeddingSkill, + AzureOpenAIParameters, + AzureOpenAIVectorizer, + BaseVectorSearchAlgorithmConfiguration, + BaseVectorSearchVectorizer, CharFilter, - ListIndexesOptions, + CognitiveServicesAccount, + ComplexDataType, + ComplexField, + CreateAliasOptions, + CreateDataSourceConnectionOptions, + CreateIndexerOptions, CreateIndexOptions, + CreateOrUpdateAliasOptions, + CreateorUpdateDataSourceConnectionOptions, + CreateorUpdateIndexerOptions, CreateOrUpdateIndexOptions, CreateOrUpdateSkillsetOptions, CreateOrUpdateSynonymMapOptions, CreateSkillsetOptions, CreateSynonymMapOptions, + CustomAnalyzer, + CustomEntityLookupSkill, + CustomVectorizer, + CustomVectorizerParameters, + DataChangeDetectionPolicy, + DataDeletionDetectionPolicy, + DeleteAliasOptions, + DeleteDataSourceConnectionOptions, + DeleteIndexerOptions, + DeleteIndexOptions, DeleteSkillsetOptions, DeleteSynonymMapOptions, - GetSkillSetOptions, - GetSynonymMapsOptions, - ListSkillsetsOptions, - SearchIndexerSkillset, - ListSynonymMapsOptions, - DeleteIndexOptions, - AnalyzeTextOptions, + EdgeNGramTokenFilter, + EntityRecognitionSkill, + ExhaustiveKnnAlgorithmConfiguration, + ExhaustiveKnnParameters, + GetAliasOptions, + GetDataSourceConnectionOptions, + GetIndexerOptions, + GetIndexerStatusOptions, GetIndexOptions, GetIndexStatisticsOptions, + GetServiceStatisticsOptions, + GetSkillSetOptions, + GetSynonymMapsOptions, + HnswAlgorithmConfiguration, + HnswParameters, + ImageAnalysisSkill, + IndexingParameters, + IndexingParametersConfiguration, + IndexIterator, + IndexNameIterator, + KeyPhraseExtractionSkill, + KeywordTokenizer, KnownAnalyzerNames, KnownCharFilterNames, KnownTokenFilterNames, KnownTokenizerNames, - ScoringFunction, - ScoringProfile, - CustomAnalyzer, - PatternAnalyzer, - PatternTokenizer, - SearchField, - SimpleField, - ComplexField, - SearchFieldDataType, - ComplexDataType, - CognitiveServicesAccount, - SearchIndexerSkill, - SynonymMap, - ListIndexersOptions, - CreateIndexerOptions, - GetIndexerOptions, - CreateorUpdateIndexerOptions, - DeleteIndexerOptions, - GetIndexerStatusOptions, - ResetIndexerOptions, - RunIndexerOptions, - CreateDataSourceConnectionOptions, - CreateorUpdateDataSourceConnectionOptions, - DeleteDataSourceConnectionOptions, - GetDataSourceConnectionOptions, + LexicalAnalyzer, + LexicalNormalizer, + LexicalTokenizer, + ListAliasesOptions, ListDataSourceConnectionsOptions, - SearchIndexerDataSourceConnection, - DataChangeDetectionPolicy, - DataDeletionDetectionPolicy, - GetServiceStatisticsOptions, - IndexIterator, - IndexNameIterator, - SimilarityAlgorithm, - NGramTokenFilter, + ListIndexersOptions, + ListIndexesOptions, + ListSkillsetsOptions, + ListSynonymMapsOptions, LuceneStandardTokenizer, - EdgeNGramTokenFilter, - KeywordTokenizer, - AnalyzeRequest, - SearchResourceEncryptionKey, - SearchIndexStatistics, - SearchServiceStatistics, - SearchIndexer, - LexicalNormalizer, - SearchIndexerDataIdentity, + NGramTokenFilter, + OcrSkill, + PatternAnalyzer, + PatternTokenizer, + PIIDetectionSkill, ResetDocumentsOptions, + ResetIndexerOptions, ResetSkillsOptions, + RunIndexerOptions, + ScoringFunction, + ScoringProfile, + SearchField, + SearchFieldDataType, + SearchIndex, SearchIndexAlias, - CreateAliasOptions, - CreateOrUpdateAliasOptions, - DeleteAliasOptions, - GetAliasOptions, - ListAliasesOptions, - AliasIterator, - VectorSearchAlgorithmConfiguration, - VectorSearchAlgorithmMetric, - VectorSearch, + SearchIndexer, SearchIndexerCache, - SearchIndexerKnowledgeStore, - WebApiSkill, - HnswParameters, - HnswVectorSearchAlgorithmConfiguration, + SearchIndexerDataIdentity, + SearchIndexerDataSourceConnection, SearchIndexerIndexProjections, SearchIndexerIndexProjectionsParameters, - VectorSearchVectorizer, - ExhaustiveKnnParameters, - AzureOpenAIParameters, - CustomVectorizerParameters, - VectorSearchAlgorithmKind, - VectorSearchVectorizerKind, - AzureOpenAIEmbeddingSkill, - AzureOpenAIVectorizer, - CustomVectorizer, - ExhaustiveKnnVectorSearchAlgorithmConfiguration, - IndexProjectionMode, - BaseVectorSearchAlgorithmConfiguration, - BaseVectorSearchVectorizer, + SearchIndexerKnowledgeStore, SearchIndexerKnowledgeStoreParameters, -} from "./serviceModels"; -export { default as GeographyPoint } from "./geographyPoint"; -export { odata } from "./odata"; -export { IndexDocumentsBatch } from "./indexDocumentsBatch"; -export { - AutocompleteResult, - AutocompleteMode, - AutocompleteItem, - FacetResult, - IndexActionType, - IndexDocumentsResult, - IndexingResult, - QueryType, - SearchMode, - ScoringStatistics, - KnownAnswers, - QueryLanguage, - KnownQueryLanguage, - Speller, - KnownSpeller, - CaptionResult, - AnswerResult, - Captions, - QueryAnswerType, - QueryCaptionType, - QuerySpellerType, - KnownQuerySpellerType, - KnownQueryAnswerType, - KnownQueryCaptionType, - QueryResultDocumentRerankerInput, -} from "./generated/data/models"; -export { - RegexFlags, - KnownRegexFlags, - LuceneStandardAnalyzer, - StopAnalyzer, - MappingCharFilter, - PatternReplaceCharFilter, - CorsOptions, - AzureActiveDirectoryApplicationCredentials, - ScoringFunctionAggregation, - ScoringFunctionInterpolation, - DistanceScoringParameters, - DistanceScoringFunction, - FreshnessScoringParameters, - FreshnessScoringFunction, - MagnitudeScoringParameters, - MagnitudeScoringFunction, - TagScoringParameters, - TagScoringFunction, - TextWeights, - AsciiFoldingTokenFilter, - CjkBigramTokenFilterScripts, - CjkBigramTokenFilter, - CommonGramTokenFilter, - DictionaryDecompounderTokenFilter, - EdgeNGramTokenFilterSide, - ElisionTokenFilter, - KeepTokenFilter, - KeywordMarkerTokenFilter, - LengthTokenFilter, - LimitTokenFilter, - PatternCaptureTokenFilter, - PatternReplaceTokenFilter, - PhoneticEncoder, - PhoneticTokenFilter, - ShingleTokenFilter, - SnowballTokenFilterLanguage, - SnowballTokenFilter, - StemmerTokenFilterLanguage, - StemmerTokenFilter, - StemmerOverrideTokenFilter, - StopwordsList, - StopwordsTokenFilter, - SynonymTokenFilter, - TruncateTokenFilter, - UniqueTokenFilter, - WordDelimiterTokenFilter, - ClassicTokenizer, - TokenCharacterKind, - EdgeNGramTokenizer, - MicrosoftTokenizerLanguage, - MicrosoftLanguageTokenizer, - MicrosoftStemmingTokenizerLanguage, - MicrosoftLanguageStemmingTokenizer, - NGramTokenizer, - PathHierarchyTokenizerV2 as PathHierarchyTokenizer, - UaxUrlEmailTokenizer, - Suggester as SearchSuggester, - AnalyzeResult, - AnalyzedTokenInfo, - ConditionalSkill, - KeyPhraseExtractionSkill, - OcrSkill, - ImageAnalysisSkill, - LanguageDetectionSkill, - ShaperSkill, - MergeSkill, - EntityRecognitionSkill, + SearchIndexerSkill, + SearchIndexerSkillset, + SearchIndexStatistics, + SearchResourceEncryptionKey, + SearchServiceStatistics, SentimentSkill, - CustomEntityLookupSkill, - CustomEntityLookupSkillLanguage, - KnownCustomEntityLookupSkillLanguage, - DocumentExtractionSkill, - CustomEntity, - CustomEntityAlias, + SimilarityAlgorithm, + SimpleField, SplitSkill, - PIIDetectionSkill, - EntityRecognitionSkillV3, - EntityLinkingSkill, - SentimentSkillV3, + SynonymMap, TextTranslationSkill, - AzureMachineLearningSkill, - SentimentSkillLanguage, - KnownSentimentSkillLanguage, - SplitSkillLanguage, - KnownSplitSkillLanguage, - TextSplitMode, - KnownTextSplitMode, - TextTranslationSkillLanguage, - KnownTextTranslationSkillLanguage, - DefaultCognitiveServicesAccount, - CognitiveServicesAccountKey, - InputFieldMappingEntry, - OutputFieldMappingEntry, - EntityCategory, - KnownEntityCategory, - EntityRecognitionSkillLanguage, - KnownEntityRecognitionSkillLanguage, - ImageAnalysisSkillLanguage, - KnownImageAnalysisSkillLanguage, - ImageDetail, - KnownImageDetail, - VisualFeature, - KnownVisualFeature, - KeyPhraseExtractionSkillLanguage, - KnownKeyPhraseExtractionSkillLanguage, - OcrSkillLanguage, - KnownOcrSkillLanguage, - FieldMapping, - IndexingParameters, - IndexingSchedule, - FieldMappingFunction, - SearchIndexerStatus, - IndexerExecutionResult, - SearchIndexerLimits, - IndexerStatus, - SearchIndexerError, - IndexerExecutionStatus, - SearchIndexerWarning, - SearchIndexerDataContainer, - SearchIndexerDataSourceType, - KnownSearchIndexerDataSourceType, - SoftDeleteColumnDeletionDetectionPolicy, - SqlIntegratedChangeTrackingPolicy, - HighWaterMarkChangeDetectionPolicy, - SearchIndexerDataUserAssignedIdentity, - SearchIndexerDataNoneIdentity, - ServiceCounters, - ServiceLimits, - ResourceCounter, - LexicalAnalyzerName, - KnownLexicalAnalyzerName, - ClassicSimilarity, - BM25Similarity, - IndexingParametersConfiguration, - BlobIndexerDataToExtract, - KnownBlobIndexerDataToExtract, - IndexerExecutionEnvironment, - BlobIndexerImageAction, - KnownBlobIndexerImageAction, - BlobIndexerParsingMode, - KnownBlobIndexerParsingMode, - BlobIndexerPDFTextRotationAlgorithm, - KnownBlobIndexerPDFTextRotationAlgorithm, - TokenFilter as BaseTokenFilter, - Similarity, - LexicalTokenizer as BaseLexicalTokenizer, - CognitiveServicesAccount as BaseCognitiveServicesAccount, - SearchIndexerSkill as BaseSearchIndexerSkill, - ScoringFunction as BaseScoringFunction, - DataChangeDetectionPolicy as BaseDataChangeDetectionPolicy, - LexicalAnalyzer as BaseLexicalAnalyzer, - CharFilter as BaseCharFilter, - DataDeletionDetectionPolicy as BaseDataDeletionDetectionPolicy, - LexicalNormalizerName, - KnownLexicalNormalizerName, - CustomNormalizer, - TokenFilterName, - KnownTokenFilterName, - CharFilterName, - KnownCharFilterName, - LexicalNormalizer as BaseLexicalNormalizer, - SearchIndexerKnowledgeStoreProjection, - SearchIndexerKnowledgeStoreFileProjectionSelector, - SearchIndexerKnowledgeStoreBlobProjectionSelector, - SearchIndexerKnowledgeStoreProjectionSelector, - SearchIndexerKnowledgeStoreObjectProjectionSelector, - SearchIndexerKnowledgeStoreTableProjectionSelector, - PIIDetectionSkillMaskingMode, - KnownPIIDetectionSkillMaskingMode, - LineEnding, - KnownLineEnding, - SearchIndexerDataIdentity as BaseSearchIndexerDataIdentity, - IndexerState, - IndexerExecutionStatusDetail, - KnownIndexerExecutionStatusDetail, - IndexingMode, - KnownIndexingMode, - SemanticSettings, - SemanticConfiguration, - PrioritizedFields, - SemanticField, - SearchAlias, - NativeBlobSoftDeleteDeletionDetectionPolicy, - SearchIndexerIndexProjectionSelector, - VectorSearchProfile, -} from "./generated/service/models"; -export { AzureKeyCredential } from "@azure/core-auth"; + TokenFilter, + VectorSearch, + VectorSearchAlgorithmConfiguration, + VectorSearchCompressionConfiguration, + VectorSearchVectorizer, + WebApiSkill, +} from "./serviceModels"; export { createSynonymMapFromFile } from "./synonymMapHelper"; -export { KnownSearchAudience } from "./searchAudience"; diff --git a/sdk/search/search-documents/src/indexDocumentsBatch.ts b/sdk/search/search-documents/src/indexDocumentsBatch.ts index 28910ac384f3..1122943bb701 100644 --- a/sdk/search/search-documents/src/indexDocumentsBatch.ts +++ b/sdk/search/search-documents/src/indexDocumentsBatch.ts @@ -7,13 +7,13 @@ import { IndexDocumentsAction } from "./indexModels"; * Class used to perform batch operations * with multiple documents to the index. */ -export class IndexDocumentsBatch { +export class IndexDocumentsBatch { /** * The set of actions taken in this batch. */ - public readonly actions: IndexDocumentsAction[]; + public readonly actions: IndexDocumentsAction[]; - constructor(actions: IndexDocumentsAction[] = []) { + constructor(actions: IndexDocumentsAction[] = []) { this.actions = actions; } @@ -21,8 +21,8 @@ export class IndexDocumentsBatch { * Upload an array of documents to the index. * @param documents - The documents to upload. */ - public upload(documents: T[]): void { - const batch = documents.map>((doc) => { + public upload(documents: TModel[]): void { + const batch = documents.map>((doc) => { return { ...doc, __actionType: "upload", @@ -37,8 +37,8 @@ export class IndexDocumentsBatch { * For more details about how merging works, see https://docs.microsoft.com/en-us/rest/api/searchservice/AddUpdate-or-Delete-Documents * @param documents - The updated documents. */ - public merge(documents: T[]): void { - const batch = documents.map>((doc) => { + public merge(documents: TModel[]): void { + const batch = documents.map>((doc) => { return { ...doc, __actionType: "merge", @@ -53,8 +53,8 @@ export class IndexDocumentsBatch { * For more details about how merging works, see https://docs.microsoft.com/en-us/rest/api/searchservice/AddUpdate-or-Delete-Documents * @param documents - The new/updated documents. */ - public mergeOrUpload(documents: T[]): void { - const batch = documents.map>((doc) => { + public mergeOrUpload(documents: TModel[]): void { + const batch = documents.map>((doc) => { return { ...doc, __actionType: "mergeOrUpload", @@ -69,34 +69,34 @@ export class IndexDocumentsBatch { * @param keyName - The name of their primary key in the index. * @param keyValues - The primary key values of documents to delete. */ - public delete(keyName: keyof T, keyValues: string[]): void; + public delete(keyName: keyof TModel, keyValues: string[]): void; /** * Delete a set of documents. * @param documents - Documents to be deleted. */ - public delete(documents: T[]): void; + public delete(documents: TModel[]): void; - public delete(keyNameOrDocuments: keyof T | T[], keyValues?: string[]): void { + public delete(keyNameOrDocuments: keyof TModel | TModel[], keyValues?: string[]): void { if (keyValues) { - const keyName = keyNameOrDocuments as keyof T; + const keyName = keyNameOrDocuments as keyof TModel; - const batch = keyValues.map>((keyValue) => { + const batch = keyValues.map>((keyValue) => { return { __actionType: "delete", [keyName]: keyValue, - } as IndexDocumentsAction; + } as IndexDocumentsAction; }); this.actions.push(...batch); } else { - const documents = keyNameOrDocuments as T[]; + const documents = keyNameOrDocuments as TModel[]; - const batch = documents.map>((document) => { + const batch = documents.map>((document) => { return { __actionType: "delete", ...document, - } as IndexDocumentsAction; + } as IndexDocumentsAction; }); this.actions.push(...batch); diff --git a/sdk/search/search-documents/src/indexModels.ts b/sdk/search/search-documents/src/indexModels.ts index 713b4180c024..14907ea0110d 100644 --- a/sdk/search/search-documents/src/indexModels.ts +++ b/sdk/search/search-documents/src/indexModels.ts @@ -2,24 +2,29 @@ // Licensed under the MIT license. import { OperationOptions } from "@azure/core-client"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { - AnswerResult, AutocompleteMode, - CaptionResult, - Captions, FacetResult, IndexActionType, - QueryAnswerType, - QueryCaptionType, + QueryAnswerResult, + QueryCaptionResult, + QueryDebugMode, QueryLanguage, QueryResultDocumentRerankerInput, - QuerySpellerType, QueryType, ScoringStatistics, SearchMode, + SemanticFieldState, Speller, } from "./generated/data/models"; -import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { + SemanticErrorMode, + SemanticErrorReason, + SemanticSearchResultsType, + VectorFilterMode, + VectorQueryKind, +} from "./generatedStringLiteralUnions"; import GeographyPoint from "./geographyPoint"; /** @@ -180,16 +185,9 @@ export type SearchIterator< ListSearchResultsPageSettings >; -export type VectorQueryKind = "vector" | "text"; - -/** - * Determines whether or not filters are applied before or after the vector search is performed. - */ -export type VectorFilterMode = "postFilter" | "preFilter"; - /** The query parameters for vector and hybrid search queries. */ export type VectorQuery = - | RawVectorQuery + | VectorizedQuery | VectorizableTextQuery; /** The query parameters for vector and hybrid search queries. */ @@ -200,16 +198,27 @@ export interface BaseVectorQuery { kNearestNeighborsCount?: number; /** Vector Fields of type Collection(Edm.Single) to be included in the vector searched. */ fields?: SearchFieldArray; - /** When true, triggers an exhaustive k-nearest neighbor search across all vectors within the vector index. Useful for scenarios where exact matches are critical, such as determining ground truth values. */ + /** + * When true, triggers an exhaustive k-nearest neighbor search across all vectors within the + * vector index. Useful for scenarios where exact matches are critical, such as determining ground + * truth values. + */ exhaustive?: boolean; + /** + * Oversampling factor. Minimum value is 1. It overrides the 'defaultOversampling' parameter + * configured in the index definition. It can be set only when 'rerankWithOriginalVectors' is + * true. This parameter is only permitted when a compression method is used on the underlying + * vector field. + */ + oversampling?: number; } /** The query parameters to use for vector search when a raw vector value is provided. */ -export interface RawVectorQuery extends BaseVectorQuery { +export interface VectorizedQuery extends BaseVectorQuery { /** Polymorphic discriminator, which specifies the different types this object can be */ kind: "vector"; /** The vector representation of a search query. */ - vector?: number[]; + vector: number[]; } /** The query parameters to use for vector search when a text value that needs to be vectorized is provided. */ @@ -223,179 +232,7 @@ export interface VectorizableTextQuery extends BaseVector /** * Parameters for filtering, sorting, faceting, paging, and other search query behaviors. */ -export interface SearchRequest { - /** - * A value that specifies whether to fetch the total count of results. Default is false. Setting - * this value to true may have a performance impact. Note that the count returned is an - * approximation. - */ - includeTotalCount?: boolean; - /** - * The list of facet expressions to apply to the search query. Each facet expression contains a - * field name, optionally followed by a comma-separated list of name:value pairs. - */ - facets?: string[]; - /** - * The OData $filter expression to apply to the search query. - */ - filter?: string; - /** - * The comma-separated list of field names to use for hit highlights. Only searchable fields can - * be used for hit highlighting. - */ - highlightFields?: string; - /** - * A string tag that is appended to hit highlights. Must be set with highlightPreTag. Default is - * </em>. - */ - highlightPostTag?: string; - /** - * A string tag that is prepended to hit highlights. Must be set with highlightPostTag. Default - * is <em>. - */ - highlightPreTag?: string; - /** - * A number between 0 and 100 indicating the percentage of the index that must be covered by a - * search query in order for the query to be reported as a success. This parameter can be useful - * for ensuring search availability even for services with only one replica. The default is 100. - */ - minimumCoverage?: number; - /** - * The comma-separated list of OData $orderby expressions by which to sort the results. Each - * expression can be either a field name or a call to either the geo.distance() or the - * search.score() functions. Each expression can be followed by asc to indicate ascending, or - * desc to indicate descending. The default is ascending order. Ties will be broken by the match - * scores of documents. If no $orderby is specified, the default sort order is descending by - * document match score. There can be at most 32 $orderby clauses. - */ - orderBy?: string; - /** - * A value that specifies the syntax of the search query. The default is 'simple'. Use 'full' if - * your query uses the Lucene query syntax. Possible values include: 'Simple', 'Full' - */ - queryType?: QueryType; - /** - * A value that specifies whether we want to calculate scoring statistics (such as document - * frequency) globally for more consistent scoring, or locally, for lower latency. The default is - * 'local'. Use 'global' to aggregate scoring statistics globally before scoring. Using global - * scoring statistics can increase latency of search queries. Possible values include: 'Local', - * 'Global' - */ - scoringStatistics?: ScoringStatistics; - /** - * A value to be used to create a sticky session, which can help getting more consistent results. - * As long as the same sessionId is used, a best-effort attempt will be made to target the same - * replica set. Be wary that reusing the same sessionID values repeatedly can interfere with the - * load balancing of the requests across replicas and adversely affect the performance of the - * search service. The value used as sessionId cannot start with a '_' character. - */ - sessionId?: string; - /** - * The list of parameter values to be used in scoring functions (for example, - * referencePointParameter) using the format name-values. For example, if the scoring profile - * defines a function with a parameter called 'mylocation' the parameter string would be - * "mylocation--122.2,44.8" (without the quotes). - */ - scoringParameters?: string[]; - /** - * The name of a scoring profile to evaluate match scores for matching documents in order to sort - * the results. - */ - scoringProfile?: string; - /** - * Allows setting a separate search query that will be solely used for semantic reranking, - * semantic captions and semantic answers. Is useful for scenarios where there is a need to use - * different queries between the base retrieval and ranking phase, and the L2 semantic phase. - */ - semanticQuery?: string; - /** - * The name of a semantic configuration that will be used when processing documents for queries of - * type semantic. - */ - semanticConfiguration?: string; - /** - * Allows the user to choose whether a semantic call should fail completely, or to return partial - * results (default). - */ - semanticErrorHandlingMode?: SemanticErrorHandlingMode; - /** - * Allows the user to set an upper bound on the amount of time it takes for semantic enrichment - * to finish processing before the request fails. - */ - semanticMaxWaitInMilliseconds?: number; - /** - * Enables a debugging tool that can be used to further explore your Semantic search results. - */ - debugMode?: QueryDebugMode; - /** - * A full-text search query expression; Use "*" or omit this parameter to match all documents. - */ - searchText?: string; - /** - * The comma-separated list of field names to which to scope the full-text search. When using - * fielded search (fieldName:searchExpression) in a full Lucene query, the field names of each - * fielded search expression take precedence over any field names listed in this parameter. - */ - searchFields?: string; - /** - * A value that specifies whether any or all of the search terms must be matched in order to - * count the document as a match. Possible values include: 'Any', 'All' - */ - searchMode?: SearchMode; - /** - * A value that specifies the language of the search query. - */ - queryLanguage?: QueryLanguage; - /** - * A value that specified the type of the speller to use to spell-correct individual search - * query terms. - */ - speller?: QuerySpellerType; - /** - * A value that specifies whether answers should be returned as part of the search response. - */ - answers?: QueryAnswerType; - /** - * The comma-separated list of fields to retrieve. If unspecified, all fields marked as - * retrievable in the schema are included. - */ - select?: string; - /** - * The number of search results to skip. This value cannot be greater than 100,000. If you need - * to scan documents in sequence, but cannot use skip due to this limitation, consider using - * orderby on a totally-ordered key and filter with a range query instead. - */ - skip?: number; - /** - * The number of search results to retrieve. This can be used in conjunction with $skip to - * implement client-side paging of search results. If results are truncated due to server-side - * paging, the response will include a continuation token that can be used to issue another - * Search request for the next page of results. - */ - top?: number; - /** - * A value that specifies whether captions should be returned as part of the search response. - */ - captions?: QueryCaptionType; - /** - * The comma-separated list of field names used for semantic search. - */ - semanticFields?: string; - /** - * The query parameters for vector, hybrid, and multi-vector search queries. - */ - vectorQueries?: VectorQuery[]; - /** - * Determines whether or not filters are applied before or after the vector search is performed. - * Default is 'preFilter'. - */ - vectorFilterMode?: VectorFilterMode; -} - -/** - * Parameters for filtering, sorting, faceting, paging, and other search query behaviors. - */ -export interface SearchRequestOptions< +export interface BaseSearchRequestOptions< TModel extends object, TFields extends SelectFields = SelectFields, > { @@ -461,31 +298,6 @@ export interface SearchRequestOptions< * the results. */ scoringProfile?: string; - /** - * Allows setting a separate search query that will be solely used for semantic reranking, - * semantic captions and semantic answers. Is useful for scenarios where there is a need to use - * different queries between the base retrieval and ranking phase, and the L2 semantic phase. - */ - semanticQuery?: string; - /** - * The name of a semantic configuration that will be used when processing documents for queries of - * type semantic. - */ - semanticConfiguration?: string; - /** - * Allows the user to choose whether a semantic call should fail completely, or to return - * partial results (default). - */ - semanticErrorHandlingMode?: SemanticErrorHandlingMode; - /** - * Allows the user to set an upper bound on the amount of time it takes for semantic enrichment to finish - * processing before the request fails. - */ - semanticMaxWaitInMilliseconds?: number; - /** - * Enables a debugging tool that can be used to further explore your search results. - */ - debugMode?: QueryDebugMode; /** * The comma-separated list of field names to which to scope the full-text search. When using * fielded search (fieldName:searchExpression) in a full Lucene query, the field names of each @@ -500,11 +312,6 @@ export interface SearchRequestOptions< * Improve search recall by spell-correcting individual search query terms. */ speller?: Speller; - /** - * This parameter is only valid if the query type is 'semantic'. If set, the query returns answers - * extracted from key passages in the highest ranked documents. - */ - answers?: Answers | AnswersOptions; /** * A value that specifies whether any or all of the search terms must be matched in order to * count the document as a match. Possible values include: 'any', 'all' @@ -543,27 +350,29 @@ export interface SearchRequestOptions< */ top?: number; /** - * This parameter is only valid if the query type is 'semantic'. If set, the query returns captions - * extracted from key passages in the highest ranked documents. When Captions is set to 'extractive', - * highlighting is enabled by default, and can be configured by appending the pipe character '|' - * followed by the 'highlight-true'/'highlight-false' option, such as 'extractive|highlight-true'. Defaults to 'None'. - */ - captions?: Captions; - /** - * The list of field names used for semantic search. - */ - semanticFields?: string[]; - /** - * The query parameters for vector and hybrid search queries. - */ - vectorQueries?: VectorQuery[]; - /** - * Determines whether or not filters are applied before or after the vector search is performed. - * Default is 'preFilter'. + * Defines options for vector search queries */ - vectorFilterMode?: VectorFilterMode; + vectorSearchOptions?: VectorSearchOptions; } +/** + * Parameters for filtering, sorting, faceting, paging, and other search query behaviors. + */ +export type SearchRequestOptions< + TModel extends object, + TFields extends SelectFields = SelectFields, +> = BaseSearchRequestOptions & SearchRequestQueryTypeOptions; + +export type SearchRequestQueryTypeOptions = + | { + queryType: "semantic"; + /** + * Defines options for semantic search queries + */ + semanticSearchOptions: SemanticSearchOptions; + } + | { queryType?: "simple" | "full" }; + /** * Contains a document found by a search query, plus associated metadata. */ @@ -591,7 +400,7 @@ export type SearchResult< * Captions are the most representative passages from the document relatively to the search query. They are often used as document summary. Captions are only returned for queries of type 'semantic'. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly captions?: CaptionResult[]; + readonly captions?: QueryCaptionResult[]; document: NarrowedModel; @@ -631,17 +440,17 @@ export interface SearchDocumentsResultBase { * not specified or set to 'none'. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly answers?: AnswerResult[]; + readonly answers?: QueryAnswerResult[]; /** * Reason that a partial response was returned for a semantic search request. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly semanticPartialResponseReason?: SemanticPartialResponseReason; + readonly semanticErrorReason?: SemanticErrorReason; /** * Type of partial response that was returned for a semantic search request. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly semanticPartialResponseType?: SemanticPartialResponseType; + readonly semanticSearchResultsType?: SemanticSearchResultsType; } /** @@ -830,13 +639,13 @@ export interface AutocompleteRequest { /** * Represents an index action that operates on a document. */ -export type IndexDocumentsAction = { +export type IndexDocumentsAction = { /** * The operation to perform on a document in an indexing batch. Possible values include: * 'upload', 'merge', 'mergeOrUpload', 'delete' */ __actionType: IndexActionType; -} & Partial; +} & Partial; // END manually modified generated interfaces @@ -1090,83 +899,98 @@ export interface SemanticDebugInfo { } /** - * This parameter is only valid if the query type is 'semantic'. If set, the query returns answers - * extracted from key passages in the highest ranked documents. The number of answers returned can - * be configured by appending the pipe character '|' followed by the 'count-\' option - * after the answers parameter value, such as 'extractive|count-3'. Default count is 1. The - * confidence threshold can be configured by appending the pipe character '|' followed by the - * 'threshold-\' option after the answers parameter value, such as - * 'extractive|threshold-0.9'. Default threshold is 0.7. + * Extracts answer candidates from the contents of the documents returned in response to a query + * expressed as a question in natural language. */ -export type Answers = string; +export interface ExtractiveQueryAnswer { + answerType: "extractive"; + /** + * The number of answers returned. Default count is 1 + */ + count?: number; + /** + * The confidence threshold. Default threshold is 0.7 + */ + threshold?: number; +} /** * A value that specifies whether answers should be returned as part of the search response. * This parameter is only valid if the query type is 'semantic'. If set to `extractive`, the query * returns answers extracted from key passages in the highest ranked documents. */ -export type AnswersOptions = - | { - /** - * Extracts answer candidates from the contents of the documents returned in response to a - * query expressed as a question in natural language. - */ - answers: "extractive"; - /** - * The number of answers returned. Default count is 1 - */ - count?: number; - /** - * The confidence threshold. Default threshold is 0.7 - */ - threshold?: number; - } - | { - /** - * Do not return answers for the query. - */ - answers: "none"; - }; - -/** - * maxWaitExceeded: If 'semanticMaxWaitInMilliseconds' was set and the semantic processing duration - * exceeded that value. Only the base results were returned. - * - * capacityOverloaded: The request was throttled. Only the base results were returned. - * - * transient: At least one step of the semantic process failed. - */ -export type SemanticPartialResponseReason = "maxWaitExceeded" | "capacityOverloaded" | "transient"; +export type QueryAnswer = ExtractiveQueryAnswer; -/** - * baseResults: Results without any semantic enrichment or reranking. - * - * rerankedResults: Results have been reranked with the reranker model and will include semantic - * captions. They will not include any answers, answers highlights or caption highlights. - */ -export type SemanticPartialResponseType = "baseResults" | "rerankedResults"; +/** Extracts captions from the matching documents that contain passages relevant to the search query. */ +export interface ExtractiveQueryCaption { + captionType: "extractive"; + highlight?: boolean; +} /** - * disabled: No query debugging information will be returned. - * - * semantic: Allows the user to further explore their Semantic search results. + * A value that specifies whether captions should be returned as part of the search response. + * This parameter is only valid if the query type is 'semantic'. If set, the query returns captions + * extracted from key passages in the highest ranked documents. When Captions is 'extractive', + * highlighting is enabled by default. Defaults to 'none'. */ -export type QueryDebugMode = "disabled" | "semantic"; +export type QueryCaption = ExtractiveQueryCaption; /** - * partial: If the semantic processing fails, partial results still return. The definition of - * partial results depends on what semantic step failed and what was the reason for failure. - * - * fail: If there is an exception during the semantic processing step, the query will fail and - * return the appropriate HTTP code depending on the error. + * Defines options for semantic search queries */ -export type SemanticErrorHandlingMode = "partial" | "fail"; +export interface SemanticSearchOptions { + /** + * The name of a semantic configuration that will be used when processing documents for queries of + * type semantic. + */ + configurationName?: string; + /** + * Allows the user to choose whether a semantic call should fail completely, or to return partial + * results (default). + */ + errorMode?: SemanticErrorMode; + /** + * Allows the user to set an upper bound on the amount of time it takes for semantic enrichment + * to finish processing before the request fails. + */ + maxWaitInMilliseconds?: number; + /** + * If set, the query returns answers extracted from key passages in the highest ranked documents. + */ + answers?: QueryAnswer; + /** + * If set, the query returns captions extracted from key passages in the highest ranked + * documents. When Captions is set to 'extractive', highlighting is enabled by default. Defaults + * to 'None'. + */ + captions?: QueryCaption; + /** + * Allows setting a separate search query that will be solely used for semantic reranking, + * semantic captions and semantic answers. Is useful for scenarios where there is a need to use + * different queries between the base retrieval and ranking phase, and the L2 semantic phase. + */ + semanticQuery?: string; + /** + * The list of field names used for semantic search. + */ + semanticFields?: string[]; + /** + * Enables a debugging tool that can be used to further explore your search results. + */ + debugMode?: QueryDebugMode; +} /** - * used: The field was fully used for semantic enrichment. - * - * unused: The field was not used for semantic enrichment. - * - * partial: The field was partially used for semantic enrichment. + * Defines options for vector search queries */ -export type SemanticFieldState = "used" | "unused" | "partial"; +export interface VectorSearchOptions { + /** + * The query parameters for vector, hybrid, and multi-vector search queries. + */ + queries: VectorQuery[]; + /** + * Determines whether or not filters are applied before or after the vector search is performed. + * Default is 'preFilter'. + */ + filterMode?: VectorFilterMode; +} diff --git a/sdk/search/search-documents/src/odataMetadataPolicy.ts b/sdk/search/search-documents/src/odataMetadataPolicy.ts index c3854981db6c..c6b873ee83c1 100644 --- a/sdk/search/search-documents/src/odataMetadataPolicy.ts +++ b/sdk/search/search-documents/src/odataMetadataPolicy.ts @@ -10,7 +10,7 @@ import { const AcceptHeaderName = "Accept"; -export type MetadataLevel = "none" | "minimal"; +type MetadataLevel = "none" | "minimal"; const odataMetadataPolicy = "OdataMetadataPolicy"; /** diff --git a/sdk/search/search-documents/src/searchClient.ts b/sdk/search/search-documents/src/searchClient.ts index 6a724e9c87a8..a10bbcf7a139 100644 --- a/sdk/search/search-documents/src/searchClient.ts +++ b/sdk/search/search-documents/src/searchClient.ts @@ -3,29 +3,26 @@ /// +import { isTokenCredential, KeyCredential, TokenCredential } from "@azure/core-auth"; import { InternalClientPipelineOptions } from "@azure/core-client"; -import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; -import { SearchClient as GeneratedClient } from "./generated/data/searchClient"; -import { KeyCredential, TokenCredential, isTokenCredential } from "@azure/core-auth"; -import { createSearchApiKeyCredentialPolicy } from "./searchApiKeyCredentialPolicy"; -import { logger } from "./logger"; +import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; +import { bearerTokenAuthenticationPolicy, Pipeline } from "@azure/core-rest-pipeline"; +import { decode, encode } from "./base64"; import { AutocompleteRequest, AutocompleteResult, IndexDocumentsResult, - KnownSemanticPartialResponseReason, - KnownSemanticPartialResponseType, - SuggestRequest, + QueryAnswerType as BaseAnswers, + QueryCaptionType as BaseCaptions, SearchRequest as GeneratedSearchRequest, - Answers, - QueryAnswerType, - VectorQueryUnion as GeneratedVectorQuery, - VectorQuery as GeneratedBaseVectorQuery, - RawVectorQuery as GeneratedRawVectorQuery, + SuggestRequest, VectorizableTextQuery as GeneratedVectorizableTextQuery, + VectorizedQuery as GeneratedVectorizedQuery, + VectorQueryUnion as GeneratedVectorQuery, } from "./generated/data/models"; -import { createSpan } from "./tracing"; -import { deserialize, serialize } from "./serialization"; +import { SearchClient as GeneratedClient } from "./generated/data/searchClient"; +import { SemanticErrorReason, SemanticSearchResultsType } from "./generatedStringLiteralUnions"; +import { IndexDocumentsBatch } from "./indexDocumentsBatch"; import { AutocompleteOptions, CountDocumentsOptions, @@ -35,32 +32,32 @@ import { ListSearchResultsPageSettings, MergeDocumentsOptions, MergeOrUploadDocumentsOptions, + NarrowedModel, + QueryAnswer, + QueryCaption, SearchDocumentsPageResult, SearchDocumentsResult, + SearchFieldArray, SearchIterator, SearchOptions, - SearchRequest, - SelectFields, SearchResult, + SelectArray, + SelectFields, SuggestDocumentsResult, SuggestOptions, UploadDocumentsOptions, - NarrowedModel, - SelectArray, - SearchFieldArray, - AnswersOptions, - BaseVectorQuery, - RawVectorQuery, VectorizableTextQuery, + VectorizedQuery, VectorQuery, } from "./indexModels"; +import { logger } from "./logger"; import { createOdataMetadataPolicy } from "./odataMetadataPolicy"; -import { IndexDocumentsBatch } from "./indexDocumentsBatch"; -import { decode, encode } from "./base64"; -import * as utils from "./serviceUtils"; -import { IndexDocumentsClient } from "./searchIndexingBufferedSender"; -import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; +import { createSearchApiKeyCredentialPolicy } from "./searchApiKeyCredentialPolicy"; import { KnownSearchAudience } from "./searchAudience"; +import { IndexDocumentsClient } from "./searchIndexingBufferedSender"; +import { deserialize, serialize } from "./serialization"; +import * as utils from "./serviceUtils"; +import { createSpan } from "./tracing"; /** * Client options used to configure Cognitive Search API requests. @@ -116,12 +113,16 @@ export class SearchClient implements IndexDocumentsClient public readonly indexName: string; /** - * @internal * @hidden * A reference to the auto-generated SearchClient */ private readonly client: GeneratedClient; + /** + * A reference to the internal HTTP pipeline for use with raw requests + */ + public readonly pipeline: Pipeline; + /** * Creates an instance of SearchClient. * @@ -195,6 +196,7 @@ export class SearchClient implements IndexDocumentsClient this.serviceVersion, internalClientPipelineOptions, ); + this.pipeline = this.client.pipeline; if (isTokenCredential(credential)) { const scope: string = options.audience @@ -315,21 +317,32 @@ export class SearchClient implements IndexDocumentsClient private async searchDocuments>( searchText?: string, options: SearchOptions = {}, - nextPageParameters: SearchRequest = {}, + nextPageParameters: GeneratedSearchRequest = {}, ): Promise> { const { + includeTotalCount, + orderBy, searchFields, - semanticFields, select, - orderBy, - includeTotalCount, - vectorQueries, + vectorSearchOptions, + semanticSearchOptions, + ...restOptions + } = options as typeof options & { queryType: "semantic" }; + + const { + semanticFields, + configurationName, + errorMode, answers, - semanticErrorHandlingMode, + captions, debugMode, - ...restOptions - } = options; + ...restSemanticOptions + } = semanticSearchOptions ?? {}; + const { queries, filterMode, ...restVectorOptions } = vectorSearchOptions ?? {}; + const fullOptions: GeneratedSearchRequest = { + ...restSemanticOptions, + ...restVectorOptions, ...restOptions, ...nextPageParameters, searchFields: this.convertSearchFields(searchFields), @@ -337,10 +350,13 @@ export class SearchClient implements IndexDocumentsClient select: this.convertSelect(select) || "*", orderBy: this.convertOrderBy(orderBy), includeTotalResultCount: includeTotalCount, - vectorQueries: vectorQueries?.map(this.convertVectorQuery.bind(this)), - answers: this.convertAnswers(answers), - semanticErrorHandling: semanticErrorHandlingMode, + vectorQueries: queries?.map(this.convertVectorQuery.bind(this)), + answers: this.convertQueryAnswers(answers), + captions: this.convertQueryCaptions(captions), + semanticErrorHandling: errorMode, + semanticConfigurationName: configurationName, debug: debugMode, + vectorFilterMode: filterMode, }; const { span, updatedOptions } = createSpan("SearchClient-searchDocuments", options); @@ -358,10 +374,13 @@ export class SearchClient implements IndexDocumentsClient results, nextLink, nextPageParameters: resultNextPageParameters, - semanticPartialResponseReason, - semanticPartialResponseType, + semanticPartialResponseReason: semanticErrorReason, + semanticPartialResponseType: semanticSearchResultsType, ...restResult - } = result; + } = result as typeof result & { + semanticPartialResponseReason: SemanticErrorReason | undefined; + semanticPartialResponseType: SemanticSearchResultsType | undefined; + }; const modifiedResults = utils.generatedSearchResultToPublicSearchResult( results, @@ -370,16 +389,9 @@ export class SearchClient implements IndexDocumentsClient const converted: SearchDocumentsPageResult = { ...restResult, results: modifiedResults, - semanticPartialResponseReason: - semanticPartialResponseReason as `${KnownSemanticPartialResponseReason}`, - semanticPartialResponseType: - semanticPartialResponseType as `${KnownSemanticPartialResponseType}`, - continuationToken: this.encodeContinuationToken( - nextLink, - resultNextPageParameters - ? utils.generatedSearchRequestToPublicSearchRequest(resultNextPageParameters) - : resultNextPageParameters, - ), + semanticErrorReason, + semanticSearchResultsType, + continuationToken: this.encodeContinuationToken(nextLink, resultNextPageParameters), }; return deserialize>(converted); @@ -605,7 +617,7 @@ export class SearchClient implements IndexDocumentsClient try { const result = await this.client.documents.get(key, { ...updatedOptions, - selectedFields: updatedOptions.selectedFields as string[], + selectedFields: updatedOptions.selectedFields as string[] | undefined, }); return deserialize>(result); } catch (e: any) { @@ -798,7 +810,7 @@ export class SearchClient implements IndexDocumentsClient private encodeContinuationToken( nextLink: string | undefined, - nextPageParameters: SearchRequest | undefined, + nextPageParameters: GeneratedSearchRequest | undefined, ): string | undefined { if (!nextLink || !nextPageParameters) { return undefined; @@ -813,7 +825,7 @@ export class SearchClient implements IndexDocumentsClient private decodeContinuationToken( token?: string, - ): { nextPageParameters: SearchRequest; nextLink: string } | undefined { + ): { nextPageParameters: GeneratedSearchRequest; nextLink: string } | undefined { if (!token) { return undefined; } @@ -824,7 +836,7 @@ export class SearchClient implements IndexDocumentsClient const result: { apiVersion: string; nextLink: string; - nextPageParameters: SearchRequest; + nextPageParameters: GeneratedSearchRequest; } = JSON.parse(decodedToken); if (result.apiVersion !== this.apiVersion) { @@ -877,16 +889,13 @@ export class SearchClient implements IndexDocumentsClient return orderBy; } - private convertAnswers(answers?: Answers | AnswersOptions): QueryAnswerType | undefined { - if (!answers || typeof answers === "string") { + private convertQueryAnswers(answers?: QueryAnswer): BaseAnswers | undefined { + if (!answers) { return answers; } - if (answers.answers === "none") { - return answers.answers; - } const config = []; - const { answers: output, count, threshold } = answers; + const { answerType: output, count, threshold } = answers; if (count) { config.push(`count-${count}`); @@ -903,12 +912,30 @@ export class SearchClient implements IndexDocumentsClient return output; } + private convertQueryCaptions(captions?: QueryCaption): BaseCaptions | undefined { + if (!captions) { + return captions; + } + + const config = []; + const { captionType: output, highlight } = captions; + + if (highlight !== undefined) { + config.push(`highlight-${highlight}`); + } + + if (config.length) { + return output + `|${config.join(",")}`; + } + + return output; + } + private convertVectorQuery(): undefined; - private convertVectorQuery(vectorQuery: RawVectorQuery): GeneratedRawVectorQuery; + private convertVectorQuery(vectorQuery: VectorizedQuery): GeneratedVectorizedQuery; private convertVectorQuery( vectorQuery: VectorizableTextQuery, ): GeneratedVectorizableTextQuery; - private convertVectorQuery(vectorQuery: BaseVectorQuery): GeneratedBaseVectorQuery; private convertVectorQuery(vectorQuery: VectorQuery): GeneratedVectorQuery; private convertVectorQuery(vectorQuery?: VectorQuery): GeneratedVectorQuery | undefined { if (!vectorQuery) { diff --git a/sdk/search/search-documents/src/searchIndexClient.ts b/sdk/search/search-documents/src/searchIndexClient.ts index 4b19a6793b25..229c672a65f0 100644 --- a/sdk/search/search-documents/src/searchIndexClient.ts +++ b/sdk/search/search-documents/src/searchIndexClient.ts @@ -3,13 +3,17 @@ /// -import { KeyCredential, TokenCredential, isTokenCredential } from "@azure/core-auth"; +import { isTokenCredential, KeyCredential, TokenCredential } from "@azure/core-auth"; import { InternalClientPipelineOptions } from "@azure/core-client"; -import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; +import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; +import { bearerTokenAuthenticationPolicy, Pipeline } from "@azure/core-rest-pipeline"; import { AnalyzeResult } from "./generated/service/models"; import { SearchServiceClient as GeneratedClient } from "./generated/service/searchServiceClient"; import { logger } from "./logger"; +import { createOdataMetadataPolicy } from "./odataMetadataPolicy"; import { createSearchApiKeyCredentialPolicy } from "./searchApiKeyCredentialPolicy"; +import { KnownSearchAudience } from "./searchAudience"; +import { SearchClient, SearchClientOptions as GetSearchClientOptions } from "./searchClient"; import { AliasIterator, AnalyzeTextOptions, @@ -40,10 +44,6 @@ import { } from "./serviceModels"; import * as utils from "./serviceUtils"; import { createSpan } from "./tracing"; -import { createOdataMetadataPolicy } from "./odataMetadataPolicy"; -import { SearchClientOptions as GetSearchClientOptions, SearchClient } from "./searchClient"; -import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; -import { KnownSearchAudience } from "./searchAudience"; /** * Client options used to configure Cognitive Search API requests. @@ -91,12 +91,16 @@ export class SearchIndexClient { public readonly endpoint: string; /** - * @internal * @hidden * A reference to the auto-generated SearchServiceClient */ private readonly client: GeneratedClient; + /** + * A reference to the internal HTTP pipeline for use with raw requests + */ + public readonly pipeline: Pipeline; + /** * Used to authenticate requests to the service. */ @@ -158,6 +162,7 @@ export class SearchIndexClient { this.serviceVersion, internalClientPipelineOptions, ); + this.pipeline = this.client.pipeline; if (isTokenCredential(credential)) { const scope: string = this.options.audience @@ -732,7 +737,15 @@ export class SearchIndexClient { * @param options - Additional arguments */ public async analyzeText(indexName: string, options: AnalyzeTextOptions): Promise { - const { abortSignal, requestOptions, tracingOptions, ...restOptions } = options; + const { + abortSignal, + requestOptions, + tracingOptions, + analyzerName: analyzer, + tokenizerName: tokenizer, + ...restOptions + } = options; + const operationOptions = { abortSignal, requestOptions, @@ -740,15 +753,11 @@ export class SearchIndexClient { }; const { span, updatedOptions } = createSpan("SearchIndexClient-analyzeText", operationOptions); + try { const result = await this.client.indexes.analyze( indexName, - { - ...restOptions, - analyzer: restOptions.analyzerName, - tokenizer: restOptions.tokenizerName, - normalizer: restOptions.normalizerName, - }, + { ...restOptions, analyzer, tokenizer }, updatedOptions, ); return result; diff --git a/sdk/search/search-documents/src/searchIndexerClient.ts b/sdk/search/search-documents/src/searchIndexerClient.ts index ecd8a5d2c641..12cae27c57f0 100644 --- a/sdk/search/search-documents/src/searchIndexerClient.ts +++ b/sdk/search/search-documents/src/searchIndexerClient.ts @@ -1,20 +1,23 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { KeyCredential, TokenCredential, isTokenCredential } from "@azure/core-auth"; +import { isTokenCredential, KeyCredential, TokenCredential } from "@azure/core-auth"; import { InternalClientPipelineOptions } from "@azure/core-client"; -import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; +import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; +import { bearerTokenAuthenticationPolicy, Pipeline } from "@azure/core-rest-pipeline"; import { SearchIndexerStatus } from "./generated/service/models"; import { SearchServiceClient as GeneratedClient } from "./generated/service/searchServiceClient"; import { logger } from "./logger"; +import { createOdataMetadataPolicy } from "./odataMetadataPolicy"; import { createSearchApiKeyCredentialPolicy } from "./searchApiKeyCredentialPolicy"; +import { KnownSearchAudience } from "./searchAudience"; import { CreateDataSourceConnectionOptions, CreateIndexerOptions, - CreateOrUpdateSkillsetOptions, - CreateSkillsetOptions, CreateorUpdateDataSourceConnectionOptions, CreateorUpdateIndexerOptions, + CreateOrUpdateSkillsetOptions, + CreateSkillsetOptions, DeleteDataSourceConnectionOptions, DeleteIndexerOptions, DeleteSkillsetOptions, @@ -35,9 +38,6 @@ import { } from "./serviceModels"; import * as utils from "./serviceUtils"; import { createSpan } from "./tracing"; -import { createOdataMetadataPolicy } from "./odataMetadataPolicy"; -import { ExtendedCommonClientOptions } from "@azure/core-http-compat"; -import { KnownSearchAudience } from "./searchAudience"; /** * Client options used to configure Cognitive Search API requests. @@ -85,12 +85,16 @@ export class SearchIndexerClient { public readonly endpoint: string; /** - * @internal * @hidden * A reference to the auto-generated SearchServiceClient */ private readonly client: GeneratedClient; + /** + * A reference to the internal HTTP pipeline for use with raw requests + */ + public readonly pipeline: Pipeline; + /** * Creates an instance of SearchIndexerClient. * @@ -140,6 +144,7 @@ export class SearchIndexerClient { this.serviceVersion, internalClientPipelineOptions, ); + this.pipeline = this.client.pipeline; if (isTokenCredential(credential)) { const scope: string = options.audience @@ -469,17 +474,17 @@ export class SearchIndexerClient { "SearchIndexerClient-createOrUpdateIndexer", options, ); + + const { onlyIfUnchanged, ...restOptions } = updatedOptions; try { - const etag = options.onlyIfUnchanged ? indexer.etag : undefined; + const etag = onlyIfUnchanged ? indexer.etag : undefined; const result = await this.client.indexers.createOrUpdate( indexer.name, utils.publicSearchIndexerToGeneratedSearchIndexer(indexer), { - ...updatedOptions, + ...restOptions, ifMatch: etag, - skipIndexerResetRequirementForCache: options.skipIndexerResetRequirementForCache, - disableCacheReprocessingChangeDetection: options.disableCacheReprocessingChangeDetection, }, ); return utils.generatedSearchIndexerToPublicSearchIndexer(result); @@ -516,7 +521,6 @@ export class SearchIndexerClient { { ...updatedOptions, ifMatch: etag, - skipIndexerResetRequirementForCache: options.skipIndexerResetRequirementForCache, }, ); return utils.generatedDataSourceToPublicDataSource(result); @@ -553,8 +557,6 @@ export class SearchIndexerClient { { ...updatedOptions, ifMatch: etag, - skipIndexerResetRequirementForCache: options.skipIndexerResetRequirementForCache, - disableCacheReprocessingChangeDetection: options.disableCacheReprocessingChangeDetection, }, ); diff --git a/sdk/search/search-documents/src/searchIndexingBufferedSender.ts b/sdk/search/search-documents/src/searchIndexingBufferedSender.ts index 87f36a355085..9be346a46b09 100644 --- a/sdk/search/search-documents/src/searchIndexingBufferedSender.ts +++ b/sdk/search/search-documents/src/searchIndexingBufferedSender.ts @@ -1,6 +1,10 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { OperationOptions } from "@azure/core-client"; +import { RestError } from "@azure/core-rest-pipeline"; +import EventEmitter from "events"; +import { IndexDocumentsResult } from "./generated/data/models"; import { IndexDocumentsBatch } from "./indexDocumentsBatch"; import { IndexDocumentsAction, @@ -12,18 +16,13 @@ import { SearchIndexingBufferedSenderOptions, SearchIndexingBufferedSenderUploadDocumentsOptions, } from "./indexModels"; -import { IndexDocumentsResult } from "./generated/data/models"; -import { OperationOptions } from "@azure/core-client"; -import EventEmitter from "events"; +import { delay, getRandomIntegerInclusive } from "./serviceUtils"; import { createSpan } from "./tracing"; -import { delay } from "./serviceUtils"; -import { getRandomIntegerInclusive } from "./serviceUtils"; -import { RestError } from "@azure/core-rest-pipeline"; /** * Index Documents Client */ -export interface IndexDocumentsClient { +export interface IndexDocumentsClient { /** * Perform a set of index modifications (upload, merge, mergeOrUpload, delete) * for the given set of documents. @@ -32,7 +31,7 @@ export interface IndexDocumentsClient { * @param options - Additional options. */ indexDocuments( - batch: IndexDocumentsBatch, + batch: IndexDocumentsBatch, options: IndexDocumentsOptions, ): Promise; } @@ -49,14 +48,10 @@ export const DEFAULT_FLUSH_WINDOW: number = 60000; * Default number of times to retry. */ export const DEFAULT_RETRY_COUNT: number = 3; -/** - * Default retry delay. - */ -export const DEFAULT_RETRY_DELAY: number = 800; /** * Default Max Delay between retries. */ -export const DEFAULT_MAX_RETRY_DELAY: number = 60000; +const DEFAULT_MAX_RETRY_DELAY: number = 60000; /** * Class used to perform buffered operations against a search index, diff --git a/sdk/search/search-documents/src/serviceModels.ts b/sdk/search/search-documents/src/serviceModels.ts index 4605d13741e0..853c7569afd9 100644 --- a/sdk/search/search-documents/src/serviceModels.ts +++ b/sdk/search/search-documents/src/serviceModels.ts @@ -6,6 +6,7 @@ import { AsciiFoldingTokenFilter, AzureMachineLearningSkill, BM25Similarity, + CharFilterName, CjkBigramTokenFilter, ClassicSimilarity, ClassicTokenizer, @@ -13,7 +14,7 @@ import { CommonGramTokenFilter, ConditionalSkill, CorsOptions, - CustomEntityLookupSkill, + CustomEntity, CustomNormalizer, DefaultCognitiveServicesAccount, DictionaryDecompounderTokenFilter, @@ -23,21 +24,19 @@ import { EdgeNGramTokenizer, ElisionTokenFilter, EntityLinkingSkill, - EntityRecognitionSkill, EntityRecognitionSkillV3, FieldMapping, FreshnessScoringFunction, HighWaterMarkChangeDetectionPolicy, - ImageAnalysisSkill, - IndexingParameters, IndexingSchedule, + IndexProjectionMode, KeepTokenFilter, - KeyPhraseExtractionSkill, KeywordMarkerTokenFilter, LanguageDetectionSkill, LengthTokenFilter, LexicalAnalyzerName, LexicalNormalizerName, + LexicalTokenizerName, LimitTokenFilter, LuceneStandardAnalyzer, MagnitudeScoringFunction, @@ -45,25 +44,23 @@ import { MergeSkill, MicrosoftLanguageStemmingTokenizer, MicrosoftLanguageTokenizer, + NativeBlobSoftDeleteDeletionDetectionPolicy, NGramTokenizer, - OcrSkill, - PIIDetectionSkill, PathHierarchyTokenizerV2 as PathHierarchyTokenizer, PatternCaptureTokenFilter, PatternReplaceCharFilter, PatternReplaceTokenFilter, PhoneticTokenFilter, - RegexFlags, + ScalarQuantizationCompressionConfiguration, ScoringFunctionAggregation, SearchAlias, SearchIndexerDataContainer, SearchIndexerDataNoneIdentity, - SearchIndexerDataSourceType, SearchIndexerDataUserAssignedIdentity, - Suggester as SearchSuggester, + SearchIndexerIndexProjectionSelector, + SearchIndexerKnowledgeStoreProjection, SearchIndexerSkill as BaseSearchIndexerSkill, - SemanticSettings, - SentimentSkill, + SemanticSearch, SentimentSkillV3, ServiceCounters, ServiceLimits, @@ -71,25 +68,47 @@ import { ShingleTokenFilter, SnowballTokenFilter, SoftDeleteColumnDeletionDetectionPolicy, - SplitSkill, SqlIntegratedChangeTrackingPolicy, StemmerOverrideTokenFilter, StemmerTokenFilter, StopAnalyzer, StopwordsTokenFilter, + Suggester as SearchSuggester, SynonymTokenFilter, TagScoringFunction, - TextTranslationSkill, TextWeights, + TokenFilterName, TruncateTokenFilter, UaxUrlEmailTokenizer, UniqueTokenFilter, - WordDelimiterTokenFilter, - SearchIndexerKnowledgeStoreProjection, - SearchIndexerIndexProjectionSelector, - NativeBlobSoftDeleteDeletionDetectionPolicy, VectorSearchProfile, + WordDelimiterTokenFilter, } from "./generated/service/models"; +import { + BlobIndexerDataToExtract, + BlobIndexerImageAction, + BlobIndexerParsingMode, + BlobIndexerPDFTextRotationAlgorithm, + CustomEntityLookupSkillLanguage, + EntityCategory, + EntityRecognitionSkillLanguage, + ImageAnalysisSkillLanguage, + ImageDetail, + IndexerExecutionEnvironment, + KeyPhraseExtractionSkillLanguage, + OcrSkillLanguage, + PIIDetectionSkillMaskingMode, + RegexFlags, + SearchIndexerDataSourceType, + SentimentSkillLanguage, + SplitSkillLanguage, + TextSplitMode, + TextTranslationSkillLanguage, + VectorSearchAlgorithmKind, + VectorSearchAlgorithmMetric, + VectorSearchVectorizerKind, + VisualFeature, +} from "./generatedStringLiteralUnions"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; @@ -167,7 +186,7 @@ export interface SearchIndexStatistics { * The amount of memory in bytes consumed by vectors in the index. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly vectorIndexSize?: number; + readonly vectorIndexSize: number; } /** @@ -424,31 +443,32 @@ export interface AnalyzeRequest { /** * The name of the analyzer to use to break the given text. If this parameter is not specified, * you must specify a tokenizer instead. The tokenizer and analyzer parameters are mutually - * exclusive. KnownAnalyzerNames is an enum containing known values. + * exclusive. {@link KnownAnalyzerNames} is an enum containing built-in analyzer names. * NOTE: Either analyzerName or tokenizerName is required in an AnalyzeRequest. */ - analyzerName?: string; + analyzerName?: LexicalAnalyzerName; /** * The name of the tokenizer to use to break the given text. If this parameter is not specified, * you must specify an analyzer instead. The tokenizer and analyzer parameters are mutually - * exclusive. KnownTokenizerNames is an enum containing known values. + * exclusive. {@link KnownTokenizerNames} is an enum containing built-in tokenizer names. * NOTE: Either analyzerName or tokenizerName is required in an AnalyzeRequest. */ - tokenizerName?: string; + tokenizerName?: LexicalTokenizerName; /** - * The name of the normalizer to use to normalize the given text. + * The name of the normalizer to use to normalize the given text. {@link KnownNormalizerNames} is + * an enum containing built-in analyzer names. */ normalizerName?: LexicalNormalizerName; /** * An optional list of token filters to use when breaking the given text. This parameter can only * be set when using the tokenizer parameter. */ - tokenFilters?: string[]; + tokenFilters?: TokenFilterName[]; /** * An optional list of character filters to use when breaking the given text. This parameter can * only be set when using the tokenizer parameter. */ - charFilters?: string[]; + charFilters?: CharFilterName[]; } /** @@ -515,21 +535,21 @@ export interface CustomAnalyzer { name: string; /** * The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as - * breaking a sentence into words. KnownTokenizerNames is an enum containing known values. + * breaking a sentence into words. {@link KnownTokenizerNames} is an enum containing built-in tokenizer names. */ - tokenizerName: string; + tokenizerName: LexicalTokenizerName; /** * A list of token filters used to filter out or modify the tokens generated by a tokenizer. For * example, you can specify a lowercase filter that converts all characters to lowercase. The * filters are run in the order in which they are listed. */ - tokenFilters?: string[]; + tokenFilters?: TokenFilterName[]; /** * A list of character filters used to prepare input text before it is processed by the * tokenizer. For instance, they can replace certain characters or symbols. The filters are run * in the order in which they are listed. */ - charFilters?: string[]; + charFilters?: CharFilterName[]; } /** @@ -596,26 +616,26 @@ export interface WebApiSkill extends BaseSearchIndexerSkill { * Contains the possible cases for Skill. */ export type SearchIndexerSkill = + | AzureMachineLearningSkill + | AzureOpenAIEmbeddingSkill | ConditionalSkill - | KeyPhraseExtractionSkill - | OcrSkill + | CustomEntityLookupSkill + | DocumentExtractionSkill + | EntityLinkingSkill + | EntityRecognitionSkill + | EntityRecognitionSkillV3 | ImageAnalysisSkill + | KeyPhraseExtractionSkill | LanguageDetectionSkill - | ShaperSkill | MergeSkill - | EntityRecognitionSkill - | SentimentSkill - | SplitSkill + | OcrSkill | PIIDetectionSkill - | EntityRecognitionSkillV3 - | EntityLinkingSkill + | SentimentSkill | SentimentSkillV3 - | CustomEntityLookupSkill + | ShaperSkill + | SplitSkill | TextTranslationSkill - | DocumentExtractionSkill - | WebApiSkill - | AzureMachineLearningSkill - | AzureOpenAIEmbeddingSkill; + | WebApiSkill; /** * Contains the possible cases for CognitiveServicesAccount. @@ -856,7 +876,8 @@ export type ScoringFunction = * Possible values include: 'Edm.String', 'Edm.Int32', 'Edm.Int64', 'Edm.Double', 'Edm.Boolean', * 'Edm.DateTimeOffset', 'Edm.GeographyPoint', 'Collection(Edm.String)', 'Collection(Edm.Int32)', * 'Collection(Edm.Int64)', 'Collection(Edm.Double)', 'Collection(Edm.Boolean)', - * 'Collection(Edm.DateTimeOffset)', 'Collection(Edm.GeographyPoint)', 'Collection(Edm.Single)' + * 'Collection(Edm.DateTimeOffset)', 'Collection(Edm.GeographyPoint)', 'Collection(Edm.Single)', + * 'Collection(Edm.Half)', 'Collection(Edm.Int16)', 'Collection(Edm.SByte)' * * NB: `Edm.Single` alone is not a valid data type. It must be used as part of a collection type. * @readonly @@ -876,7 +897,10 @@ export type SearchFieldDataType = | "Collection(Edm.Boolean)" | "Collection(Edm.DateTimeOffset)" | "Collection(Edm.GeographyPoint)" - | "Collection(Edm.Single)"; + | "Collection(Edm.Single)" + | "Collection(Edm.Half)" + | "Collection(Edm.Int16)" + | "Collection(Edm.SByte)"; /** * Defines values for ComplexDataType. @@ -917,14 +941,25 @@ export interface SimpleField { */ key?: boolean; /** - * A value indicating whether the field can be returned in a search result. You can enable this + * A value indicating whether the field can be returned in a search result. You can disable this * option if you want to use a field (for example, margin) as a filter, sorting, or scoring - * mechanism but do not want the field to be visible to the end user. This property must be false - * for key fields. This property can be changed on existing fields. - * Disabling this property does not cause any increase in index storage requirements. - * Default is false. + * mechanism but do not want the field to be visible to the end user. This property must be true + * for key fields. This property can be changed on existing fields. Enabling this property does + * not cause any increase in index storage requirements. Default is true for simple fields and + * false for vector fields. */ hidden?: boolean; + /** + * An immutable value indicating whether the field will be persisted separately on disk to be + * returned in a search result. You can disable this option if you don't plan to return the field + * contents in a search response to save on storage overhead. This can only be set during index + * creation and only for vector fields. This property cannot be changed for existing fields or set + * as false for new fields. If this property is set as false, the property `hidden` must be set as + * true. This property must be true or unset for key fields, for new fields, and for non-vector + * fields, and it must be null for complex fields. Disabling this property will reduce index + * storage requirements. The default is true for vector fields. + */ + stored?: boolean; /** * A value indicating whether the field is full-text searchable. This means it will undergo * analysis such as word-breaking during indexing. If you set a searchable field to a value like @@ -1004,7 +1039,7 @@ export interface SimpleField { * The name of the vector search algorithm configuration that specifies the algorithm and * optional parameters for searching the vector field. */ - vectorSearchProfile?: string; + vectorSearchProfileName?: string; } export function isComplexField(field: SearchField): field is ComplexField { @@ -1156,7 +1191,7 @@ export interface SearchIndex { /** * Defines parameters for a search index that influence semantic capabilities. */ - semanticSettings?: SemanticSettings; + semanticSearch?: SemanticSearch; /** * Contains configuration options related to vector search. */ @@ -2094,12 +2129,17 @@ export interface VectorSearch { algorithms?: VectorSearchAlgorithmConfiguration[]; /** Contains configuration options on how to vectorize text vector queries. */ vectorizers?: VectorSearchVectorizer[]; + /** + * Contains configuration options specific to the compression method used during indexing or + * querying. + */ + compressions?: VectorSearchCompressionConfiguration[]; } /** Contains configuration options specific to the algorithm used during indexing and/or querying. */ export type VectorSearchAlgorithmConfiguration = - | HnswVectorSearchAlgorithmConfiguration - | ExhaustiveKnnVectorSearchAlgorithmConfiguration; + | HnswAlgorithmConfiguration + | ExhaustiveKnnAlgorithmConfiguration; /** Contains configuration options specific to the algorithm used during indexing and/or querying. */ export interface BaseVectorSearchAlgorithmConfiguration { @@ -2113,7 +2153,7 @@ export interface BaseVectorSearchAlgorithmConfiguration { * Contains configuration options specific to the hnsw approximate nearest neighbors algorithm * used during indexing time. */ -export type HnswVectorSearchAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { +export type HnswAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { /** * Polymorphic discriminator, which specifies the different types this object can be */ @@ -2155,13 +2195,12 @@ export interface HnswParameters { } /** Contains configuration options specific to the exhaustive KNN algorithm used during querying, which will perform brute-force search across the entire vector index. */ -export type ExhaustiveKnnVectorSearchAlgorithmConfiguration = - BaseVectorSearchAlgorithmConfiguration & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - kind: "exhaustiveKnn"; - /** Contains the parameters specific to exhaustive KNN algorithm. */ - parameters?: ExhaustiveKnnParameters; - }; +export type ExhaustiveKnnAlgorithmConfiguration = BaseVectorSearchAlgorithmConfiguration & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + kind: "exhaustiveKnn"; + /** Contains the parameters specific to exhaustive KNN algorithm. */ + parameters?: ExhaustiveKnnParameters; +}; /** Contains the parameters specific to exhaustive KNN algorithm. */ export interface ExhaustiveKnnParameters { @@ -2262,16 +2301,198 @@ export interface SearchIndexerKnowledgeStoreParameters { synthesizeGeneratedKeyName?: boolean; } -/** The similarity metric to use for vector comparisons. */ -export type VectorSearchAlgorithmMetric = "cosine" | "euclidean" | "dotProduct"; +/** A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */ +export interface IndexingParametersConfiguration { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** Represents the parsing mode for indexing from an Azure blob data source. */ + parsingMode?: BlobIndexerParsingMode; + /** Comma-delimited list of filename extensions to ignore when processing from Azure blob storage. For example, you could exclude ".png, .mp4" to skip over those files during indexing. */ + excludedFileNameExtensions?: string; + /** Comma-delimited list of filename extensions to select when processing from Azure blob storage. For example, you could focus indexing on specific application files ".docx, .pptx, .msg" to specifically include those file types. */ + indexedFileNameExtensions?: string; + /** For Azure blobs, set to false if you want to continue indexing when an unsupported content type is encountered, and you don't know all the content types (file extensions) in advance. */ + failOnUnsupportedContentType?: boolean; + /** For Azure blobs, set to false if you want to continue indexing if a document fails indexing. */ + failOnUnprocessableDocument?: boolean; + /** For Azure blobs, set this property to true to still index storage metadata for blob content that is too large to process. Oversized blobs are treated as errors by default. For limits on blob size, see https://docs.microsoft.com/azure/search/search-limits-quotas-capacity. */ + indexStorageMetadataOnlyForOversizedDocuments?: boolean; + /** For CSV blobs, specifies a comma-delimited list of column headers, useful for mapping source fields to destination fields in an index. */ + delimitedTextHeaders?: string; + /** For CSV blobs, specifies the end-of-line single-character delimiter for CSV files where each line starts a new document (for example, "|"). */ + delimitedTextDelimiter?: string; + /** For CSV blobs, indicates that the first (non-blank) line of each blob contains headers. */ + firstLineContainsHeaders?: boolean; + /** For JSON arrays, given a structured or semi-structured document, you can specify a path to the array using this property. */ + documentRoot?: string; + /** Specifies the data to extract from Azure blob storage and tells the indexer which data to extract from image content when "imageAction" is set to a value other than "none". This applies to embedded image content in a .PDF or other application, or image files such as .jpg and .png, in Azure blobs. */ + dataToExtract?: BlobIndexerDataToExtract; + /** Determines how to process embedded images and image files in Azure blob storage. Setting the "imageAction" configuration to any value other than "none" requires that a skillset also be attached to that indexer. */ + imageAction?: BlobIndexerImageAction; + /** If true, will create a path //document//file_data that is an object representing the original file data downloaded from your blob data source. This allows you to pass the original file data to a custom skill for processing within the enrichment pipeline, or to the Document Extraction skill. */ + allowSkillsetToReadFileData?: boolean; + /** Determines algorithm for text extraction from PDF files in Azure blob storage. */ + pdfTextRotationAlgorithm?: BlobIndexerPDFTextRotationAlgorithm; + /** Specifies the environment in which the indexer should execute. */ + executionEnvironment?: IndexerExecutionEnvironment; + /** Increases the timeout beyond the 5-minute default for Azure SQL database data sources, specified in the format "hh:mm:ss". */ + queryTimeout?: string; +} + +/** Represents parameters for indexer execution. */ +export interface IndexingParameters { + /** The number of items that are read from the data source and indexed as a single batch in order to improve performance. The default depends on the data source type. */ + batchSize?: number; + /** The maximum number of items that can fail indexing for indexer execution to still be considered successful. -1 means no limit. Default is 0. */ + maxFailedItems?: number; + /** The maximum number of items in a single batch that can fail indexing for the batch to still be considered successful. -1 means no limit. Default is 0. */ + maxFailedItemsPerBatch?: number; + /** A dictionary of indexer-specific configuration properties. Each name is the name of a specific property. Each value must be of a primitive type. */ + configuration?: IndexingParametersConfiguration; +} + +/** A skill looks for text from a custom, user-defined list of words and phrases. */ +export interface CustomEntityLookupSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.CustomEntityLookupSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: CustomEntityLookupSkillLanguage; + /** Path to a JSON or CSV file containing all the target text to match against. This entity definition is read at the beginning of an indexer run. Any updates to this file during an indexer run will not take effect until subsequent runs. This config must be accessible over HTTPS. */ + entitiesDefinitionUri?: string; + /** The inline CustomEntity definition. */ + inlineEntitiesDefinition?: CustomEntity[]; + /** A global flag for CaseSensitive. If CaseSensitive is not set in CustomEntity, this value will be the default value. */ + globalDefaultCaseSensitive?: boolean; + /** A global flag for AccentSensitive. If AccentSensitive is not set in CustomEntity, this value will be the default value. */ + globalDefaultAccentSensitive?: boolean; + /** A global flag for FuzzyEditDistance. If FuzzyEditDistance is not set in CustomEntity, this value will be the default value. */ + globalDefaultFuzzyEditDistance?: number; +} + +/** + * Text analytics entity recognition. + * + * @deprecated This skill has been deprecated. + */ +export interface EntityRecognitionSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.EntityRecognitionSkill"; + /** A list of entity categories that should be extracted. */ + categories?: EntityCategory[]; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: EntityRecognitionSkillLanguage; + /** Determines whether or not to include entities which are well known but don't conform to a pre-defined type. If this configuration is not set (default), set to null or set to false, entities which don't conform to one of the pre-defined types will not be surfaced. */ + includeTypelessEntities?: boolean; + /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ + minimumPrecision?: number; +} + +/** A skill that analyzes image files. It extracts a rich set of visual features based on the image content. */ +export interface ImageAnalysisSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: ImageAnalysisSkillLanguage; + /** A list of visual features. */ + visualFeatures?: VisualFeature[]; + /** A string indicating which domain-specific details to return. */ + details?: ImageDetail[]; +} + +/** A skill that uses text analytics for key phrase extraction. */ +export interface KeyPhraseExtractionSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.KeyPhraseExtractionSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: KeyPhraseExtractionSkillLanguage; + /** A number indicating how many key phrases to return. If absent, all identified key phrases will be returned. */ + maxKeyPhraseCount?: number; + /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ + modelVersion?: string; +} -export type VectorSearchAlgorithmKind = "hnsw" | "exhaustiveKnn"; +/** A skill that extracts text from image files. */ +export interface OcrSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Vision.OcrSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: OcrSkillLanguage; + /** A value indicating to turn orientation detection on or not. Default is false. */ + shouldDetectOrientation?: boolean; +} + +/** Using the Text Analytics API, extracts personal information from an input text and gives you the option of masking it. */ +export interface PIIDetectionSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.PIIDetectionSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: string; + /** A value between 0 and 1 that be used to only include entities whose confidence score is greater than the value specified. If not set (default), or if explicitly set to null, all entities will be included. */ + minimumPrecision?: number; + /** A parameter that provides various ways to mask the personal information detected in the input text. Default is 'none'. */ + maskingMode?: PIIDetectionSkillMaskingMode; + /** The character used to mask the text if the maskingMode parameter is set to replace. Default is '*'. */ + maskingCharacter?: string; + /** The version of the model to use when calling the Text Analytics service. It will default to the latest available when not specified. We recommend you do not specify this value unless absolutely necessary. */ + modelVersion?: string; + /** A list of PII entity categories that should be extracted and masked. */ + categories?: string[]; + /** If specified, will set the PII domain to include only a subset of the entity categories. Possible values include: 'phi', 'none'. Default is 'none'. */ + domain?: string; +} /** - * Defines behavior of the index projections in relation to the rest of the indexer. + * Text analytics positive-negative sentiment analysis, scored as a floating point value in a range of zero to 1. + * + * @deprecated This skill has been deprecated. */ -export type IndexProjectionMode = "skipIndexingParentDocuments" | "includeIndexingParentDocuments"; +export interface SentimentSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.SentimentSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: SentimentSkillLanguage; +} -export type VectorSearchVectorizerKind = "azureOpenAI" | "customWebApi"; +/** A skill to split a string into chunks of text. */ +export interface SplitSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.SplitSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: SplitSkillLanguage; + /** A value indicating which split mode to perform. */ + textSplitMode?: TextSplitMode; + /** The desired maximum page length. Default is 10000. */ + maxPageLength?: number; +} + +/** A skill to translate text from one language to another. */ +export interface TextTranslationSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Text.TranslationSkill"; + /** The language code to translate documents into for documents that don't specify the to language explicitly. */ + defaultToLanguageCode: TextTranslationSkillLanguage; + /** The language code to translate documents from for documents that don't specify the from language explicitly. */ + defaultFromLanguageCode?: TextTranslationSkillLanguage; + /** The language code to translate documents from when neither the fromLanguageCode input nor the defaultFromLanguageCode parameter are provided, and the automatic language detection is unsuccessful. Default is en. */ + suggestedFrom?: TextTranslationSkillLanguage; +} + +/** A skill that analyzes image files. It extracts a rich set of visual features based on the image content. */ +export interface ImageAnalysisSkill extends BaseSearchIndexerSkill { + /** Polymorphic discriminator, which specifies the different types this object can be */ + odatatype: "#Microsoft.Skills.Vision.ImageAnalysisSkill"; + /** A value indicating which language code to use. Default is en. */ + defaultLanguageCode?: ImageAnalysisSkillLanguage; + /** A list of visual features. */ + visualFeatures?: VisualFeature[]; + /** A string indicating which domain-specific details to return. */ + details?: ImageDetail[]; +} + +/** + * Contains configuration options specific to the compression method used during indexing or + * querying. + */ +export type VectorSearchCompressionConfiguration = ScalarQuantizationCompressionConfiguration; // END manually modified generated interfaces diff --git a/sdk/search/search-documents/src/serviceUtils.ts b/sdk/search/search-documents/src/serviceUtils.ts index ec47fabf2186..625479e538d0 100644 --- a/sdk/search/search-documents/src/serviceUtils.ts +++ b/sdk/search/search-documents/src/serviceUtils.ts @@ -1,80 +1,92 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { + SearchResult as GeneratedSearchResult, + SuggestDocumentsResult as GeneratedSuggestDocumentsResult, +} from "./generated/data/models"; import { AzureMachineLearningSkill, + AzureOpenAIVectorizer as GeneratedAzureOpenAIVectorizer, BM25Similarity, ClassicSimilarity, CognitiveServicesAccountKey, CognitiveServicesAccountUnion, ConditionalSkill, - CustomAnalyzer, - CustomEntityLookupSkill, + CustomAnalyzer as BaseCustomAnalyzer, + CustomVectorizer as GeneratedCustomVectorizer, DataChangeDetectionPolicyUnion, DataDeletionDetectionPolicyUnion, DefaultCognitiveServicesAccount, DocumentExtractionSkill, EntityLinkingSkill, - EntityRecognitionSkill, EntityRecognitionSkillV3, - PatternAnalyzer as GeneratedPatternAnalyzer, - SearchField as GeneratedSearchField, - SearchIndex as GeneratedSearchIndex, - SearchIndexer as GeneratedSearchIndexer, - SearchIndexerDataSource as GeneratedSearchIndexerDataSourceConnection, - SearchIndexerSkillset as GeneratedSearchIndexerSkillset, - SearchResourceEncryptionKey as GeneratedSearchResourceEncryptionKey, - SynonymMap as GeneratedSynonymMap, + ExhaustiveKnnAlgorithmConfiguration as GeneratedExhaustiveKnnAlgorithmConfiguration, HighWaterMarkChangeDetectionPolicy, - ImageAnalysisSkill, - KeyPhraseExtractionSkill, + HnswAlgorithmConfiguration as GeneratedHnswAlgorithmConfiguration, LanguageDetectionSkill, - LexicalAnalyzerName, LexicalAnalyzerUnion, - LexicalNormalizerName, LexicalTokenizerUnion, LuceneStandardAnalyzer, MergeSkill, - OcrSkill, - PIIDetectionSkill, + PatternAnalyzer as GeneratedPatternAnalyzer, PatternTokenizer, - RegexFlags, + SearchField as GeneratedSearchField, + SearchIndex as GeneratedSearchIndex, + SearchIndexer as GeneratedSearchIndexer, + SearchIndexerCache as GeneratedSearchIndexerCache, SearchIndexerDataIdentityUnion, SearchIndexerDataNoneIdentity, + SearchIndexerDataSource as GeneratedSearchIndexerDataSourceConnection, SearchIndexerDataUserAssignedIdentity, SearchIndexerKnowledgeStore as BaseSearchIndexerKnowledgeStore, + SearchIndexerSkillset as GeneratedSearchIndexerSkillset, SearchIndexerSkillUnion, - SentimentSkill, + SearchResourceEncryptionKey as GeneratedSearchResourceEncryptionKey, SentimentSkillV3, ShaperSkill, SimilarityUnion, SoftDeleteColumnDeletionDetectionPolicy, - SplitSkill, SqlIntegratedChangeTrackingPolicy, StopAnalyzer, - TextTranslationSkill, + SynonymMap as GeneratedSynonymMap, TokenFilterUnion, - SearchIndexerCache as GeneratedSearchIndexerCache, VectorSearch as GeneratedVectorSearch, - CustomVectorizer as GeneratedCustomVectorizer, - AzureOpenAIVectorizer as GeneratedAzureOpenAIVectorizer, - VectorSearchVectorizerUnion as GeneratedVectorSearchVectorizer, VectorSearchAlgorithmConfigurationUnion as GeneratedVectorSearchAlgorithmConfiguration, - HnswVectorSearchAlgorithmConfiguration as GeneratedHnswVectorSearchAlgorithmConfiguration, - ExhaustiveKnnVectorSearchAlgorithmConfiguration as GeneratedExhaustiveKnnVectorSearchAlgorithmConfiguration, + VectorSearchVectorizerUnion as GeneratedVectorSearchVectorizer, } from "./generated/service/models"; +import { + BlobIndexerDataToExtract, + BlobIndexerImageAction, + BlobIndexerParsingMode, + BlobIndexerPDFTextRotationAlgorithm, + IndexerExecutionEnvironment, + RegexFlags, + SearchIndexerDataSourceType, + VectorSearchAlgorithmMetric, +} from "./generatedStringLiteralUnions"; +import { SearchResult, SelectFields, SuggestDocumentsResult, SuggestResult } from "./indexModels"; import { AzureOpenAIVectorizer, CharFilter, CognitiveServicesAccount, ComplexField, + CustomEntityLookupSkill, CustomVectorizer, DataChangeDetectionPolicy, DataDeletionDetectionPolicy, + EntityRecognitionSkill, + ImageAnalysisSkill, + IndexingParameters, + IndexingParametersConfiguration, + isComplexField, + KeyPhraseExtractionSkill, LexicalAnalyzer, LexicalNormalizer, LexicalTokenizer, + OcrSkill, PatternAnalyzer, + PIIDetectionSkill, ScoringProfile, SearchField, SearchFieldDataType, @@ -83,39 +95,23 @@ import { SearchIndexerCache, SearchIndexerDataIdentity, SearchIndexerDataSourceConnection, + SearchIndexerIndexProjections, SearchIndexerKnowledgeStore, SearchIndexerSkill, SearchIndexerSkillset, SearchResourceEncryptionKey, + SentimentSkill, SimilarityAlgorithm, SimpleField, + SplitSkill, SynonymMap, + TextTranslationSkill, TokenFilter, VectorSearch, VectorSearchAlgorithmConfiguration, - VectorSearchAlgorithmMetric, VectorSearchVectorizer, WebApiSkill, - isComplexField, } from "./serviceModels"; -import { - QueryDebugMode, - SearchFieldArray, - SearchRequest, - SearchResult, - SelectFields, - SemanticErrorHandlingMode, - SuggestDocumentsResult, - SuggestResult, - VectorFilterMode, - VectorQuery, -} from "./indexModels"; -import { - SearchResult as GeneratedSearchResult, - SuggestDocumentsResult as GeneratedSuggestDocumentsResult, - SearchRequest as GeneratedSearchRequest, - VectorQueryUnion as GeneratedVectorQuery, -} from "./generated/data/models"; export function convertSkillsToPublic(skills: SearchIndexerSkillUnion[]): SearchIndexerSkill[] { if (!skills) { @@ -226,7 +222,7 @@ export function convertTokenFiltersToGenerated( return result; } -export function convertAnalyzersToGenerated( +function convertAnalyzersToGenerated( analyzers?: LexicalAnalyzer[], ): LexicalAnalyzerUnion[] | undefined { if (!analyzers) { @@ -257,7 +253,7 @@ export function convertAnalyzersToGenerated( return result; } -export function convertAnalyzersToPublic( +function convertAnalyzersToPublic( analyzers?: LexicalAnalyzerUnion[], ): LexicalAnalyzer[] | undefined { if (!analyzers) { @@ -282,10 +278,7 @@ export function convertAnalyzersToPublic( } as PatternAnalyzer); break; case "#Microsoft.Azure.Search.CustomAnalyzer": - result.push({ - ...analyzer, - tokenizerName: (analyzer as CustomAnalyzer).tokenizerName, - } as CustomAnalyzer); + result.push(analyzer as BaseCustomAnalyzer); break; } } @@ -307,24 +300,21 @@ export function convertFieldsToPublic(fields: GeneratedSearchField[]): SearchFie return result; } else { const type: SearchFieldDataType = field.type as SearchFieldDataType; - const analyzerName: LexicalAnalyzerName | undefined = field.analyzer; - const searchAnalyzerName: LexicalAnalyzerName | undefined = field.searchAnalyzer; - const indexAnalyzerName: LexicalAnalyzerName | undefined = field.indexAnalyzer; const synonymMapNames: string[] | undefined = field.synonymMaps; - const normalizerName: LexicalNormalizerName | undefined = field.normalizer; - const { retrievable, ...restField } = field; + const { retrievable, analyzer, searchAnalyzer, indexAnalyzer, normalizer, ...restField } = + field; const hidden = typeof retrievable === "boolean" ? !retrievable : retrievable; const result: SimpleField = { ...restField, type, hidden, - analyzerName, - searchAnalyzerName, - indexAnalyzerName, + analyzerName: analyzer, + searchAnalyzerName: searchAnalyzer, + indexAnalyzerName: indexAnalyzer, + normalizerName: normalizer, synonymMapNames, - normalizerName, }; return result; } @@ -360,7 +350,7 @@ export function convertFieldsToGenerated(fields: SearchField[]): GeneratedSearch }); } -export function convertTokenizersToGenerated( +function convertTokenizersToGenerated( tokenizers?: LexicalTokenizer[], ): LexicalTokenizerUnion[] | undefined { if (!tokenizers) { @@ -381,7 +371,7 @@ export function convertTokenizersToGenerated( return result; } -export function convertTokenizersToPublic( +function convertTokenizersToPublic( tokenizers?: LexicalTokenizerUnion[], ): LexicalTokenizer[] | undefined { if (!tokenizers) { @@ -391,11 +381,11 @@ export function convertTokenizersToPublic( const result: LexicalTokenizer[] = []; for (const tokenizer of tokenizers) { if (tokenizer.odatatype === "#Microsoft.Azure.Search.PatternTokenizer") { + const patternTokenizer = tokenizer as PatternTokenizer; + const flags = patternTokenizer.flags?.split("|") as RegexFlags[] | undefined; result.push({ ...tokenizer, - flags: (tokenizer as PatternTokenizer).flags - ? ((tokenizer as PatternTokenizer).flags!.split("|") as RegexFlags[]) - : undefined, + flags, }); } else { result.push(tokenizer); @@ -428,7 +418,7 @@ export function convertSimilarityToPublic( } } -export function convertEncryptionKeyToPublic( +function convertEncryptionKeyToPublic( encryptionKey?: GeneratedSearchResourceEncryptionKey, ): SearchResourceEncryptionKey | undefined { if (!encryptionKey) { @@ -450,7 +440,7 @@ export function convertEncryptionKeyToPublic( return result; } -export function convertEncryptionKeyToGenerated( +function convertEncryptionKeyToGenerated( encryptionKey?: SearchResourceEncryptionKey, ): GeneratedSearchResourceEncryptionKey | undefined { if (!encryptionKey) { @@ -490,7 +480,7 @@ export function generatedIndexToPublicIndex(generatedIndex: GeneratedSearchIndex scoringProfiles: generatedIndex.scoringProfiles as ScoringProfile[], fields: convertFieldsToPublic(generatedIndex.fields), similarity: convertSimilarityToPublic(generatedIndex.similarity), - semanticSettings: generatedIndex.semanticSettings, + semanticSearch: generatedIndex.semanticSearch, vectorSearch: generatedVectorSearchToPublicVectorSearch(generatedIndex.vectorSearch), }; } @@ -506,31 +496,32 @@ export function generatedVectorSearchVectorizerToPublicVectorizer( return generatedVectorizer; } - if (generatedVectorizer.kind === "azureOpenAI") { - const { azureOpenAIParameters } = generatedVectorizer as GeneratedAzureOpenAIVectorizer; - const authIdentity = convertSearchIndexerDataIdentityToPublic( - azureOpenAIParameters?.authIdentity, - ); - const vectorizer: AzureOpenAIVectorizer = { - ...(generatedVectorizer as GeneratedAzureOpenAIVectorizer), - azureOpenAIParameters: { ...azureOpenAIParameters, authIdentity }, - }; - return vectorizer; - } - - if (generatedVectorizer.kind === "customWebApi") { - const { customVectorizerParameters } = generatedVectorizer as GeneratedCustomVectorizer; - const authIdentity = convertSearchIndexerDataIdentityToPublic( - customVectorizerParameters?.authIdentity, - ); - const vectorizer: CustomVectorizer = { - ...(generatedVectorizer as GeneratedCustomVectorizer), - customVectorizerParameters: { ...customVectorizerParameters, authIdentity }, - }; - return vectorizer; + switch (generatedVectorizer.kind) { + case "azureOpenAI": { + const { azureOpenAIParameters } = generatedVectorizer as GeneratedAzureOpenAIVectorizer; + const authIdentity = convertSearchIndexerDataIdentityToPublic( + azureOpenAIParameters?.authIdentity, + ); + const vectorizer: AzureOpenAIVectorizer = { + ...(generatedVectorizer as GeneratedAzureOpenAIVectorizer), + azureOpenAIParameters: { ...azureOpenAIParameters, authIdentity }, + }; + return vectorizer; + } + case "customWebApi": { + const { customWebApiParameters } = generatedVectorizer as GeneratedCustomVectorizer; + const authIdentity = convertSearchIndexerDataIdentityToPublic( + customWebApiParameters?.authIdentity, + ); + const vectorizer: CustomVectorizer = { + ...(generatedVectorizer as GeneratedCustomVectorizer), + customVectorizerParameters: { ...customWebApiParameters, authIdentity }, + }; + return vectorizer; + } + default: + throw Error("Unsupported vectorizer"); } - - throw Error("Unsupported vectorizer"); } export function generatedVectorSearchAlgorithmConfigurationToPublicVectorSearchAlgorithmConfiguration(): undefined; @@ -546,8 +537,8 @@ export function generatedVectorSearchAlgorithmConfigurationToPublicVectorSearchA if (["hnsw", "exhaustiveKnn"].includes(generatedAlgorithmConfiguration.kind)) { const algorithmConfiguration = generatedAlgorithmConfiguration as - | GeneratedHnswVectorSearchAlgorithmConfiguration - | GeneratedExhaustiveKnnVectorSearchAlgorithmConfiguration; + | GeneratedHnswAlgorithmConfiguration + | GeneratedExhaustiveKnnAlgorithmConfiguration; const metric = algorithmConfiguration.parameters?.metric as VectorSearchAlgorithmMetric; return { ...algorithmConfiguration, @@ -580,18 +571,21 @@ export function generatedSearchResultToPublicSearchResult< >(results: GeneratedSearchResult[]): SearchResult[] { const returnValues: SearchResult[] = results.map>( (result) => { - const { _score, _highlights, rerankerScore, captions, documentDebugInfo, ...restProps } = - result; - const doc: { [key: string]: any } = { - ...restProps, - }; + const { + _score: score, + _highlights: highlights, + _rerankerScore: rerankerScore, + _captions: captions, + documentDebugInfo: documentDebugInfo, + ...restProps + } = result; const obj = { - score: _score, - highlights: _highlights, + score, + highlights, rerankerScore, captions, - document: doc, documentDebugInfo, + document: restProps, }; return obj as SearchResult; }, @@ -606,13 +600,9 @@ export function generatedSuggestDocumentsResultToPublicSuggestDocumentsResult< const results = searchDocumentsResult.results.map>((element) => { const { _text, ...restProps } = element; - const doc: { [key: string]: any } = { - ...restProps, - }; - const obj = { text: _text, - document: doc, + document: restProps, }; return obj as SuggestResult; @@ -643,16 +633,21 @@ export function publicIndexToGeneratedIndex(index: SearchIndex): GeneratedSearch export function generatedSkillsetToPublicSkillset( generatedSkillset: GeneratedSearchIndexerSkillset, ): SearchIndexerSkillset { + const { + skills, + cognitiveServicesAccount, + knowledgeStore, + encryptionKey, + indexProjections, + ...props + } = generatedSkillset; return { - name: generatedSkillset.name, - description: generatedSkillset.description, - skills: convertSkillsToPublic(generatedSkillset.skills), - cognitiveServicesAccount: convertCognitiveServicesAccountToPublic( - generatedSkillset.cognitiveServicesAccount, - ), - knowledgeStore: convertKnowledgeStoreToPublic(generatedSkillset.knowledgeStore), - etag: generatedSkillset.etag, - encryptionKey: convertEncryptionKeyToPublic(generatedSkillset.encryptionKey), + ...props, + skills: convertSkillsToPublic(skills), + cognitiveServicesAccount: convertCognitiveServicesAccountToPublic(cognitiveServicesAccount), + knowledgeStore: convertKnowledgeStoreToPublic(knowledgeStore), + encryptionKey: convertEncryptionKeyToPublic(encryptionKey), + indexProjections: indexProjections as SearchIndexerIndexProjections, }; } @@ -713,30 +708,38 @@ export function publicSearchIndexerToGeneratedSearchIndexer( export function generatedSearchIndexerToPublicSearchIndexer( indexer: GeneratedSearchIndexer, ): SearchIndexer { + const { + parsingMode, + dataToExtract, + imageAction, + pdfTextRotationAlgorithm, + executionEnvironment, + } = indexer.parameters?.configuration ?? {}; + + const configuration: IndexingParametersConfiguration | undefined = indexer.parameters + ?.configuration && { + ...indexer.parameters?.configuration, + parsingMode: parsingMode as BlobIndexerParsingMode | undefined, + dataToExtract: dataToExtract as BlobIndexerDataToExtract | undefined, + imageAction: imageAction as BlobIndexerImageAction | undefined, + pdfTextRotationAlgorithm: pdfTextRotationAlgorithm as + | BlobIndexerPDFTextRotationAlgorithm + | undefined, + executionEnvironment: executionEnvironment as IndexerExecutionEnvironment | undefined, + }; + const parameters: IndexingParameters = { + ...indexer.parameters, + configuration, + }; + return { ...indexer, + parameters, encryptionKey: convertEncryptionKeyToPublic(indexer.encryptionKey), cache: convertSearchIndexerCacheToPublic(indexer.cache), }; } -export function generatedSearchRequestToPublicSearchRequest( - request: GeneratedSearchRequest, -): SearchRequest { - const { semanticErrorHandling, debug, vectorQueries, vectorFilterMode, ...props } = request; - const publicRequest: SearchRequest = { - semanticErrorHandlingMode: semanticErrorHandling as SemanticErrorHandlingMode | undefined, - debugMode: debug as QueryDebugMode | undefined, - vectorFilterMode: vectorFilterMode as VectorFilterMode | undefined, - vectorQueries: vectorQueries - ?.map(convertVectorQueryToPublic) - .filter((v): v is VectorQuery => v !== undefined), - ...props, - }; - - return publicRequest; -} - export function publicDataSourceToGeneratedDataSource( dataSource: SearchIndexerDataSourceConnection, ): GeneratedSearchIndexerDataSourceConnection { @@ -762,7 +765,7 @@ export function generatedDataSourceToPublicDataSource( return { name: dataSource.name, description: dataSource.name, - type: dataSource.type, + type: dataSource.type as SearchIndexerDataSourceType, connectionString: dataSource.credentials.connectionString, container: dataSource.container, identity: convertSearchIndexerDataIdentityToPublic(dataSource.identity), @@ -818,20 +821,6 @@ export function convertDataDeletionDetectionPolicyToPublic( return dataDeletionDetectionPolicy as SoftDeleteColumnDeletionDetectionPolicy; } -function convertVectorQueryToPublic( - vector: GeneratedVectorQuery | undefined, -): VectorQuery | undefined { - if (!vector) { - return vector; - } - - const fields: SearchFieldArray | undefined = vector.fields?.split(",") as - | SearchFieldArray - | undefined; - - return { ...vector, fields }; -} - export function getRandomIntegerInclusive(min: number, max: number): number { // Make sure inputs are integers. min = Math.ceil(min); @@ -852,9 +841,9 @@ export function delay(timeInMs: number): Promise { return new Promise((resolve) => setTimeout(() => resolve(), timeInMs)); } -export const serviceVersions = ["2020-06-30", "2023-10-01-Preview"]; +export const serviceVersions = ["2023-11-01", "2024-03-01-Preview"]; -export const defaultServiceVersion = "2023-10-01-Preview"; +export const defaultServiceVersion = "2024-03-01-Preview"; function convertKnowledgeStoreToPublic( knowledgeStore: BaseSearchIndexerKnowledgeStore | undefined, @@ -869,7 +858,7 @@ function convertKnowledgeStoreToPublic( }; } -function convertSearchIndexerCacheToPublic( +export function convertSearchIndexerCacheToPublic( cache?: GeneratedSearchIndexerCache, ): SearchIndexerCache | undefined { if (!cache) { diff --git a/sdk/search/search-documents/src/synonymMapHelper.ts b/sdk/search/search-documents/src/synonymMapHelper.ts index 873eee754119..6e002f2a7fbc 100644 --- a/sdk/search/search-documents/src/synonymMapHelper.ts +++ b/sdk/search/search-documents/src/synonymMapHelper.ts @@ -1,9 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { SynonymMap } from "./serviceModels"; -import { promisify } from "util"; import * as fs from "fs"; +import { promisify } from "util"; +import { SynonymMap } from "./serviceModels"; const readFileAsync = promisify(fs.readFile); /** diff --git a/sdk/search/search-documents/src/tracing.ts b/sdk/search/search-documents/src/tracing.ts index 4f40e2d0cf45..38bff4bae880 100644 --- a/sdk/search/search-documents/src/tracing.ts +++ b/sdk/search/search-documents/src/tracing.ts @@ -7,7 +7,7 @@ import { createTracingClient } from "@azure/core-tracing"; * Creates a tracing client using the global tracer. * @internal */ -export const tracingClient = createTracingClient({ +const tracingClient = createTracingClient({ namespace: "Microsoft.Search", packageName: "Azure.Search", }); diff --git a/sdk/search/search-documents/swagger/Data.md b/sdk/search/search-documents/swagger/Data.md index 693d413016d0..7185711271cb 100644 --- a/sdk/search/search-documents/swagger/Data.md +++ b/sdk/search/search-documents/swagger/Data.md @@ -10,13 +10,13 @@ generate-metadata: false license-header: MICROSOFT_MIT_NO_VERSION output-folder: ../ source-code-folder-path: ./src/generated/data -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/b62ddd0ffb844fbfb688a04546800d60645a18ef/specification/search/data-plane/Azure.Search/preview/2023-10-01-Preview/searchindex.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/a0151afd7cd14913fc86cb793bde49c71122eb1e/specification/search/data-plane/Azure.Search/preview/2024-03-01-Preview/searchindex.json add-credentials: false title: SearchClient use-extension: - "@autorest/typescript": "6.0.0-alpha.17.20220318.1" + "@autorest/typescript": "6.0.14" core-http-compat-mode: true -package-version: 12.0.0-beta.4 +package-version: 12.1.0-beta.1 disable-async-iterators: true api-version-parameter: choice v3: true @@ -79,7 +79,7 @@ modelerfour: Text: $DO_NOT_NORMALIZE$_text ``` -### Change score to \_score & highlights to \_highlights in SuggestResult +### Preserve underscore prefix in some result type properties ```yaml modelerfour: @@ -87,6 +87,8 @@ modelerfour: override: Score: $DO_NOT_NORMALIZE$_score Highlights: $DO_NOT_NORMALIZE$_highlights + RerankerScore: $DO_NOT_NORMALIZE$_rerankerScore + Captions: $DO_NOT_NORMALIZE$_captions ``` ### Mark score, key and text fields as required in AnswerResult Object @@ -99,7 +101,7 @@ directive: $.required = ['score', 'key', 'text']; ``` -### Rename Vector property `K` +### Renames ```yaml directive: @@ -108,9 +110,19 @@ directive: transform: $["x-ms-client-name"] = "KNearestNeighborsCount"; ``` -### Rename QueryResultDocumentSemanticFieldState +```yaml +directive: + - from: swagger-document + where: $.definitions.SearchRequest.properties.semanticConfiguration + transform: $["x-ms-client-name"] = "semanticConfigurationName"; +``` -Simplify `QueryResultDocumentSemanticFieldState` name by renaming it to `SemanticFieldState` +```yaml +directive: + - from: swagger-document + where: $.definitions.RawVectorQuery + transform: $["x-ms-client-name"] = "VectorizedQuery"; +``` ```yaml directive: @@ -118,3 +130,17 @@ directive: where: $.definitions.QueryResultDocumentSemanticFieldState transform: $["x-ms-enum"].name = "SemanticFieldState"; ``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.AnswerResult + transform: $["x-ms-client-name"] = "QueryAnswerResult"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.CaptionResult + transform: $["x-ms-client-name"] = "QueryCaptionResult"; +``` diff --git a/sdk/search/search-documents/swagger/Service.md b/sdk/search/search-documents/swagger/Service.md index f531d95e7938..f1c287e8f862 100644 --- a/sdk/search/search-documents/swagger/Service.md +++ b/sdk/search/search-documents/swagger/Service.md @@ -10,12 +10,12 @@ generate-metadata: false license-header: MICROSOFT_MIT_NO_VERSION output-folder: ../ source-code-folder-path: ./src/generated/service -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/b62ddd0ffb844fbfb688a04546800d60645a18ef/specification/search/data-plane/Azure.Search/preview/2023-10-01-Preview/searchservice.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/a0151afd7cd14913fc86cb793bde49c71122eb1e/specification/search/data-plane/Azure.Search/preview/2024-03-01-Preview/searchservice.json add-credentials: false use-extension: - "@autorest/typescript": "6.0.0-alpha.17.20220318.1" + "@autorest/typescript": "6.0.14" core-http-compat-mode: true -package-version: 12.0.0-beta.4 +package-version: 12.1.0-beta.1 disable-async-iterators: true api-version-parameter: choice v3: true @@ -290,6 +290,83 @@ directive: $["x-ms-client-name"] = "name"; ``` +```yaml +directive: + - from: swagger-document + where: $.definitions.SearchField.properties.dimensions + transform: $["x-ms-client-name"] = "vectorSearchDimensions"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.HnswVectorSearchAlgorithmConfiguration + transform: $["x-ms-client-name"] = "HnswAlgorithmConfiguration"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.ExhaustiveKnnVectorSearchAlgorithmConfiguration + transform: $["x-ms-client-name"] = "ExhaustiveKnnAlgorithmConfiguration"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.PIIDetectionSkill.properties.piiCategories + transform: $["x-ms-client-name"] = "categories"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.SearchField.properties.vectorSearchProfile + transform: $["x-ms-client-name"] = "vectorSearchProfileName"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.SemanticSettings.defaultConfiguration + transform: $["x-ms-client-name"] = "defaultConfigurationName"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.SearchIndex.properties.semantic + transform: $["x-ms-client-name"] = "semanticSearch"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.SemanticSettings + transform: $["x-ms-client-name"] = "SemanticSearch"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.VectorSearchProfile.properties.algorithm + transform: $["x-ms-client-name"] = "algorithmConfigurationName"; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.PIIDetectionSkill.properties.maskingCharacter + transform: $["x-ms-client-name"] = undefined; +``` + +```yaml +directive: + - from: swagger-document + where: $.definitions.VectorSearchCompressionConfiguration + transform: $["x-ms-client-name"] = "BaseVectorSearchCompressionConfiguration"; +``` + ### Deprecations ```yaml @@ -313,17 +390,6 @@ directive: transform: $.description += "\n\n@deprecated"; ``` -### Rename Dimensions - -To ensure alignment with `VectorSearchConfiguration` in intellisense and documentation, rename the `Dimensions` to `VectorSearchDimensions`. - -```yaml -directive: - - from: swagger-document - where: $.definitions.SearchField.properties.dimensions - transform: $["x-ms-client-name"] = "vectorSearchDimensions"; -``` - ### Add `arm-id` format for `AuthResourceId` Add `"format": "arm-id"` for `AuthResourceId` to generate as [Azure.Core.ResourceIdentifier] diff --git a/sdk/search/search-documents/test/compressionDisabled.ts b/sdk/search/search-documents/test/compressionDisabled.ts new file mode 100644 index 000000000000..dc8c1a022a78 --- /dev/null +++ b/sdk/search/search-documents/test/compressionDisabled.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +export const COMPRESSION_DISABLED = true; diff --git a/sdk/search/search-documents/test/internal/serialization.spec.ts b/sdk/search/search-documents/test/internal/serialization.spec.ts index 092973935e7b..d1411fb1ce7a 100644 --- a/sdk/search/search-documents/test/internal/serialization.spec.ts +++ b/sdk/search/search-documents/test/internal/serialization.spec.ts @@ -3,8 +3,8 @@ import { assert } from "chai"; import * as sinon from "sinon"; -import { deserialize, serialize } from "../../src/serialization"; import GeographyPoint from "../../src/geographyPoint"; +import { deserialize, serialize } from "../../src/serialization"; describe("serialization.serialize", function () { it("nested", function () { diff --git a/sdk/search/search-documents/test/internal/serviceUtils.spec.ts b/sdk/search/search-documents/test/internal/serviceUtils.spec.ts index 8040b7b79758..bda81ef1557b 100644 --- a/sdk/search/search-documents/test/internal/serviceUtils.spec.ts +++ b/sdk/search/search-documents/test/internal/serviceUtils.spec.ts @@ -2,10 +2,10 @@ // Licensed under the MIT license. import { assert } from "chai"; -import { convertFieldsToGenerated, convertFieldsToPublic } from "../../src/serviceUtils"; import { SearchField as GeneratedSearchField } from "../../src/generated/service/models/index"; -import { KnownLexicalAnalyzerName } from "../../src/index"; +import { KnownAnalyzerNames } from "../../src/index"; import { ComplexField, SearchField } from "../../src/serviceModels"; +import { convertFieldsToGenerated, convertFieldsToPublic } from "../../src/serviceUtils"; describe("serviceUtils", function () { it("convert generated fields to public fields", function () { @@ -19,10 +19,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, retrievable: false, - analyzer: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzer: KnownLexicalAnalyzerName.ArLucene, - normalizer: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzer: KnownLexicalAnalyzerName.CaLucene, + analyzer: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzer: KnownAnalyzerNames.ArLucene, + normalizer: KnownAnalyzerNames.BgLucene, + searchAnalyzer: KnownAnalyzerNames.CaLucene, synonymMaps: undefined, }, ]); @@ -36,10 +36,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, hidden: true, - analyzerName: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzerName: KnownLexicalAnalyzerName.ArLucene, - normalizerName: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzerName: KnownLexicalAnalyzerName.CaLucene, + analyzerName: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzerName: KnownAnalyzerNames.ArLucene, + normalizerName: KnownAnalyzerNames.BgLucene, + searchAnalyzerName: KnownAnalyzerNames.CaLucene, synonymMapNames: undefined, }); }); @@ -59,10 +59,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, retrievable: false, - analyzer: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzer: KnownLexicalAnalyzerName.ArLucene, - normalizer: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzer: KnownLexicalAnalyzerName.CaLucene, + analyzer: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzer: KnownAnalyzerNames.ArLucene, + normalizer: KnownAnalyzerNames.BgLucene, + searchAnalyzer: KnownAnalyzerNames.CaLucene, synonymMaps: undefined, }, ], @@ -83,10 +83,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, hidden: true, - analyzerName: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzerName: KnownLexicalAnalyzerName.ArLucene, - normalizerName: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzerName: KnownLexicalAnalyzerName.CaLucene, + analyzerName: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzerName: KnownAnalyzerNames.ArLucene, + normalizerName: KnownAnalyzerNames.BgLucene, + searchAnalyzerName: KnownAnalyzerNames.CaLucene, synonymMapNames: undefined, }); }); @@ -102,10 +102,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, hidden: true, - analyzerName: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzerName: KnownLexicalAnalyzerName.ArLucene, - normalizerName: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzerName: KnownLexicalAnalyzerName.CaLucene, + analyzerName: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzerName: KnownAnalyzerNames.ArLucene, + normalizerName: KnownAnalyzerNames.BgLucene, + searchAnalyzerName: KnownAnalyzerNames.CaLucene, synonymMapNames: undefined, }, ]); @@ -119,10 +119,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, retrievable: false, - analyzer: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzer: KnownLexicalAnalyzerName.ArLucene, - normalizer: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzer: KnownLexicalAnalyzerName.CaLucene, + analyzer: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzer: KnownAnalyzerNames.ArLucene, + normalizer: KnownAnalyzerNames.BgLucene, + searchAnalyzer: KnownAnalyzerNames.CaLucene, synonymMaps: undefined, }); }); @@ -142,10 +142,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, hidden: true, - analyzerName: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzerName: KnownLexicalAnalyzerName.ArLucene, - normalizerName: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzerName: KnownLexicalAnalyzerName.CaLucene, + analyzerName: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzerName: KnownAnalyzerNames.ArLucene, + normalizerName: KnownAnalyzerNames.BgLucene, + searchAnalyzerName: KnownAnalyzerNames.CaLucene, synonymMapNames: undefined, }, ], @@ -166,10 +166,10 @@ describe("serviceUtils", function () { filterable: true, facetable: true, retrievable: false, - analyzer: KnownLexicalAnalyzerName.ArMicrosoft, - indexAnalyzer: KnownLexicalAnalyzerName.ArLucene, - normalizer: KnownLexicalAnalyzerName.BgLucene, - searchAnalyzer: KnownLexicalAnalyzerName.CaLucene, + analyzer: KnownAnalyzerNames.ArMicrosoft, + indexAnalyzer: KnownAnalyzerNames.ArLucene, + normalizer: KnownAnalyzerNames.BgLucene, + searchAnalyzer: KnownAnalyzerNames.CaLucene, synonymMaps: undefined, }); }); diff --git a/sdk/search/search-documents/test/narrowedTypes.ts b/sdk/search/search-documents/test/narrowedTypes.ts index d30449b2f156..47f2e86a5db2 100644 --- a/sdk/search/search-documents/test/narrowedTypes.ts +++ b/sdk/search/search-documents/test/narrowedTypes.ts @@ -9,10 +9,10 @@ import { SearchClient, SelectFields } from "../src/index"; import { + NarrowedModel as GenericNarrowedModel, SearchFieldArray, SearchPick, SelectArray, - NarrowedModel as GenericNarrowedModel, SuggestNarrowedModel, } from "../src/indexModels"; @@ -246,7 +246,9 @@ function testNarrowedClient() { async () => { type VectorFields = NonNullable< NonNullable< - NonNullable[1]>["vectorQueries"] + NonNullable< + NonNullable[1]>["vectorSearchOptions"] + >["queries"] >[number]["fields"] >; const a: Equals = "pass"; @@ -379,7 +381,9 @@ function testWideClient() { async () => { type VectorFields = NonNullable< NonNullable< - NonNullable[1]>["vectorQueries"] + NonNullable< + NonNullable[1]>["vectorSearchOptions"] + >["queries"] >[number]["fields"] >; const a: Equals = "pass"; diff --git a/sdk/search/search-documents/test/public/generated/typeDefinitions.ts b/sdk/search/search-documents/test/public/generated/typeDefinitions.ts new file mode 100755 index 000000000000..046e933bd0e3 --- /dev/null +++ b/sdk/search/search-documents/test/public/generated/typeDefinitions.ts @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +/* eslint-disable no-unused-expressions */ +/* eslint-disable no-constant-condition */ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +/* eslint-disable @typescript-eslint/explicit-function-return-type */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +import { + KnownSemanticErrorMode, + KnownSemanticErrorReason, + KnownSemanticSearchResultsType, + KnownVectorFilterMode, + KnownVectorQueryKind, +} from "../../../src/generated/data"; + +import { + KnownBlobIndexerDataToExtract, + KnownBlobIndexerImageAction, + KnownBlobIndexerPDFTextRotationAlgorithm, + KnownBlobIndexerParsingMode, + KnownCustomEntityLookupSkillLanguage, + KnownEntityCategory, + KnownEntityRecognitionSkillLanguage, + KnownImageAnalysisSkillLanguage, + KnownImageDetail, + KnownIndexerExecutionEnvironment, + KnownKeyPhraseExtractionSkillLanguage, + KnownOcrSkillLanguage, + KnownPIIDetectionSkillMaskingMode, + KnownRegexFlags, + KnownSearchFieldDataType, + KnownSearchIndexerDataSourceType, + KnownSentimentSkillLanguage, + KnownSplitSkillLanguage, + KnownTextSplitMode, + KnownTextTranslationSkillLanguage, + KnownVectorSearchAlgorithmKind, + KnownVectorSearchAlgorithmMetric, + KnownVectorSearchVectorizerKind, + KnownVisualFeature, +} from "../../../src/generated/service"; + +import { + BlobIndexerDataToExtract, + BlobIndexerImageAction, + BlobIndexerPDFTextRotationAlgorithm, + BlobIndexerParsingMode, + CustomEntityLookupSkillLanguage, + EntityCategory, + EntityRecognitionSkillLanguage, + ImageAnalysisSkillLanguage, + ImageDetail, + IndexerExecutionEnvironment, + KeyPhraseExtractionSkillLanguage, + OcrSkillLanguage, + PIIDetectionSkillMaskingMode, + RegexFlags, + SearchFieldDataType, + SearchIndexerDataSourceType, + SemanticErrorMode, + SemanticErrorReason, + SemanticSearchResultsType, + SentimentSkillLanguage, + SplitSkillLanguage, + TextSplitMode, + TextTranslationSkillLanguage, + VectorFilterMode, + VectorQueryKind, + VectorSearchAlgorithmKind, + VectorSearchAlgorithmMetric, + VectorSearchVectorizerKind, + VisualFeature, +} from "../../../src/index"; + +type IsIdentical = + (() => T extends T1 ? true : false) extends () => T extends T2 ? true : false ? any : never; + +type ExpectBlobIndexerDataToExtract = `${KnownBlobIndexerDataToExtract}`; +type ExpectBlobIndexerImageAction = `${KnownBlobIndexerImageAction}`; +type ExpectBlobIndexerParsingMode = `${KnownBlobIndexerParsingMode}`; +type ExpectBlobIndexerPDFTextRotationAlgorithm = `${KnownBlobIndexerPDFTextRotationAlgorithm}`; +type ExpectCustomEntityLookupSkillLanguage = `${KnownCustomEntityLookupSkillLanguage}`; +type ExpectEntityCategory = `${KnownEntityCategory}`; +type ExpectEntityRecognitionSkillLanguage = `${KnownEntityRecognitionSkillLanguage}`; +type ExpectImageAnalysisSkillLanguage = `${KnownImageAnalysisSkillLanguage}`; +type ExpectImageDetail = `${KnownImageDetail}`; +type ExpectIndexerExecutionEnvironment = `${KnownIndexerExecutionEnvironment}`; +type ExpectKeyPhraseExtractionSkillLanguage = `${KnownKeyPhraseExtractionSkillLanguage}`; +type ExpectOcrSkillLanguage = `${KnownOcrSkillLanguage}`; +type ExpectPIIDetectionSkillMaskingMode = `${KnownPIIDetectionSkillMaskingMode}`; +type ExpectRegexFlags = `${KnownRegexFlags}`; +type ExpectSearchFieldDataType = Exclude< + `${KnownSearchFieldDataType}` | `Collection(${KnownSearchFieldDataType})`, + | "Edm.ComplexType" + | "Collection(Edm.ComplexType)" + | "Edm.Single" + | "Edm.Half" + | "Edm.Int16" + | "Edm.SByte" +>; +type ExpectSearchIndexerDataSourceType = `${KnownSearchIndexerDataSourceType}`; +type ExpectSemanticErrorMode = `${KnownSemanticErrorMode}`; +type ExpectSemanticErrorReason = `${KnownSemanticErrorReason}`; +type ExpectSemanticSearchResultsType = `${KnownSemanticSearchResultsType}`; +type ExpectSentimentSkillLanguage = `${KnownSentimentSkillLanguage}`; +type ExpectSplitSkillLanguage = `${KnownSplitSkillLanguage}`; +type ExpectTextSplitMode = `${KnownTextSplitMode}`; +type ExpectTextTranslationSkillLanguage = `${KnownTextTranslationSkillLanguage}`; +type ExpectVectorFilterMode = `${KnownVectorFilterMode}`; +type ExpectVectorQueryKind = `${KnownVectorQueryKind}`; +type ExpectVectorSearchAlgorithmKind = `${KnownVectorSearchAlgorithmKind}`; +type ExpectVectorSearchAlgorithmMetric = `${KnownVectorSearchAlgorithmMetric}`; +type ExpectVectorSearchVectorizerKind = `${KnownVectorSearchVectorizerKind}`; +type ExpectVisualFeature = `${KnownVisualFeature}`; + +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +function fun() { + const a: IsIdentical = "pass"; + const b: IsIdentical = "pass"; + const c: IsIdentical = "pass"; + const d: IsIdentical< + ExpectBlobIndexerPDFTextRotationAlgorithm, + BlobIndexerPDFTextRotationAlgorithm + > = "pass"; + const e: IsIdentical = + "pass"; + const f: IsIdentical = "pass"; + const g: IsIdentical = + "pass"; + const h: IsIdentical = "pass"; + const i: IsIdentical = "pass"; + const j: IsIdentical = "pass"; + const k: IsIdentical = + "pass"; + const l: IsIdentical = "pass"; + const m: IsIdentical = "pass"; + const n: IsIdentical = "pass"; + const o: IsIdentical = "pass"; + const p: IsIdentical = "pass"; + const q: IsIdentical = "pass"; + const r: IsIdentical = "pass"; + const s: IsIdentical = "pass"; + const t: IsIdentical = "pass"; + const u: IsIdentical = "pass"; + const v: IsIdentical = "pass"; + const w: IsIdentical = "pass"; + const x: IsIdentical = "pass"; + const y: IsIdentical = "pass"; + const z: IsIdentical = "pass"; + const aa: IsIdentical = "pass"; + const ab: IsIdentical = "pass"; + const ac: IsIdentical = "pass"; + return [a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z, aa, ab, ac]; +} diff --git a/sdk/search/search-documents/test/public/node/searchClient.spec.ts b/sdk/search/search-documents/test/public/node/searchClient.spec.ts index e783131e065d..997ff3dffe05 100644 --- a/sdk/search/search-documents/test/public/node/searchClient.spec.ts +++ b/sdk/search/search-documents/test/public/node/searchClient.spec.ts @@ -1,12 +1,12 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { env, isLiveMode, Recorder } from "@azure-tools/test-recorder"; import { assert } from "chai"; -import { Context } from "mocha"; -import { Suite } from "mocha"; -import { Recorder, env, isLiveMode } from "@azure-tools/test-recorder"; +import { Context, Suite } from "mocha"; -import { createClients } from "../utils/recordedClient"; +import { OpenAIClient } from "@azure/openai"; +import { versionsToTest } from "@azure/test-utils"; import { AutocompleteResult, AzureKeyCredential, @@ -17,15 +17,15 @@ import { SearchIndexClient, SelectFields, } from "../../../src"; -import { Hotel } from "../utils/interfaces"; -import { WAIT_TIME, createIndex, createRandomIndexName, populateIndex } from "../utils/setup"; -import { delay, serviceVersions } from "../../../src/serviceUtils"; -import { versionsToTest } from "@azure/test-utils"; import { SearchFieldArray, SelectArray } from "../../../src/indexModels"; -import { OpenAIClient } from "@azure/openai"; +import { delay, serviceVersions } from "../../../src/serviceUtils"; +import { COMPRESSION_DISABLED } from "../../compressionDisabled"; +import { Hotel } from "../utils/interfaces"; +import { createClients } from "../utils/recordedClient"; +import { createIndex, createRandomIndexName, populateIndex, WAIT_TIME } from "../utils/setup"; versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { - onVersions({ minVer: "2020-06-30" }).describe("SearchClient tests", function (this: Suite) { + onVersions({ minVer: "2023-11-01" }).describe("SearchClient tests", function (this: Suite) { let recorder: Recorder; let searchClient: SearchClient; let indexClient: SearchIndexClient; @@ -45,7 +45,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { } = await createClients(serviceVersion, recorder, TEST_INDEX_NAME)); await createIndex(indexClient, TEST_INDEX_NAME, serviceVersion); await delay(WAIT_TIME); - await populateIndex(searchClient, openAIClient, serviceVersion); + await populateIndex(searchClient, openAIClient); }); afterEach(async function () { @@ -80,6 +80,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { skip: 0, top: 5, includeTotalCount: true, + select: ["address/streetAddress"], }); assert.equal(searchResults.count, 6); }); @@ -379,7 +380,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { }); }); - onVersions({ minVer: "2023-10-01-Preview" }).describe( + onVersions({ minVer: "2024-03-01-Preview" }).describe( "SearchClient tests", function (this: Suite) { let recorder: Recorder; @@ -401,7 +402,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { } = await createClients(serviceVersion, recorder, TEST_INDEX_NAME)); await createIndex(indexClient, TEST_INDEX_NAME, serviceVersion); await delay(WAIT_TIME); - await populateIndex(searchClient, openAIClient, serviceVersion); + await populateIndex(searchClient, openAIClient); }); afterEach(async function () { @@ -430,7 +431,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { includeTotalCount: true, queryLanguage: KnownQueryLanguage.EnUs, queryType: "semantic", - semanticConfiguration: "semantic-configuration-name", + semanticSearchOptions: { configurationName: "semantic-configuration-name" }, }); assert.equal(searchResults.count, 1); }); @@ -439,9 +440,11 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { const searchResults = await searchClient.search("luxury", { queryLanguage: KnownQueryLanguage.EnUs, queryType: "semantic", - semanticConfiguration: "semantic-configuration-name", - semanticErrorHandlingMode: "fail", - debugMode: "semantic", + semanticSearchOptions: { + configurationName: "semantic-configuration-name", + errorMode: "fail", + debugMode: "semantic", + }, }); for await (const result of searchResults.results) { assert.deepEqual( @@ -457,13 +460,13 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { keywordFields: [ { name: "tags", - state: "unused", + state: "used", }, ], rerankerInput: { content: "Best hotel in town if you like luxury hotels. They have an amazing infinity pool, a spa, and a really helpful concierge. The location is perfect -- right downtown, close to all the tourist attractions. We highly recommend this hotel.", - keywords: "", + keywords: "pool\r\nview\r\nwifi\r\nconcierge", title: "Fancy Stay", }, titleField: { @@ -482,8 +485,10 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { const searchResults = await searchClient.search("What are the most luxurious hotels?", { queryLanguage: KnownQueryLanguage.EnUs, queryType: "semantic", - semanticConfiguration: "semantic-configuration-name", - answers: { answers: "extractive", count: 3, threshold: 0.7 }, + semanticSearchOptions: { + configurationName: "semantic-configuration-name", + answers: { answerType: "extractive", count: 3, threshold: 0.7 }, + }, top: 3, select: ["hotelId"], }); @@ -492,15 +497,17 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { for await (const result of searchResults.results) { resultIds.push(result.document.hotelId); } - assert.deepEqual(["3", "9", "1"], resultIds); + assert.deepEqual(["1", "9", "3"], resultIds); }); it("search with semantic error handling", async function () { const searchResults = await searchClient.search("luxury", { queryLanguage: KnownQueryLanguage.EnUs, queryType: "semantic", - semanticConfiguration: "semantic-configuration-name", - semanticErrorHandlingMode: "partial", + semanticSearchOptions: { + configurationName: "semantic-configuration-name", + errorMode: "partial", + }, select: ["hotelId"], }); @@ -517,21 +524,23 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { this.skip(); } const embeddings = await openAIClient.getEmbeddings( - env.OPENAI_DEPLOYMENT_NAME ?? "deployment-name", + env.AZURE_OPENAI_DEPLOYMENT_NAME ?? "deployment-name", ["What are the most luxurious hotels?"], ); const embedding = embeddings.data[0].embedding; const searchResults = await searchClient.search("*", { - vectorQueries: [ - { - kind: "vector", - vector: embedding, - kNearestNeighborsCount: 3, - fields: ["vectorDescription"], - }, - ], + vectorSearchOptions: { + queries: [ + { + kind: "vector", + vector: embedding, + kNearestNeighborsCount: 3, + fields: ["vectorDescription"], + }, + ], + }, top: 3, select: ["hotelId"], }); @@ -549,27 +558,67 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { this.skip(); } const embeddings = await openAIClient.getEmbeddings( - env.OPENAI_DEPLOYMENT_NAME ?? "deployment-name", + env.AZURE_OPENAI_DEPLOYMENT_NAME ?? "deployment-name", ["What are the most luxurious hotels?"], ); const embedding = embeddings.data[0].embedding; const searchResults = await searchClient.search("*", { - vectorQueries: [ - { - kind: "vector", - vector: embedding, - kNearestNeighborsCount: 3, - fields: ["vectorDescription"], - }, - { - kind: "vector", - vector: embedding, - kNearestNeighborsCount: 3, - fields: ["vectorDescription"], - }, - ], + vectorSearchOptions: { + queries: [ + { + kind: "vector", + vector: embedding, + kNearestNeighborsCount: 3, + fields: ["vectorDescription"], + }, + { + kind: "vector", + vector: embedding, + kNearestNeighborsCount: 3, + fields: ["vectorDescription"], + }, + ], + }, + top: 3, + select: ["hotelId"], + }); + + const resultIds = []; + for await (const result of searchResults.results) { + resultIds.push(result.document.hotelId); + } + assert.deepEqual(["1", "3", "4"], resultIds); + }); + + it("oversampling compressed vectors", async function () { + // This live test is disabled due to temporary limitations with the new OpenAI service + if (isLiveMode()) { + this.skip(); + } + // Currently unable to create a compression resource + if (COMPRESSION_DISABLED) { + this.skip(); + } + const embeddings = await openAIClient.getEmbeddings( + env.AZURE_OPENAI_DEPLOYMENT_NAME ?? "deployment-name", + ["What are the most luxurious hotels?"], + ); + + const embedding = embeddings.data[0].embedding; + const searchResults = await searchClient.search("*", { + vectorSearchOptions: { + queries: [ + { + kind: "vector", + vector: embedding, + kNearestNeighborsCount: 3, + fields: ["compressedVectorDescription"], + oversampling: 2, + }, + ], + }, top: 3, select: ["hotelId"], }); @@ -585,7 +634,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { }); versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { - onVersions({ minVer: "2020-06-30" }).describe("SearchClient tests", function (this: Suite) { + onVersions({ minVer: "2023-11-01" }).describe("SearchClient tests", function (this: Suite) { const credential = new AzureKeyCredential("key"); describe("Passing serviceVersion", () => { @@ -607,8 +656,8 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { it("defaults to the current apiVersion", () => { const client = new SearchClient("", "", credential); - assert.equal("2023-10-01-Preview", client.serviceVersion); - assert.equal("2023-10-01-Preview", client.apiVersion); + assert.equal("2024-03-01-Preview", client.serviceVersion); + assert.equal("2024-03-01-Preview", client.apiVersion); }); }); }); diff --git a/sdk/search/search-documents/test/public/node/searchIndexClient.spec.ts b/sdk/search/search-documents/test/public/node/searchIndexClient.spec.ts index dd53050e556d..b2b1bb70d514 100644 --- a/sdk/search/search-documents/test/public/node/searchIndexClient.spec.ts +++ b/sdk/search/search-documents/test/public/node/searchIndexClient.spec.ts @@ -1,10 +1,10 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { Recorder, isLiveMode, env } from "@azure-tools/test-recorder"; -import { Context } from "mocha"; -import { Suite } from "mocha"; +import { env, isLiveMode, Recorder } from "@azure-tools/test-recorder"; +import { versionsToTest } from "@azure/test-utils"; import { assert } from "chai"; +import { Context, Suite } from "mocha"; import { AzureOpenAIVectorizer, SearchIndex, @@ -13,20 +13,19 @@ import { VectorSearchAlgorithmConfiguration, VectorSearchProfile, } from "../../../src"; +import { delay, serviceVersions } from "../../../src/serviceUtils"; import { Hotel } from "../utils/interfaces"; import { createClients } from "../utils/recordedClient"; import { - WAIT_TIME, createRandomIndexName, createSimpleIndex, createSynonymMaps, deleteSynonymMaps, + WAIT_TIME, } from "../utils/setup"; -import { delay, serviceVersions } from "../../../src/serviceUtils"; -import { versionsToTest } from "@azure/test-utils"; versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { - onVersions({ minVer: "2020-06-30" }).describe("SearchIndexClient", function (this: Suite) { + onVersions({ minVer: "2023-11-01" }).describe("SearchIndexClient", function (this: Suite) { let recorder: Recorder; let indexClient: SearchIndexClient; let TEST_INDEX_NAME: string; @@ -232,7 +231,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { }); }); }); - onVersions({ minVer: "2023-10-01-Preview" }).describe( + onVersions({ minVer: "2024-03-01-Preview" }).describe( "SearchIndexClient", function (this: Suite) { let recorder: Recorder; @@ -276,14 +275,14 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { kind: "azureOpenAI", name: "vectorizer", azureOpenAIParameters: { - apiKey: env.OPENAI_KEY, - deploymentId: env.OPENAI_DEPLOYMENT_NAME, - resourceUri: env.OPENAI_ENDPOINT, + apiKey: env.AZURE_OPENAI_KEY, + deploymentId: env.AZURE_OPENAI_DEPLOYMENT_NAME, + resourceUri: env.AZURE_OPENAI_ENDPOINT, }, }; const profile: VectorSearchProfile = { name: "profile", - algorithm: algorithm.name, + algorithmConfigurationName: algorithm.name, vectorizer: vectorizer.name, }; @@ -300,7 +299,7 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { name: "descriptionVector", vectorSearchDimensions: 1536, searchable: true, - vectorSearchProfile: profile.name, + vectorSearchProfileName: profile.name, }, ], vectorSearch: { @@ -309,8 +308,8 @@ versionsToTest(serviceVersions, {}, (serviceVersion, onVersions) => { profiles: [profile], }, }; - await indexClient.createOrUpdateIndex(index); try { + await indexClient.createOrUpdateIndex(index); index = await indexClient.getIndex(indexName); assert.deepEqual(index.vectorSearch?.algorithms?.[0].name, algorithm.name); assert.deepEqual(index.vectorSearch?.vectorizers?.[0].name, vectorizer.name); diff --git a/sdk/search/search-documents/test/public/typeDefinitions.ts b/sdk/search/search-documents/test/public/typeDefinitions.ts index febc839df32c..f9540063b7d7 100644 --- a/sdk/search/search-documents/test/public/typeDefinitions.ts +++ b/sdk/search/search-documents/test/public/typeDefinitions.ts @@ -8,71 +8,47 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { - KnownSearchFieldDataType, - KnownVectorSearchAlgorithmMetric, + KnownCharFilterName, + KnownLexicalAnalyzerName, + KnownLexicalTokenizerName, + KnownTokenFilterName, KnownVectorSearchAlgorithmKind, - KnownIndexProjectionMode, - KnownVectorSearchVectorizerKind, + KnownVectorSearchAlgorithmMetric, } from "../../src/generated/service"; + +import { KnownVectorFilterMode } from "../../src/generated/data"; + import { - KnownSemanticPartialResponseReason, - KnownSemanticPartialResponseType, - KnownQueryDebugMode, - KnownSemanticErrorHandling, - KnownSemanticFieldState, - KnownVectorQueryKind, - KnownVectorFilterMode, -} from "../../src/generated/data"; -import { - ComplexDataType, - SearchFieldDataType, - SemanticPartialResponseReason, - SemanticPartialResponseType, - QueryDebugMode, - SemanticErrorHandlingMode, - SemanticFieldState, - VectorSearchAlgorithmMetric, - VectorSearchAlgorithmKind, - IndexProjectionMode, - VectorSearchVectorizerKind, - VectorQueryKind, + KnownAnalyzerNames, + KnownCharFilterNames, + KnownTokenFilterNames, + KnownTokenizerNames, VectorFilterMode, + VectorSearchAlgorithmKind, + VectorSearchAlgorithmMetric, } from "../../src/index"; type IsIdentical = (() => T extends T1 ? true : false) extends () => T extends T2 ? true : false ? any : never; -type ExpectSearchFieldDataType = Exclude< - `${KnownSearchFieldDataType}` | `Collection(${KnownSearchFieldDataType})`, - ComplexDataType | "Edm.Single" ->; -type ExpectSemanticPartialResponseReason = `${KnownSemanticPartialResponseReason}`; -type ExpectSemanticPartialResponseType = `${KnownSemanticPartialResponseType}`; -type ExpectQueryDebugMode = `${KnownQueryDebugMode}`; -type ExpectSemanticErrorHandlingMode = `${KnownSemanticErrorHandling}`; -type ExpectSemanticFieldState = `${KnownSemanticFieldState}`; type ExpectVectorSearchAlgorithmMetric = `${KnownVectorSearchAlgorithmMetric}`; type ExpectVectorSearchAlgorithmKind = `${KnownVectorSearchAlgorithmKind}`; -type ExpectIndexProjectionMode = `${KnownIndexProjectionMode}`; -type ExpectVectorSearchVectorizerKind = `${KnownVectorSearchVectorizerKind}`; -type ExpectVectorQueryKind = `${KnownVectorQueryKind}`; type ExpectVectorFilterMode = `${KnownVectorFilterMode}`; +type ExpectKnownCharFilterNames = `${KnownCharFilterName}`; +type ExpectKnownAnalyzerNames = `${KnownLexicalAnalyzerName}`; +type ExpectKnownTokenizerNames = `${KnownLexicalTokenizerName}`; +type ExpectKnownTokenFilterNames = `${KnownTokenFilterName}`; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore function fun() { - const a: IsIdentical = "pass"; - const b: IsIdentical = "pass"; - const c: IsIdentical = "pass"; - const d: IsIdentical = "pass"; - const e: IsIdentical = "pass"; - const f: IsIdentical = "pass"; - const g: IsIdentical = "pass"; - const h: IsIdentical = "pass"; - const i: IsIdentical = "pass"; - const j: IsIdentical = "pass"; - const k: IsIdentical = "pass"; - const l: IsIdentical = "pass"; + const a: IsIdentical = "pass"; + const b: IsIdentical = "pass"; + const c: IsIdentical = "pass"; + const d: IsIdentical = "pass"; + const e: IsIdentical = "pass"; + const f: IsIdentical = "pass"; + const g: IsIdentical = "pass"; - return [a, b, c, d, e, f, g, h, i, j, k, l]; + return [a, b, c, d, e, f, g]; } diff --git a/sdk/search/search-documents/test/public/utils/interfaces.ts b/sdk/search/search-documents/test/public/utils/interfaces.ts index 8cfe01cf2cb0..cbf59ad1d666 100644 --- a/sdk/search/search-documents/test/public/utils/interfaces.ts +++ b/sdk/search/search-documents/test/public/utils/interfaces.ts @@ -8,6 +8,7 @@ export interface Hotel { hotelName?: string | null; description?: string | null; vectorDescription?: number[] | null; + compressedVectorDescription?: number[] | null; descriptionFr?: string | null; category?: string | null; tags?: string[] | null; diff --git a/sdk/search/search-documents/test/public/utils/recordedClient.ts b/sdk/search/search-documents/test/public/utils/recordedClient.ts index d6df77d22847..f36302708e55 100644 --- a/sdk/search/search-documents/test/public/utils/recordedClient.ts +++ b/sdk/search/search-documents/test/public/utils/recordedClient.ts @@ -1,15 +1,21 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { Recorder, RecorderStartOptions, env } from "@azure-tools/test-recorder"; - +import { + assertEnvironmentVariable, + env, + Recorder, + RecorderStartOptions, +} from "@azure-tools/test-recorder"; +import { FindReplaceSanitizer } from "@azure-tools/test-recorder/types/src/utils/utils"; +import { isDefined } from "@azure/core-util"; +import { OpenAIClient } from "@azure/openai"; import { AzureKeyCredential, SearchClient, SearchIndexClient, SearchIndexerClient, } from "../../../src"; -import { OpenAIClient } from "@azure/openai"; export interface Clients { searchClient: SearchClient; @@ -19,58 +25,88 @@ export interface Clients { openAIClient: OpenAIClient; } -const envSetupForPlayback: { [k: string]: string } = { - SEARCH_API_ADMIN_KEY: "admin_key", - SEARCH_API_ADMIN_KEY_ALT: "admin_key_alt", - ENDPOINT: "https://endpoint", - OPENAI_DEPLOYMENT_NAME: "deployment-name", - OPENAI_ENDPOINT: "https://openai.endpoint", - OPENAI_KEY: "openai-key", -}; - -export const testEnv = new Proxy(envSetupForPlayback, { - get: (target, key: string) => { - return env[key] || target[key]; - }, -}); - -const generalSanitizers = []; - -if (env.ENDPOINT) { - generalSanitizers.push({ - regex: false, - value: "subdomain", - target: env.ENDPOINT.match(/:\/\/(.*).search.windows.net/)![1], - }); +interface Env { + SEARCH_API_ADMIN_KEY: string; + SEARCH_API_ADMIN_KEY_ALT: string; + ENDPOINT: string; + AZURE_OPENAI_DEPLOYMENT_NAME: string; + AZURE_OPENAI_ENDPOINT: string; + AZURE_OPENAI_KEY: string; } -if (env.OPENAI_ENDPOINT) { - generalSanitizers.push({ - regex: false, - value: "subdomain", - target: env.OPENAI_ENDPOINT.match(/:\/\/(.*).openai.azure.com/)![1], - }); +// modifies URIs in the environment to end in a trailing slash +const uriEnvVars = ["ENDPOINT", "AZURE_OPENAI_ENDPOINT"] as const; + +function fixEnvironment(): RecorderStartOptions { + const envSetupForPlayback = { + SEARCH_API_ADMIN_KEY: "admin_key", + SEARCH_API_ADMIN_KEY_ALT: "admin_key_alt", + ENDPOINT: "https://subdomain.search.windows.net/", + AZURE_OPENAI_DEPLOYMENT_NAME: "deployment-name", + AZURE_OPENAI_ENDPOINT: "https://subdomain.openai.azure.com/", + AZURE_OPENAI_KEY: "openai-key", + }; + + appendTrailingSlashesToEnvironment(envSetupForPlayback); + const generalSanitizers = getSubdomainSanitizers(); + + return { + envSetupForPlayback, + sanitizerOptions: { + generalSanitizers, + }, + }; +} + +function appendTrailingSlashesToEnvironment(envSetupForPlayback: Env): void { + for (const envBag of [env, envSetupForPlayback]) { + for (const name of uriEnvVars) { + const value = envBag[name]; + if (value) { + envBag[name] = value.endsWith("/") ? value : `${value}/`; + } + } + } } -const recorderOptions: RecorderStartOptions = { - envSetupForPlayback, - sanitizerOptions: { - generalSanitizers, - }, -}; +function getSubdomainSanitizers(): FindReplaceSanitizer[] { + const uriDomainMap: Pick = { + ENDPOINT: "search.windows.net", + AZURE_OPENAI_ENDPOINT: "openai.azure.com", + }; + + const subdomains = Object.entries(uriDomainMap) + .map(([name, domain]) => { + const uri = env[name]; + const subdomain = uri?.match(String.raw`\/\/(.*?)\.` + domain)?.[1]; + + return subdomain; + }) + .filter(isDefined); + + const generalSanitizers = subdomains.map((target) => { + return { + target, + value: "subdomain", + }; + }); + + return generalSanitizers; +} export async function createClients( serviceVersion: string, recorder: Recorder, indexName: string, ): Promise> { + const recorderOptions = fixEnvironment(); await recorder.start(recorderOptions); indexName = recorder.variable("TEST_INDEX_NAME", indexName); - const endPoint: string = env.ENDPOINT ?? "https://endpoint"; - const credential = new AzureKeyCredential(testEnv.SEARCH_API_ADMIN_KEY); - const openAIEndpoint = env.OPENAI_ENDPOINT ?? "https://openai.endpoint"; - const openAIKey = new AzureKeyCredential(env.OPENAI_KEY ?? "openai-key"); + const endPoint: string = assertEnvironmentVariable("ENDPOINT"); + const credential = new AzureKeyCredential(assertEnvironmentVariable("SEARCH_API_ADMIN_KEY")); + const openAIEndpoint = assertEnvironmentVariable("AZURE_OPENAI_ENDPOINT"); + const openAIKey = new AzureKeyCredential(assertEnvironmentVariable("AZURE_OPENAI_KEY")); const searchClient = new SearchClient( endPoint, indexName, diff --git a/sdk/search/search-documents/test/public/utils/setup.ts b/sdk/search/search-documents/test/public/utils/setup.ts index bd7f5580a900..49d3266b0356 100644 --- a/sdk/search/search-documents/test/public/utils/setup.ts +++ b/sdk/search/search-documents/test/public/utils/setup.ts @@ -1,6 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { env, isLiveMode, isPlaybackMode } from "@azure-tools/test-recorder"; +import { OpenAIClient } from "@azure/openai"; +import { assert } from "chai"; import { GeographyPoint, KnownAnalyzerNames, @@ -9,11 +12,9 @@ import { SearchIndexClient, SearchIndexerClient, } from "../../../src"; -import { Hotel } from "./interfaces"; import { delay } from "../../../src/serviceUtils"; -import { assert } from "chai"; -import { env, isLiveMode, isPlaybackMode } from "@azure-tools/test-recorder"; -import { OpenAIClient } from "@azure/openai"; +import { COMPRESSION_DISABLED } from "../../compressionDisabled"; +import { Hotel } from "./interfaces"; export const WAIT_TIME = isPlaybackMode() ? 0 : 4000; @@ -23,6 +24,12 @@ export async function createIndex( name: string, serviceVersion: string, ): Promise { + const algorithmConfigurationName = "algorithm-configuration-name"; + const vectorizerName = "vectorizer-name"; + const vectorSearchProfileName = "profile-name"; + const compressedVectorSearchProfileName = "compressed-profile-name"; + const compressionConfigurationName = "compression-configuration-name"; + const hotelIndex: SearchIndex = { name, fields: [ @@ -201,6 +208,23 @@ export async function createIndex( }, ], }, + { + type: "Collection(Edm.Single)", + name: "vectorDescription", + searchable: true, + vectorSearchDimensions: 1536, + hidden: true, + vectorSearchProfileName, + }, + { + type: "Collection(Edm.Half)", + name: "compressedVectorDescription", + searchable: true, + hidden: true, + vectorSearchDimensions: 1536, + vectorSearchProfileName: compressedVectorSearchProfileName, + stored: false, + }, ], suggesters: [ { @@ -230,57 +254,77 @@ export async function createIndex( // for browser tests allowedOrigins: ["*"], }, - }; - - if (serviceVersion.includes("Preview")) { - const algorithm = "algorithm-configuration"; - const vectorizer = "vectorizer"; - const profile = "profile"; - - hotelIndex.fields.push({ - type: "Collection(Edm.Single)", - name: "vectorDescription", - searchable: true, - vectorSearchDimensions: 1536, - hidden: true, - vectorSearchProfile: profile, - }); - - hotelIndex.vectorSearch = { + vectorSearch: { algorithms: [ { - name: algorithm, - kind: "exhaustiveKnn", + name: algorithmConfigurationName, + kind: "hnsw", parameters: { metric: "dotProduct", }, }, ], - vectorizers: [ + vectorizers: serviceVersion.includes("Preview") + ? [ + { + kind: "azureOpenAI", + name: vectorizerName, + azureOpenAIParameters: { + apiKey: env.AZURE_OPENAI_KEY, + deploymentId: env.AZURE_OPENAI_DEPLOYMENT_NAME, + resourceUri: env.AZURE_OPENAI_ENDPOINT, + }, + }, + ] + : undefined, + compressions: [ { - kind: "azureOpenAI", - name: vectorizer, - azureOpenAIParameters: { - apiKey: env.OPENAI_KEY, - deploymentId: env.OPENAI_DEPLOYMENT_NAME, - resourceUri: env.OPENAI_ENDPOINT, - }, + name: compressionConfigurationName, + kind: "scalarQuantization", + parameters: { quantizedDataType: "int8" }, + rerankWithOriginalVectors: true, }, ], - profiles: [{ name: profile, vectorizer, algorithm }], - }; - hotelIndex.semanticSettings = { + profiles: [ + { + name: vectorSearchProfileName, + vectorizer: serviceVersion.includes("Preview") ? vectorizerName : undefined, + algorithmConfigurationName, + }, + { + name: compressedVectorSearchProfileName, + vectorizer: serviceVersion.includes("Preview") ? vectorizerName : undefined, + algorithmConfigurationName, + compressionConfigurationName, + }, + ], + }, + semanticSearch: { configurations: [ { name: "semantic-configuration-name", prioritizedFields: { titleField: { name: "hotelName" }, - prioritizedContentFields: [{ name: "description" }], - prioritizedKeywordsFields: [{ name: "tags" }], + contentFields: [{ name: "description" }], + keywordsFields: [{ name: "tags" }], }, }, ], - }; + }, + }; + + // This feature isn't publically available yet + if (COMPRESSION_DISABLED) { + hotelIndex.fields = hotelIndex.fields.filter( + (field) => field.name !== "compressedVectorDescription", + ); + const vs = hotelIndex.vectorSearch; + if (vs) { + delete vs.compressions; + vs.profiles = vs.profiles?.filter( + (profile) => profile.name !== compressedVectorSearchProfileName, + ); + } } await client.createIndex(hotelIndex); @@ -290,7 +334,6 @@ export async function createIndex( export async function populateIndex( client: SearchClient, openAIClient: OpenAIClient, - serviceVersion: string, ): Promise { // test data from https://github.com/Azure/azure-sdk-for-net/blob/master/sdk/search/Azure.Search.Documents/tests/Utilities/SearchResources.Data.cs const testDocuments: Hotel[] = [ @@ -495,7 +538,7 @@ export async function populateIndex( }, ]; - if (serviceVersion.includes("Preview") && !isLiveMode()) { + if (!isLiveMode()) { await addVectorDescriptions(testDocuments, openAIClient); } @@ -514,29 +557,22 @@ async function addVectorDescriptions( documents: Hotel[], openAIClient: OpenAIClient, ): Promise { - const deploymentName = process.env.OPENAI_DEPLOYMENT_NAME ?? "deployment-name"; - - const descriptionMap: Map = documents.reduce((map, document, i) => { - map.set(i, document); - return map; - }, new Map()); + const deploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME ?? "deployment-name"; const descriptions = documents .filter(({ description }) => description) .map(({ description }) => description!); - // OpenAI only supports one description at a time at the moment - const embeddingsArray = await Promise.all( - descriptions.map((description) => openAIClient.getEmbeddings(deploymentName, [description])), - ); + const embeddingsArray = await openAIClient.getEmbeddings(deploymentName, descriptions); - embeddingsArray.forEach((embeddings, i) => - embeddings.data.forEach((embeddingItem) => { - const { embedding, index: j } = embeddingItem; - const document = descriptionMap.get(i + j)!; - document.vectorDescription = embedding; - }), - ); + embeddingsArray.data.forEach((embeddingItem) => { + const { embedding, index } = embeddingItem; + const document = documents[index]; + document.vectorDescription = embedding; + if (!COMPRESSION_DISABLED) { + document.compressedVectorDescription = embedding; + } + }); } // eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters diff --git a/sdk/search/search-documents/tsconfig.json b/sdk/search/search-documents/tsconfig.json index 2249af41759a..5b2c1fce7aa9 100644 --- a/sdk/search/search-documents/tsconfig.json +++ b/sdk/search/search-documents/tsconfig.json @@ -1,11 +1,11 @@ { "extends": "../../../tsconfig.package", "compilerOptions": { - "outDir": "./dist-esm", "declarationDir": "./types", + "outDir": "./dist-esm", "paths": { "@azure/search-documents": ["./src/index"] } }, - "include": ["src/**/*.ts", "test/**/*.ts", "samples-dev/**/*.ts"] + "include": ["samples-dev/**/*.ts", "src/**/*.ts", "test/**/*.ts"] }