From b6c5369f48e9e69b0a7d7c8a3056ee7704cb2e90 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Wed, 15 May 2024 05:59:32 +0000 Subject: [PATCH] CodeGen from PR 29107 in Azure/azure-rest-api-specs Merge f0ed73bcb0d7a61a58568075801c4422fc660735 into 9061674ac55af0ec23e12e4f9f505010ab428935 --- .../Azure.AI.Vision.Face.sln | 56 + sdk/vision/Azure.AI.Vision.Face/CHANGELOG.md | 11 + .../Directory.Build.props | 6 + sdk/vision/Azure.AI.Vision.Face/README.md | 86 + .../src/Azure.AI.Vision.Face.csproj | 20 + .../AIVisionFaceClientBuilderExtensions.cs | 118 + .../src/Generated/AIVisionFaceModelFactory.cs | 721 ++ .../Generated/AccessoryItem.Serialization.cs | 143 + .../src/Generated/AccessoryItem.cs | 78 + .../src/Generated/AccessoryType.cs | 54 + ...aceListFaceFromUrlRequest.Serialization.cs | 135 + .../AddFaceListFaceFromUrlRequest.cs | 75 + .../Generated/AddFaceResult.Serialization.cs | 135 + .../src/Generated/AddFaceResult.cs | 72 + ...aceListFaceFromUrlRequest.Serialization.cs | 135 + .../AddLargeFaceListFaceFromUrlRequest.cs | 75 + ...pPersonFaceFromUrlRequest.Serialization.cs | 135 + ...argePersonGroupPersonFaceFromUrlRequest.cs | 75 + ...dPersonFaceFromUrlRequest.Serialization.cs | 135 + .../Generated/AddPersonFaceFromUrlRequest.cs | 75 + ...pPersonFaceFromUrlRequest.Serialization.cs | 135 + .../AddPersonGroupPersonFaceFromUrlRequest.cs | 75 + ...AuditLivenessResponseInfo.Serialization.cs | 151 + .../Generated/AuditLivenessResponseInfo.cs | 87 + .../AuditRequestInfo.Serialization.cs | 183 + .../src/Generated/AuditRequestInfo.cs | 97 + .../AzureAIVisionFaceClientOptions.cs | 37 + .../src/Generated/BlurLevel.cs | 54 + .../Generated/BlurProperties.Serialization.cs | 143 + .../src/Generated/BlurProperties.cs | 78 + ...DynamicPersonGroupRequest.Serialization.cs | 146 + .../CreateDynamicPersonGroupRequest.cs | 79 + ...sonGroupWithPersonRequest.Serialization.cs | 164 + ...eateDynamicPersonGroupWithPersonRequest.cs | 87 + .../CreateFaceListRequest.Serialization.cs | 161 + .../src/Generated/CreateFaceListRequest.cs | 83 + ...reateLargeFaceListRequest.Serialization.cs | 161 + .../Generated/CreateLargeFaceListRequest.cs | 83 + ...ePersonGroupPersonRequest.Serialization.cs | 146 + .../CreateLargePersonGroupPersonRequest.cs | 79 + ...teLargePersonGroupRequest.Serialization.cs | 161 + .../CreateLargePersonGroupRequest.cs | 83 + ...ateLivenessSessionContent.Serialization.cs | 197 + .../Generated/CreateLivenessSessionContent.cs | 88 + ...eateLivenessSessionResult.Serialization.cs | 143 + .../Generated/CreateLivenessSessionResult.cs | 82 + ...sWithVerifySessionContent.Serialization.cs | 176 + .../CreateLivenessWithVerifySessionContent.cs | 83 + ...ssWithVerifySessionResult.Serialization.cs | 158 + .../CreateLivenessWithVerifySessionResult.cs | 86 + ...ePersonGroupPersonRequest.Serialization.cs | 146 + .../CreatePersonGroupPersonRequest.cs | 79 + .../CreatePersonGroupRequest.Serialization.cs | 161 + .../src/Generated/CreatePersonGroupRequest.cs | 83 + .../CreatePersonRequest.Serialization.cs | 146 + .../src/Generated/CreatePersonRequest.cs | 79 + .../CreatePersonResult.Serialization.cs | 135 + .../src/Generated/CreatePersonResult.cs | 72 + .../Docs/FaceAdministrationClient.xml | 6027 +++++++++ .../src/Generated/Docs/FaceClient.xml | 1481 +++ .../src/Generated/Docs/FaceSessionClient.xml | 1093 ++ .../DynamicPersonGroup.Serialization.cs | 157 + .../src/Generated/DynamicPersonGroup.cs | 83 + .../src/Generated/ExposureLevel.cs | 54 + .../ExposureProperties.Serialization.cs | 143 + .../src/Generated/ExposureProperties.cs | 78 + .../src/Generated/FaceAdministrationClient.cs | 10261 ++++++++++++++++ .../src/Generated/FaceAttributeType.cs | 84 + .../Generated/FaceAttributes.Serialization.cs | 346 + .../src/Generated/FaceAttributes.cs | 114 + .../src/Generated/FaceClient.cs | 2248 ++++ ...eCollectionTrainingResult.Serialization.cs | 176 + .../Generated/FaceCollectionTrainingResult.cs | 94 + .../src/Generated/FaceDetectionModel.cs | 54 + .../FaceDetectionResult.Serialization.cs | 201 + .../src/Generated/FaceDetectionResult.cs | 91 + .../FaceFindSimilarResult.Serialization.cs | 165 + .../src/Generated/FaceFindSimilarResult.cs | 80 + .../FaceGroupingResult.Serialization.cs | 185 + .../src/Generated/FaceGroupingResult.cs | 83 + ...ceIdentificationCandidate.Serialization.cs | 143 + .../Generated/FaceIdentificationCandidate.cs | 78 + .../FaceIdentificationResult.Serialization.cs | 153 + .../src/Generated/FaceIdentificationResult.cs | 82 + .../src/Generated/FaceImageType.cs | 54 + .../Generated/FaceLandmarks.Serialization.cs | 371 + .../src/Generated/FaceLandmarks.cs | 257 + .../src/Generated/FaceList.Serialization.cs | 203 + .../src/Generated/FaceList.cs | 92 + .../Generated/FaceListFace.Serialization.cs | 149 + .../src/Generated/FaceListFace.cs | 69 + .../Generated/FaceListItem.Serialization.cs | 169 + .../src/Generated/FaceListItem.cs | 90 + .../src/Generated/FaceLivenessDecision.cs | 54 + .../src/Generated/FaceOperationStatus.cs | 57 + .../src/Generated/FaceRecognitionModel.cs | 57 + .../Generated/FaceRectangle.Serialization.cs | 159 + .../src/Generated/FaceRectangle.cs | 90 + .../src/Generated/FaceSessionClient.cs | 1628 +++ .../src/Generated/FaceSessionStatus.cs | 54 + .../FaceVerificationResult.Serialization.cs | 143 + .../src/Generated/FaceVerificationResult.cs | 78 + .../src/Generated/FacialHair.Serialization.cs | 151 + .../src/Generated/FacialHair.cs | 84 + ...imilarFromFaceListRequest.Serialization.cs | 173 + .../FindSimilarFromFaceListRequest.cs | 89 + ...rFromLargeFaceListRequest.Serialization.cs | 173 + .../FindSimilarFromLargeFaceListRequest.cs | 89 + .../src/Generated/FindSimilarMatchMode.cs | 51 + .../FindSimilarRequest.Serialization.cs | 183 + .../src/Generated/FindSimilarRequest.cs | 90 + .../src/Generated/GlassesType.cs | 57 + .../Generated/GroupRequest.Serialization.cs | 145 + .../src/Generated/GroupRequest.cs | 76 + .../src/Generated/HairColor.Serialization.cs | 143 + .../src/Generated/HairColor.cs | 78 + .../src/Generated/HairColorType.cs | 69 + .../Generated/HairProperties.Serialization.cs | 161 + .../src/Generated/HairProperties.cs | 88 + .../src/Generated/HeadPose.Serialization.cs | 151 + .../src/Generated/HeadPose.cs | 84 + ...DynamicPersonGroupRequest.Serialization.cs | 183 + .../IdentifyFromDynamicPersonGroupRequest.cs | 91 + ...omLargePersonGroupRequest.Serialization.cs | 183 + .../IdentifyFromLargePersonGroupRequest.cs | 91 + ...romPersonDirectoryRequest.Serialization.cs | 193 + .../IdentifyFromPersonDirectoryRequest.cs | 91 + ...ifyFromPersonGroupRequest.Serialization.cs | 183 + .../IdentifyFromPersonGroupRequest.cs | 91 + .../src/Generated/Internal/Argument.cs | 129 + .../Internal/ChangeTrackingDictionary.cs | 167 + .../Generated/Internal/ChangeTrackingList.cs | 153 + .../Internal/ModelSerializationExtensions.cs | 398 + .../MultipartFormDataRequestContent.cs | 203 + .../src/Generated/Internal/Optional.cs | 51 + .../Internal/Utf8JsonRequestContent.cs | 55 + .../LandmarkCoordinate.Serialization.cs | 143 + .../src/Generated/LandmarkCoordinate.cs | 78 + .../Generated/LargeFaceList.Serialization.cs | 172 + .../src/Generated/LargeFaceList.cs | 87 + .../LargeFaceListFace.Serialization.cs | 149 + .../src/Generated/LargeFaceListFace.cs | 69 + .../LargePersonGroup.Serialization.cs | 172 + .../src/Generated/LargePersonGroup.cs | 87 + .../LargePersonGroupPerson.Serialization.cs | 182 + .../src/Generated/LargePersonGroupPerson.cs | 88 + ...argePersonGroupPersonFace.Serialization.cs | 149 + .../Generated/LargePersonGroupPersonFace.cs | 69 + .../Generated/ListFaceResult.Serialization.cs | 153 + .../src/Generated/ListFaceResult.cs | 82 + .../ListGroupReferenceResult.Serialization.cs | 145 + .../src/Generated/ListGroupReferenceResult.cs | 76 + .../ListPersonResult.Serialization.cs | 145 + .../src/Generated/ListPersonResult.cs | 76 + .../src/Generated/LivenessModel.cs | 57 + .../src/Generated/LivenessOperationMode.cs | 48 + .../LivenessOutputsTarget.Serialization.cs | 159 + .../src/Generated/LivenessOutputsTarget.cs | 94 + .../LivenessResponseBody.Serialization.cs | 181 + .../src/Generated/LivenessResponseBody.cs | 77 + .../LivenessSession.Serialization.cs | 227 + .../src/Generated/LivenessSession.cs | 104 + ...LivenessSessionAuditEntry.Serialization.cs | 200 + .../Generated/LivenessSessionAuditEntry.cs | 122 + .../LivenessSessionItem.Serialization.cs | 202 + .../src/Generated/LivenessSessionItem.cs | 94 + .../LivenessWithVerifyImage.Serialization.cs | 143 + .../src/Generated/LivenessWithVerifyImage.cs | 81 + ...LivenessWithVerifyOutputs.Serialization.cs | 151 + .../Generated/LivenessWithVerifyOutputs.cs | 87 + ...LivenessWithVerifySession.Serialization.cs | 227 + .../Generated/LivenessWithVerifySession.cs | 104 + .../Generated/MaskProperties.Serialization.cs | 143 + .../src/Generated/MaskProperties.cs | 78 + .../src/Generated/MaskType.cs | 57 + .../src/Generated/NoiseLevel.cs | 54 + .../NoiseProperties.Serialization.cs | 143 + .../src/Generated/NoiseProperties.cs | 78 + .../OcclusionProperties.Serialization.cs | 151 + .../src/Generated/OcclusionProperties.cs | 84 + .../PersonDirectoryFace.Serialization.cs | 149 + .../src/Generated/PersonDirectoryFace.cs | 69 + .../PersonDirectoryPerson.Serialization.cs | 157 + .../src/Generated/PersonDirectoryPerson.cs | 83 + .../Generated/PersonGroup.Serialization.cs | 172 + .../src/Generated/PersonGroup.cs | 87 + .../PersonGroupPerson.Serialization.cs | 182 + .../src/Generated/PersonGroupPerson.cs | 88 + .../PersonGroupPersonFace.Serialization.cs | 149 + .../src/Generated/PersonGroupPersonFace.cs | 69 + .../src/Generated/QualityForRecognition.cs | 54 + .../VerifyFaceToFaceRequest.Serialization.cs | 143 + .../src/Generated/VerifyFaceToFaceRequest.cs | 78 + ...omLargePersonGroupRequest.Serialization.cs | 151 + .../VerifyFromLargePersonGroupRequest.cs | 87 + ...romPersonDirectoryRequest.Serialization.cs | 143 + .../VerifyFromPersonDirectoryRequest.cs | 78 + ...ifyFromPersonGroupRequest.Serialization.cs | 151 + .../Generated/VerifyFromPersonGroupRequest.cs | 87 + .../src/Properties/AssemblyInfo.cs | 12 + .../tests/Azure.AI.Vision.Face.Tests.csproj | 20 + .../Samples_FaceAdministrationClient.cs | 6774 ++++++++++ .../Generated/Samples/Samples_FaceClient.cs | 1640 +++ .../Samples/Samples_FaceSessionClient.cs | 1204 ++ .../Azure.AI.Vision.Face/tsp-location.yaml | 5 + 205 files changed, 55752 insertions(+) create mode 100644 sdk/vision/Azure.AI.Vision.Face/Azure.AI.Vision.Face.sln create mode 100644 sdk/vision/Azure.AI.Vision.Face/CHANGELOG.md create mode 100644 sdk/vision/Azure.AI.Vision.Face/Directory.Build.props create mode 100644 sdk/vision/Azure.AI.Vision.Face/README.md create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Azure.AI.Vision.Face.csproj create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurLevel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceAdministrationClient.xml create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureLevel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributeType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionModel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceImageType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLivenessDecision.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRecognitionModel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionStatus.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarMatchMode.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/GlassesType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColorType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Argument.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingDictionary.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingList.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ModelSerializationExtensions.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/MultipartFormDataRequestContent.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Optional.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Utf8JsonRequestContent.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskType.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseLevel.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/QualityForRecognition.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.Serialization.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/src/Properties/AssemblyInfo.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/tests/Azure.AI.Vision.Face.Tests.csproj create mode 100644 sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceAdministrationClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs create mode 100644 sdk/vision/Azure.AI.Vision.Face/tsp-location.yaml diff --git a/sdk/vision/Azure.AI.Vision.Face/Azure.AI.Vision.Face.sln b/sdk/vision/Azure.AI.Vision.Face/Azure.AI.Vision.Face.sln new file mode 100644 index 0000000000000..19a728a2d4d19 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/Azure.AI.Vision.Face.sln @@ -0,0 +1,56 @@ +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.29709.97 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.Core.TestFramework", "..\..\core\Azure.Core.TestFramework\src\Azure.Core.TestFramework.csproj", "{ECC730C1-4AEA-420C-916A-66B19B79E4DC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.AI.Vision.Face", "src\Azure.AI.Vision.Face.csproj", "{28FF4005-4467-4E36-92E7-DEA27DEB1519}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.AI.Vision.Face.Tests", "tests\Azure.AI.Vision.Face.Tests.csproj", "{1F1CD1D4-9932-4B73-99D8-C252A67D4B46}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0C276D1-2930-4887-B29A-D1A33E7009A2}.Release|Any CPU.Build.0 = Release|Any CPU + {8E9A77AC-792A-4432-8320-ACFD46730401}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8E9A77AC-792A-4432-8320-ACFD46730401}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8E9A77AC-792A-4432-8320-ACFD46730401}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8E9A77AC-792A-4432-8320-ACFD46730401}.Release|Any CPU.Build.0 = Release|Any CPU + {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ECC730C1-4AEA-420C-916A-66B19B79E4DC}.Release|Any CPU.Build.0 = Release|Any CPU + {A4241C1F-A53D-474C-9E4E-075054407E74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A4241C1F-A53D-474C-9E4E-075054407E74}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A4241C1F-A53D-474C-9E4E-075054407E74}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A4241C1F-A53D-474C-9E4E-075054407E74}.Release|Any CPU.Build.0 = Release|Any CPU + {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FA8BD3F1-8616-47B6-974C-7576CDF4717E}.Release|Any CPU.Build.0 = Release|Any CPU + {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {85677AD3-C214-42FA-AE6E-49B956CAC8DC}.Release|Any CPU.Build.0 = Release|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Debug|Any CPU.Build.0 = Debug|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.ActiveCfg = Release|Any CPU + {28FF4005-4467-4E36-92E7-DEA27DEB1519}.Release|Any CPU.Build.0 = Release|Any CPU + {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1F1CD1D4-9932-4B73-99D8-C252A67D4B46}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {A97F4B90-2591-4689-B1F8-5F21FE6D6CAE} + EndGlobalSection +EndGlobal diff --git a/sdk/vision/Azure.AI.Vision.Face/CHANGELOG.md b/sdk/vision/Azure.AI.Vision.Face/CHANGELOG.md new file mode 100644 index 0000000000000..13dd08af78abe --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/CHANGELOG.md @@ -0,0 +1,11 @@ +# Release History + +## 1.0.0-beta.1 (Unreleased) + +### Features Added + +### Breaking Changes + +### Bugs Fixed + +### Other Changes diff --git a/sdk/vision/Azure.AI.Vision.Face/Directory.Build.props b/sdk/vision/Azure.AI.Vision.Face/Directory.Build.props new file mode 100644 index 0000000000000..63bd836ad44b7 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/Directory.Build.props @@ -0,0 +1,6 @@ + + + + diff --git a/sdk/vision/Azure.AI.Vision.Face/README.md b/sdk/vision/Azure.AI.Vision.Face/README.md new file mode 100644 index 0000000000000..c6a120aaf0f1a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/README.md @@ -0,0 +1,86 @@ +# Azure.AI.Vision.Face client library for .NET + +Azure.AI.Vision.Face is a managed service that helps developers get secret simply and securely. + +Use the client library for to: + +* [Get secret](https://docs.microsoft.com/azure) + +[Source code][source_root] | [Package (NuGet)][package] | [API reference documentation][reference_docs] | [Product documentation][azconfig_docs] | [Samples][source_samples] + + [Source code](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/vision/Azure.AI.Vision.Face/src) | [Package (NuGet)](https://www.nuget.org/packages) | [API reference documentation](https://azure.github.io/azure-sdk-for-net) | [Product documentation](https://docs.microsoft.com/azure) + +## Getting started + +This section should include everything a developer needs to do to install and create their first client connection *very quickly*. + +### Install the package + +First, provide instruction for obtaining and installing the package or library. This section might include only a single line of code, like `dotnet add package package-name`, but should enable a developer to successfully install the package from NuGet, npm, or even cloning a GitHub repository. + +Install the client library for .NET with [NuGet](https://www.nuget.org/ ): + +```dotnetcli +dotnet add package Azure.AI.Vision.Face --prerelease +``` + +### Prerequisites + +Include a section after the install command that details any requirements that must be satisfied before a developer can [authenticate](#authenticate-the-client) and test all of the snippets in the [Examples](#examples) section. For example, for Cosmos DB: + +> You must have an [Azure subscription](https://azure.microsoft.com/free/dotnet/) and [Cosmos DB account](https://docs.microsoft.com/azure/cosmos-db/account-overview) (SQL API). In order to take advantage of the C# 8.0 syntax, it is recommended that you compile using the [.NET Core SDK](https://dotnet.microsoft.com/download) 3.0 or higher with a [language version](https://docs.microsoft.com/dotnet/csharp/language-reference/configure-language-version#override-a-default) of `latest`. It is also possible to compile with the .NET Core SDK 2.1.x using a language version of `preview`. + +### Authenticate the client + +If your library requires authentication for use, such as for Azure services, include instructions and example code needed for initializing and authenticating. + +For example, include details on obtaining an account key and endpoint URI, setting environment variables for each, and initializing the client object. + +## Key concepts + +The *Key concepts* section should describe the functionality of the main classes. Point out the most important and useful classes in the package (with links to their reference pages) and explain how those classes work together. Feel free to use bulleted lists, tables, code blocks, or even diagrams for clarity. + +Include the *Thread safety* and *Additional concepts* sections below at the end of your *Key concepts* section. You may remove or add links depending on what your library makes use of: + +### Thread safety + +We guarantee that all client instance methods are thread-safe and independent of each other ([guideline](https://azure.github.io/azure-sdk/dotnet_introduction.html#dotnet-service-methods-thread-safety)). This ensures that the recommendation of reusing client instances is always safe, even across threads. + +### Additional concepts + +[Client options](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#configuring-service-clients-using-clientoptions) | +[Accessing the response](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#accessing-http-response-details-using-responset) | +[Long-running operations](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#consuming-long-running-operations-using-operationt) | +[Handling failures](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#reporting-errors-requestfailedexception) | +[Diagnostics](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/samples/Diagnostics.md) | +[Mocking](https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/README.md#mocking) | +[Client lifetime](https://devblogs.microsoft.com/azure-sdk/lifetime-management-and-thread-safety-guarantees-of-azure-sdk-net-clients/) + + +## Examples + +You can familiarize yourself with different APIs using [Samples](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/vision/Azure.AI.Vision.Face/samples). + +## Troubleshooting + +Describe common errors and exceptions, how to "unpack" them if necessary, and include guidance for graceful handling and recovery. + +Provide information to help developers avoid throttling or other service-enforced errors they might encounter. For example, provide guidance and examples for using retry or connection policies in the API. + +If the package or a related package supports it, include tips for logging or enabling instrumentation to help them debug their code. + +## Next steps + +* Provide a link to additional code examples, ideally to those sitting alongside the README in the package's `/samples` directory. +* If appropriate, point users to other packages that might be useful. +* If you think there's a good chance that developers might stumble across your package in error (because they're searching for specific functionality and mistakenly think the package provides that functionality), point them to the packages they might be looking for. + +## Contributing + +This is a template, but your SDK readme should include details on how to contribute code to the repo/package. + + +[style-guide-msft]: https://docs.microsoft.com/style-guide/capitalization +[style-guide-cloud]: https://aka.ms/azsdk/cloud-style-guide + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-net/sdk/vision/Azure.AI.Vision.Face/README.png) diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Azure.AI.Vision.Face.csproj b/sdk/vision/Azure.AI.Vision.Face/src/Azure.AI.Vision.Face.csproj new file mode 100644 index 0000000000000..860725ddbc6b6 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Azure.AI.Vision.Face.csproj @@ -0,0 +1,20 @@ + + + This is the Azure.AI.Vision.Face client library for developing .NET applications with rich experience. + Azure SDK Code Generation Azure.AI.Vision.Face for Azure Data Plane + 1.0.0-beta.1 + Azure.AI.Vision.Face + $(RequiredTargetFrameworks) + true + + + + + + + + + + + + diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs new file mode 100644 index 0000000000000..083db01dc516c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure; +using Azure.AI.Vision.Face; +using Azure.Core.Extensions; + +namespace Microsoft.Extensions.Azure +{ + /// Extension methods to add , , to client builder. + public static partial class AIVisionFaceClientBuilderExtensions + { + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddFaceClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new FaceClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + public static IAzureClientBuilder AddFaceClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new FaceClient(endpoint, cred, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new FaceAdministrationClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new FaceAdministrationClient(endpoint, cred, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddFaceSessionClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new FaceSessionClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + public static IAzureClientBuilder AddFaceSessionClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new FaceSessionClient(endpoint, cred, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddFaceClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddFaceSessionClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs new file mode 100644 index 0000000000000..00a5c28b53883 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs @@ -0,0 +1,721 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Model factory for models. + public static partial class AIVisionFaceModelFactory + { + /// Initializes a new instance of . + /// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. + /// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true. + /// A rectangle area for the face location on image. + /// An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true. + /// Face attributes for detected face. + /// A new instance for mocking. + public static FaceDetectionResult FaceDetectionResult(Guid? faceId = null, FaceRecognitionModel? recognitionModel = null, FaceRectangle faceRectangle = null, FaceLandmarks faceLandmarks = null, FaceAttributes faceAttributes = null) + { + return new FaceDetectionResult( + faceId, + recognitionModel, + faceRectangle, + faceLandmarks, + faceAttributes, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The distance from the top edge if the image to the top edge of the rectangle, in pixels. + /// The distance from the left edge if the image to the left edge of the rectangle, in pixels. + /// The width of the rectangle, in pixels. + /// The height of the rectangle, in pixels. + /// A new instance for mocking. + public static FaceRectangle FaceRectangle(int top = default, int left = default, int width = default, int height = default) + { + return new FaceRectangle(top, left, width, height, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The coordinates of the left eye pupil. + /// The coordinates of the right eye pupil. + /// The coordinates of the nose tip. + /// The coordinates of the mouth left. + /// The coordinates of the mouth right. + /// The coordinates of the left eyebrow outer. + /// The coordinates of the left eyebrow inner. + /// The coordinates of the left eye outer. + /// The coordinates of the left eye top. + /// The coordinates of the left eye bottom. + /// The coordinates of the left eye inner. + /// The coordinates of the right eyebrow inner. + /// The coordinates of the right eyebrow outer. + /// The coordinates of the right eye inner. + /// The coordinates of the right eye top. + /// The coordinates of the right eye bottom. + /// The coordinates of the right eye outer. + /// The coordinates of the nose root left. + /// The coordinates of the nose root right. + /// The coordinates of the nose left alar top. + /// The coordinates of the nose right alar top. + /// The coordinates of the nose left alar out tip. + /// The coordinates of the nose right alar out tip. + /// The coordinates of the upper lip top. + /// The coordinates of the upper lip bottom. + /// The coordinates of the under lip top. + /// The coordinates of the under lip bottom. + /// A new instance for mocking. + public static FaceLandmarks FaceLandmarks(LandmarkCoordinate pupilLeft = null, LandmarkCoordinate pupilRight = null, LandmarkCoordinate noseTip = null, LandmarkCoordinate mouthLeft = null, LandmarkCoordinate mouthRight = null, LandmarkCoordinate eyebrowLeftOuter = null, LandmarkCoordinate eyebrowLeftInner = null, LandmarkCoordinate eyeLeftOuter = null, LandmarkCoordinate eyeLeftTop = null, LandmarkCoordinate eyeLeftBottom = null, LandmarkCoordinate eyeLeftInner = null, LandmarkCoordinate eyebrowRightInner = null, LandmarkCoordinate eyebrowRightOuter = null, LandmarkCoordinate eyeRightInner = null, LandmarkCoordinate eyeRightTop = null, LandmarkCoordinate eyeRightBottom = null, LandmarkCoordinate eyeRightOuter = null, LandmarkCoordinate noseRootLeft = null, LandmarkCoordinate noseRootRight = null, LandmarkCoordinate noseLeftAlarTop = null, LandmarkCoordinate noseRightAlarTop = null, LandmarkCoordinate noseLeftAlarOutTip = null, LandmarkCoordinate noseRightAlarOutTip = null, LandmarkCoordinate upperLipTop = null, LandmarkCoordinate upperLipBottom = null, LandmarkCoordinate underLipTop = null, LandmarkCoordinate underLipBottom = null) + { + return new FaceLandmarks( + pupilLeft, + pupilRight, + noseTip, + mouthLeft, + mouthRight, + eyebrowLeftOuter, + eyebrowLeftInner, + eyeLeftOuter, + eyeLeftTop, + eyeLeftBottom, + eyeLeftInner, + eyebrowRightInner, + eyebrowRightOuter, + eyeRightInner, + eyeRightTop, + eyeRightBottom, + eyeRightOuter, + noseRootLeft, + noseRootRight, + noseLeftAlarTop, + noseRightAlarTop, + noseLeftAlarOutTip, + noseRightAlarOutTip, + upperLipTop, + upperLipBottom, + underLipTop, + underLipBottom, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The horizontal component, in pixels. + /// The vertical component, in pixels. + /// A new instance for mocking. + public static LandmarkCoordinate LandmarkCoordinate(float x = default, float y = default) + { + return new LandmarkCoordinate(x, y, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Age in years. + /// Smile intensity, a number between [0,1]. + /// Properties describing facial hair attributes. + /// Glasses type if any of the face. + /// 3-D roll/yaw/pitch angles for face direction. + /// Properties describing hair attributes. + /// Properties describing occlusions on a given face. + /// Properties describing any accessories on a given face. + /// Properties describing any presence of blur within the image. + /// Properties describing exposure level of the image. + /// Properties describing noise level of the image. + /// Properties describing the presence of a mask on a given face. + /// Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. + /// A new instance for mocking. + public static FaceAttributes FaceAttributes(float? age = null, float? smile = null, FacialHair facialHair = null, GlassesType? glasses = null, HeadPose headPose = null, HairProperties hair = null, OcclusionProperties occlusion = null, IEnumerable accessories = null, BlurProperties blur = null, ExposureProperties exposure = null, NoiseProperties noise = null, MaskProperties mask = null, QualityForRecognition? qualityForRecognition = null) + { + accessories ??= new List(); + + return new FaceAttributes( + age, + smile, + facialHair, + glasses, + headPose, + hair, + occlusion, + accessories?.ToList(), + blur, + exposure, + noise, + mask, + qualityForRecognition, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A new instance for mocking. + public static FacialHair FacialHair(float moustache = default, float beard = default, float sideburns = default) + { + return new FacialHair(moustache, beard, sideburns, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Value of angles. + /// Value of angles. + /// Value of angles. + /// A new instance for mocking. + public static HeadPose HeadPose(float pitch = default, float roll = default, float yaw = default) + { + return new HeadPose(pitch, roll, yaw, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// A number describing confidence level of whether the person is bald. + /// A boolean value describing whether the hair is visible in the image. + /// An array of candidate colors and confidence level in the presence of each. + /// A new instance for mocking. + public static HairProperties HairProperties(float bald = default, bool invisible = default, IEnumerable hairColor = null) + { + hairColor ??= new List(); + + return new HairProperties(bald, invisible, hairColor?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Name of the hair color. + /// Confidence level of the color. Range between [0,1]. + /// A new instance for mocking. + public static HairColor HairColor(HairColorType color = default, float confidence = default) + { + return new HairColor(color, confidence, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// A boolean value indicating whether forehead is occluded. + /// A boolean value indicating whether eyes are occluded. + /// A boolean value indicating whether the mouth is occluded. + /// A new instance for mocking. + public static OcclusionProperties OcclusionProperties(bool foreheadOccluded = default, bool eyeOccluded = default, bool mouthOccluded = default) + { + return new OcclusionProperties(foreheadOccluded, eyeOccluded, mouthOccluded, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Type of the accessory. + /// Confidence level of the accessory type. Range between [0,1]. + /// A new instance for mocking. + public static AccessoryItem AccessoryItem(AccessoryType type = default, float confidence = default) + { + return new AccessoryItem(type, confidence, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// An enum value indicating level of blurriness. + /// A number indicating level of blurriness ranging from 0 to 1. + /// A new instance for mocking. + public static BlurProperties BlurProperties(BlurLevel blurLevel = default, float value = default) + { + return new BlurProperties(blurLevel, value, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// An enum value indicating level of exposure. + /// A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. + /// A new instance for mocking. + public static ExposureProperties ExposureProperties(ExposureLevel exposureLevel = default, float value = default) + { + return new ExposureProperties(exposureLevel, value, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// An enum value indicating level of noise. + /// A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. + /// A new instance for mocking. + public static NoiseProperties NoiseProperties(NoiseLevel noiseLevel = default, float value = default) + { + return new NoiseProperties(noiseLevel, value, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// A boolean value indicating whether nose and mouth are covered. + /// Type of the mask. + /// A new instance for mocking. + public static MaskProperties MaskProperties(bool noseAndMouthCovered = default, MaskType type = default) + { + return new MaskProperties(noseAndMouthCovered, type, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// faceId of candidate face when find by faceIds. faceId is created by "Detect" and will expire 24 hours after the detection call. + /// persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire. + /// A new instance for mocking. + public static FaceFindSimilarResult FaceFindSimilarResult(float confidence = default, Guid? faceId = null, Guid? persistedFaceId = null) + { + return new FaceFindSimilarResult(confidence, faceId, persistedFaceId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// A new instance for mocking. + public static FaceIdentificationResult FaceIdentificationResult(Guid faceId = default, IEnumerable candidates = null) + { + candidates ??= new List(); + + return new FaceIdentificationResult(faceId, candidates?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// A new instance for mocking. + public static FaceIdentificationCandidate FaceIdentificationCandidate(Guid personId = default, float confidence = default) + { + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// True if the two faces belong to the same person or the face belongs to the person, otherwise false. + /// A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. + /// A new instance for mocking. + public static FaceVerificationResult FaceVerificationResult(bool isIdentical = default, float confidence = default) + { + return new FaceVerificationResult(isIdentical, confidence, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// A partition of the original faces based on face similarity. Groups are ranked by number of faces. + /// Face ids array of faces that cannot find any similar faces from original faces. + /// A new instance for mocking. + public static FaceGroupingResult FaceGroupingResult(IEnumerable> groups = null, IEnumerable messyGroup = null) + { + groups ??= new List>(); + messyGroup ??= new List(); + + return new FaceGroupingResult(groups?.ToList(), messyGroup?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ids of registered faces in the face list. + /// A new instance for mocking. + public static FaceList FaceList(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string faceListId = null, IEnumerable persistedFaces = null) + { + persistedFaces ??= new List(); + + return new FaceList( + name, + userData, + recognitionModel, + faceListId, + persistedFaces?.ToList(), + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static FaceListFace FaceListFace(Guid persistedFaceId = default, string userData = null) + { + return new FaceListFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A new instance for mocking. + public static FaceListItem FaceListItem(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string faceListId = null) + { + return new FaceListItem(name, userData, recognitionModel, faceListId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// A new instance for mocking. + public static AddFaceResult AddFaceResult(Guid persistedFaceId = default) + { + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A new instance for mocking. + public static LargeFaceList LargeFaceList(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largeFaceListId = null) + { + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// A new instance for mocking. + public static FaceCollectionTrainingResult FaceCollectionTrainingResult(FaceOperationStatus status = default, DateTimeOffset createdDateTime = default, DateTimeOffset lastActionDateTime = default, DateTimeOffset lastSuccessfulTrainingDateTime = default, string message = null) + { + return new FaceCollectionTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargeFaceListFace LargeFaceListFace(Guid persistedFaceId = default, string userData = null) + { + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// A new instance for mocking. + public static PersonGroup PersonGroup(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string personGroupId = null) + { + return new PersonGroup(name, userData, recognitionModel, personGroupId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// A new instance for mocking. + public static CreatePersonResult CreatePersonResult(Guid personId = default) + { + return new CreatePersonResult(personId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// A new instance for mocking. + public static PersonGroupPerson PersonGroupPerson(Guid personId = default, string name = null, string userData = null, IEnumerable persistedFaceIds = null) + { + persistedFaceIds ??= new List(); + + return new PersonGroupPerson(personId, name, userData, persistedFaceIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static PersonGroupPersonFace PersonGroupPersonFace(Guid persistedFaceId = default, string userData = null) + { + return new PersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// A new instance for mocking. + public static LargePersonGroup LargePersonGroup(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largePersonGroupId = null) + { + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// A new instance for mocking. + public static LargePersonGroupPerson LargePersonGroupPerson(Guid personId = default, string name = null, string userData = null, IEnumerable persistedFaceIds = null) + { + persistedFaceIds ??= new List(); + + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargePersonGroupPersonFace LargePersonGroupPersonFace(Guid persistedFaceId = default, string userData = null) + { + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// A new instance for mocking. + public static PersonDirectoryPerson PersonDirectoryPerson(Guid personId = default, string name = null, string userData = null) + { + return new PersonDirectoryPerson(personId, name, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Array of PersonDirectory DynamicPersonGroup ids. + /// A new instance for mocking. + public static ListGroupReferenceResult ListGroupReferenceResult(IEnumerable dynamicPersonGroupIds = null) + { + dynamicPersonGroupIds ??= new List(); + + return new ListGroupReferenceResult(dynamicPersonGroupIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static PersonDirectoryFace PersonDirectoryFace(Guid persistedFaceId = default, string userData = null) + { + return new PersonDirectoryFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Id of person. + /// Array of persisted face ids. + /// A new instance for mocking. + public static ListFaceResult ListFaceResult(Guid personId = default, IEnumerable persistedFaceIds = null) + { + persistedFaceIds ??= new List(); + + return new ListFaceResult(personId, persistedFaceIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// A new instance for mocking. + public static DynamicPersonGroup DynamicPersonGroup(string dynamicPersonGroupId = null, string name = null, string userData = null) + { + return new DynamicPersonGroup(dynamicPersonGroupId, name, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Array of PersonDirectory Person ids. + /// A new instance for mocking. + public static ListPersonResult ListPersonResult(IEnumerable personIds = null) + { + personIds ??= new List(); + + return new ListPersonResult(personIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// A new instance for mocking. + public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) + { + return new CreateLivenessSessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// A new instance for mocking. + public static CreateLivenessSessionResult CreateLivenessSessionResult(string sessionId = null, string authToken = null) + { + return new CreateLivenessSessionResult(sessionId, authToken, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// The current status of the session. + /// The latest session audit result only populated if status == 'ResultAvailable'. + /// A new instance for mocking. + public static LivenessSession LivenessSession(string id = null, DateTimeOffset createdDateTime = default, DateTimeOffset? sessionStartDateTime = null, bool sessionExpired = default, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null, FaceSessionStatus status = default, LivenessSessionAuditEntry result = null) + { + return new LivenessSession( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + status, + result, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results. + /// The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation. + /// The unique requestId that is returned by the service to the client in the 'apim-request-id' header. + /// The unique clientRequestId that is sent by the client in the 'client-request-id' header. + /// The UTC DateTime that the request was received. + /// The request of this entry. + /// The response of this entry. + /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// A new instance for mocking. + public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null) + { + return new LivenessSessionAuditEntry( + id, + sessionId, + requestId, + clientRequestId, + receivedDateTime, + request, + response, + digest, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The relative URL and query of the liveness request. + /// The HTTP method of the request (i.e., GET, POST, DELETE). + /// The length of the request body in bytes. + /// The content type of the request. + /// The user agent used to submit the request. + /// A new instance for mocking. + public static AuditRequestInfo AuditRequestInfo(string url = null, string method = null, long? contentLength = null, string contentType = null, string userAgent = null) + { + return new AuditRequestInfo( + url, + method, + contentLength, + contentType, + userAgent, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The response body. The schema of this field will depend on the request.url and request.method used by the client. + /// The HTTP status code returned to the client. + /// The server measured latency for this request in milliseconds. + /// A new instance for mocking. + public static AuditLivenessResponseInfo AuditLivenessResponseInfo(LivenessResponseBody body = null, int statusCode = default, long latencyInMilliseconds = default) + { + return new AuditLivenessResponseInfo(body, statusCode, latencyInMilliseconds, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The liveness classification for the target face. + /// Specific targets used for liveness classification. + /// The model version used for liveness classification. + /// The face verification output. Only available when the request is liveness with verify. + /// Additional Properties. + /// A new instance for mocking. + public static LivenessResponseBody LivenessResponseBody(FaceLivenessDecision? livenessDecision = null, LivenessOutputsTarget target = null, LivenessModel? modelVersionUsed = null, LivenessWithVerifyOutputs verifyResult = null, IReadOnlyDictionary additionalProperties = null) + { + additionalProperties ??= new Dictionary(); + + return new LivenessResponseBody(livenessDecision, target, modelVersionUsed, verifyResult, additionalProperties); + } + + /// Initializes a new instance of . + /// The face region where the liveness classification was made on. + /// The file name which contains the face rectangle where the liveness classification was made on. + /// The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on. + /// The image type which contains the face rectangle where the liveness classification was made on. + /// A new instance for mocking. + public static LivenessOutputsTarget LivenessOutputsTarget(FaceRectangle faceRectangle = null, string fileName = null, int timeOffsetWithinFile = default, FaceImageType imageType = default) + { + return new LivenessOutputsTarget(faceRectangle, fileName, timeOffsetWithinFile, imageType, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The detail of face for verification. + /// The target face liveness face and comparison image face verification confidence. + /// Whether the target liveness face and comparison image face match. + /// A new instance for mocking. + public static LivenessWithVerifyOutputs LivenessWithVerifyOutputs(LivenessWithVerifyImage verifyImage = null, float matchConfidence = default, bool isIdentical = default) + { + return new LivenessWithVerifyOutputs(verifyImage, matchConfidence, isIdentical, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The face region where the comparison image's classification was made. + /// Quality of face image for recognition. + /// A new instance for mocking. + public static LivenessWithVerifyImage LivenessWithVerifyImage(FaceRectangle faceRectangle = null, QualityForRecognition qualityForRecognition = default) + { + return new LivenessWithVerifyImage(faceRectangle, qualityForRecognition, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// A new instance for mocking. + public static LivenessSessionItem LivenessSessionItem(string id = null, DateTimeOffset createdDateTime = default, DateTimeOffset? sessionStartDateTime = null, bool sessionExpired = default, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) + { + return new LivenessSessionItem( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// The detail of face for verification. + /// A new instance for mocking. + public static CreateLivenessWithVerifySessionResult CreateLivenessWithVerifySessionResult(string sessionId = null, string authToken = null, LivenessWithVerifyImage verifyImage = null) + { + return new CreateLivenessWithVerifySessionResult(sessionId, authToken, verifyImage, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// The current status of the session. + /// The latest session audit result only populated if status == 'ResultAvailable'. + /// A new instance for mocking. + public static LivenessWithVerifySession LivenessWithVerifySession(string id = null, DateTimeOffset createdDateTime = default, DateTimeOffset? sessionStartDateTime = null, bool sessionExpired = default, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null, FaceSessionStatus status = default, LivenessSessionAuditEntry result = null) + { + return new LivenessWithVerifySession( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + status, + result, + serializedAdditionalRawData: null); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.Serialization.cs new file mode 100644 index 0000000000000..f8f5f5b54de06 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AccessoryItem : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AccessoryItem)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.ToString()); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AccessoryItem IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AccessoryItem)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAccessoryItem(document.RootElement, options); + } + + internal static AccessoryItem DeserializeAccessoryItem(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + AccessoryType type = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type"u8)) + { + type = new AccessoryType(property.Value.GetString()); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AccessoryItem(type, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AccessoryItem)} does not support writing '{options.Format}' format."); + } + } + + AccessoryItem IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAccessoryItem(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AccessoryItem)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AccessoryItem FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAccessoryItem(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.cs new file mode 100644 index 0000000000000..a1bde016a3890 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryItem.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Accessory item and corresponding confidence level. + public partial class AccessoryItem + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Type of the accessory. + /// Confidence level of the accessory type. Range between [0,1]. + internal AccessoryItem(AccessoryType type, float confidence) + { + Type = type; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// Type of the accessory. + /// Confidence level of the accessory type. Range between [0,1]. + /// Keeps track of any properties unknown to the library. + internal AccessoryItem(AccessoryType type, float confidence, IDictionary serializedAdditionalRawData) + { + Type = type; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AccessoryItem() + { + } + + /// Type of the accessory. + public AccessoryType Type { get; } + /// Confidence level of the accessory type. Range between [0,1]. + public float Confidence { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryType.cs new file mode 100644 index 0000000000000..7419d6b0c4b74 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AccessoryType.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Type of the accessory. + public readonly partial struct AccessoryType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public AccessoryType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string HeadwearValue = "headwear"; + private const string GlassesValue = "glasses"; + private const string MaskValue = "mask"; + + /// Head wear. + public static AccessoryType Headwear { get; } = new AccessoryType(HeadwearValue); + /// Glasses. + public static AccessoryType Glasses { get; } = new AccessoryType(GlassesValue); + /// Mask. + public static AccessoryType Mask { get; } = new AccessoryType(MaskValue); + /// Determines if two values are the same. + public static bool operator ==(AccessoryType left, AccessoryType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(AccessoryType left, AccessoryType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator AccessoryType(string value) => new AccessoryType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is AccessoryType other && Equals(other); + /// + public bool Equals(AccessoryType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.Serialization.cs new file mode 100644 index 0000000000000..625d96ef037a2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddFaceListFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceListFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceListFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceListFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceListFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddFaceListFaceFromUrlRequest DeserializeAddFaceListFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceListFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceListFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddFaceListFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceListFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceListFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceListFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceListFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.cs new file mode 100644 index 0000000000000..2a3fe06c2b498 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceListFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddFaceListFaceFromUrlRequest. + internal partial class AddFaceListFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + public AddFaceListFaceFromUrlRequest(Uri url) + { + Argument.AssertNotNull(url, nameof(url)); + + Url = url; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddFaceListFaceFromUrlRequest(Uri url, IDictionary serializedAdditionalRawData) + { + Url = url; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceListFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Url { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs new file mode 100644 index 0000000000000..716408b25a915 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AddFaceResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceResult(document.RootElement, options); + } + + internal static AddFaceResult DeserializeAddFaceResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{options.Format}' format."); + } + } + + AddFaceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs new file mode 100644 index 0000000000000..28ad972fc9d5e --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response body for adding face. + public partial class AddFaceResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + internal AddFaceResult(Guid persistedFaceId) + { + PersistedFaceId = persistedFaceId; + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// Keeps track of any properties unknown to the library. + internal AddFaceResult(Guid persistedFaceId, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceResult() + { + } + + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + public Guid PersistedFaceId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.Serialization.cs new file mode 100644 index 0000000000000..86825c1c13c35 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddLargeFaceListFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddLargeFaceListFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddLargeFaceListFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddLargeFaceListFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddLargeFaceListFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddLargeFaceListFaceFromUrlRequest DeserializeAddLargeFaceListFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddLargeFaceListFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddLargeFaceListFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddLargeFaceListFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddLargeFaceListFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddLargeFaceListFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddLargeFaceListFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddLargeFaceListFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.cs new file mode 100644 index 0000000000000..0001ad78df44a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargeFaceListFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddLargeFaceListFaceFromUrlRequest. + internal partial class AddLargeFaceListFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + public AddLargeFaceListFaceFromUrlRequest(Uri url) + { + Argument.AssertNotNull(url, nameof(url)); + + Url = url; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddLargeFaceListFaceFromUrlRequest(Uri url, IDictionary serializedAdditionalRawData) + { + Url = url; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddLargeFaceListFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Url { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.Serialization.cs new file mode 100644 index 0000000000000..c2ab6a7a5914f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddLargePersonGroupPersonFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddLargePersonGroupPersonFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddLargePersonGroupPersonFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddLargePersonGroupPersonFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddLargePersonGroupPersonFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddLargePersonGroupPersonFaceFromUrlRequest DeserializeAddLargePersonGroupPersonFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddLargePersonGroupPersonFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddLargePersonGroupPersonFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddLargePersonGroupPersonFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddLargePersonGroupPersonFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddLargePersonGroupPersonFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddLargePersonGroupPersonFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddLargePersonGroupPersonFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.cs new file mode 100644 index 0000000000000..d5cbfe6669062 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddLargePersonGroupPersonFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddLargePersonGroupPersonFaceFromUrlRequest. + internal partial class AddLargePersonGroupPersonFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + public AddLargePersonGroupPersonFaceFromUrlRequest(Uri url) + { + Argument.AssertNotNull(url, nameof(url)); + + Url = url; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddLargePersonGroupPersonFaceFromUrlRequest(Uri url, IDictionary serializedAdditionalRawData) + { + Url = url; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddLargePersonGroupPersonFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Url { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.Serialization.cs new file mode 100644 index 0000000000000..5fd9aae7fb1e9 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddPersonFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddPersonFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddPersonFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddPersonFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddPersonFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddPersonFaceFromUrlRequest DeserializeAddPersonFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddPersonFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddPersonFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddPersonFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddPersonFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddPersonFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddPersonFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddPersonFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.cs new file mode 100644 index 0000000000000..244705ddc5725 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddPersonFaceFromUrlRequest. + internal partial class AddPersonFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + public AddPersonFaceFromUrlRequest(Uri url) + { + Argument.AssertNotNull(url, nameof(url)); + + Url = url; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddPersonFaceFromUrlRequest(Uri url, IDictionary serializedAdditionalRawData) + { + Url = url; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddPersonFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Url { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.Serialization.cs new file mode 100644 index 0000000000000..1dcb155b43d8a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddPersonGroupPersonFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddPersonGroupPersonFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddPersonGroupPersonFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddPersonGroupPersonFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddPersonGroupPersonFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddPersonGroupPersonFaceFromUrlRequest DeserializeAddPersonGroupPersonFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddPersonGroupPersonFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddPersonGroupPersonFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddPersonGroupPersonFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddPersonGroupPersonFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddPersonGroupPersonFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddPersonGroupPersonFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddPersonGroupPersonFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.cs new file mode 100644 index 0000000000000..7b2f2eb7fc9c4 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AddPersonGroupPersonFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddPersonGroupPersonFaceFromUrlRequest. + internal partial class AddPersonGroupPersonFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + public AddPersonGroupPersonFaceFromUrlRequest(Uri url) + { + Argument.AssertNotNull(url, nameof(url)); + + Url = url; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddPersonGroupPersonFaceFromUrlRequest(Uri url, IDictionary serializedAdditionalRawData) + { + Url = url; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddPersonGroupPersonFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Url { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.Serialization.cs new file mode 100644 index 0000000000000..1c321539b72ba --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AuditLivenessResponseInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AuditLivenessResponseInfo)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("body"u8); + writer.WriteObjectValue(Body, options); + writer.WritePropertyName("statusCode"u8); + writer.WriteNumberValue(StatusCode); + writer.WritePropertyName("latencyInMilliseconds"u8); + writer.WriteNumberValue(LatencyInMilliseconds); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AuditLivenessResponseInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AuditLivenessResponseInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAuditLivenessResponseInfo(document.RootElement, options); + } + + internal static AuditLivenessResponseInfo DeserializeAuditLivenessResponseInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + LivenessResponseBody body = default; + int statusCode = default; + long latencyInMilliseconds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("body"u8)) + { + body = LivenessResponseBody.DeserializeLivenessResponseBody(property.Value, options); + continue; + } + if (property.NameEquals("statusCode"u8)) + { + statusCode = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("latencyInMilliseconds"u8)) + { + latencyInMilliseconds = property.Value.GetInt64(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AuditLivenessResponseInfo(body, statusCode, latencyInMilliseconds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AuditLivenessResponseInfo)} does not support writing '{options.Format}' format."); + } + } + + AuditLivenessResponseInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAuditLivenessResponseInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AuditLivenessResponseInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AuditLivenessResponseInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAuditLivenessResponseInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.cs new file mode 100644 index 0000000000000..3448b284f8e3c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditLivenessResponseInfo.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Audit entry for a response in the session. + public partial class AuditLivenessResponseInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The response body. The schema of this field will depend on the request.url and request.method used by the client. + /// The HTTP status code returned to the client. + /// The server measured latency for this request in milliseconds. + /// is null. + internal AuditLivenessResponseInfo(LivenessResponseBody body, int statusCode, long latencyInMilliseconds) + { + Argument.AssertNotNull(body, nameof(body)); + + Body = body; + StatusCode = statusCode; + LatencyInMilliseconds = latencyInMilliseconds; + } + + /// Initializes a new instance of . + /// The response body. The schema of this field will depend on the request.url and request.method used by the client. + /// The HTTP status code returned to the client. + /// The server measured latency for this request in milliseconds. + /// Keeps track of any properties unknown to the library. + internal AuditLivenessResponseInfo(LivenessResponseBody body, int statusCode, long latencyInMilliseconds, IDictionary serializedAdditionalRawData) + { + Body = body; + StatusCode = statusCode; + LatencyInMilliseconds = latencyInMilliseconds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AuditLivenessResponseInfo() + { + } + + /// The response body. The schema of this field will depend on the request.url and request.method used by the client. + public LivenessResponseBody Body { get; } + /// The HTTP status code returned to the client. + public int StatusCode { get; } + /// The server measured latency for this request in milliseconds. + public long LatencyInMilliseconds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.Serialization.cs new file mode 100644 index 0000000000000..1990a1f1cb024 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AuditRequestInfo : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AuditRequestInfo)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Url); + writer.WritePropertyName("method"u8); + writer.WriteStringValue(Method); + if (Optional.IsDefined(ContentLength)) + { + writer.WritePropertyName("contentLength"u8); + writer.WriteNumberValue(ContentLength.Value); + } + writer.WritePropertyName("contentType"u8); + writer.WriteStringValue(ContentType); + if (Optional.IsDefined(UserAgent)) + { + writer.WritePropertyName("userAgent"u8); + writer.WriteStringValue(UserAgent); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AuditRequestInfo IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AuditRequestInfo)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAuditRequestInfo(document.RootElement, options); + } + + internal static AuditRequestInfo DeserializeAuditRequestInfo(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string url = default; + string method = default; + long? contentLength = default; + string contentType = default; + string userAgent = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = property.Value.GetString(); + continue; + } + if (property.NameEquals("method"u8)) + { + method = property.Value.GetString(); + continue; + } + if (property.NameEquals("contentLength"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + contentLength = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("contentType"u8)) + { + contentType = property.Value.GetString(); + continue; + } + if (property.NameEquals("userAgent"u8)) + { + userAgent = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AuditRequestInfo( + url, + method, + contentLength, + contentType, + userAgent, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AuditRequestInfo)} does not support writing '{options.Format}' format."); + } + } + + AuditRequestInfo IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAuditRequestInfo(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AuditRequestInfo)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AuditRequestInfo FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAuditRequestInfo(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.cs new file mode 100644 index 0000000000000..39356d1eb8f8a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AuditRequestInfo.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Audit entry for a request in the session. + public partial class AuditRequestInfo + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The relative URL and query of the liveness request. + /// The HTTP method of the request (i.e., GET, POST, DELETE). + /// The content type of the request. + /// , or is null. + internal AuditRequestInfo(string url, string method, string contentType) + { + Argument.AssertNotNull(url, nameof(url)); + Argument.AssertNotNull(method, nameof(method)); + Argument.AssertNotNull(contentType, nameof(contentType)); + + Url = url; + Method = method; + ContentType = contentType; + } + + /// Initializes a new instance of . + /// The relative URL and query of the liveness request. + /// The HTTP method of the request (i.e., GET, POST, DELETE). + /// The length of the request body in bytes. + /// The content type of the request. + /// The user agent used to submit the request. + /// Keeps track of any properties unknown to the library. + internal AuditRequestInfo(string url, string method, long? contentLength, string contentType, string userAgent, IDictionary serializedAdditionalRawData) + { + Url = url; + Method = method; + ContentLength = contentLength; + ContentType = contentType; + UserAgent = userAgent; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AuditRequestInfo() + { + } + + /// The relative URL and query of the liveness request. + public string Url { get; } + /// The HTTP method of the request (i.e., GET, POST, DELETE). + public string Method { get; } + /// The length of the request body in bytes. + public long? ContentLength { get; } + /// The content type of the request. + public string ContentType { get; } + /// The user agent used to submit the request. + public string UserAgent { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs new file mode 100644 index 0000000000000..a8cf7ed227cc9 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + /// Client options for Azure.AI.Vision.Face library clients. + public partial class AzureAIVisionFaceClientOptions : ClientOptions + { + private const ServiceVersion LatestVersion = ServiceVersion.V1_1_Preview_1; + + /// The version of the service to use. + public enum ServiceVersion + { + /// Service version "v1.1-preview.1". + V1_1_Preview_1 = 1, + } + + internal string Version { get; } + + /// Initializes new instance of AzureAIVisionFaceClientOptions. + public AzureAIVisionFaceClientOptions(ServiceVersion version = LatestVersion) + { + Version = version switch + { + ServiceVersion.V1_1_Preview_1 => "v1.1-preview.1", + _ => throw new NotSupportedException() + }; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurLevel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurLevel.cs new file mode 100644 index 0000000000000..bab59c1ac1810 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurLevel.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Indicates level of blurriness. + public readonly partial struct BlurLevel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public BlurLevel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LowValue = "low"; + private const string MediumValue = "medium"; + private const string HighValue = "high"; + + /// Low blur level. + public static BlurLevel Low { get; } = new BlurLevel(LowValue); + /// Medium blur level. + public static BlurLevel Medium { get; } = new BlurLevel(MediumValue); + /// High blur level. + public static BlurLevel High { get; } = new BlurLevel(HighValue); + /// Determines if two values are the same. + public static bool operator ==(BlurLevel left, BlurLevel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(BlurLevel left, BlurLevel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator BlurLevel(string value) => new BlurLevel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is BlurLevel other && Equals(other); + /// + public bool Equals(BlurLevel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.Serialization.cs new file mode 100644 index 0000000000000..31d2641534f78 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class BlurProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BlurProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("blurLevel"u8); + writer.WriteStringValue(BlurLevel.ToString()); + writer.WritePropertyName("value"u8); + writer.WriteNumberValue(Value); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + BlurProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(BlurProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeBlurProperties(document.RootElement, options); + } + + internal static BlurProperties DeserializeBlurProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + BlurLevel blurLevel = default; + float value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("blurLevel"u8)) + { + blurLevel = new BlurLevel(property.Value.GetString()); + continue; + } + if (property.NameEquals("value"u8)) + { + value = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new BlurProperties(blurLevel, value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(BlurProperties)} does not support writing '{options.Format}' format."); + } + } + + BlurProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeBlurProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(BlurProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static BlurProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeBlurProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.cs new file mode 100644 index 0000000000000..91ce601ef2c55 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/BlurProperties.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing any presence of blur within the image. + public partial class BlurProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// An enum value indicating level of blurriness. + /// A number indicating level of blurriness ranging from 0 to 1. + internal BlurProperties(BlurLevel blurLevel, float value) + { + BlurLevel = blurLevel; + Value = value; + } + + /// Initializes a new instance of . + /// An enum value indicating level of blurriness. + /// A number indicating level of blurriness ranging from 0 to 1. + /// Keeps track of any properties unknown to the library. + internal BlurProperties(BlurLevel blurLevel, float value, IDictionary serializedAdditionalRawData) + { + BlurLevel = blurLevel; + Value = value; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal BlurProperties() + { + } + + /// An enum value indicating level of blurriness. + public BlurLevel BlurLevel { get; } + /// A number indicating level of blurriness ranging from 0 to 1. + public float Value { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..1a5fe072271a7 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateDynamicPersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateDynamicPersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateDynamicPersonGroupRequest(document.RootElement, options); + } + + internal static CreateDynamicPersonGroupRequest DeserializeCreateDynamicPersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateDynamicPersonGroupRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateDynamicPersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateDynamicPersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateDynamicPersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateDynamicPersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.cs new file mode 100644 index 0000000000000..3ee3544ae2a0b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateDynamicPersonGroupRequest. + internal partial class CreateDynamicPersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreateDynamicPersonGroupRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreateDynamicPersonGroupRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateDynamicPersonGroupRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.Serialization.cs new file mode 100644 index 0000000000000..5540c5a796326 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.Serialization.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateDynamicPersonGroupWithPersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupWithPersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + writer.WritePropertyName("addPersonIds"u8); + writer.WriteStartArray(); + foreach (var item in AddPersonIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateDynamicPersonGroupWithPersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupWithPersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateDynamicPersonGroupWithPersonRequest(document.RootElement, options); + } + + internal static CreateDynamicPersonGroupWithPersonRequest DeserializeCreateDynamicPersonGroupWithPersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IList addPersonIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("addPersonIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + addPersonIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateDynamicPersonGroupWithPersonRequest(name, userData, addPersonIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupWithPersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateDynamicPersonGroupWithPersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateDynamicPersonGroupWithPersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateDynamicPersonGroupWithPersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateDynamicPersonGroupWithPersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateDynamicPersonGroupWithPersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.cs new file mode 100644 index 0000000000000..362e8dd49de18 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateDynamicPersonGroupWithPersonRequest.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The CreateDynamicPersonGroupWithPersonRequest. + internal partial class CreateDynamicPersonGroupWithPersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Array of personIds created by Person Directory "Create Person" to be added. + /// or is null. + public CreateDynamicPersonGroupWithPersonRequest(string name, IEnumerable addPersonIds) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(addPersonIds, nameof(addPersonIds)); + + Name = name; + AddPersonIds = addPersonIds.ToList(); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Array of personIds created by Person Directory "Create Person" to be added. + /// Keeps track of any properties unknown to the library. + internal CreateDynamicPersonGroupWithPersonRequest(string name, string userData, IList addPersonIds, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + AddPersonIds = addPersonIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateDynamicPersonGroupWithPersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + /// Array of personIds created by Person Directory "Create Person" to be added. + public IList AddPersonIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.Serialization.cs new file mode 100644 index 0000000000000..cec5552f6cc1c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateFaceListRequest(document.RootElement, options); + } + + internal static CreateFaceListRequest DeserializeCreateFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateFaceListRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.cs new file mode 100644 index 0000000000000..32bd89ab1b427 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateFaceListRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateFaceListRequest. + internal partial class CreateFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreateFaceListRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateFaceListRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateFaceListRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.Serialization.cs new file mode 100644 index 0000000000000..8f823893675e4 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLargeFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargeFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLargeFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargeFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLargeFaceListRequest(document.RootElement, options); + } + + internal static CreateLargeFaceListRequest DeserializeCreateLargeFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLargeFaceListRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLargeFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateLargeFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLargeFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLargeFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLargeFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLargeFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.cs new file mode 100644 index 0000000000000..463d8dee7eb00 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargeFaceListRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateLargeFaceListRequest. + internal partial class CreateLargeFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreateLargeFaceListRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateLargeFaceListRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLargeFaceListRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.Serialization.cs new file mode 100644 index 0000000000000..d9aeeef3dee8f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLargePersonGroupPersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargePersonGroupPersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLargePersonGroupPersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargePersonGroupPersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLargePersonGroupPersonRequest(document.RootElement, options); + } + + internal static CreateLargePersonGroupPersonRequest DeserializeCreateLargePersonGroupPersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLargePersonGroupPersonRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLargePersonGroupPersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateLargePersonGroupPersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLargePersonGroupPersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLargePersonGroupPersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLargePersonGroupPersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLargePersonGroupPersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.cs new file mode 100644 index 0000000000000..8f80412922a7d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupPersonRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateLargePersonGroupPersonRequest. + internal partial class CreateLargePersonGroupPersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreateLargePersonGroupPersonRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreateLargePersonGroupPersonRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLargePersonGroupPersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..2757e28c0f6aa --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLargePersonGroupRequest(document.RootElement, options); + } + + internal static CreateLargePersonGroupRequest DeserializeCreateLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLargePersonGroupRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.cs new file mode 100644 index 0000000000000..ef8942ac73c4d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLargePersonGroupRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateLargePersonGroupRequest. + internal partial class CreateLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreateLargePersonGroupRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateLargePersonGroupRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLargePersonGroupRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs new file mode 100644 index 0000000000000..5890a4f74258b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreateLivenessSessionContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessSessionContent)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("livenessOperationMode"u8); + writer.WriteStringValue(LivenessOperationMode.ToString()); + if (Optional.IsDefined(SendResultsToClient)) + { + writer.WritePropertyName("sendResultsToClient"u8); + writer.WriteBooleanValue(SendResultsToClient.Value); + } + if (Optional.IsDefined(DeviceCorrelationIdSetInClient)) + { + writer.WritePropertyName("deviceCorrelationIdSetInClient"u8); + writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value); + } + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessSessionContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessSessionContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessSessionContent(document.RootElement, options); + } + + internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + LivenessOperationMode livenessOperationMode = default; + bool? sendResultsToClient = default; + bool? deviceCorrelationIdSetInClient = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("livenessOperationMode"u8)) + { + livenessOperationMode = new LivenessOperationMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("sendResultsToClient"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sendResultsToClient = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationIdSetInClient"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + deviceCorrelationIdSetInClient = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessSessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLivenessSessionContent)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessSessionContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessSessionContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessSessionContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessSessionContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessSessionContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs new file mode 100644 index 0000000000000..eb27333cbe93f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Request for creating liveness session. + public partial class CreateLivenessSessionContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Type of liveness mode the client should follow. + public CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode) + { + LivenessOperationMode = livenessOperationMode; + } + + /// Initializes a new instance of . + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) + { + LivenessOperationMode = livenessOperationMode; + SendResultsToClient = sendResultsToClient; + DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessSessionContent() + { + } + + /// Type of liveness mode the client should follow. + public LivenessOperationMode LivenessOperationMode { get; } + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + public bool? SendResultsToClient { get; set; } + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + public bool? DeviceCorrelationIdSetInClient { get; set; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; set; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.Serialization.cs new file mode 100644 index 0000000000000..3d9edb2101d45 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreateLivenessSessionResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessSessionResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("sessionId"u8); + writer.WriteStringValue(SessionId); + writer.WritePropertyName("authToken"u8); + writer.WriteStringValue(AuthToken); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessSessionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessSessionResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessSessionResult(document.RootElement, options); + } + + internal static CreateLivenessSessionResult DeserializeCreateLivenessSessionResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sessionId = default; + string authToken = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("sessionId"u8)) + { + sessionId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authToken"u8)) + { + authToken = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessSessionResult(sessionId, authToken, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLivenessSessionResult)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessSessionResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessSessionResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessSessionResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessSessionResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessSessionResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.cs new file mode 100644 index 0000000000000..cf8e5b8e2c7fa --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionResult.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response of liveness session creation. + public partial class CreateLivenessSessionResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// or is null. + internal CreateLivenessSessionResult(string sessionId, string authToken) + { + Argument.AssertNotNull(sessionId, nameof(sessionId)); + Argument.AssertNotNull(authToken, nameof(authToken)); + + SessionId = sessionId; + AuthToken = authToken; + } + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessSessionResult(string sessionId, string authToken, IDictionary serializedAdditionalRawData) + { + SessionId = sessionId; + AuthToken = authToken; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessSessionResult() + { + } + + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + public string SessionId { get; } + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + public string AuthToken { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs new file mode 100644 index 0000000000000..6b713f68b180d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("Parameters"u8); + writer.WriteObjectValue(Parameters, options); + writer.WritePropertyName("VerifyImage"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage)); +#else + using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage))) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessWithVerifySessionContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessWithVerifySessionContent(document.RootElement, options); + } + + internal static CreateLivenessWithVerifySessionContent DeserializeCreateLivenessWithVerifySessionContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + CreateLivenessSessionContent parameters = default; + Stream verifyImage = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("Parameters"u8)) + { + parameters = CreateLivenessSessionContent.DeserializeCreateLivenessSessionContent(property.Value, options); + continue; + } + if (property.NameEquals("VerifyImage"u8)) + { + verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessWithVerifySessionContent(parameters, verifyImage, serializedAdditionalRawData); + } + + private BinaryData SerializeMultipart(ModelReaderWriterOptions options) + { + using MultipartFormDataRequestContent content = ToMultipartRequestContent(); + using MemoryStream stream = new MemoryStream(); + content.WriteTo(stream); + if (stream.Position > int.MaxValue) + { + return BinaryData.FromStream(stream); + } + else + { + return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position)); + } + } + + internal virtual MultipartFormDataRequestContent ToMultipartRequestContent() + { + MultipartFormDataRequestContent content = new MultipartFormDataRequestContent(); + content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters"); + content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream"); + return content; + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + case "MFD": + return SerializeMultipart(options); + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessWithVerifySessionContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessWithVerifySessionContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessWithVerifySessionContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessWithVerifySessionContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs new file mode 100644 index 0000000000000..5d6d724fdbda9 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.IO; + +namespace Azure.AI.Vision.Face +{ + /// Request of liveness with verify session creation. + internal partial class CreateLivenessWithVerifySessionContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// or is null. + public CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage) + { + Argument.AssertNotNull(parameters, nameof(parameters)); + Argument.AssertNotNull(verifyImage, nameof(verifyImage)); + + Parameters = parameters; + VerifyImage = verifyImage; + } + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData) + { + Parameters = parameters; + VerifyImage = verifyImage; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessWithVerifySessionContent() + { + } + + /// The parameters for creating session. + public CreateLivenessSessionContent Parameters { get; } + /// The image stream for verify. Content-Disposition header field for this part must have filename. + public Stream VerifyImage { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.Serialization.cs new file mode 100644 index 0000000000000..282402586b8dd --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.Serialization.cs @@ -0,0 +1,158 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreateLivenessWithVerifySessionResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("sessionId"u8); + writer.WriteStringValue(SessionId); + writer.WritePropertyName("authToken"u8); + writer.WriteStringValue(AuthToken); + if (Optional.IsDefined(VerifyImage)) + { + writer.WritePropertyName("verifyImage"u8); + writer.WriteObjectValue(VerifyImage, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessWithVerifySessionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessWithVerifySessionResult(document.RootElement, options); + } + + internal static CreateLivenessWithVerifySessionResult DeserializeCreateLivenessWithVerifySessionResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sessionId = default; + string authToken = default; + LivenessWithVerifyImage verifyImage = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("sessionId"u8)) + { + sessionId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authToken"u8)) + { + authToken = property.Value.GetString(); + continue; + } + if (property.NameEquals("verifyImage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + verifyImage = LivenessWithVerifyImage.DeserializeLivenessWithVerifyImage(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessWithVerifySessionResult(sessionId, authToken, verifyImage, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionResult)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessWithVerifySessionResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessWithVerifySessionResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessWithVerifySessionResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessWithVerifySessionResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.cs new file mode 100644 index 0000000000000..b969512f54ce7 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionResult.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response of liveness session with verify creation with verify image provided. + public partial class CreateLivenessWithVerifySessionResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// or is null. + internal CreateLivenessWithVerifySessionResult(string sessionId, string authToken) + { + Argument.AssertNotNull(sessionId, nameof(sessionId)); + Argument.AssertNotNull(authToken, nameof(authToken)); + + SessionId = sessionId; + AuthToken = authToken; + } + + /// Initializes a new instance of . + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + /// The detail of face for verification. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessWithVerifySessionResult(string sessionId, string authToken, LivenessWithVerifyImage verifyImage, IDictionary serializedAdditionalRawData) + { + SessionId = sessionId; + AuthToken = authToken; + VerifyImage = verifyImage; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessWithVerifySessionResult() + { + } + + /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. + public string SessionId { get; } + /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. + public string AuthToken { get; } + /// The detail of face for verification. + public LivenessWithVerifyImage VerifyImage { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.Serialization.cs new file mode 100644 index 0000000000000..d1f511d54c56a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreatePersonGroupPersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonGroupPersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonGroupPersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonGroupPersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonGroupPersonRequest(document.RootElement, options); + } + + internal static CreatePersonGroupPersonRequest DeserializeCreatePersonGroupPersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonGroupPersonRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonGroupPersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonGroupPersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonGroupPersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonGroupPersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonGroupPersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonGroupPersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.cs new file mode 100644 index 0000000000000..4df984e3f8cdf --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupPersonRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreatePersonGroupPersonRequest. + internal partial class CreatePersonGroupPersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreatePersonGroupPersonRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreatePersonGroupPersonRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonGroupPersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..38b2aa6701171 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreatePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonGroupRequest(document.RootElement, options); + } + + internal static CreatePersonGroupRequest DeserializeCreatePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonGroupRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.cs new file mode 100644 index 0000000000000..710ca5e22917c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonGroupRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreatePersonGroupRequest. + internal partial class CreatePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreatePersonGroupRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreatePersonGroupRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonGroupRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs new file mode 100644 index 0000000000000..682c4f08d863b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreatePersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + + internal static CreatePersonRequest DeserializeCreatePersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs new file mode 100644 index 0000000000000..df91a9a3eda94 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreatePersonRequest. + internal partial class CreatePersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + public CreatePersonRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreatePersonRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs new file mode 100644 index 0000000000000..e0afa23647b81 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreatePersonResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonResult(document.RootElement, options); + } + + internal static CreatePersonResult DeserializeCreatePersonResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonResult(personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs new file mode 100644 index 0000000000000..d2b993228dd66 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response of create person. + public partial class CreatePersonResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Person ID of the person. + internal CreatePersonResult(Guid personId) + { + PersonId = personId; + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// Keeps track of any properties unknown to the library. + internal CreatePersonResult(Guid personId, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonResult() + { + } + + /// Person ID of the person. + public Guid PersonId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceAdministrationClient.xml b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceAdministrationClient.xml new file mode 100644 index 0000000000000..ebab9012416e6 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceAdministrationClient.xml @@ -0,0 +1,6027 @@ + + + + + +This sample shows how to call CreateFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateFaceListAsync("", ""); +]]> +This sample shows how to call CreateFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateFaceListAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateFaceList("", ""); +]]> +This sample shows how to call CreateFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateFaceList("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreateFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateFaceListAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreateFaceList("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateFaceList with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = client.CreateFaceList("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteFaceListAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteFaceListAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteFaceList(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteFaceList(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListAsync(""); +]]> +This sample shows how to call GetFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListAsync("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceList(""); +]]> +This sample shows how to call GetFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceList("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListAsync("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("faceListId").ToString()); +]]> +This sample shows how to call GetFaceListAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListAsync("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("faceListId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceList("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("faceListId").ToString()); +]]> +This sample shows how to call GetFaceList with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceList("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("faceListId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdateFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateFaceListAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdateFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateFaceList("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateFaceList with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdateFaceList("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceListsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetFaceListsAsync(); +]]> +This sample shows how to call GetFaceListsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetFaceListsAsync(returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetFaceLists. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetFaceLists(); +]]> +This sample shows how to call GetFaceLists with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetFaceLists(returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetFaceListsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListsAsync(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("faceListId").ToString()); +]]> +This sample shows how to call GetFaceListsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetFaceListsAsync(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("faceListId").ToString()); +]]> + + + +This sample shows how to call GetFaceLists and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceLists(null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("faceListId").ToString()); +]]> +This sample shows how to call GetFaceLists with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetFaceLists(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("faceListId").ToString()); +]]> + + + +This sample shows how to call AddFaceListFaceFromUrlAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddFaceListFaceFromUrlAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddFaceListFaceFromUrl. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddFaceListFaceFromUrl("", new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddFaceListFaceFromUrl with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddFaceListFaceFromUrl("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddFaceListFaceFromUrlAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddFaceListFaceFromUrlAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddFaceListFaceFromUrlAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddFaceListFaceFromUrlAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddFaceListFaceFromUrl and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddFaceListFaceFromUrl("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddFaceListFaceFromUrl with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddFaceListFaceFromUrl("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddFaceListFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddFaceListFace("", BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddFaceListFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddFaceListFace("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddFaceListFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddFaceListFaceAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddFaceListFaceAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddFaceListFaceAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddFaceListFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddFaceListFace("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddFaceListFace with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddFaceListFace("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call DeleteFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteFaceListFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteFaceListFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargeFaceListAsync("", ""); +]]> +This sample shows how to call CreateLargeFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargeFaceListAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargeFaceList("", ""); +]]> +This sample shows how to call CreateLargeFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargeFaceList("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreateLargeFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateLargeFaceListAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateLargeFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreateLargeFaceList("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateLargeFaceList with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = client.CreateLargeFaceList("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargeFaceListAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargeFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargeFaceListAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargeFaceList(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargeFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargeFaceList(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListAsync(""); +]]> +This sample shows how to call GetLargeFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListAsync("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceList(""); +]]> +This sample shows how to call GetLargeFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceList("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListAsync("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> +This sample shows how to call GetLargeFaceListAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListAsync("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceList("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> +This sample shows how to call GetLargeFaceList with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceList("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call UpdateLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateLargeFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargeFaceListAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdateLargeFaceListAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateLargeFaceList("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargeFaceList with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdateLargeFaceList("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargeFaceListsAsync(); +]]> +This sample shows how to call GetLargeFaceListsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargeFaceListsAsync(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargeFaceLists. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargeFaceLists(); +]]> +This sample shows how to call GetLargeFaceLists with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargeFaceLists(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListsAsync(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> +This sample shows how to call GetLargeFaceListsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListsAsync("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceLists and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceLists(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> +This sample shows how to call GetLargeFaceLists with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceLists("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceListTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListTrainingStatusAsync(""); +]]> +This sample shows how to call GetLargeFaceListTrainingStatusAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListTrainingStatusAsync(""); +]]> + + + +This sample shows how to call GetLargeFaceListTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListTrainingStatus(""); +]]> +This sample shows how to call GetLargeFaceListTrainingStatus with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListTrainingStatus(""); +]]> + + + +This sample shows how to call GetLargeFaceListTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetLargeFaceListTrainingStatusAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceListTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetLargeFaceListTrainingStatus with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call AddLargeFaceListFaceFromUrlAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargeFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddLargeFaceListFaceFromUrlAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargeFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargeFaceListFaceFromUrl. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargeFaceListFaceFromUrl("", new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddLargeFaceListFaceFromUrl with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargeFaceListFaceFromUrl("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargeFaceListFaceFromUrlAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddLargeFaceListFaceFromUrlAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargeFaceListFaceFromUrlAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddLargeFaceListFaceFromUrlAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargeFaceListFaceFromUrl and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddLargeFaceListFaceFromUrl("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargeFaceListFaceFromUrl with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddLargeFaceListFaceFromUrl("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargeFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargeFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddLargeFaceListFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargeFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargeFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargeFaceListFace("", BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddLargeFaceListFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargeFaceListFace("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargeFaceListFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddLargeFaceListFaceAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargeFaceListFaceAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddLargeFaceListFaceAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargeFaceListFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddLargeFaceListFace("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargeFaceListFace with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddLargeFaceListFace("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call DeleteLargeFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargeFaceListFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargeFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargeFaceListFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargeFaceListFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargeFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargeFaceListFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargeFaceListFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargeFaceListFaceAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceListFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargeFaceListFace with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdateLargeFaceListFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargeFaceListFaceAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = await client.UpdateLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateLargeFaceListFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargeFaceListFace with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = client.UpdateLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListFacesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargeFaceListFacesAsync(""); +]]> +This sample shows how to call GetLargeFaceListFacesAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargeFaceListFacesAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLargeFaceListFaces. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargeFaceListFaces(""); +]]> +This sample shows how to call GetLargeFaceListFaces with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargeFaceListFaces("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLargeFaceListFacesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFacesAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargeFaceListFacesAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargeFaceListFacesAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceListFaces and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFaces("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargeFaceListFaces with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargeFaceListFaces("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call CreatePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreatePersonGroupAsync("", ""); +]]> +This sample shows how to call CreatePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreatePersonGroupAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreatePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreatePersonGroup("", ""); +]]> +This sample shows how to call CreatePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreatePersonGroup("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreatePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreatePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreatePersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = await client.CreatePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreatePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreatePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreatePersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = client.CreatePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroup(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroup(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupAsync(""); +]]> +This sample shows how to call GetPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupAsync("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroup(""); +]]> +This sample shows how to call GetPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroup("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetPersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupAsync("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("personGroupId").ToString()); +]]> +This sample shows how to call GetPersonGroupAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupAsync("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("personGroupId").ToString()); +]]> + + + +This sample shows how to call GetPersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroup("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("personGroupId").ToString()); +]]> +This sample shows how to call GetPersonGroup with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroup("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("personGroupId").ToString()); +]]> + + + +This sample shows how to call UpdatePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdatePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdatePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdatePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdatePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonGroupsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonGroupsAsync(); +]]> +This sample shows how to call GetPersonGroupsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonGroupsAsync(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetPersonGroups. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersonGroups(); +]]> +This sample shows how to call GetPersonGroups with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersonGroups(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetPersonGroupsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupsAsync(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); +]]> +This sample shows how to call GetPersonGroupsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupsAsync("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); +]]> + + + +This sample shows how to call GetPersonGroups and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroups(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); +]]> +This sample shows how to call GetPersonGroups with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroups("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); +]]> + + + +This sample shows how to call GetPersonGroupTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupTrainingStatusAsync(""); +]]> +This sample shows how to call GetPersonGroupTrainingStatusAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupTrainingStatusAsync(""); +]]> + + + +This sample shows how to call GetPersonGroupTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupTrainingStatus(""); +]]> +This sample shows how to call GetPersonGroupTrainingStatus with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupTrainingStatus(""); +]]> + + + +This sample shows how to call GetPersonGroupTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetPersonGroupTrainingStatusAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call GetPersonGroupTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetPersonGroupTrainingStatus with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call CreatePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreatePersonGroupPersonAsync("", ""); +]]> +This sample shows how to call CreatePersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreatePersonGroupPersonAsync("", "", userData: ""); +]]> + + + +This sample shows how to call CreatePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreatePersonGroupPerson("", ""); +]]> +This sample shows how to call CreatePersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreatePersonGroupPerson("", "", userData: ""); +]]> + + + +This sample shows how to call CreatePersonGroupPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreatePersonGroupPersonAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> +This sample shows how to call CreatePersonGroupPersonAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.CreatePersonGroupPersonAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call CreatePersonGroupPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreatePersonGroupPerson("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> +This sample shows how to call CreatePersonGroupPerson with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.CreatePersonGroupPerson("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call DeletePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonGroupPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonGroupPersonAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call GetPersonGroupPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonGroupPerson with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call UpdatePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdatePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroupPersonAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdatePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdatePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroupPerson with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdatePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonGroupPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonGroupPersonsAsync(""); +]]> +This sample shows how to call GetPersonGroupPersonsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonGroupPersonsAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetPersonGroupPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersonGroupPersons(""); +]]> +This sample shows how to call GetPersonGroupPersons with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersonGroupPersons("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetPersonGroupPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonsAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonGroupPersonsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonsAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call GetPersonGroupPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersons("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonGroupPersons with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersons("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceFromUrlAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddPersonGroupPersonFaceFromUrlAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceFromUrl. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddPersonGroupPersonFaceFromUrl with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceFromUrlAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonGroupPersonFaceFromUrlAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceFromUrl and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonGroupPersonFaceFromUrl with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddPersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddPersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddPersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddPersonGroupPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonGroupPersonFaceAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddPersonGroupPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonGroupPersonFace with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call DeletePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeletePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeletePersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeletePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonGroupPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetPersonGroupPersonFaceAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetPersonGroupPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetPersonGroupPersonFace with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdatePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdatePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroupPersonFaceAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = await client.UpdatePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdatePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonGroupPersonFace with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = client.UpdatePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargePersonGroupAsync("", ""); +]]> +This sample shows how to call CreateLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargePersonGroupAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargePersonGroup("", ""); +]]> +This sample shows how to call CreateLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargePersonGroup("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call CreateLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreateLargePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateLargePersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateLargePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreateLargePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateLargePersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + recognitionModel = "recognition_01", +}); +Response response = client.CreateLargePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroup(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroup(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupAsync(""); +]]> +This sample shows how to call GetLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupAsync("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroup(""); +]]> +This sample shows how to call GetLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroup("", returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupAsync("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> +This sample shows how to call GetLargePersonGroupAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupAsync("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroup("", null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> +This sample shows how to call GetLargePersonGroup with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroup("", true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("recognitionModel").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call UpdateLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateLargePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdateLargePersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateLargePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdateLargePersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargePersonGroupsAsync(); +]]> +This sample shows how to call GetLargePersonGroupsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargePersonGroupsAsync(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargePersonGroups. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargePersonGroups(); +]]> +This sample shows how to call GetLargePersonGroups with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargePersonGroups(start: "", top: 1234, returnRecognitionModel: true); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupsAsync(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> +This sample shows how to call GetLargePersonGroupsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupsAsync("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroups and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroups(null, null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> +This sample shows how to call GetLargePersonGroups with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroups("", 1234, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroupTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupTrainingStatusAsync(""); +]]> +This sample shows how to call GetLargePersonGroupTrainingStatusAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupTrainingStatusAsync(""); +]]> + + + +This sample shows how to call GetLargePersonGroupTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupTrainingStatus(""); +]]> +This sample shows how to call GetLargePersonGroupTrainingStatus with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupTrainingStatus(""); +]]> + + + +This sample shows how to call GetLargePersonGroupTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetLargePersonGroupTrainingStatusAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupTrainingStatusAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroupTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> +This sample shows how to call GetLargePersonGroupTrainingStatus with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupTrainingStatus("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +Console.WriteLine(result.GetProperty("message").ToString()); +]]> + + + +This sample shows how to call CreateLargePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargePersonGroupPersonAsync("", ""); +]]> +This sample shows how to call CreateLargePersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateLargePersonGroupPersonAsync("", "", userData: ""); +]]> + + + +This sample shows how to call CreateLargePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargePersonGroupPerson("", ""); +]]> +This sample shows how to call CreateLargePersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateLargePersonGroupPerson("", "", userData: ""); +]]> + + + +This sample shows how to call CreateLargePersonGroupPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreateLargePersonGroupPersonAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> +This sample shows how to call CreateLargePersonGroupPersonAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.CreateLargePersonGroupPersonAsync("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call CreateLargePersonGroupPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreateLargePersonGroupPerson("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> +This sample shows how to call CreateLargePersonGroupPerson with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.CreateLargePersonGroupPerson("", content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call DeleteLargePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargePersonGroupPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargePersonGroupPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPersonAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroupPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPerson with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call UpdateLargePersonGroupPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroupPersonAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdateLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateLargePersonGroupPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroupPerson with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdateLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargePersonGroupPersonsAsync(""); +]]> +This sample shows how to call GetLargePersonGroupPersonsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetLargePersonGroupPersonsAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLargePersonGroupPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargePersonGroupPersons(""); +]]> +This sample shows how to call GetLargePersonGroupPersons with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetLargePersonGroupPersons("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonsAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPersonsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonsAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroupPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersons("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPersons with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersons("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceFromUrlAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceFromUrlAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceFromUrl. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceFromUrl with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceFromUrlAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceFromUrlAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceFromUrl and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceFromUrl with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); +]]> +This sample shows how to call AddLargePersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargePersonGroupPersonFaceAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call AddLargePersonGroupPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddLargePersonGroupPersonFace with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call DeleteLargePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.DeleteLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLargePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLargePersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.DeleteLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargePersonGroupPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetLargePersonGroupPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPersonFaceAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroupPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetLargePersonGroupPersonFace with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdateLargePersonGroupPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroupPersonFaceAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = await client.UpdateLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateLargePersonGroupPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateLargePersonGroupPersonFace with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = client.UpdateLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetPerson with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdatePersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdatePersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdatePerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePerson with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdatePerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonsAsync(); +]]> +This sample shows how to call GetPersonsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetPersonsAsync(start: "", top: 1234); +]]> + + + +This sample shows how to call GetPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersons(); +]]> +This sample shows how to call GetPersons with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetPersons(start: "", top: 1234); +]]> + + + +This sample shows how to call GetPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonsAsync(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersonsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonsAsync("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersons(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetPersons with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersons("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroupReferencesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetDynamicPersonGroupReferencesAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroupReferences. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetDynamicPersonGroupReferences with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroupReferencesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupReferencesAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroupReferences and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupReferences with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); +]]> + + + +This sample shows how to call GetPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call GetPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call GetPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetPersonFaceAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call GetPersonFace with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdatePersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdatePersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonFaceAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = await client.UpdatePersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdatePersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdatePersonFace with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userData = "", +}); +Response response = client.UpdatePersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonFacesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); +]]> +This sample shows how to call GetPersonFacesAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call GetPersonFaces. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); +]]> +This sample shows how to call GetPersonFaces with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); +]]> + + + +This sample shows how to call GetPersonFacesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> +This sample shows how to call GetPersonFacesAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call GetPersonFaces and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> +This sample shows how to call GetPersonFaces with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); +]]> + + + +This sample shows how to call CreateDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateDynamicPersonGroupAsync("", ""); +]]> +This sample shows how to call CreateDynamicPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.CreateDynamicPersonGroupAsync("", "", userData: ""); +]]> + + + +This sample shows how to call CreateDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateDynamicPersonGroup("", ""); +]]> +This sample shows how to call CreateDynamicPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.CreateDynamicPersonGroup("", "", userData: ""); +]]> + + + +This sample shows how to call CreateDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = await client.CreateDynamicPersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateDynamicPersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.CreateDynamicPersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call CreateDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Response response = client.CreateDynamicPersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call CreateDynamicPersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.CreateDynamicPersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupAsync(""); +]]> +This sample shows how to call GetDynamicPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupAsync(""); +]]> + + + +This sample shows how to call GetDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroup(""); +]]> +This sample shows how to call GetDynamicPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroup(""); +]]> + + + +This sample shows how to call GetDynamicPersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroup("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call GetDynamicPersonGroup with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroup("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call UpdateDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.UpdateDynamicPersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateDynamicPersonGroupAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = await client.UpdateDynamicPersonGroupAsync("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.UpdateDynamicPersonGroup("", content); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call UpdateDynamicPersonGroup with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Response response = client.UpdateDynamicPersonGroup("", content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetDynamicPersonGroupsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetDynamicPersonGroupsAsync(); +]]> +This sample shows how to call GetDynamicPersonGroupsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = await client.GetDynamicPersonGroupsAsync(start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroups. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetDynamicPersonGroups(); +]]> +This sample shows how to call GetDynamicPersonGroups with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response> response = client.GetDynamicPersonGroups(start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroupsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupsAsync(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupsAsync("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroups and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroups(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> +This sample shows how to call GetDynamicPersonGroups with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroups("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroupPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupPersonsAsync(""); +]]> +This sample shows how to call GetDynamicPersonGroupPersonsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupPersonsAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroupPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupPersons(""); +]]> +This sample shows how to call GetDynamicPersonGroupPersons with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupPersons("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetDynamicPersonGroupPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupPersonsAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personIds")[0].ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupPersonsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = await client.GetDynamicPersonGroupPersonsAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personIds")[0].ToString()); +]]> + + + +This sample shows how to call GetDynamicPersonGroupPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupPersons("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personIds")[0].ToString()); +]]> +This sample shows how to call GetDynamicPersonGroupPersons with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Response response = client.GetDynamicPersonGroupPersons("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personIds")[0].ToString()); +]]> + + + +This sample shows how to call TrainLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainLargeFaceListAsync(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainLargeFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainLargeFaceListAsync(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call TrainLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainLargeFaceList(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainLargeFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainLargeFaceList(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call TrainPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainPersonGroupAsync(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainPersonGroupAsync(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call TrainPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainPersonGroup(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainPersonGroup(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call TrainLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainLargePersonGroupAsync(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.TrainLargePersonGroupAsync(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call TrainLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainLargePersonGroup(WaitUntil.Completed, ""); +]]> +This sample shows how to call TrainLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.TrainLargePersonGroup(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call CreatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, ""); +PersonDirectoryPerson responseData = operation.Value; +]]> +This sample shows how to call CreatePersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, "", userData: ""); +PersonDirectoryPerson responseData = operation.Value; +]]> + + + +This sample shows how to call CreatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.CreatePerson(WaitUntil.Completed, ""); +PersonDirectoryPerson responseData = operation.Value; +]]> +This sample shows how to call CreatePerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.CreatePerson(WaitUntil.Completed, "", userData: ""); +PersonDirectoryPerson responseData = operation.Value; +]]> + + + +This sample shows how to call CreatePersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call CreatePersonAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call CreatePerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", +}); +Operation operation = client.CreatePerson(WaitUntil.Completed, content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call CreatePerson with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", +}); +Operation operation = client.CreatePerson(WaitUntil.Completed, content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call DeletePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call DeletePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePerson(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePerson(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call AddPersonFaceFromUrlAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000")); +PersonDirectoryFace responseData = operation.Value; +]]> +This sample shows how to call AddPersonFaceFromUrlAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +PersonDirectoryFace responseData = operation.Value; +]]> + + + +This sample shows how to call AddPersonFaceFromUrl. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000")); +PersonDirectoryFace responseData = operation.Value; +]]> +This sample shows how to call AddPersonFaceFromUrl with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +PersonDirectoryFace responseData = operation.Value; +]]> + + + +This sample shows how to call AddPersonFaceFromUrlAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonFaceFromUrlAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call AddPersonFaceFromUrl and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonFaceFromUrl with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + url = "http://localhost:3000", +}); +Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call AddPersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object())); +PersonDirectoryFace responseData = operation.Value; +]]> +This sample shows how to call AddPersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +PersonDirectoryFace responseData = operation.Value; +]]> + + + +This sample shows how to call AddPersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object())); +PersonDirectoryFace responseData = operation.Value; +]]> +This sample shows how to call AddPersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); +PersonDirectoryFace responseData = operation.Value; +]]> + + + +This sample shows how to call AddPersonFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonFaceAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call AddPersonFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> +This sample shows how to call AddPersonFace with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call DeletePersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call DeletePersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call DeletePersonFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePersonFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call DeletePersonFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call DeletePersonFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call CreateDynamicPersonGroupWithPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +DynamicPersonGroup responseData = operation.Value; +]]> +This sample shows how to call CreateDynamicPersonGroupWithPersonAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, userData: ""); +DynamicPersonGroup responseData = operation.Value; +]]> + + + +This sample shows how to call CreateDynamicPersonGroupWithPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +DynamicPersonGroup responseData = operation.Value; +]]> +This sample shows how to call CreateDynamicPersonGroupWithPerson with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, userData: ""); +DynamicPersonGroup responseData = operation.Value; +]]> + + + +This sample shows how to call CreateDynamicPersonGroupWithPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call CreateDynamicPersonGroupWithPersonAsync with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call CreateDynamicPersonGroupWithPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> +This sample shows how to call CreateDynamicPersonGroupWithPerson with all parameters and request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", content); +BinaryData responseData = operation.Value; + +JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; +Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("userData").ToString()); +]]> + + + +This sample shows how to call DeleteDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeleteDynamicPersonGroupAsync(WaitUntil.Completed, ""); +]]> +This sample shows how to call DeleteDynamicPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = await client.DeleteDynamicPersonGroupAsync(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call DeleteDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeleteDynamicPersonGroup(WaitUntil.Completed, ""); +]]> +This sample shows how to call DeleteDynamicPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +Operation operation = client.DeleteDynamicPersonGroup(WaitUntil.Completed, ""); +]]> + + + +This sample shows how to call UpdateDynamicPersonGroupWithPersonChangesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = await client.UpdateDynamicPersonGroupWithPersonChangesAsync(WaitUntil.Completed, "", content); +]]> +This sample shows how to call UpdateDynamicPersonGroupWithPersonChangesAsync with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + removePersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = await client.UpdateDynamicPersonGroupWithPersonChangesAsync(WaitUntil.Completed, "", content); +]]> + + + +This sample shows how to call UpdateDynamicPersonGroupWithPersonChanges. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Operation operation = client.UpdateDynamicPersonGroupWithPersonChanges(WaitUntil.Completed, "", content); +]]> +This sample shows how to call UpdateDynamicPersonGroupWithPersonChanges with all parameters and request content. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + name = "", + userData = "", + addPersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + removePersonIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Operation operation = client.UpdateDynamicPersonGroupWithPersonChanges(WaitUntil.Completed, "", content); +]]> + + + \ No newline at end of file diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml new file mode 100644 index 0000000000000..98eecb23c6496 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml @@ -0,0 +1,1481 @@ + + + + + +This sample shows how to call FindSimilarAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call FindSimilarAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilar. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilar(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call FindSimilar with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilar(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilarAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = await client.FindSimilarAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilarAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = await client.FindSimilarAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call FindSimilar and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = client.FindSimilar(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilar with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = client.FindSimilar(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); +]]> +This sample shows how to call FindSimilarFromFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilarFromFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); +]]> +This sample shows how to call FindSimilarFromFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilarFromFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceListId = "", +}); +Response response = await client.FindSimilarFromFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilarFromFaceListAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceListId = "", +}); +Response response = await client.FindSimilarFromFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceListId = "", +}); +Response response = client.FindSimilarFromFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilarFromFaceList with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceListId = "", +}); +Response response = client.FindSimilarFromFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); +]]> +This sample shows how to call FindSimilarFromLargeFaceListAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); +]]> +This sample shows how to call FindSimilarFromLargeFaceList with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largeFaceListId = "", +}); +Response response = await client.FindSimilarFromLargeFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilarFromLargeFaceListAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + largeFaceListId = "", +}); +Response response = await client.FindSimilarFromLargeFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largeFaceListId = "", +}); +Response response = client.FindSimilarFromLargeFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call FindSimilarFromLargeFaceList with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + largeFaceListId = "", +}); +Response response = client.FindSimilarFromLargeFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call IdentifyFromPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromPersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", +}); +Response response = await client.IdentifyFromPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromPersonGroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = await client.IdentifyFromPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromPersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", +}); +Response response = client.IdentifyFromPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromPersonGroup with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = client.IdentifyFromPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", +}); +Response response = await client.IdentifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromLargePersonGroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = await client.IdentifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", +}); +Response response = client.IdentifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromLargePersonGroup with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = client.IdentifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromPersonDirectoryAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromPersonDirectoryAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call IdentifyFromPersonDirectoryAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromPersonDirectoryAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromPersonDirectory. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromPersonDirectory(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call IdentifyFromPersonDirectory with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromPersonDirectory(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromPersonDirectoryAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = await client.IdentifyFromPersonDirectoryAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromPersonDirectoryAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = await client.IdentifyFromPersonDirectoryAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromPersonDirectory and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = client.IdentifyFromPersonDirectory(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromPersonDirectory with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = client.IdentifyFromPersonDirectory(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromDynamicPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromDynamicPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromDynamicPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromDynamicPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromDynamicPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromDynamicPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); +]]> +This sample shows how to call IdentifyFromDynamicPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromDynamicPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); +]]> + + + +This sample shows how to call IdentifyFromDynamicPersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", +}); +Response response = await client.IdentifyFromDynamicPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromDynamicPersonGroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = await client.IdentifyFromDynamicPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromDynamicPersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", +}); +Response response = client.IdentifyFromDynamicPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> +This sample shows how to call IdentifyFromDynamicPersonGroup with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, +}); +Response response = client.IdentifyFromDynamicPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFaceToFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFaceToFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFaceToFaceAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFaceToFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFaceToFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFaceToFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFaceToFace with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFaceToFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFaceToFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFaceToFaceAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFaceToFaceAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFaceToFaceAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFaceToFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFaceToFace(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFaceToFace with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFaceToFace(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromPersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromPersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromPersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromPersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromPersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromPersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromPersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromPersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromPersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromPersonGroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromPersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromPersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromPersonGroup with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromPersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromLargePersonGroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromLargePersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromLargePersonGroup with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromLargePersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromLargePersonGroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromLargePersonGroup with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromPersonDirectoryAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromPersonDirectoryAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromPersonDirectoryAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromPersonDirectoryAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromPersonDirectory. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromPersonDirectory(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> +This sample shows how to call VerifyFromPersonDirectory with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromPersonDirectory(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); +]]> + + + +This sample shows how to call VerifyFromPersonDirectoryAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromPersonDirectoryAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromPersonDirectoryAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = await client.VerifyFromPersonDirectoryAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromPersonDirectory and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromPersonDirectory(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> +This sample shows how to call VerifyFromPersonDirectory with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", +}); +Response response = client.VerifyFromPersonDirectory(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call GroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.GroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call GroupAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.GroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> + + + +This sample shows how to call Group. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.Group(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> +This sample shows how to call Group with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.Group(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); +]]> + + + +This sample shows how to call GroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = await client.GroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); +Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> +This sample shows how to call GroupAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = await client.GroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); +Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> + + + +This sample shows how to call Group and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = client.Group(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); +Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> +This sample shows how to call Group with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, +}); +Response response = client.Group(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); +Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml new file mode 100644 index 0000000000000..caebbcff8c2ff --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml @@ -0,0 +1,1093 @@ + + + + + +This sample shows how to call CreateLivenessSessionAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive); +Response response = await client.CreateLivenessSessionAsync(createLivenessSessionContent); +]]> +This sample shows how to call CreateLivenessSessionAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive) +{ + SendResultsToClient = true, + DeviceCorrelationIdSetInClient = true, + DeviceCorrelationId = "", + AuthTokenTimeToLiveInSeconds = 1234, +}; +Response response = await client.CreateLivenessSessionAsync(createLivenessSessionContent); +]]> + + + +This sample shows how to call CreateLivenessSession. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive); +Response response = client.CreateLivenessSession(createLivenessSessionContent); +]]> +This sample shows how to call CreateLivenessSession with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive) +{ + SendResultsToClient = true, + DeviceCorrelationIdSetInClient = true, + DeviceCorrelationId = "", + AuthTokenTimeToLiveInSeconds = 1234, +}; +Response response = client.CreateLivenessSession(createLivenessSessionContent); +]]> + + + +This sample shows how to call CreateLivenessSessionAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + livenessOperationMode = "Passive", +}); +Response response = await client.CreateLivenessSessionAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("authToken").ToString()); +]]> +This sample shows how to call CreateLivenessSessionAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + livenessOperationMode = "Passive", + sendResultsToClient = true, + deviceCorrelationIdSetInClient = true, + deviceCorrelationId = "", + authTokenTimeToLiveInSeconds = 1234, +}); +Response response = await client.CreateLivenessSessionAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("authToken").ToString()); +]]> + + + +This sample shows how to call CreateLivenessSession and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + livenessOperationMode = "Passive", +}); +Response response = client.CreateLivenessSession(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("authToken").ToString()); +]]> +This sample shows how to call CreateLivenessSession with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + livenessOperationMode = "Passive", + sendResultsToClient = true, + deviceCorrelationIdSetInClient = true, + deviceCorrelationId = "", + authTokenTimeToLiveInSeconds = 1234, +}); +Response response = client.CreateLivenessSession(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("authToken").ToString()); +]]> + + + +This sample shows how to call DeleteLivenessSessionAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.DeleteLivenessSessionAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLivenessSessionAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.DeleteLivenessSessionAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLivenessSession. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.DeleteLivenessSession(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLivenessSession with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.DeleteLivenessSession(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLivenessSessionResultAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionResultAsync(""); +]]> +This sample shows how to call GetLivenessSessionResultAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionResultAsync(""); +]]> + + + +This sample shows how to call GetLivenessSessionResult. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionResult(""); +]]> +This sample shows how to call GetLivenessSessionResult with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionResult(""); +]]> + + + +This sample shows how to call GetLivenessSessionResultAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionResultAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +]]> +This sample shows how to call GetLivenessSessionResultAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionResultAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessSessionResult and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionResult("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +]]> +This sample shows how to call GetLivenessSessionResult with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionResult("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessSessionsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessSessionsAsync(); +]]> +This sample shows how to call GetLivenessSessionsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessSessionsAsync(start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessSessions. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessSessions(); +]]> +This sample shows how to call GetLivenessSessions with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessSessions(start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessSessionsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionsAsync(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +]]> +This sample shows how to call GetLivenessSessionsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionsAsync("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); +]]> + + + +This sample shows how to call GetLivenessSessions and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessions(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +]]> +This sample shows how to call GetLivenessSessions with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessions("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); +]]> + + + +This sample shows how to call GetLivenessSessionAuditEntriesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessSessionAuditEntriesAsync(""); +]]> +This sample shows how to call GetLivenessSessionAuditEntriesAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessSessionAuditEntriesAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessSessionAuditEntries. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessSessionAuditEntries(""); +]]> +This sample shows how to call GetLivenessSessionAuditEntries with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessSessionAuditEntries("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessSessionAuditEntriesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionAuditEntriesAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> +This sample shows how to call GetLivenessSessionAuditEntriesAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessSessionAuditEntriesAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessSessionAuditEntries and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionAuditEntries("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> +This sample shows how to call GetLivenessSessionAuditEntries with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessSessionAuditEntries("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call DeleteLivenessWithVerifySessionAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.DeleteLivenessWithVerifySessionAsync(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLivenessWithVerifySessionAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.DeleteLivenessWithVerifySessionAsync(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteLivenessWithVerifySession. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.DeleteLivenessWithVerifySession(""); + +Console.WriteLine(response.Status); +]]> +This sample shows how to call DeleteLivenessWithVerifySession with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.DeleteLivenessWithVerifySession(""); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionResultAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionResultAsync(""); +]]> +This sample shows how to call GetLivenessWithVerifySessionResultAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionResultAsync(""); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionResult. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionResult(""); +]]> +This sample shows how to call GetLivenessWithVerifySessionResult with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionResult(""); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionResultAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionResultAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessionResultAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionResultAsync("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionResult and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionResult("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessionResult with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionResult("", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result.GetProperty("sessionExpired").ToString()); +Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessWithVerifySessionsAsync(); +]]> +This sample shows how to call GetLivenessWithVerifySessionsAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessWithVerifySessionsAsync(start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessions. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessWithVerifySessions(); +]]> +This sample shows how to call GetLivenessWithVerifySessions with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessWithVerifySessions(start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionsAsync(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessionsAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionsAsync("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessions and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessions(null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessions with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessions("", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); +Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); +Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionAuditEntriesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync(""); +]]> +This sample shows how to call GetLivenessWithVerifySessionAuditEntriesAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionAuditEntries. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessWithVerifySessionAuditEntries(""); +]]> +This sample shows how to call GetLivenessWithVerifySessionAuditEntries with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.GetLivenessWithVerifySessionAuditEntries("", start: "", top: 1234); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionAuditEntriesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessionAuditEntriesAsync with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call GetLivenessWithVerifySessionAuditEntries and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionAuditEntries("", null, null, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> +This sample shows how to call GetLivenessWithVerifySessionAuditEntries with all parameters and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetLivenessWithVerifySessionAuditEntries("", "", 1234, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("id").ToString()); +Console.WriteLine(result[0].GetProperty("sessionId").ToString()); +Console.WriteLine(result[0].GetProperty("requestId").ToString()); +Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); +Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); +Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); +Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); +Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + \ No newline at end of file diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.Serialization.cs new file mode 100644 index 0000000000000..104549679d2f6 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class DynamicPersonGroup : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DynamicPersonGroup)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("dynamicPersonGroupId"u8); + writer.WriteStringValue(DynamicPersonGroupId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DynamicPersonGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DynamicPersonGroup)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDynamicPersonGroup(document.RootElement, options); + } + + internal static DynamicPersonGroup DeserializeDynamicPersonGroup(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string dynamicPersonGroupId = default; + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("dynamicPersonGroupId"u8)) + { + dynamicPersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DynamicPersonGroup(dynamicPersonGroupId, name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DynamicPersonGroup)} does not support writing '{options.Format}' format."); + } + } + + DynamicPersonGroup IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDynamicPersonGroup(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DynamicPersonGroup)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DynamicPersonGroup FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDynamicPersonGroup(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.cs new file mode 100644 index 0000000000000..c8ed75d42b567 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/DynamicPersonGroup.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// A container that references Person Directory "Create Person". + public partial class DynamicPersonGroup + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal DynamicPersonGroup(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal DynamicPersonGroup(string dynamicPersonGroupId, string name, string userData, IDictionary serializedAdditionalRawData) + { + DynamicPersonGroupId = dynamicPersonGroupId; + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DynamicPersonGroup() + { + } + + /// ID of the dynamic person group. + public string DynamicPersonGroupId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureLevel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureLevel.cs new file mode 100644 index 0000000000000..0feeaebbfef74 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureLevel.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Indicates level of exposure. + public readonly partial struct ExposureLevel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public ExposureLevel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string UnderExposureValue = "underExposure"; + private const string GoodExposureValue = "goodExposure"; + private const string OverExposureValue = "overExposure"; + + /// Low exposure level. + public static ExposureLevel UnderExposure { get; } = new ExposureLevel(UnderExposureValue); + /// Good exposure level. + public static ExposureLevel GoodExposure { get; } = new ExposureLevel(GoodExposureValue); + /// High exposure level. + public static ExposureLevel OverExposure { get; } = new ExposureLevel(OverExposureValue); + /// Determines if two values are the same. + public static bool operator ==(ExposureLevel left, ExposureLevel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(ExposureLevel left, ExposureLevel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator ExposureLevel(string value) => new ExposureLevel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is ExposureLevel other && Equals(other); + /// + public bool Equals(ExposureLevel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.Serialization.cs new file mode 100644 index 0000000000000..bf051597e0f8c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class ExposureProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExposureProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("exposureLevel"u8); + writer.WriteStringValue(ExposureLevel.ToString()); + writer.WritePropertyName("value"u8); + writer.WriteNumberValue(Value); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ExposureProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ExposureProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeExposureProperties(document.RootElement, options); + } + + internal static ExposureProperties DeserializeExposureProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + ExposureLevel exposureLevel = default; + float value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("exposureLevel"u8)) + { + exposureLevel = new ExposureLevel(property.Value.GetString()); + continue; + } + if (property.NameEquals("value"u8)) + { + value = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ExposureProperties(exposureLevel, value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ExposureProperties)} does not support writing '{options.Format}' format."); + } + } + + ExposureProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeExposureProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ExposureProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ExposureProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeExposureProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.cs new file mode 100644 index 0000000000000..4ead377b0eac5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ExposureProperties.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing exposure level of the image. + public partial class ExposureProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// An enum value indicating level of exposure. + /// A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. + internal ExposureProperties(ExposureLevel exposureLevel, float value) + { + ExposureLevel = exposureLevel; + Value = value; + } + + /// Initializes a new instance of . + /// An enum value indicating level of exposure. + /// A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. + /// Keeps track of any properties unknown to the library. + internal ExposureProperties(ExposureLevel exposureLevel, float value, IDictionary serializedAdditionalRawData) + { + ExposureLevel = exposureLevel; + Value = value; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ExposureProperties() + { + } + + /// An enum value indicating level of exposure. + public ExposureLevel ExposureLevel { get; } + /// A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. + public float Value { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs new file mode 100644 index 0000000000000..5e042830b3e2a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs @@ -0,0 +1,10261 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated client. + /// The FaceAdministration service client. + public partial class FaceAdministrationClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of FaceAdministrationClient for mocking. + protected FaceAdministrationClient() + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Up to 64 Face Lists are allowed in one subscription. + /// + /// Face List is a list of faces, up to 1,000 faces, and used by "Find Similar From Face List". + /// + /// After creation, user should use "Add Face List Face" to import the faces. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List" is called. + /// + /// "Find Similar" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and "Identify". + /// + /// Please consider Large Face List when the face number is large. It can support up to 1,000,000 faces. + /// + /// + public virtual async Task CreateFaceListAsync(string faceListId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateFaceListRequest createFaceListRequest = new CreateFaceListRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateFaceListAsync(faceListId, createFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Up to 64 Face Lists are allowed in one subscription. + /// + /// Face List is a list of faces, up to 1,000 faces, and used by "Find Similar From Face List". + /// + /// After creation, user should use "Add Face List Face" to import the faces. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List" is called. + /// + /// "Find Similar" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and "Identify". + /// + /// Please consider Large Face List when the face number is large. It can support up to 1,000,000 faces. + /// + /// + public virtual Response CreateFaceList(string faceListId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateFaceListRequest createFaceListRequest = new CreateFaceListRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateFaceList(faceListId, createFaceListRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateFaceListAsync(string faceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateFaceListRequest(faceListId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateFaceList(string faceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateFaceListRequest(faceListId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a specified Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceListAsync(string faceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceListRequest(faceListId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a specified Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFaceList(string faceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceListRequest(faceListId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetFaceListAsync(string faceListId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceListAsync(faceListId, returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(FaceList.FromResponse(response), response); + } + + /// Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetFaceList(string faceListId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFaceList(faceListId, returnRecognitionModel, context); + return Response.FromValue(FaceList.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceListAsync(string faceListId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceListRequest(faceListId, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFaceList(string faceListId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceListRequest(faceListId, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update information of a Face List, including name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateFaceListAsync(string faceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceListRequest(faceListId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update information of a Face List, including name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateFaceList(string faceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceListRequest(faceListId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// List Face Lists' faceListId, name, userData and recognitionModel. + /// + /// To get face information inside Face List use "Get Face List". + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual async Task>> GetFaceListsAsync(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceListsAsync(returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceListItem.DeserializeFaceListItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// List Face Lists' faceListId, name, userData and recognitionModel. + /// + /// To get face information inside Face List use "Get Face List". + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual Response> GetFaceLists(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFaceLists(returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceListItem.DeserializeFaceListItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List Face Lists' faceListId, name, userData and recognitionModel. + /// + /// To get face information inside Face List use "Get Face List". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceListsAsync(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceListsRequest(returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List Face Lists' faceListId, name, userData and recognitionModel. + /// + /// To get face information inside Face List use "Get Face List". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFaceLists(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceListsRequest(returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Face List, up to 1,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List Face" or "Delete Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddFaceListFaceFromUrlAsync(string faceListId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(url, nameof(url)); + + AddFaceListFaceFromUrlRequest addFaceListFaceFromUrlRequest = new AddFaceListFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceListFaceFromUrlAsync(faceListId, addFaceListFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Face List, up to 1,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List Face" or "Delete Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddFaceListFaceFromUrl(string faceListId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(url, nameof(url)); + + AddFaceListFaceFromUrlRequest addFaceListFaceFromUrlRequest = new AddFaceListFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceListFaceFromUrl(faceListId, addFaceListFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Face List, up to 1,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddFaceListFaceFromUrlAsync(string faceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddFaceListFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceListFaceFromUrlRequest(faceListId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Face List, up to 1,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddFaceListFaceFromUrl(string faceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddFaceListFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceListFaceFromUrlRequest(faceListId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Face List, up to 1,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List Face" or "Delete Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddFaceListFaceAsync(string faceListId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceListFaceAsync(faceListId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Face List, up to 1,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Face List Face" or "Delete Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddFaceListFace(string faceListId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceListFace(faceListId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Face List, up to 1,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddFaceListFaceAsync(string faceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceListFaceRequest(faceListId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Face List, up to 1,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddFaceListFace(string faceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceListFaceRequest(faceListId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Face List by specified faceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceListFaceAsync(string faceListId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceListFaceRequest(faceListId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Face List by specified faceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFaceListFace(string faceListId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(faceListId, nameof(faceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceListFaceRequest(faceListId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Large Face List is a list of faces, up to 1,000,000 faces, and used by "Find Similar From Large Face List". + /// + /// After creation, user should use Add Large Face List Face to import the faces and Train Large Face List to make it ready for "Find Similar". No image will be stored. Only the extracted face feature(s) will be stored on server until Delete Large Face List is called. + /// + /// "Find Similar" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and "Identify". + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 64 Large Face Lists. + /// > * S0-tier subscription quota: 1,000,000 Large Face Lists. + /// + /// + public virtual async Task CreateLargeFaceListAsync(string largeFaceListId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargeFaceListRequest createLargeFaceListRequest = new CreateLargeFaceListRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLargeFaceListAsync(largeFaceListId, createLargeFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Large Face List is a list of faces, up to 1,000,000 faces, and used by "Find Similar From Large Face List". + /// + /// After creation, user should use Add Large Face List Face to import the faces and Train Large Face List to make it ready for "Find Similar". No image will be stored. Only the extracted face feature(s) will be stored on server until Delete Large Face List is called. + /// + /// "Find Similar" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and "Identify". + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 64 Large Face Lists. + /// > * S0-tier subscription quota: 1,000,000 Large Face Lists. + /// + /// + public virtual Response CreateLargeFaceList(string largeFaceListId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargeFaceListRequest createLargeFaceListRequest = new CreateLargeFaceListRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLargeFaceList(largeFaceListId, createLargeFaceListRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateLargeFaceListAsync(string largeFaceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargeFaceListRequest(largeFaceListId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateLargeFaceList(string largeFaceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargeFaceListRequest(largeFaceListId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLargeFaceListAsync(string largeFaceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargeFaceListRequest(largeFaceListId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLargeFaceList(string largeFaceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargeFaceListRequest(largeFaceListId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargeFaceListAsync(string largeFaceListId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListAsync(largeFaceListId, returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargeFaceList(string largeFaceListId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceList(largeFaceListId, returnRecognitionModel, context); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListAsync(string largeFaceListId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(largeFaceListId, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceList(string largeFaceListId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(largeFaceListId, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update information of a Large Face List, including name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateLargeFaceListAsync(string largeFaceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargeFaceListRequest(largeFaceListId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update information of a Large Face List, including name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateLargeFaceList(string largeFaceListId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargeFaceListRequest(largeFaceListId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// To get face information inside largeFaceList use "Get Large Face List Face". + /// + /// Large Face Lists are stored in alphabetical order of largeFaceListId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetLargeFaceListsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// To get face information inside largeFaceList use "Get Large Face List Face". + /// + /// Large Face Lists are stored in alphabetical order of largeFaceListId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetLargeFaceLists(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceLists(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceLists(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by "Train Large Face List". + /// + /// Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargeFaceListTrainingStatusAsync(string largeFaceListId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListTrainingStatusAsync(largeFaceListId, context).ConfigureAwait(false); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// + /// To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by "Train Large Face List". + /// + /// Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargeFaceListTrainingStatus(string largeFaceListId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceListTrainingStatus(largeFaceListId, context); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by "Train Large Face List". + /// + /// Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListTrainingStatusAsync(string largeFaceListId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListTrainingStatusRequest(largeFaceListId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by "Train Large Face List". + /// + /// Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceListTrainingStatus(string largeFaceListId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListTrainingStatusRequest(largeFaceListId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Face List Face" or "Delete Large Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 faces per Large Face List. + /// > * S0-tier subscription quota: 1,000,000 faces per Large Face List. + /// + /// + public virtual async Task> AddLargeFaceListFaceFromUrlAsync(string largeFaceListId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(url, nameof(url)); + + AddLargeFaceListFaceFromUrlRequest addLargeFaceListFaceFromUrlRequest = new AddLargeFaceListFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddLargeFaceListFaceFromUrlAsync(largeFaceListId, addLargeFaceListFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Face List Face" or "Delete Large Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 faces per Large Face List. + /// > * S0-tier subscription quota: 1,000,000 faces per Large Face List. + /// + /// + public virtual Response AddLargeFaceListFaceFromUrl(string largeFaceListId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(url, nameof(url)); + + AddLargeFaceListFaceFromUrlRequest addLargeFaceListFaceFromUrlRequest = new AddLargeFaceListFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddLargeFaceListFaceFromUrl(largeFaceListId, addLargeFaceListFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddLargeFaceListFaceFromUrlAsync(string largeFaceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargeFaceListFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargeFaceListFaceFromUrlRequest(largeFaceListId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddLargeFaceListFaceFromUrl(string largeFaceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargeFaceListFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargeFaceListFaceFromUrlRequest(largeFaceListId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Face List Face" or "Delete Large Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 faces per Large Face List. + /// > * S0-tier subscription quota: 1,000,000 faces per Large Face List. + /// + /// + public virtual async Task> AddLargeFaceListFaceAsync(string largeFaceListId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddLargeFaceListFaceAsync(largeFaceListId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Face List Face" or "Delete Large Face List" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 faces per Large Face List. + /// > * S0-tier subscription quota: 1,000,000 faces per Large Face List. + /// + /// + public virtual Response AddLargeFaceListFace(string largeFaceListId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddLargeFaceListFace(largeFaceListId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddLargeFaceListFaceAsync(string largeFaceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargeFaceListFaceRequest(largeFaceListId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddLargeFaceListFace(string largeFaceListId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargeFaceListFaceRequest(largeFaceListId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLargeFaceListFaceAsync(string largeFaceListId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLargeFaceListFace(string largeFaceListId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargeFaceListFaceAsync(string largeFaceListId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListFaceAsync(largeFaceListId, persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargeFaceListFace(string largeFaceListId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceListFace(largeFaceListId, persistedFaceId, context); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListFaceAsync(string largeFaceListId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceListFace(string largeFaceListId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a specified face's userData field in a Large Face List by its persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateLargeFaceListFaceAsync(string largeFaceListId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a specified face's userData field in a Large Face List by its persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateLargeFaceListFace(string largeFaceListId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargeFaceListFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargeFaceListFaceRequest(largeFaceListId, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Faces are stored in alphabetical order of persistedFaceId created in "Add Large Face List Face". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetLargeFaceListFacesAsync(string largeFaceListId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListFacesAsync(largeFaceListId, start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Faces are stored in alphabetical order of persistedFaceId created in "Add Large Face List Face". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetLargeFaceListFaces(string largeFaceListId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceListFaces(largeFaceListId, start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListFacesAsync(string largeFaceListId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListFacesRequest(largeFaceListId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceListFaces(string largeFaceListId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargeFaceListFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListFacesRequest(largeFaceListId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel. + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Person Group is a container holding the uploaded person data, including face recognition features. + /// + /// After creation, use "Create Person Group Person" to add persons into the group, and then call "Train Person Group" to get this group ready for "Identify From Person Group". + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// 'recognitionModel' should be specified to associate with this Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Person Group can't be updated to features extracted by another version of recognition model. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 Person Groups. Each holds up to 1,000 persons. + /// > * S0-tier subscription quota: 1,000,000 Person Groups. Each holds up to 10,000 persons. + /// > * to handle larger scale face identification problem, please consider using Large Person Group. + /// + /// + public virtual async Task CreatePersonGroupAsync(string personGroupId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonGroupRequest createPersonGroupRequest = new CreatePersonGroupRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreatePersonGroupAsync(personGroupId, createPersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel. + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Person Group is a container holding the uploaded person data, including face recognition features. + /// + /// After creation, use "Create Person Group Person" to add persons into the group, and then call "Train Person Group" to get this group ready for "Identify From Person Group". + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// 'recognitionModel' should be specified to associate with this Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Person Group can't be updated to features extracted by another version of recognition model. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 Person Groups. Each holds up to 1,000 persons. + /// > * S0-tier subscription quota: 1,000,000 Person Groups. Each holds up to 10,000 persons. + /// > * to handle larger scale face identification problem, please consider using Large Person Group. + /// + /// + public virtual Response CreatePersonGroup(string personGroupId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonGroupRequest createPersonGroupRequest = new CreatePersonGroupRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreatePersonGroup(personGroupId, createPersonGroupRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreatePersonGroupAsync(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonGroupRequest(personGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreatePersonGroup(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonGroupRequest(personGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing Person Group with specified personGroupId. Persisted data in this Person Group will be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeletePersonGroupAsync(string personGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupRequest(personGroupId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing Person Group with specified personGroupId. Persisted data in this Person Group will be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeletePersonGroup(string personGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupRequest(personGroupId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use "Get Person Group Persons". + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetPersonGroupAsync(string personGroupId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupAsync(personGroupId, returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(PersonGroup.FromResponse(response), response); + } + + /// Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use "Get Person Group Persons". + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetPersonGroup(string personGroupId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroup(personGroupId, returnRecognitionModel, context); + return Response.FromValue(PersonGroup.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use "Get Person Group Persons". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupAsync(string personGroupId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupRequest(personGroupId, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use "Get Person Group Persons". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroup(string personGroupId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupRequest(personGroupId, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update an existing Person Group's name and userData. The properties keep unchanged if they are not in request body. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonGroupAsync(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupRequest(personGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update an existing Person Group's name and userData. The properties keep unchanged if they are not in request body. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePersonGroup(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupRequest(personGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List Person Groups' personGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// Person Groups are stored in alphabetical order of personGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetPersonGroupsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonGroup.DeserializePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List Person Groups' personGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// Person Groups are stored in alphabetical order of personGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetPersonGroups(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroups(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonGroup.DeserializePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List Person Groups' personGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List Person Groups' personGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroups(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by "Train Person Group" API. + /// ID of the container. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetPersonGroupTrainingStatusAsync(string personGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupTrainingStatusAsync(personGroupId, context).ConfigureAwait(false); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by "Train Person Group" API. + /// ID of the container. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetPersonGroupTrainingStatus(string personGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroupTrainingStatus(personGroupId, context); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by "Train Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupTrainingStatusAsync(string personGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupTrainingStatusRequest(personGroupId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by "Train Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroupTrainingStatus(string personGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupTrainingStatusRequest(personGroupId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new person in a specified Person Group. To add face to this person, please call "Add Person Group Person Face". + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: + /// > * 1,000 persons in all Person Groups. + /// > * S0-tier subscription quota: + /// > * 10,000 persons per Person Group. + /// > * 1,000,000 Person Groups. + /// > * 100,000,000 persons in all Person Groups. + /// + /// + public virtual async Task> CreatePersonGroupPersonAsync(string personGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonGroupPersonRequest createPersonGroupPersonRequest = new CreatePersonGroupPersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreatePersonGroupPersonAsync(personGroupId, createPersonGroupPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// Create a new person in a specified Person Group. To add face to this person, please call "Add Person Group Person Face". + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: + /// > * 1,000 persons in all Person Groups. + /// > * S0-tier subscription quota: + /// > * 10,000 persons per Person Group. + /// > * 1,000,000 Person Groups. + /// > * 100,000,000 persons in all Person Groups. + /// + /// + public virtual Response CreatePersonGroupPerson(string personGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonGroupPersonRequest createPersonGroupPersonRequest = new CreatePersonGroupPersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreatePersonGroupPerson(personGroupId, createPersonGroupPersonRequest.ToRequestContent(), context); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new person in a specified Person Group. To add face to this person, please call "Add Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreatePersonGroupPersonAsync(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonGroupPersonRequest(personGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new person in a specified Person Group. To add face to this person, please call "Add Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreatePersonGroupPerson(string personGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonGroupPersonRequest(personGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from a Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeletePersonGroupPersonAsync(string personGroupId, Guid personId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupPersonRequest(personGroupId, personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from a Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeletePersonGroupPerson(string personGroupId, Guid personId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupPersonRequest(personGroupId, personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// ID of the container. + /// ID of the person. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetPersonGroupPersonAsync(string personGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupPersonAsync(personGroupId, personId, context).ConfigureAwait(false); + return Response.FromValue(PersonGroupPerson.FromResponse(response), response); + } + + /// Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// ID of the container. + /// ID of the person. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetPersonGroupPerson(string personGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroupPerson(personGroupId, personId, context); + return Response.FromValue(PersonGroupPerson.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupPersonAsync(string personGroupId, Guid personId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonRequest(personGroupId, personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroupPerson(string personGroupId, Guid personId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonRequest(personGroupId, personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonGroupPersonAsync(string personGroupId, Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupPersonRequest(personGroupId, personId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePersonGroupPerson(string personGroupId, Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupPersonRequest(personGroupId, personId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in "Create Person Group Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetPersonGroupPersonsAsync(string personGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupPersonsAsync(personGroupId, start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonGroupPerson.DeserializePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in "Create Person Group Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetPersonGroupPersons(string personGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroupPersons(personGroupId, start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonGroupPerson.DeserializePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupPersonsAsync(string personGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonsRequest(personGroupId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroupPersons(string personGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonsRequest(personGroupId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Person Group Person Face", "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddPersonGroupPersonFaceFromUrlAsync(string personGroupId, Guid personId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(url, nameof(url)); + + AddPersonGroupPersonFaceFromUrlRequest addPersonGroupPersonFaceFromUrlRequest = new AddPersonGroupPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddPersonGroupPersonFaceFromUrlAsync(personGroupId, personId, addPersonGroupPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Person Group Person Face", "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddPersonGroupPersonFaceFromUrl(string personGroupId, Guid personId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(url, nameof(url)); + + AddPersonGroupPersonFaceFromUrlRequest addPersonGroupPersonFaceFromUrlRequest = new AddPersonGroupPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddPersonGroupPersonFaceFromUrl(personGroupId, personId, addPersonGroupPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddPersonGroupPersonFaceFromUrlAsync(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonGroupPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonGroupPersonFaceFromUrlRequest(personGroupId, personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddPersonGroupPersonFaceFromUrl(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonGroupPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonGroupPersonFaceFromUrlRequest(personGroupId, personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Person Group Person Face", "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddPersonGroupPersonFaceAsync(string personGroupId, Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddPersonGroupPersonFaceAsync(personGroupId, personId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Person Group Person Face", "Delete Person Group Person" or "Delete Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddPersonGroupPersonFace(string personGroupId, Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddPersonGroupPersonFace(personGroupId, personId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddPersonGroupPersonFaceAsync(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonGroupPersonFaceRequest(personGroupId, personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddPersonGroupPersonFace(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonGroupPersonFaceRequest(personGroupId, personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Person Group by specified personGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeletePersonGroupPersonFaceAsync(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Person Group by specified personGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeletePersonGroupPersonFace(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId. + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetPersonGroupPersonFaceAsync(string personGroupId, Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonGroupPersonFaceAsync(personGroupId, personId, persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(PersonGroupPersonFace.FromResponse(response), response); + } + + /// Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId. + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetPersonGroupPersonFace(string personGroupId, Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonGroupPersonFace(personGroupId, personId, persistedFaceId, context); + return Response.FromValue(PersonGroupPersonFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonGroupPersonFaceAsync(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonGroupPersonFace(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a person persisted face's userData field. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonGroupPersonFaceAsync(string personGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a person persisted face's userData field. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePersonGroupPersonFace(string personGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonGroupPersonFaceRequest(personGroupId, personId, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Large Person Group is a container holding the uploaded person data, including the face recognition features. It can hold up to 1,000,000 entities. + /// + /// After creation, use "Create Large Person Group Person" to add person into the group, and call "Train Large Person Group" to get this group ready for "Identify From Large Person Group". + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// 'recognitionModel' should be specified to associate with this Large Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Large Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Large Person Group can't be updated to features extracted by another version of recognition model. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 Large Person Groups. + /// > * S0-tier subscription quota: 1,000,000 Large Person Groups. + /// + /// + public virtual async Task CreateLargePersonGroupAsync(string largePersonGroupId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargePersonGroupRequest createLargePersonGroupRequest = new CreateLargePersonGroupRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLargePersonGroupAsync(largePersonGroupId, createLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Large Person Group is a container holding the uploaded person data, including the face recognition features. It can hold up to 1,000,000 entities. + /// + /// After creation, use "Create Large Person Group Person" to add person into the group, and call "Train Large Person Group" to get this group ready for "Identify From Large Person Group". + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// 'recognitionModel' should be specified to associate with this Large Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Large Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Large Person Group can't be updated to features extracted by another version of recognition model. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: 1,000 Large Person Groups. + /// > * S0-tier subscription quota: 1,000,000 Large Person Groups. + /// + /// + public virtual Response CreateLargePersonGroup(string largePersonGroupId, string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargePersonGroupRequest createLargePersonGroupRequest = new CreateLargePersonGroupRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLargePersonGroup(largePersonGroupId, createLargePersonGroupRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateLargePersonGroupAsync(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargePersonGroupRequest(largePersonGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateLargePersonGroup(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargePersonGroupRequest(largePersonGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing Large Person Group with specified personGroupId. Persisted data in this Large Person Group will be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLargePersonGroupAsync(string largePersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupRequest(largePersonGroupId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing Large Person Group with specified personGroupId. Persisted data in this Large Person Group will be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLargePersonGroup(string largePersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupRequest(largePersonGroupId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use "Get Large Person Group Persons" instead to retrieve person information under the Large Person Group. + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargePersonGroupAsync(string largePersonGroupId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupAsync(largePersonGroupId, returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use "Get Large Person Group Persons" instead to retrieve person information under the Large Person Group. + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargePersonGroup(string largePersonGroupId, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroup(largePersonGroupId, returnRecognitionModel, context); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use "Get Large Person Group Persons" instead to retrieve person information under the Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupAsync(string largePersonGroupId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(largePersonGroupId, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use "Get Large Person Group Persons" instead to retrieve person information under the Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroup(string largePersonGroupId, bool? returnRecognitionModel, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(largePersonGroupId, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update an existing Large Person Group's name and userData. The properties keep unchanged if they are not in request body. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateLargePersonGroupAsync(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupRequest(largePersonGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update an existing Large Person Group's name and userData. The properties keep unchanged if they are not in request body. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateLargePersonGroup(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupRequest(largePersonGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// Large Person Groups are stored in alphabetical order of largePersonGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetLargePersonGroupsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + /// Large Person Groups are stored in alphabetical order of largePersonGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetLargePersonGroups(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroups(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroups(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// ID of the container. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in seconds, or up to half an hour for 1,000,000 persons. + /// + public virtual async Task> GetLargePersonGroupTrainingStatusAsync(string largePersonGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupTrainingStatusAsync(largePersonGroupId, context).ConfigureAwait(false); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// ID of the container. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in seconds, or up to half an hour for 1,000,000 persons. + /// + public virtual Response GetLargePersonGroupTrainingStatus(string largePersonGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroupTrainingStatus(largePersonGroupId, context); + return Response.FromValue(FaceCollectionTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupTrainingStatusAsync(string largePersonGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupTrainingStatusRequest(largePersonGroupId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroupTrainingStatus(string largePersonGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupTrainingStatusRequest(largePersonGroupId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: + /// > * 1,000 persons in all Large Person Groups. + /// > * S0-tier subscription quota: + /// > * 1,000,000 persons per Large Person Group. + /// > * 1,000,000 Large Person Groups. + /// > * 1,000,000,000 persons in all Large Person Groups. + /// + /// + public virtual async Task> CreateLargePersonGroupPersonAsync(string largePersonGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargePersonGroupPersonRequest createLargePersonGroupPersonRequest = new CreateLargePersonGroupPersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLargePersonGroupPersonAsync(largePersonGroupId, createLargePersonGroupPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// ID of the container. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Free-tier subscription quota: + /// > * 1,000 persons in all Large Person Groups. + /// > * S0-tier subscription quota: + /// > * 1,000,000 persons per Large Person Group. + /// > * 1,000,000 Large Person Groups. + /// > * 1,000,000,000 persons in all Large Person Groups. + /// + /// + public virtual Response CreateLargePersonGroupPerson(string largePersonGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateLargePersonGroupPersonRequest createLargePersonGroupPersonRequest = new CreateLargePersonGroupPersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLargePersonGroupPerson(largePersonGroupId, createLargePersonGroupPersonRequest.ToRequestContent(), context); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateLargePersonGroupPersonAsync(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargePersonGroupPersonRequest(largePersonGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateLargePersonGroupPerson(string largePersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLargePersonGroupPersonRequest(largePersonGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from a Large Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLargePersonGroupPersonAsync(string largePersonGroupId, Guid personId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupPersonRequest(largePersonGroupId, personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from a Large Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLargePersonGroupPerson(string largePersonGroupId, Guid personId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupPersonRequest(largePersonGroupId, personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// ID of the container. + /// ID of the person. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargePersonGroupPersonAsync(string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupPersonAsync(largePersonGroupId, personId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// ID of the container. + /// ID of the person. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargePersonGroupPerson(string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroupPerson(largePersonGroupId, personId, context); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupPersonAsync(string largePersonGroupId, Guid personId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonRequest(largePersonGroupId, personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroupPerson(string largePersonGroupId, Guid personId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonRequest(largePersonGroupId, personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateLargePersonGroupPersonAsync(string largePersonGroupId, Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupPersonRequest(largePersonGroupId, personId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateLargePersonGroupPerson(string largePersonGroupId, Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroupPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupPersonRequest(largePersonGroupId, personId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in "Create Large Person Group Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetLargePersonGroupPersonsAsync(string largePersonGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupPersonsAsync(largePersonGroupId, start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in "Create Large Person Group Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetLargePersonGroupPersons(string largePersonGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroupPersons(largePersonGroupId, start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupPersonsAsync(string largePersonGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonsRequest(largePersonGroupId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroupPersons(string largePersonGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonsRequest(largePersonGroupId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Person Group Person Face", "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddLargePersonGroupPersonFaceFromUrlAsync(string largePersonGroupId, Guid personId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(url, nameof(url)); + + AddLargePersonGroupPersonFaceFromUrlRequest addLargePersonGroupPersonFaceFromUrlRequest = new AddLargePersonGroupPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddLargePersonGroupPersonFaceFromUrlAsync(largePersonGroupId, personId, addLargePersonGroupPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Person Group Person Face", "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddLargePersonGroupPersonFaceFromUrl(string largePersonGroupId, Guid personId, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(url, nameof(url)); + + AddLargePersonGroupPersonFaceFromUrlRequest addLargePersonGroupPersonFaceFromUrlRequest = new AddLargePersonGroupPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddLargePersonGroupPersonFaceFromUrl(largePersonGroupId, personId, addLargePersonGroupPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddLargePersonGroupPersonFaceFromUrlAsync(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargePersonGroupPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargePersonGroupPersonFaceFromUrlRequest(largePersonGroupId, personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddLargePersonGroupPersonFaceFromUrl(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargePersonGroupPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargePersonGroupPersonFaceFromUrlRequest(largePersonGroupId, personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Person Group Person Face", "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual async Task> AddLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddLargePersonGroupPersonFaceAsync(largePersonGroupId, personId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the container. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until "Delete Large Person Group Person Face", "Delete Large Person Group Person" or "Delete Large Person Group" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// + /// + public virtual Response AddLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddLargePersonGroupPersonFace(largePersonGroupId, personId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AddLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AddLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId. + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupPersonFaceAsync(largePersonGroupId, personId, persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId. + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroupPersonFace(largePersonGroupId, personId, persistedFaceId, context); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a person persisted face's userData field. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateLargePersonGroupPersonFaceAsync(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a person persisted face's userData field. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the container. + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateLargePersonGroupPersonFace(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateLargePersonGroupPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateLargePersonGroupPersonFaceRequest(largePersonGroupId, personId, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a person's name and userData from Person Directory. + /// Person ID of the person. + /// The cancellation token to use. + /// + public virtual async Task> GetPersonAsync(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonAsync(personId, context).ConfigureAwait(false); + return Response.FromValue(PersonDirectoryPerson.FromResponse(response), response); + } + + /// Retrieve a person's name and userData from Person Directory. + /// Person ID of the person. + /// The cancellation token to use. + /// + public virtual Response GetPerson(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPerson(personId, context); + return Response.FromValue(PersonDirectoryPerson.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a person's name and userData from Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonAsync(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a person's name and userData from Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPerson(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Person ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonAsync(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update name or userData of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Person ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePerson(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons' information in Person Directory, including personId, name, and userData. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Persons are stored in alphabetical order of personId created in Person Directory "Create Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetPersonsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonDirectoryPerson.DeserializePersonDirectoryPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all persons' information in Person Directory, including personId, name, and userData. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Persons are stored in alphabetical order of personId created in Person Directory "Create Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetPersons(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersons(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(PersonDirectoryPerson.DeserializePersonDirectoryPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all persons' information in Person Directory, including personId, name, and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons' information in Person Directory, including personId, name, and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersons(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all Dynamic Person Groups a person has been referenced by in Person Directory. + /// Person ID of the person. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Dynamic Person Groups are stored in alphabetical order of Dynamic Person Group ID created in Person Directory "Create Dynamic Person Group". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task> GetDynamicPersonGroupReferencesAsync(Guid personId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDynamicPersonGroupReferencesAsync(personId, start, top, context).ConfigureAwait(false); + return Response.FromValue(ListGroupReferenceResult.FromResponse(response), response); + } + + /// List all Dynamic Person Groups a person has been referenced by in Person Directory. + /// Person ID of the person. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Dynamic Person Groups are stored in alphabetical order of Dynamic Person Group ID created in Person Directory "Create Dynamic Person Group". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response GetDynamicPersonGroupReferences(Guid personId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDynamicPersonGroupReferences(personId, start, top, context); + return Response.FromValue(ListGroupReferenceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] List all Dynamic Person Groups a person has been referenced by in Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDynamicPersonGroupReferencesAsync(Guid personId, string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroupReferences"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupReferencesRequest(personId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all Dynamic Person Groups a person has been referenced by in Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDynamicPersonGroupReferences(Guid personId, string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroupReferences"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupReferencesRequest(personId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual async Task> GetPersonFaceAsync(Guid personId, FaceRecognitionModel recognitionModel, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonFaceAsync(personId, recognitionModel.ToString(), persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(PersonDirectoryFace.FromResponse(response), response); + } + + /// Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual Response GetPersonFace(Guid personId, FaceRecognitionModel recognitionModel, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonFace(personId, recognitionModel.ToString(), persistedFaceId, context); + return Response.FromValue(PersonDirectoryFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonFaceAsync(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonFaceRequest(personId, recognitionModel, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonFace(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonFaceRequest(personId, recognitionModel, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a persisted face's userData field of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonFaceAsync(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonFaceRequest(personId, recognitionModel, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update a persisted face's userData field of a person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePersonFace(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdatePersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonFaceRequest(personId, recognitionModel, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve a person's persistedFaceIds representing the registered person face feature(s). + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// The cancellation token to use. + /// + public virtual async Task> GetPersonFacesAsync(Guid personId, FaceRecognitionModel recognitionModel, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonFacesAsync(personId, recognitionModel.ToString(), context).ConfigureAwait(false); + return Response.FromValue(ListFaceResult.FromResponse(response), response); + } + + /// Retrieve a person's persistedFaceIds representing the registered person face feature(s). + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// The cancellation token to use. + /// + public virtual Response GetPersonFaces(Guid personId, FaceRecognitionModel recognitionModel, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersonFaces(personId, recognitionModel.ToString(), context); + return Response.FromValue(ListFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve a person's persistedFaceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonFacesAsync(Guid personId, string recognitionModel, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonFacesRequest(personId, recognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve a person's persistedFaceIds representing the registered person face feature(s). + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersonFaces(Guid personId, string recognitionModel, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetPersonFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonFacesRequest(personId, recognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Dynamic Person Group is a container that references Person Directory "Create Person". After creation, use Person Directory "Update Dynamic Person Group" to add/remove persons to/from the Dynamic Person Group. + /// + /// Dynamic Person Group and user data will be stored on server until Person Directory "Delete Dynamic Person Group" is called. Use "Identify From Dynamic Person Group" with the dynamicPersonGroupId parameter to identify against persons. + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory "Delete Person" or "Delete Person Face" is called. + /// + /// 'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory "Create Person" and therefore work with most all 'recognitionModels'. The faceId's provided during "Identify" determine the 'recognitionModel' used. + /// + /// + public virtual async Task CreateDynamicPersonGroupAsync(string dynamicPersonGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateDynamicPersonGroupRequest createDynamicPersonGroupRequest = new CreateDynamicPersonGroupRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateDynamicPersonGroupAsync(dynamicPersonGroupId, createDynamicPersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Dynamic Person Group is a container that references Person Directory "Create Person". After creation, use Person Directory "Update Dynamic Person Group" to add/remove persons to/from the Dynamic Person Group. + /// + /// Dynamic Person Group and user data will be stored on server until Person Directory "Delete Dynamic Person Group" is called. Use "Identify From Dynamic Person Group" with the dynamicPersonGroupId parameter to identify against persons. + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory "Delete Person" or "Delete Person Face" is called. + /// + /// 'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory "Create Person" and therefore work with most all 'recognitionModels'. The faceId's provided during "Identify" determine the 'recognitionModel' used. + /// + /// + public virtual Response CreateDynamicPersonGroup(string dynamicPersonGroupId, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + + CreateDynamicPersonGroupRequest createDynamicPersonGroupRequest = new CreateDynamicPersonGroupRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateDynamicPersonGroup(dynamicPersonGroupId, createDynamicPersonGroupRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateDynamicPersonGroupAsync(string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateDynamicPersonGroupRequest(dynamicPersonGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateDynamicPersonGroup(string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateDynamicPersonGroupRequest(dynamicPersonGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Retrieve the information of a Dynamic Person Group, including its name and userData. + /// ID of the dynamic person group. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// This API returns Dynamic Person Group information only, use Person Directory "Get Dynamic Person Group Persons" instead to retrieve person information under the Dynamic Person Group. + /// + public virtual async Task> GetDynamicPersonGroupAsync(string dynamicPersonGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDynamicPersonGroupAsync(dynamicPersonGroupId, context).ConfigureAwait(false); + return Response.FromValue(DynamicPersonGroup.FromResponse(response), response); + } + + /// Retrieve the information of a Dynamic Person Group, including its name and userData. + /// ID of the dynamic person group. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// This API returns Dynamic Person Group information only, use Person Directory "Get Dynamic Person Group Persons" instead to retrieve person information under the Dynamic Person Group. + /// + public virtual Response GetDynamicPersonGroup(string dynamicPersonGroupId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDynamicPersonGroup(dynamicPersonGroupId, context); + return Response.FromValue(DynamicPersonGroup.FromResponse(response), response); + } + + /// + /// [Protocol Method] Retrieve the information of a Dynamic Person Group, including its name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDynamicPersonGroupAsync(string dynamicPersonGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupRequest(dynamicPersonGroupId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Retrieve the information of a Dynamic Person Group, including its name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDynamicPersonGroup(string dynamicPersonGroupId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupRequest(dynamicPersonGroupId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateDynamicPersonGroupAsync(string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateDynamicPersonGroupRequest(dynamicPersonGroupId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateDynamicPersonGroup(string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateDynamicPersonGroupRequest(dynamicPersonGroupId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Dynamic Person Groups are stored in alphabetical order of dynamicPersonGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task>> GetDynamicPersonGroupsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDynamicPersonGroupsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(DynamicPersonGroup.DeserializeDynamicPersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// Dynamic Person Groups are stored in alphabetical order of dynamicPersonGroupId. + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response> GetDynamicPersonGroups(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDynamicPersonGroups(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(DynamicPersonGroup.DeserializeDynamicPersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDynamicPersonGroupsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDynamicPersonGroups(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons in the specified Dynamic Person Group. + /// ID of the dynamic person group. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in Person Directory "Create Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual async Task> GetDynamicPersonGroupPersonsAsync(string dynamicPersonGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetDynamicPersonGroupPersonsAsync(dynamicPersonGroupId, start, top, context).ConfigureAwait(false); + return Response.FromValue(ListPersonResult.FromResponse(response), response); + } + + /// List all persons in the specified Dynamic Person Group. + /// ID of the dynamic person group. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + /// Persons are stored in alphabetical order of personId created in Person Directory "Create Person". + /// > + /// * + /// * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + /// * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + /// + /// > [!TIP] + /// > + /// > * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". + /// > * "start=&top=" will return all 5 items. + /// > * "start=&top=2" will return "itemId1", "itemId2". + /// > * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". + /// + /// + public virtual Response GetDynamicPersonGroupPersons(string dynamicPersonGroupId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetDynamicPersonGroupPersons(dynamicPersonGroupId, start, top, context); + return Response.FromValue(ListPersonResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] List all persons in the specified Dynamic Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetDynamicPersonGroupPersonsAsync(string dynamicPersonGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupPersonsRequest(dynamicPersonGroupId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons in the specified Dynamic Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the dynamic person group. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetDynamicPersonGroupPersons(string dynamicPersonGroupId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.GetDynamicPersonGroupPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetDynamicPersonGroupPersonsRequest(dynamicPersonGroupId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainLargeFaceListAsync(WaitUntil waitUntil, string largeFaceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainLargeFaceListRequest(largeFaceListId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainLargeFaceList", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation TrainLargeFaceList(WaitUntil waitUntil, string largeFaceListId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainLargeFaceListRequest(largeFaceListId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainLargeFaceList", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Person Group training task. Training is a crucial step that only a trained Person Group can be used by "Identify From Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainPersonGroupAsync(WaitUntil waitUntil, string personGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainPersonGroupRequest(personGroupId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainPersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Person Group training task. Training is a crucial step that only a trained Person Group can be used by "Identify From Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation TrainPersonGroup(WaitUntil waitUntil, string personGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(personGroupId, nameof(personGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainPersonGroupRequest(personGroupId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainPersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainLargePersonGroupAsync(WaitUntil waitUntil, string largePersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainLargePersonGroupRequest(largePersonGroupId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainLargePersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the container. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation TrainLargePersonGroup(WaitUntil waitUntil, string largePersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.TrainLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainLargePersonGroupRequest(largePersonGroupId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.TrainLargePersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new person in a Person Directory. To add face to this person, please call Person Directory "Add Person Face". + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// + public virtual async Task> CreatePersonAsync(WaitUntil waitUntil, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = await CreatePersonAsync(waitUntil, createPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryPerson.FromResponse, ClientDiagnostics, "FaceAdministrationClient.CreatePerson"); + } + + /// Creates a new person in a Person Directory. To add face to this person, please call Person Directory "Add Person Face". + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// + public virtual Operation CreatePerson(WaitUntil waitUntil, string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = CreatePerson(waitUntil, createPersonRequest.ToRequestContent(), context); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryPerson.FromResponse, ClientDiagnostics, "FaceAdministrationClient.CreatePerson"); + } + + /// + /// [Protocol Method] Creates a new person in a Person Directory. To add face to this person, please call Person Directory "Add Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task> CreatePersonAsync(WaitUntil waitUntil, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.CreatePerson", OperationFinalStateVia.Location, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new person in a Person Directory. To add face to this person, please call Person Directory "Add Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation CreatePerson(WaitUntil waitUntil, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return ProtocolOperationHelpers.ProcessMessage(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.CreatePerson", OperationFinalStateVia.Location, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from Person Directory. The persistedFaceId(s), userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task DeletePersonAsync(WaitUntil waitUntil, Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeletePerson", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete an existing person from Person Directory. The persistedFaceId(s), userData, person name and face feature(s) in the person entry will all be deleted. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation DeletePerson(WaitUntil waitUntil, Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeletePerson", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory "Delete Person Face" or "Delete Person" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// * + /// * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// * This is a long running operation. Use Response Header "Operation-Location" to determine when the AddFace operation has successfully propagated for future requests to "Identify". For further information about Operation-Locations see "Get Face Operation Status". + /// + /// + public virtual async Task> AddPersonFaceFromUrlAsync(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(url, nameof(url)); + + AddPersonFaceFromUrlRequest addPersonFaceFromUrlRequest = new AddPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = await AddPersonFaceFromUrlAsync(waitUntil, personId, recognitionModel.ToString(), addPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryFace.FromResponse, ClientDiagnostics, "FaceAdministrationClient.AddPersonFaceFromUrl"); + } + + /// Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory "Delete Person Face" or "Delete Person" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// * + /// * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// * This is a long running operation. Use Response Header "Operation-Location" to determine when the AddFace operation has successfully propagated for future requests to "Identify". For further information about Operation-Locations see "Get Face Operation Status". + /// + /// + public virtual Operation AddPersonFaceFromUrl(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, Uri url, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(url, nameof(url)); + + AddPersonFaceFromUrlRequest addPersonFaceFromUrlRequest = new AddPersonFaceFromUrlRequest(url, null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = AddPersonFaceFromUrl(waitUntil, personId, recognitionModel.ToString(), addPersonFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryFace.FromResponse, ClientDiagnostics, "FaceAdministrationClient.AddPersonFaceFromUrl"); + } + + /// + /// [Protocol Method] Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task> AddPersonFaceFromUrlAsync(WaitUntil waitUntil, Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonFaceFromUrlRequest(personId, recognitionModel, content, targetFace, detectionModel, userData, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.AddPersonFaceFromUrl", OperationFinalStateVia.Location, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation AddPersonFaceFromUrl(WaitUntil waitUntil, Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonFaceFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonFaceFromUrlRequest(personId, recognitionModel, content, targetFace, detectionModel, userData, context); + return ProtocolOperationHelpers.ProcessMessage(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.AddPersonFaceFromUrl", OperationFinalStateVia.Location, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory "Delete Person Face" or "Delete Person" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// * + /// * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// * This is a long running operation. Use Response Header "Operation-Location" to determine when the AddFace operation has successfully propagated for future requests to "Identify". For further information about Operation-Locations see "Get Face Operation Status". + /// + /// + public virtual async Task> AddPersonFaceAsync(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = await AddPersonFaceAsync(waitUntil, personId, recognitionModel.ToString(), content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryFace.FromResponse, ClientDiagnostics, "FaceAdministrationClient.AddPersonFace"); + } + + /// Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// + /// To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory "Delete Person Face" or "Delete Person" is called. + /// + /// Note that persistedFaceId is different from faceId generated by "Detect". + /// > + /// * + /// * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// * Each person entry can hold up to 248 faces. + /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + /// * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from "Detect", there's no guarantee to detect and add the face successfully. + /// * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model + /// * + /// * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// * This is a long running operation. Use Response Header "Operation-Location" to determine when the AddFace operation has successfully propagated for future requests to "Identify". For further information about Operation-Locations see "Get Face Operation Status". + /// + /// + public virtual Operation AddPersonFace(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = AddPersonFace(waitUntil, personId, recognitionModel.ToString(), content, targetFace, detectionModel?.ToString(), userData, context); + return ProtocolOperationHelpers.Convert(response, PersonDirectoryFace.FromResponse, ClientDiagnostics, "FaceAdministrationClient.AddPersonFace"); + } + + /// + /// [Protocol Method] Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task> AddPersonFaceAsync(WaitUntil waitUntil, Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonFaceRequest(personId, recognitionModel, content, targetFace, detectionModel, userData, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.AddPersonFace", OperationFinalStateVia.Location, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person (see Person Directory "Create Person") for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation AddPersonFace(WaitUntil waitUntil, Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.AddPersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateAddPersonFaceRequest(personId, recognitionModel, content, targetFace, detectionModel, userData, context); + return ProtocolOperationHelpers.ProcessMessage(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.AddPersonFace", OperationFinalStateVia.Location, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Delete a face from a person in Person Directory by specified personId and persistedFaceId. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// Face ID of the face. + /// The cancellation token to use. + /// Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// + public virtual async Task DeletePersonFaceAsync(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + return await DeletePersonFaceAsync(waitUntil, personId, recognitionModel.ToString(), persistedFaceId, context).ConfigureAwait(false); + } + + /// Delete a face from a person in Person Directory by specified personId and persistedFaceId. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. + /// Face ID of the face. + /// The cancellation token to use. + /// Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + /// + public virtual Operation DeletePersonFace(WaitUntil waitUntil, Guid personId, FaceRecognitionModel recognitionModel, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + return DeletePersonFace(waitUntil, personId, recognitionModel.ToString(), persistedFaceId, context); + } + + /// + /// [Protocol Method] Delete a face from a person in Person Directory by specified personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task DeletePersonFaceAsync(WaitUntil waitUntil, Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonFaceRequest(personId, recognitionModel, persistedFaceId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeletePersonFace", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Delete a face from a person in Person Directory by specified personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// Person ID of the person. + /// The 'recognitionModel' associated with faces. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation DeletePersonFace(WaitUntil waitUntil, Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(recognitionModel, nameof(recognitionModel)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeletePersonFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonFaceRequest(personId, recognitionModel, persistedFaceId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeletePersonFace", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Array of personIds created by Person Directory "Create Person" to be added. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Dynamic Person Group is a container that references Person Directory "Create Person". After creation, use Person Directory "Update Dynamic Person Group" to add/remove persons to/from the Dynamic Person Group. + /// + /// Dynamic Person Group and user data will be stored on server until Person Directory "Delete Dynamic Person Group" is called. Use "Identify From Dynamic Person Group" with the dynamicPersonGroupId parameter to identify against persons. + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory "Delete Person" or "Delete Person Face" is called. + /// + /// 'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory "Create Person" and therefore work with most all 'recognitionModels'. The faceId's provided during "Identify" determine the 'recognitionModel' used. + /// + /// + public virtual async Task> CreateDynamicPersonGroupWithPersonAsync(WaitUntil waitUntil, string dynamicPersonGroupId, string name, IEnumerable addPersonIds, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(addPersonIds, nameof(addPersonIds)); + + CreateDynamicPersonGroupWithPersonRequest createDynamicPersonGroupWithPersonRequest = new CreateDynamicPersonGroupWithPersonRequest(name, userData, addPersonIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = await CreateDynamicPersonGroupWithPersonAsync(waitUntil, dynamicPersonGroupId, createDynamicPersonGroupWithPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return ProtocolOperationHelpers.Convert(response, DynamicPersonGroup.FromResponse, ClientDiagnostics, "FaceAdministrationClient.CreateDynamicPersonGroupWithPerson"); + } + + /// Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// User defined name, maximum length is 128. + /// Array of personIds created by Person Directory "Create Person" to be added. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// , or is null. + /// is an empty string, and was expected to be non-empty. + /// + /// A Dynamic Person Group is a container that references Person Directory "Create Person". After creation, use Person Directory "Update Dynamic Person Group" to add/remove persons to/from the Dynamic Person Group. + /// + /// Dynamic Person Group and user data will be stored on server until Person Directory "Delete Dynamic Person Group" is called. Use "Identify From Dynamic Person Group" with the dynamicPersonGroupId parameter to identify against persons. + /// + /// No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory "Delete Person" or "Delete Person Face" is called. + /// + /// 'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory "Create Person" and therefore work with most all 'recognitionModels'. The faceId's provided during "Identify" determine the 'recognitionModel' used. + /// + /// + public virtual Operation CreateDynamicPersonGroupWithPerson(WaitUntil waitUntil, string dynamicPersonGroupId, string name, IEnumerable addPersonIds, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(addPersonIds, nameof(addPersonIds)); + + CreateDynamicPersonGroupWithPersonRequest createDynamicPersonGroupWithPersonRequest = new CreateDynamicPersonGroupWithPersonRequest(name, userData, addPersonIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Operation response = CreateDynamicPersonGroupWithPerson(waitUntil, dynamicPersonGroupId, createDynamicPersonGroupWithPersonRequest.ToRequestContent(), context); + return ProtocolOperationHelpers.Convert(response, DynamicPersonGroup.FromResponse, ClientDiagnostics, "FaceAdministrationClient.CreateDynamicPersonGroupWithPerson"); + } + + /// + /// [Protocol Method] Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task> CreateDynamicPersonGroupWithPersonAsync(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateDynamicPersonGroupWithPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateDynamicPersonGroupWithPersonRequest(dynamicPersonGroupId, content, context); + return await ProtocolOperationHelpers.ProcessMessageAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.CreateDynamicPersonGroupWithPerson", OperationFinalStateVia.OriginalUri, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation CreateDynamicPersonGroupWithPerson(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.CreateDynamicPersonGroupWithPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateDynamicPersonGroupWithPersonRequest(dynamicPersonGroupId, content, context); + return ProtocolOperationHelpers.ProcessMessage(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.CreateDynamicPersonGroupWithPerson", OperationFinalStateVia.OriginalUri, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes an existing Dynamic Person Group with specified dynamicPersonGroupId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task DeleteDynamicPersonGroupAsync(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteDynamicPersonGroupRequest(dynamicPersonGroupId, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeleteDynamicPersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Deletes an existing Dynamic Person Group with specified dynamicPersonGroupId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation DeleteDynamicPersonGroup(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.DeleteDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteDynamicPersonGroupRequest(dynamicPersonGroupId, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.DeleteDynamicPersonGroup", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task UpdateDynamicPersonGroupWithPersonChangesAsync(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateDynamicPersonGroupWithPersonChanges"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateDynamicPersonGroupWithPersonChangesRequest(dynamicPersonGroupId, content, context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.UpdateDynamicPersonGroupWithPersonChanges", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// ID of the dynamic person group. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// or is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation UpdateDynamicPersonGroupWithPersonChanges(WaitUntil waitUntil, string dynamicPersonGroupId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceAdministrationClient.UpdateDynamicPersonGroupWithPersonChanges"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateDynamicPersonGroupWithPersonChangesRequest(dynamicPersonGroupId, content, context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "FaceAdministrationClient.UpdateDynamicPersonGroupWithPersonChanges", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateFaceListRequest(string faceListId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceListRequest(string faceListId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetFaceListRequest(string faceListId, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateFaceListRequest(string faceListId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetFaceListsRequest(bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists", false); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddFaceListFaceFromUrlRequest(string faceListId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddFaceListFaceRequest(string faceListId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceListFaceRequest(string faceListId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/facelists/", false); + uri.AppendPath(faceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateLargeFaceListRequest(string largeFaceListId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLargeFaceListRequest(string largeFaceListId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargeFaceListRequest(string largeFaceListId, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateLargeFaceListRequest(string largeFaceListId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargeFaceListsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargeFaceListTrainingStatusRequest(string largeFaceListId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainLargeFaceListRequest(string largeFaceListId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddLargeFaceListFaceFromUrlRequest(string largeFaceListId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddLargeFaceListFaceRequest(string largeFaceListId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLargeFaceListFaceRequest(string largeFaceListId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargeFaceListFaceRequest(string largeFaceListId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateLargeFaceListFaceRequest(string largeFaceListId, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargeFaceListFacesRequest(string largeFaceListId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreatePersonGroupRequest(string personGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonGroupRequest(string personGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonGroupRequest(string personGroupId, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonGroupRequest(string personGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonGroupsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonGroupTrainingStatusRequest(string personGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainPersonGroupRequest(string personGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreatePersonGroupPersonRequest(string personGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonGroupPersonRequest(string personGroupId, Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonGroupPersonRequest(string personGroupId, Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonGroupPersonRequest(string personGroupId, Guid personId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonGroupPersonsRequest(string personGroupId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddPersonGroupPersonFaceFromUrlRequest(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddPersonGroupPersonFaceRequest(string personGroupId, Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonGroupPersonFaceRequest(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonGroupPersonFaceRequest(string personGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonGroupPersonFaceRequest(string personGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persongroups/", false); + uri.AppendPath(personGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateCreateLargePersonGroupRequest(string largePersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLargePersonGroupRequest(string largePersonGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupRequest(string largePersonGroupId, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateLargePersonGroupRequest(string largePersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargePersonGroupsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupTrainingStatusRequest(string largePersonGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainLargePersonGroupRequest(string largePersonGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateLargePersonGroupPersonRequest(string largePersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLargePersonGroupPersonRequest(string largePersonGroupId, Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupPersonRequest(string largePersonGroupId, Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateLargePersonGroupPersonRequest(string largePersonGroupId, Guid personId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargePersonGroupPersonsRequest(string largePersonGroupId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddLargePersonGroupPersonFaceFromUrlRequest(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddLargePersonGroupPersonFaceRequest(string largePersonGroupId, Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLargePersonGroupPersonFaceRequest(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupPersonFaceRequest(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateLargePersonGroupPersonFaceRequest(string largePersonGroupId, Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateCreatePersonRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonRequest(Guid personId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetDynamicPersonGroupReferencesRequest(Guid personId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/dynamicPersonGroupReferences", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddPersonFaceFromUrlRequest(Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddPersonFaceRequest(Guid personId, string recognitionModel, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonFaceRequest(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonFaceRequest(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonFaceRequest(Guid personId, string recognitionModel, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonFacesRequest(Guid personId, string recognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/recognitionModels/", false); + uri.AppendPath(recognitionModel, true); + uri.AppendPath("/persistedfaces", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateDynamicPersonGroupWithPersonRequest(string dynamicPersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateCreateDynamicPersonGroupRequest(string dynamicPersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteDynamicPersonGroupRequest(string dynamicPersonGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetDynamicPersonGroupRequest(string dynamicPersonGroupId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateDynamicPersonGroupWithPersonChangesRequest(string dynamicPersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateUpdateDynamicPersonGroupRequest(string dynamicPersonGroupId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetDynamicPersonGroupsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetDynamicPersonGroupPersonsRequest(string dynamicPersonGroupId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/dynamicpersongroups/", false); + uri.AppendPath(dynamicPersonGroupId, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributeType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributeType.cs new file mode 100644 index 0000000000000..f2a696b7e9737 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributeType.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Available options for detect face with attribute. + public readonly partial struct FaceAttributeType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceAttributeType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string HeadPoseValue = "headPose"; + private const string GlassesValue = "glasses"; + private const string OcclusionValue = "occlusion"; + private const string AccessoriesValue = "accessories"; + private const string BlurValue = "blur"; + private const string ExposureValue = "exposure"; + private const string NoiseValue = "noise"; + private const string MaskValue = "mask"; + private const string QualityForRecognitionValue = "qualityForRecognition"; + private const string AgeValue = "age"; + private const string SmileValue = "smile"; + private const string FacialHairValue = "facialHair"; + private const string HairValue = "hair"; + + /// 3-D roll/yaw/pitch angles for face direction. + public static FaceAttributeType HeadPose { get; } = new FaceAttributeType(HeadPoseValue); + /// Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'. + public static FaceAttributeType Glasses { get; } = new FaceAttributeType(GlassesValue); + /// Whether each facial area is occluded, including forehead, eyes and mouth. + public static FaceAttributeType Occlusion { get; } = new FaceAttributeType(OcclusionValue); + /// Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected. + public static FaceAttributeType Accessories { get; } = new FaceAttributeType(AccessoriesValue); + /// Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier. + public static FaceAttributeType Blur { get; } = new FaceAttributeType(BlurValue); + /// Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'. + public static FaceAttributeType Exposure { get; } = new FaceAttributeType(ExposureValue); + /// Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier. + public static FaceAttributeType Noise { get; } = new FaceAttributeType(NoiseValue); + /// Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered. + public static FaceAttributeType Mask { get; } = new FaceAttributeType(MaskValue); + /// The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using any combinations of detection models detection_01 or detection_03, and recognition models recognition_03 or recognition_04. + public static FaceAttributeType QualityForRecognition { get; } = new FaceAttributeType(QualityForRecognitionValue); + /// Age in years. + public static FaceAttributeType Age { get; } = new FaceAttributeType(AgeValue); + /// Smile intensity, a number between [0,1]. + public static FaceAttributeType Smile { get; } = new FaceAttributeType(SmileValue); + /// Properties describing facial hair attributes. + public static FaceAttributeType FacialHair { get; } = new FaceAttributeType(FacialHairValue); + /// Properties describing hair attributes. + public static FaceAttributeType Hair { get; } = new FaceAttributeType(HairValue); + /// Determines if two values are the same. + public static bool operator ==(FaceAttributeType left, FaceAttributeType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceAttributeType left, FaceAttributeType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceAttributeType(string value) => new FaceAttributeType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceAttributeType other && Equals(other); + /// + public bool Equals(FaceAttributeType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.Serialization.cs new file mode 100644 index 0000000000000..be4288ee3749c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.Serialization.cs @@ -0,0 +1,346 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceAttributes : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceAttributes)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (Optional.IsDefined(Age)) + { + writer.WritePropertyName("age"u8); + writer.WriteNumberValue(Age.Value); + } + if (Optional.IsDefined(Smile)) + { + writer.WritePropertyName("smile"u8); + writer.WriteNumberValue(Smile.Value); + } + if (Optional.IsDefined(FacialHair)) + { + writer.WritePropertyName("facialHair"u8); + writer.WriteObjectValue(FacialHair, options); + } + if (Optional.IsDefined(Glasses)) + { + writer.WritePropertyName("glasses"u8); + writer.WriteStringValue(Glasses.Value.ToString()); + } + if (Optional.IsDefined(HeadPose)) + { + writer.WritePropertyName("headPose"u8); + writer.WriteObjectValue(HeadPose, options); + } + if (Optional.IsDefined(Hair)) + { + writer.WritePropertyName("hair"u8); + writer.WriteObjectValue(Hair, options); + } + if (Optional.IsDefined(Occlusion)) + { + writer.WritePropertyName("occlusion"u8); + writer.WriteObjectValue(Occlusion, options); + } + if (Optional.IsCollectionDefined(Accessories)) + { + writer.WritePropertyName("accessories"u8); + writer.WriteStartArray(); + foreach (var item in Accessories) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Blur)) + { + writer.WritePropertyName("blur"u8); + writer.WriteObjectValue(Blur, options); + } + if (Optional.IsDefined(Exposure)) + { + writer.WritePropertyName("exposure"u8); + writer.WriteObjectValue(Exposure, options); + } + if (Optional.IsDefined(Noise)) + { + writer.WritePropertyName("noise"u8); + writer.WriteObjectValue(Noise, options); + } + if (Optional.IsDefined(Mask)) + { + writer.WritePropertyName("mask"u8); + writer.WriteObjectValue(Mask, options); + } + if (Optional.IsDefined(QualityForRecognition)) + { + writer.WritePropertyName("qualityForRecognition"u8); + writer.WriteStringValue(QualityForRecognition.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceAttributes IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceAttributes)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceAttributes(document.RootElement, options); + } + + internal static FaceAttributes DeserializeFaceAttributes(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float? age = default; + float? smile = default; + FacialHair facialHair = default; + GlassesType? glasses = default; + HeadPose headPose = default; + HairProperties hair = default; + OcclusionProperties occlusion = default; + IReadOnlyList accessories = default; + BlurProperties blur = default; + ExposureProperties exposure = default; + NoiseProperties noise = default; + MaskProperties mask = default; + QualityForRecognition? qualityForRecognition = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("age"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + age = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("smile"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + smile = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("facialHair"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + facialHair = FacialHair.DeserializeFacialHair(property.Value, options); + continue; + } + if (property.NameEquals("glasses"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + glasses = new GlassesType(property.Value.GetString()); + continue; + } + if (property.NameEquals("headPose"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + headPose = HeadPose.DeserializeHeadPose(property.Value, options); + continue; + } + if (property.NameEquals("hair"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + hair = HairProperties.DeserializeHairProperties(property.Value, options); + continue; + } + if (property.NameEquals("occlusion"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + occlusion = OcclusionProperties.DeserializeOcclusionProperties(property.Value, options); + continue; + } + if (property.NameEquals("accessories"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(AccessoryItem.DeserializeAccessoryItem(item, options)); + } + accessories = array; + continue; + } + if (property.NameEquals("blur"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + blur = BlurProperties.DeserializeBlurProperties(property.Value, options); + continue; + } + if (property.NameEquals("exposure"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + exposure = ExposureProperties.DeserializeExposureProperties(property.Value, options); + continue; + } + if (property.NameEquals("noise"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + noise = NoiseProperties.DeserializeNoiseProperties(property.Value, options); + continue; + } + if (property.NameEquals("mask"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mask = MaskProperties.DeserializeMaskProperties(property.Value, options); + continue; + } + if (property.NameEquals("qualityForRecognition"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + qualityForRecognition = new QualityForRecognition(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceAttributes( + age, + smile, + facialHair, + glasses, + headPose, + hair, + occlusion, + accessories ?? new ChangeTrackingList(), + blur, + exposure, + noise, + mask, + qualityForRecognition, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceAttributes)} does not support writing '{options.Format}' format."); + } + } + + FaceAttributes IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceAttributes(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceAttributes)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceAttributes FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceAttributes(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.cs new file mode 100644 index 0000000000000..277164079470e --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceAttributes.cs @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face attributes for the detected face. + public partial class FaceAttributes + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal FaceAttributes() + { + Accessories = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Age in years. + /// Smile intensity, a number between [0,1]. + /// Properties describing facial hair attributes. + /// Glasses type if any of the face. + /// 3-D roll/yaw/pitch angles for face direction. + /// Properties describing hair attributes. + /// Properties describing occlusions on a given face. + /// Properties describing any accessories on a given face. + /// Properties describing any presence of blur within the image. + /// Properties describing exposure level of the image. + /// Properties describing noise level of the image. + /// Properties describing the presence of a mask on a given face. + /// Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. + /// Keeps track of any properties unknown to the library. + internal FaceAttributes(float? age, float? smile, FacialHair facialHair, GlassesType? glasses, HeadPose headPose, HairProperties hair, OcclusionProperties occlusion, IReadOnlyList accessories, BlurProperties blur, ExposureProperties exposure, NoiseProperties noise, MaskProperties mask, QualityForRecognition? qualityForRecognition, IDictionary serializedAdditionalRawData) + { + Age = age; + Smile = smile; + FacialHair = facialHair; + Glasses = glasses; + HeadPose = headPose; + Hair = hair; + Occlusion = occlusion; + Accessories = accessories; + Blur = blur; + Exposure = exposure; + Noise = noise; + Mask = mask; + QualityForRecognition = qualityForRecognition; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Age in years. + public float? Age { get; } + /// Smile intensity, a number between [0,1]. + public float? Smile { get; } + /// Properties describing facial hair attributes. + public FacialHair FacialHair { get; } + /// Glasses type if any of the face. + public GlassesType? Glasses { get; } + /// 3-D roll/yaw/pitch angles for face direction. + public HeadPose HeadPose { get; } + /// Properties describing hair attributes. + public HairProperties Hair { get; } + /// Properties describing occlusions on a given face. + public OcclusionProperties Occlusion { get; } + /// Properties describing any accessories on a given face. + public IReadOnlyList Accessories { get; } + /// Properties describing any presence of blur within the image. + public BlurProperties Blur { get; } + /// Properties describing exposure level of the image. + public ExposureProperties Exposure { get; } + /// Properties describing noise level of the image. + public NoiseProperties Noise { get; } + /// Properties describing the presence of a mask on a given face. + public MaskProperties Mask { get; } + /// Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. + public QualityForRecognition? QualityForRecognition { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceClient.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceClient.cs new file mode 100644 index 0000000000000..3abcf46f50d60 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceClient.cs @@ -0,0 +1,2248 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated client. + /// The Face service client. + public partial class FaceClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of FaceClient for mocking. + protected FaceClient() + { + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task DetectFromUrlAsync(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.DetectFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromUrlRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response DetectFromUrl(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.DetectFromUrl"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromUrlRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task DetectAsync(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.Detect"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response Detect(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.Detect"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An array of candidate faceIds. All of them are created by "Detect" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. + /// + /// + public virtual async Task>> FindSimilarAsync(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + FindSimilarRequest findSimilarRequest = new FindSimilarRequest(faceId, maxNumOfCandidatesReturned, mode, faceIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await FindSimilarAsync(findSimilarRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An array of candidate faceIds. All of them are created by "Detect" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. + /// + /// + public virtual Response> FindSimilar(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + FindSimilarRequest findSimilarRequest = new FindSimilarRequest(faceId, maxNumOfCandidatesReturned, mode, faceIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = FindSimilar(findSimilarRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task FindSimilarAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilar"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response FindSimilar(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilar"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Face List, created in "Create Face List". Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Face List. + /// + /// + public virtual async Task>> FindSimilarFromFaceListAsync(Guid faceId, string faceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceListId, nameof(faceListId)); + + FindSimilarFromFaceListRequest findSimilarFromFaceListRequest = new FindSimilarFromFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, faceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await FindSimilarFromFaceListAsync(findSimilarFromFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Face List, created in "Create Face List". Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Face List. + /// + /// + public virtual Response> FindSimilarFromFaceList(Guid faceId, string faceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceListId, nameof(faceListId)); + + FindSimilarFromFaceListRequest findSimilarFromFaceListRequest = new FindSimilarFromFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, faceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = FindSimilarFromFaceList(findSimilarFromFaceListRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task FindSimilarFromFaceListAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromFaceListRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response FindSimilarFromFaceList(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromFaceListRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Large Face List. + /// + /// + public virtual async Task>> FindSimilarFromLargeFaceListAsync(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await FindSimilarFromLargeFaceListAsync(findSimilarFromLargeFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// + /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + /// + /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + /// + /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Large Face List. + /// + /// + public virtual Response> FindSimilarFromLargeFaceList(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = FindSimilarFromLargeFaceList(findSimilarFromLargeFaceListRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task FindSimilarFromLargeFaceListAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response FindSimilarFromLargeFaceList(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// personGroupId of the target Person Group, created by "Create Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Group (given by personGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Person Group should be trained to make it ready for identification. See more in "Train Person Group". + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * Try "Find Similar" when you need to find similar faces from a Face List/Large Face List instead of a Person Group. + /// > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group. + /// + /// + public virtual async Task>> IdentifyFromPersonGroupAsync(IEnumerable faceIds, string personGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + IdentifyFromPersonGroupRequest identifyFromPersonGroupRequest = new IdentifyFromPersonGroupRequest(faceIds.ToList(), personGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromPersonGroupAsync(identifyFromPersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// personGroupId of the target Person Group, created by "Create Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Group (given by personGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Person Group should be trained to make it ready for identification. See more in "Train Person Group". + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * Try "Find Similar" when you need to find similar faces from a Face List/Large Face List instead of a Person Group. + /// > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group. + /// + /// + public virtual Response> IdentifyFromPersonGroup(IEnumerable faceIds, string personGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + IdentifyFromPersonGroupRequest identifyFromPersonGroupRequest = new IdentifyFromPersonGroupRequest(faceIds.ToList(), personGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromPersonGroup(identifyFromPersonGroupRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromPersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromPersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromPersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromPersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Large Person Group (given by largePersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Large Person Group should be trained to make it ready for identification. See more in "Train Large Person Group". + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * Try "Find Similar" when you need to find similar faces from a Face List/Large Face List instead of a Person Group/Large Person Group. + /// > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group or Large Person Group. + /// + /// + public virtual async Task>> IdentifyFromLargePersonGroupAsync(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromLargePersonGroupAsync(identifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Large Person Group (given by largePersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Large Person Group should be trained to make it ready for identification. See more in "Train Large Person Group". + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * Try "Find Similar" when you need to find similar faces from a Face List/Large Face List instead of a Person Group/Large Person Group. + /// > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group or Large Person Group. + /// + /// + public virtual Response> IdentifyFromLargePersonGroup(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromLargePersonGroup(identifyFromLargePersonGroupRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// Array of personIds created in Person Directory "Create Person". The valid number of personIds is between [1,30]. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Directory Persons (given by personIds), and return candidate person(s) for that face ranked by similarity confidence. + /// Passing personIds with an array with one element "*" can perform the operation over entire person directory. + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + /// + /// + public virtual async Task>> IdentifyFromPersonDirectoryAsync(IEnumerable faceIds, IEnumerable personIds, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personIds, nameof(personIds)); + + IdentifyFromPersonDirectoryRequest identifyFromPersonDirectoryRequest = new IdentifyFromPersonDirectoryRequest(faceIds.ToList(), personIds.ToList(), maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromPersonDirectoryAsync(identifyFromPersonDirectoryRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// Array of personIds created in Person Directory "Create Person". The valid number of personIds is between [1,30]. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Directory Persons (given by personIds), and return candidate person(s) for that face ranked by similarity confidence. + /// Passing personIds with an array with one element "*" can perform the operation over entire person directory. + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + /// + /// + public virtual Response> IdentifyFromPersonDirectory(IEnumerable faceIds, IEnumerable personIds, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personIds, nameof(personIds)); + + IdentifyFromPersonDirectoryRequest identifyFromPersonDirectoryRequest = new IdentifyFromPersonDirectoryRequest(faceIds.ToList(), personIds.ToList(), maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromPersonDirectory(identifyFromPersonDirectoryRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromPersonDirectoryAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromPersonDirectory"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromPersonDirectoryRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromPersonDirectory(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromPersonDirectory"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromPersonDirectoryRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Dynamic Person Group (given by dynamicPersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + /// + /// + public virtual async Task>> IdentifyFromDynamicPersonGroupAsync(IEnumerable faceIds, string dynamicPersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + IdentifyFromDynamicPersonGroupRequest identifyFromDynamicPersonGroupRequest = new IdentifyFromDynamicPersonGroupRequest(faceIds.ToList(), dynamicPersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromDynamicPersonGroupAsync(identifyFromDynamicPersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// + /// For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Dynamic Person Group (given by dynamicPersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. + /// > [!NOTE] + /// > + /// > * + /// > * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. + /// > * Each person could have more than one face, but no more than 248 faces. + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. + /// > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + /// + /// + public virtual Response> IdentifyFromDynamicPersonGroup(IEnumerable faceIds, string dynamicPersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + IdentifyFromDynamicPersonGroupRequest identifyFromDynamicPersonGroupRequest = new IdentifyFromDynamicPersonGroupRequest(faceIds.ToList(), dynamicPersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromDynamicPersonGroup(identifyFromDynamicPersonGroupRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromDynamicPersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromDynamicPersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromDynamicPersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromDynamicPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromDynamicPersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether two faces belong to a same person. + /// The faceId of one face, come from "Detect". + /// The faceId of another face, come from "Detect". + /// The cancellation token to use. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the both faces should be the same. + /// + /// + public virtual async Task> VerifyFaceToFaceAsync(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) + { + VerifyFaceToFaceRequest verifyFaceToFaceRequest = new VerifyFaceToFaceRequest(faceId1, faceId2, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFaceToFaceAsync(verifyFaceToFaceRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether two faces belong to a same person. + /// The faceId of one face, come from "Detect". + /// The faceId of another face, come from "Detect". + /// The cancellation token to use. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the both faces should be the same. + /// + /// + public virtual Response VerifyFaceToFace(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) + { + VerifyFaceToFaceRequest verifyFaceToFaceRequest = new VerifyFaceToFaceRequest(faceId1, faceId2, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFaceToFace(verifyFaceToFaceRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether two faces belong to a same person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFaceToFaceAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFaceToFace"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFaceToFaceRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether two faces belong to a same person. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFaceToFace(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFaceToFace"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFaceToFaceRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether a face belongs to a person in a Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in "Create Person Group". + /// Specify a certain person in Person Group. + /// The cancellation token to use. + /// is null. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Person Group. + /// + /// + public virtual async Task> VerifyFromPersonGroupAsync(Guid faceId, string personGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + VerifyFromPersonGroupRequest verifyFromPersonGroupRequest = new VerifyFromPersonGroupRequest(faceId, personGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFromPersonGroupAsync(verifyFromPersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether a face belongs to a person in a Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in "Create Person Group". + /// Specify a certain person in Person Group. + /// The cancellation token to use. + /// is null. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Person Group. + /// + /// + public virtual Response VerifyFromPersonGroup(Guid faceId, string personGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + VerifyFromPersonGroupRequest verifyFromPersonGroupRequest = new VerifyFromPersonGroupRequest(faceId, personGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFromPersonGroup(verifyFromPersonGroupRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFromPersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromPersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFromPersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromPersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromPersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Large Person Group. + /// + /// + public virtual async Task> VerifyFromLargePersonGroupAsync(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFromLargePersonGroupAsync(verifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Large Person Group. + /// + /// + public virtual Response VerifyFromLargePersonGroup(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFromLargePersonGroup(verifyFromLargePersonGroupRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether a face belongs to a person in Person Directory. + /// The faceId of the face, come from "Detect". + /// Specify a certain person in PersonDirectory Person. + /// The cancellation token to use. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The Verify operation can only match faces obtained with the same recognition model, that is associated with the query face. + /// + /// + public virtual async Task> VerifyFromPersonDirectoryAsync(Guid faceId, Guid personId, CancellationToken cancellationToken = default) + { + VerifyFromPersonDirectoryRequest verifyFromPersonDirectoryRequest = new VerifyFromPersonDirectoryRequest(faceId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFromPersonDirectoryAsync(verifyFromPersonDirectoryRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether a face belongs to a person in Person Directory. + /// The faceId of the face, come from "Detect". + /// Specify a certain person in PersonDirectory Person. + /// The cancellation token to use. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + /// > * For the scenarios that are sensitive to accuracy please make your own judgment. + /// > * The Verify operation can only match faces obtained with the same recognition model, that is associated with the query face. + /// + /// + public virtual Response VerifyFromPersonDirectory(Guid faceId, Guid personId, CancellationToken cancellationToken = default) + { + VerifyFromPersonDirectoryRequest verifyFromPersonDirectoryRequest = new VerifyFromPersonDirectoryRequest(faceId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFromPersonDirectory(verifyFromPersonDirectoryRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFromPersonDirectoryAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromPersonDirectory"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromPersonDirectoryRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in Person Directory. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFromPersonDirectory(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromPersonDirectory"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromPersonDirectoryRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Divide candidate faces into groups based on face similarity. + /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. + /// The cancellation token to use. + /// is null. + /// + /// > + /// * + /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. + /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. + /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. + /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. + /// + /// + public virtual async Task> GroupAsync(IEnumerable faceIds, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + GroupRequest groupRequest = new GroupRequest(faceIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GroupAsync(groupRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceGroupingResult.FromResponse(response), response); + } + + /// Divide candidate faces into groups based on face similarity. + /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. + /// The cancellation token to use. + /// is null. + /// + /// > + /// * + /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. + /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. + /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. + /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. + /// + /// + public virtual Response Group(IEnumerable faceIds, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + GroupRequest groupRequest = new GroupRequest(faceIds.ToList(), null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Group(groupRequest.ToRequestContent(), context); + return Response.FromValue(FaceGroupingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Divide candidate faces into groups based on face similarity. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.Group"); + scope.Start(); + try + { + using HttpMessage message = CreateGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Divide candidate faces into groups based on face similarity. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Group(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.Group"); + scope.Start(); + try + { + using HttpMessage message = CreateGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateDetectFromUrlRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detect", false); + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (recognitionModel != null) + { + uri.AppendQuery("recognitionModel", recognitionModel, true); + } + if (returnFaceId != null) + { + uri.AppendQuery("returnFaceId", returnFaceId.Value, true); + } + if (returnFaceAttributes != null && !(returnFaceAttributes is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("returnFaceAttributes", returnFaceAttributes, ",", true); + } + if (returnFaceLandmarks != null) + { + uri.AppendQuery("returnFaceLandmarks", returnFaceLandmarks.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + if (faceIdTimeToLive != null) + { + uri.AppendQuery("faceIdTimeToLive", faceIdTimeToLive.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDetectRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detect", false); + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (recognitionModel != null) + { + uri.AppendQuery("recognitionModel", recognitionModel, true); + } + if (returnFaceId != null) + { + uri.AppendQuery("returnFaceId", returnFaceId.Value, true); + } + if (returnFaceAttributes != null && !(returnFaceAttributes is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("returnFaceAttributes", returnFaceAttributes, ",", true); + } + if (returnFaceLandmarks != null) + { + uri.AppendQuery("returnFaceLandmarks", returnFaceLandmarks.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + if (faceIdTimeToLive != null) + { + uri.AppendQuery("faceIdTimeToLive", faceIdTimeToLive.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateFindSimilarRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/findsimilars", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateFindSimilarFromFaceListRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/findsimilars", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateFindSimilarFromLargeFaceListRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/findsimilars", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromPersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromPersonDirectoryRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromDynamicPersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFaceToFaceRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFromPersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFromPersonDirectoryRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/group", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.Serialization.cs new file mode 100644 index 0000000000000..6da0a7d61cc81 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceCollectionTrainingResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceCollectionTrainingResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + writer.WritePropertyName("lastActionDateTime"u8); + writer.WriteStringValue(LastActionDateTime, "O"); + writer.WritePropertyName("lastSuccessfulTrainingDateTime"u8); + writer.WriteStringValue(LastSuccessfulTrainingDateTime, "O"); + if (Optional.IsDefined(Message)) + { + writer.WritePropertyName("message"u8); + writer.WriteStringValue(Message); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceCollectionTrainingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceCollectionTrainingResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceCollectionTrainingResult(document.RootElement, options); + } + + internal static FaceCollectionTrainingResult DeserializeFaceCollectionTrainingResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceOperationStatus status = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset lastActionDateTime = default; + DateTimeOffset lastSuccessfulTrainingDateTime = default; + string message = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("status"u8)) + { + status = new FaceOperationStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastActionDateTime"u8)) + { + lastActionDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastSuccessfulTrainingDateTime"u8)) + { + lastSuccessfulTrainingDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("message"u8)) + { + message = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceCollectionTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceCollectionTrainingResult)} does not support writing '{options.Format}' format."); + } + } + + FaceCollectionTrainingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceCollectionTrainingResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceCollectionTrainingResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceCollectionTrainingResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceCollectionTrainingResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.cs new file mode 100644 index 0000000000000..c6b5bfc37c08b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceCollectionTrainingResult.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Training result of a container. + public partial class FaceCollectionTrainingResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + internal FaceCollectionTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// Keeps track of any properties unknown to the library. + internal FaceCollectionTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime, string message, IDictionary serializedAdditionalRawData) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + Message = message; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceCollectionTrainingResult() + { + } + + /// Training status of the container. + public FaceOperationStatus Status { get; } + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + public DateTimeOffset CreatedDateTime { get; } + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + public DateTimeOffset LastActionDateTime { get; } + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + public DateTimeOffset LastSuccessfulTrainingDateTime { get; } + /// Show failure message when training failed (omitted when training succeed). + public string Message { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionModel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionModel.cs new file mode 100644 index 0000000000000..e018bb8bb956d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionModel.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The detection model for the face. + public readonly partial struct FaceDetectionModel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceDetectionModel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string Detection01Value = "detection_01"; + private const string Detection02Value = "detection_02"; + private const string Detection03Value = "detection_03"; + + /// The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected. + public static FaceDetectionModel Detection01 { get; } = new FaceDetectionModel(Detection01Value); + /// Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces. + public static FaceDetectionModel Detection02 { get; } = new FaceDetectionModel(Detection02Value); + /// Detection model released in 2021 February with improved accuracy especially on small faces. + public static FaceDetectionModel Detection03 { get; } = new FaceDetectionModel(Detection03Value); + /// Determines if two values are the same. + public static bool operator ==(FaceDetectionModel left, FaceDetectionModel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceDetectionModel left, FaceDetectionModel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceDetectionModel(string value) => new FaceDetectionModel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceDetectionModel other && Equals(other); + /// + public bool Equals(FaceDetectionModel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.Serialization.cs new file mode 100644 index 0000000000000..e82f7d84db5ec --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.Serialization.cs @@ -0,0 +1,201 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceDetectionResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceDetectionResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (Optional.IsDefined(FaceId)) + { + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId.Value); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + writer.WritePropertyName("faceRectangle"u8); + writer.WriteObjectValue(FaceRectangle, options); + if (Optional.IsDefined(FaceLandmarks)) + { + writer.WritePropertyName("faceLandmarks"u8); + writer.WriteObjectValue(FaceLandmarks, options); + } + if (Optional.IsDefined(FaceAttributes)) + { + writer.WritePropertyName("faceAttributes"u8); + writer.WriteObjectValue(FaceAttributes, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceDetectionResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceDetectionResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceDetectionResult(document.RootElement, options); + } + + internal static FaceDetectionResult DeserializeFaceDetectionResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid? faceId = default; + FaceRecognitionModel? recognitionModel = default; + FaceRectangle faceRectangle = default; + FaceLandmarks faceLandmarks = default; + FaceAttributes faceAttributes = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("faceRectangle"u8)) + { + faceRectangle = FaceRectangle.DeserializeFaceRectangle(property.Value, options); + continue; + } + if (property.NameEquals("faceLandmarks"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + faceLandmarks = FaceLandmarks.DeserializeFaceLandmarks(property.Value, options); + continue; + } + if (property.NameEquals("faceAttributes"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + faceAttributes = FaceAttributes.DeserializeFaceAttributes(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceDetectionResult( + faceId, + recognitionModel, + faceRectangle, + faceLandmarks, + faceAttributes, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceDetectionResult)} does not support writing '{options.Format}' format."); + } + } + + FaceDetectionResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceDetectionResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceDetectionResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceDetectionResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceDetectionResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.cs new file mode 100644 index 0000000000000..d44aa42a3f727 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceDetectionResult.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response for detect API. + public partial class FaceDetectionResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A rectangle area for the face location on image. + /// is null. + internal FaceDetectionResult(FaceRectangle faceRectangle) + { + Argument.AssertNotNull(faceRectangle, nameof(faceRectangle)); + + FaceRectangle = faceRectangle; + } + + /// Initializes a new instance of . + /// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. + /// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true. + /// A rectangle area for the face location on image. + /// An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true. + /// Face attributes for detected face. + /// Keeps track of any properties unknown to the library. + internal FaceDetectionResult(Guid? faceId, FaceRecognitionModel? recognitionModel, FaceRectangle faceRectangle, FaceLandmarks faceLandmarks, FaceAttributes faceAttributes, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + RecognitionModel = recognitionModel; + FaceRectangle = faceRectangle; + FaceLandmarks = faceLandmarks; + FaceAttributes = faceAttributes; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceDetectionResult() + { + } + + /// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. + public Guid? FaceId { get; } + /// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true. + public FaceRecognitionModel? RecognitionModel { get; } + /// A rectangle area for the face location on image. + public FaceRectangle FaceRectangle { get; } + /// An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true. + public FaceLandmarks FaceLandmarks { get; } + /// Face attributes for detected face. + public FaceAttributes FaceAttributes { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.Serialization.cs new file mode 100644 index 0000000000000..63770be442013 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.Serialization.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceFindSimilarResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceFindSimilarResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (Optional.IsDefined(FaceId)) + { + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId.Value); + } + if (Optional.IsDefined(PersistedFaceId)) + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceFindSimilarResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceFindSimilarResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceFindSimilarResult(document.RootElement, options); + } + + internal static FaceFindSimilarResult DeserializeFaceFindSimilarResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float confidence = default; + Guid? faceId = default; + Guid? persistedFaceId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("faceId"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("persistedFaceId"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceFindSimilarResult(confidence, faceId, persistedFaceId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceFindSimilarResult)} does not support writing '{options.Format}' format."); + } + } + + FaceFindSimilarResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceFindSimilarResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceFindSimilarResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceFindSimilarResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceFindSimilarResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.cs new file mode 100644 index 0000000000000..511fce4ecd5c1 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceFindSimilarResult.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response body for find similar face operation. + public partial class FaceFindSimilarResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + internal FaceFindSimilarResult(float confidence) + { + Confidence = confidence; + } + + /// Initializes a new instance of . + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// faceId of candidate face when find by faceIds. faceId is created by "Detect" and will expire 24 hours after the detection call. + /// persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire. + /// Keeps track of any properties unknown to the library. + internal FaceFindSimilarResult(float confidence, Guid? faceId, Guid? persistedFaceId, IDictionary serializedAdditionalRawData) + { + Confidence = confidence; + FaceId = faceId; + PersistedFaceId = persistedFaceId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceFindSimilarResult() + { + } + + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + public float Confidence { get; } + /// faceId of candidate face when find by faceIds. faceId is created by "Detect" and will expire 24 hours after the detection call. + public Guid? FaceId { get; } + /// persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire. + public Guid? PersistedFaceId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.Serialization.cs new file mode 100644 index 0000000000000..01b73022d3ea6 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.Serialization.cs @@ -0,0 +1,185 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceGroupingResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceGroupingResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("groups"u8); + writer.WriteStartArray(); + foreach (var item in Groups) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } + writer.WriteStartArray(); + foreach (var item0 in item) + { + writer.WriteStringValue(item0); + } + writer.WriteEndArray(); + } + writer.WriteEndArray(); + writer.WritePropertyName("messyGroup"u8); + writer.WriteStartArray(); + foreach (var item in MessyGroup) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceGroupingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceGroupingResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceGroupingResult(document.RootElement, options); + } + + internal static FaceGroupingResult DeserializeFaceGroupingResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList> groups = default; + IReadOnlyList messyGroup = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("groups"u8)) + { + List> array = new List>(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + List array0 = new List(); + foreach (var item0 in item.EnumerateArray()) + { + array0.Add(item0.GetGuid()); + } + array.Add(array0); + } + } + groups = array; + continue; + } + if (property.NameEquals("messyGroup"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + messyGroup = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceGroupingResult(groups, messyGroup, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceGroupingResult)} does not support writing '{options.Format}' format."); + } + } + + FaceGroupingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceGroupingResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceGroupingResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceGroupingResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceGroupingResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.cs new file mode 100644 index 0000000000000..b309c74e3c30c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceGroupingResult.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Response body for group face operation. + public partial class FaceGroupingResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A partition of the original faces based on face similarity. Groups are ranked by number of faces. + /// Face ids array of faces that cannot find any similar faces from original faces. + /// or is null. + internal FaceGroupingResult(IEnumerable> groups, IEnumerable messyGroup) + { + Argument.AssertNotNull(groups, nameof(groups)); + Argument.AssertNotNull(messyGroup, nameof(messyGroup)); + + Groups = groups.ToList(); + MessyGroup = messyGroup.ToList(); + } + + /// Initializes a new instance of . + /// A partition of the original faces based on face similarity. Groups are ranked by number of faces. + /// Face ids array of faces that cannot find any similar faces from original faces. + /// Keeps track of any properties unknown to the library. + internal FaceGroupingResult(IReadOnlyList> groups, IReadOnlyList messyGroup, IDictionary serializedAdditionalRawData) + { + Groups = groups; + MessyGroup = messyGroup; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceGroupingResult() + { + } + + /// A partition of the original faces based on face similarity. Groups are ranked by number of faces. + public IReadOnlyList> Groups { get; } + /// Face ids array of faces that cannot find any similar faces from original faces. + public IReadOnlyList MessyGroup { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs new file mode 100644 index 0000000000000..3d0dcf5633df6 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationCandidate : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationCandidate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + + internal static FaceIdentificationCandidate DeserializeFaceIdentificationCandidate(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationCandidate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationCandidate FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationCandidate(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs new file mode 100644 index 0000000000000..b4fcaba1400ba --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Candidate for identify call. + public partial class FaceIdentificationCandidate + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + internal FaceIdentificationCandidate(Guid personId, float confidence) + { + PersonId = personId; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationCandidate(Guid personId, float confidence, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationCandidate() + { + } + + /// personId of candidate person. + public Guid PersonId { get; } + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + public float Confidence { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs new file mode 100644 index 0000000000000..edee386538228 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("candidates"u8); + writer.WriteStartArray(); + foreach (var item in Candidates) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + + internal static FaceIdentificationResult DeserializeFaceIdentificationResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + IReadOnlyList candidates = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("candidates"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FaceIdentificationCandidate.DeserializeFaceIdentificationCandidate(item, options)); + } + candidates = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationResult(faceId, candidates, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs new file mode 100644 index 0000000000000..1489f50611452 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Identify result. + public partial class FaceIdentificationResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// is null. + internal FaceIdentificationResult(Guid faceId, IEnumerable candidates) + { + Argument.AssertNotNull(candidates, nameof(candidates)); + + FaceId = faceId; + Candidates = candidates.ToList(); + } + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationResult(Guid faceId, IReadOnlyList candidates, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + Candidates = candidates; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationResult() + { + } + + /// faceId of the query face. + public Guid FaceId { get; } + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + public IReadOnlyList Candidates { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceImageType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceImageType.cs new file mode 100644 index 0000000000000..09e5ff17c27dd --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceImageType.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The type of image. + public readonly partial struct FaceImageType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceImageType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string ColorValue = "Color"; + private const string InfraredValue = "Infrared"; + private const string DepthValue = "Depth"; + + /// Color image. + public static FaceImageType Color { get; } = new FaceImageType(ColorValue); + /// Infrared image. + public static FaceImageType Infrared { get; } = new FaceImageType(InfraredValue); + /// Depth image. + public static FaceImageType Depth { get; } = new FaceImageType(DepthValue); + /// Determines if two values are the same. + public static bool operator ==(FaceImageType left, FaceImageType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceImageType left, FaceImageType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceImageType(string value) => new FaceImageType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceImageType other && Equals(other); + /// + public bool Equals(FaceImageType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.Serialization.cs new file mode 100644 index 0000000000000..dc8450fcf2b9a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.Serialization.cs @@ -0,0 +1,371 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceLandmarks : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceLandmarks)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("pupilLeft"u8); + writer.WriteObjectValue(PupilLeft, options); + writer.WritePropertyName("pupilRight"u8); + writer.WriteObjectValue(PupilRight, options); + writer.WritePropertyName("noseTip"u8); + writer.WriteObjectValue(NoseTip, options); + writer.WritePropertyName("mouthLeft"u8); + writer.WriteObjectValue(MouthLeft, options); + writer.WritePropertyName("mouthRight"u8); + writer.WriteObjectValue(MouthRight, options); + writer.WritePropertyName("eyebrowLeftOuter"u8); + writer.WriteObjectValue(EyebrowLeftOuter, options); + writer.WritePropertyName("eyebrowLeftInner"u8); + writer.WriteObjectValue(EyebrowLeftInner, options); + writer.WritePropertyName("eyeLeftOuter"u8); + writer.WriteObjectValue(EyeLeftOuter, options); + writer.WritePropertyName("eyeLeftTop"u8); + writer.WriteObjectValue(EyeLeftTop, options); + writer.WritePropertyName("eyeLeftBottom"u8); + writer.WriteObjectValue(EyeLeftBottom, options); + writer.WritePropertyName("eyeLeftInner"u8); + writer.WriteObjectValue(EyeLeftInner, options); + writer.WritePropertyName("eyebrowRightInner"u8); + writer.WriteObjectValue(EyebrowRightInner, options); + writer.WritePropertyName("eyebrowRightOuter"u8); + writer.WriteObjectValue(EyebrowRightOuter, options); + writer.WritePropertyName("eyeRightInner"u8); + writer.WriteObjectValue(EyeRightInner, options); + writer.WritePropertyName("eyeRightTop"u8); + writer.WriteObjectValue(EyeRightTop, options); + writer.WritePropertyName("eyeRightBottom"u8); + writer.WriteObjectValue(EyeRightBottom, options); + writer.WritePropertyName("eyeRightOuter"u8); + writer.WriteObjectValue(EyeRightOuter, options); + writer.WritePropertyName("noseRootLeft"u8); + writer.WriteObjectValue(NoseRootLeft, options); + writer.WritePropertyName("noseRootRight"u8); + writer.WriteObjectValue(NoseRootRight, options); + writer.WritePropertyName("noseLeftAlarTop"u8); + writer.WriteObjectValue(NoseLeftAlarTop, options); + writer.WritePropertyName("noseRightAlarTop"u8); + writer.WriteObjectValue(NoseRightAlarTop, options); + writer.WritePropertyName("noseLeftAlarOutTip"u8); + writer.WriteObjectValue(NoseLeftAlarOutTip, options); + writer.WritePropertyName("noseRightAlarOutTip"u8); + writer.WriteObjectValue(NoseRightAlarOutTip, options); + writer.WritePropertyName("upperLipTop"u8); + writer.WriteObjectValue(UpperLipTop, options); + writer.WritePropertyName("upperLipBottom"u8); + writer.WriteObjectValue(UpperLipBottom, options); + writer.WritePropertyName("underLipTop"u8); + writer.WriteObjectValue(UnderLipTop, options); + writer.WritePropertyName("underLipBottom"u8); + writer.WriteObjectValue(UnderLipBottom, options); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceLandmarks IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceLandmarks)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceLandmarks(document.RootElement, options); + } + + internal static FaceLandmarks DeserializeFaceLandmarks(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + LandmarkCoordinate pupilLeft = default; + LandmarkCoordinate pupilRight = default; + LandmarkCoordinate noseTip = default; + LandmarkCoordinate mouthLeft = default; + LandmarkCoordinate mouthRight = default; + LandmarkCoordinate eyebrowLeftOuter = default; + LandmarkCoordinate eyebrowLeftInner = default; + LandmarkCoordinate eyeLeftOuter = default; + LandmarkCoordinate eyeLeftTop = default; + LandmarkCoordinate eyeLeftBottom = default; + LandmarkCoordinate eyeLeftInner = default; + LandmarkCoordinate eyebrowRightInner = default; + LandmarkCoordinate eyebrowRightOuter = default; + LandmarkCoordinate eyeRightInner = default; + LandmarkCoordinate eyeRightTop = default; + LandmarkCoordinate eyeRightBottom = default; + LandmarkCoordinate eyeRightOuter = default; + LandmarkCoordinate noseRootLeft = default; + LandmarkCoordinate noseRootRight = default; + LandmarkCoordinate noseLeftAlarTop = default; + LandmarkCoordinate noseRightAlarTop = default; + LandmarkCoordinate noseLeftAlarOutTip = default; + LandmarkCoordinate noseRightAlarOutTip = default; + LandmarkCoordinate upperLipTop = default; + LandmarkCoordinate upperLipBottom = default; + LandmarkCoordinate underLipTop = default; + LandmarkCoordinate underLipBottom = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("pupilLeft"u8)) + { + pupilLeft = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("pupilRight"u8)) + { + pupilRight = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseTip"u8)) + { + noseTip = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("mouthLeft"u8)) + { + mouthLeft = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("mouthRight"u8)) + { + mouthRight = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyebrowLeftOuter"u8)) + { + eyebrowLeftOuter = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyebrowLeftInner"u8)) + { + eyebrowLeftInner = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeLeftOuter"u8)) + { + eyeLeftOuter = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeLeftTop"u8)) + { + eyeLeftTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeLeftBottom"u8)) + { + eyeLeftBottom = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeLeftInner"u8)) + { + eyeLeftInner = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyebrowRightInner"u8)) + { + eyebrowRightInner = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyebrowRightOuter"u8)) + { + eyebrowRightOuter = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeRightInner"u8)) + { + eyeRightInner = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeRightTop"u8)) + { + eyeRightTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeRightBottom"u8)) + { + eyeRightBottom = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("eyeRightOuter"u8)) + { + eyeRightOuter = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseRootLeft"u8)) + { + noseRootLeft = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseRootRight"u8)) + { + noseRootRight = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseLeftAlarTop"u8)) + { + noseLeftAlarTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseRightAlarTop"u8)) + { + noseRightAlarTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseLeftAlarOutTip"u8)) + { + noseLeftAlarOutTip = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("noseRightAlarOutTip"u8)) + { + noseRightAlarOutTip = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("upperLipTop"u8)) + { + upperLipTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("upperLipBottom"u8)) + { + upperLipBottom = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("underLipTop"u8)) + { + underLipTop = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (property.NameEquals("underLipBottom"u8)) + { + underLipBottom = LandmarkCoordinate.DeserializeLandmarkCoordinate(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceLandmarks( + pupilLeft, + pupilRight, + noseTip, + mouthLeft, + mouthRight, + eyebrowLeftOuter, + eyebrowLeftInner, + eyeLeftOuter, + eyeLeftTop, + eyeLeftBottom, + eyeLeftInner, + eyebrowRightInner, + eyebrowRightOuter, + eyeRightInner, + eyeRightTop, + eyeRightBottom, + eyeRightOuter, + noseRootLeft, + noseRootRight, + noseLeftAlarTop, + noseRightAlarTop, + noseLeftAlarOutTip, + noseRightAlarOutTip, + upperLipTop, + upperLipBottom, + underLipTop, + underLipBottom, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceLandmarks)} does not support writing '{options.Format}' format."); + } + } + + FaceLandmarks IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceLandmarks(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceLandmarks)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceLandmarks FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceLandmarks(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.cs new file mode 100644 index 0000000000000..5401caaf015e5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLandmarks.cs @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// A collection of 27-point face landmarks pointing to the important positions of face components. + public partial class FaceLandmarks + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The coordinates of the left eye pupil. + /// The coordinates of the right eye pupil. + /// The coordinates of the nose tip. + /// The coordinates of the mouth left. + /// The coordinates of the mouth right. + /// The coordinates of the left eyebrow outer. + /// The coordinates of the left eyebrow inner. + /// The coordinates of the left eye outer. + /// The coordinates of the left eye top. + /// The coordinates of the left eye bottom. + /// The coordinates of the left eye inner. + /// The coordinates of the right eyebrow inner. + /// The coordinates of the right eyebrow outer. + /// The coordinates of the right eye inner. + /// The coordinates of the right eye top. + /// The coordinates of the right eye bottom. + /// The coordinates of the right eye outer. + /// The coordinates of the nose root left. + /// The coordinates of the nose root right. + /// The coordinates of the nose left alar top. + /// The coordinates of the nose right alar top. + /// The coordinates of the nose left alar out tip. + /// The coordinates of the nose right alar out tip. + /// The coordinates of the upper lip top. + /// The coordinates of the upper lip bottom. + /// The coordinates of the under lip top. + /// The coordinates of the under lip bottom. + /// , , , , , , , , , , , , , , , , , , , , , , , , , or is null. + internal FaceLandmarks(LandmarkCoordinate pupilLeft, LandmarkCoordinate pupilRight, LandmarkCoordinate noseTip, LandmarkCoordinate mouthLeft, LandmarkCoordinate mouthRight, LandmarkCoordinate eyebrowLeftOuter, LandmarkCoordinate eyebrowLeftInner, LandmarkCoordinate eyeLeftOuter, LandmarkCoordinate eyeLeftTop, LandmarkCoordinate eyeLeftBottom, LandmarkCoordinate eyeLeftInner, LandmarkCoordinate eyebrowRightInner, LandmarkCoordinate eyebrowRightOuter, LandmarkCoordinate eyeRightInner, LandmarkCoordinate eyeRightTop, LandmarkCoordinate eyeRightBottom, LandmarkCoordinate eyeRightOuter, LandmarkCoordinate noseRootLeft, LandmarkCoordinate noseRootRight, LandmarkCoordinate noseLeftAlarTop, LandmarkCoordinate noseRightAlarTop, LandmarkCoordinate noseLeftAlarOutTip, LandmarkCoordinate noseRightAlarOutTip, LandmarkCoordinate upperLipTop, LandmarkCoordinate upperLipBottom, LandmarkCoordinate underLipTop, LandmarkCoordinate underLipBottom) + { + Argument.AssertNotNull(pupilLeft, nameof(pupilLeft)); + Argument.AssertNotNull(pupilRight, nameof(pupilRight)); + Argument.AssertNotNull(noseTip, nameof(noseTip)); + Argument.AssertNotNull(mouthLeft, nameof(mouthLeft)); + Argument.AssertNotNull(mouthRight, nameof(mouthRight)); + Argument.AssertNotNull(eyebrowLeftOuter, nameof(eyebrowLeftOuter)); + Argument.AssertNotNull(eyebrowLeftInner, nameof(eyebrowLeftInner)); + Argument.AssertNotNull(eyeLeftOuter, nameof(eyeLeftOuter)); + Argument.AssertNotNull(eyeLeftTop, nameof(eyeLeftTop)); + Argument.AssertNotNull(eyeLeftBottom, nameof(eyeLeftBottom)); + Argument.AssertNotNull(eyeLeftInner, nameof(eyeLeftInner)); + Argument.AssertNotNull(eyebrowRightInner, nameof(eyebrowRightInner)); + Argument.AssertNotNull(eyebrowRightOuter, nameof(eyebrowRightOuter)); + Argument.AssertNotNull(eyeRightInner, nameof(eyeRightInner)); + Argument.AssertNotNull(eyeRightTop, nameof(eyeRightTop)); + Argument.AssertNotNull(eyeRightBottom, nameof(eyeRightBottom)); + Argument.AssertNotNull(eyeRightOuter, nameof(eyeRightOuter)); + Argument.AssertNotNull(noseRootLeft, nameof(noseRootLeft)); + Argument.AssertNotNull(noseRootRight, nameof(noseRootRight)); + Argument.AssertNotNull(noseLeftAlarTop, nameof(noseLeftAlarTop)); + Argument.AssertNotNull(noseRightAlarTop, nameof(noseRightAlarTop)); + Argument.AssertNotNull(noseLeftAlarOutTip, nameof(noseLeftAlarOutTip)); + Argument.AssertNotNull(noseRightAlarOutTip, nameof(noseRightAlarOutTip)); + Argument.AssertNotNull(upperLipTop, nameof(upperLipTop)); + Argument.AssertNotNull(upperLipBottom, nameof(upperLipBottom)); + Argument.AssertNotNull(underLipTop, nameof(underLipTop)); + Argument.AssertNotNull(underLipBottom, nameof(underLipBottom)); + + PupilLeft = pupilLeft; + PupilRight = pupilRight; + NoseTip = noseTip; + MouthLeft = mouthLeft; + MouthRight = mouthRight; + EyebrowLeftOuter = eyebrowLeftOuter; + EyebrowLeftInner = eyebrowLeftInner; + EyeLeftOuter = eyeLeftOuter; + EyeLeftTop = eyeLeftTop; + EyeLeftBottom = eyeLeftBottom; + EyeLeftInner = eyeLeftInner; + EyebrowRightInner = eyebrowRightInner; + EyebrowRightOuter = eyebrowRightOuter; + EyeRightInner = eyeRightInner; + EyeRightTop = eyeRightTop; + EyeRightBottom = eyeRightBottom; + EyeRightOuter = eyeRightOuter; + NoseRootLeft = noseRootLeft; + NoseRootRight = noseRootRight; + NoseLeftAlarTop = noseLeftAlarTop; + NoseRightAlarTop = noseRightAlarTop; + NoseLeftAlarOutTip = noseLeftAlarOutTip; + NoseRightAlarOutTip = noseRightAlarOutTip; + UpperLipTop = upperLipTop; + UpperLipBottom = upperLipBottom; + UnderLipTop = underLipTop; + UnderLipBottom = underLipBottom; + } + + /// Initializes a new instance of . + /// The coordinates of the left eye pupil. + /// The coordinates of the right eye pupil. + /// The coordinates of the nose tip. + /// The coordinates of the mouth left. + /// The coordinates of the mouth right. + /// The coordinates of the left eyebrow outer. + /// The coordinates of the left eyebrow inner. + /// The coordinates of the left eye outer. + /// The coordinates of the left eye top. + /// The coordinates of the left eye bottom. + /// The coordinates of the left eye inner. + /// The coordinates of the right eyebrow inner. + /// The coordinates of the right eyebrow outer. + /// The coordinates of the right eye inner. + /// The coordinates of the right eye top. + /// The coordinates of the right eye bottom. + /// The coordinates of the right eye outer. + /// The coordinates of the nose root left. + /// The coordinates of the nose root right. + /// The coordinates of the nose left alar top. + /// The coordinates of the nose right alar top. + /// The coordinates of the nose left alar out tip. + /// The coordinates of the nose right alar out tip. + /// The coordinates of the upper lip top. + /// The coordinates of the upper lip bottom. + /// The coordinates of the under lip top. + /// The coordinates of the under lip bottom. + /// Keeps track of any properties unknown to the library. + internal FaceLandmarks(LandmarkCoordinate pupilLeft, LandmarkCoordinate pupilRight, LandmarkCoordinate noseTip, LandmarkCoordinate mouthLeft, LandmarkCoordinate mouthRight, LandmarkCoordinate eyebrowLeftOuter, LandmarkCoordinate eyebrowLeftInner, LandmarkCoordinate eyeLeftOuter, LandmarkCoordinate eyeLeftTop, LandmarkCoordinate eyeLeftBottom, LandmarkCoordinate eyeLeftInner, LandmarkCoordinate eyebrowRightInner, LandmarkCoordinate eyebrowRightOuter, LandmarkCoordinate eyeRightInner, LandmarkCoordinate eyeRightTop, LandmarkCoordinate eyeRightBottom, LandmarkCoordinate eyeRightOuter, LandmarkCoordinate noseRootLeft, LandmarkCoordinate noseRootRight, LandmarkCoordinate noseLeftAlarTop, LandmarkCoordinate noseRightAlarTop, LandmarkCoordinate noseLeftAlarOutTip, LandmarkCoordinate noseRightAlarOutTip, LandmarkCoordinate upperLipTop, LandmarkCoordinate upperLipBottom, LandmarkCoordinate underLipTop, LandmarkCoordinate underLipBottom, IDictionary serializedAdditionalRawData) + { + PupilLeft = pupilLeft; + PupilRight = pupilRight; + NoseTip = noseTip; + MouthLeft = mouthLeft; + MouthRight = mouthRight; + EyebrowLeftOuter = eyebrowLeftOuter; + EyebrowLeftInner = eyebrowLeftInner; + EyeLeftOuter = eyeLeftOuter; + EyeLeftTop = eyeLeftTop; + EyeLeftBottom = eyeLeftBottom; + EyeLeftInner = eyeLeftInner; + EyebrowRightInner = eyebrowRightInner; + EyebrowRightOuter = eyebrowRightOuter; + EyeRightInner = eyeRightInner; + EyeRightTop = eyeRightTop; + EyeRightBottom = eyeRightBottom; + EyeRightOuter = eyeRightOuter; + NoseRootLeft = noseRootLeft; + NoseRootRight = noseRootRight; + NoseLeftAlarTop = noseLeftAlarTop; + NoseRightAlarTop = noseRightAlarTop; + NoseLeftAlarOutTip = noseLeftAlarOutTip; + NoseRightAlarOutTip = noseRightAlarOutTip; + UpperLipTop = upperLipTop; + UpperLipBottom = upperLipBottom; + UnderLipTop = underLipTop; + UnderLipBottom = underLipBottom; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceLandmarks() + { + } + + /// The coordinates of the left eye pupil. + public LandmarkCoordinate PupilLeft { get; } + /// The coordinates of the right eye pupil. + public LandmarkCoordinate PupilRight { get; } + /// The coordinates of the nose tip. + public LandmarkCoordinate NoseTip { get; } + /// The coordinates of the mouth left. + public LandmarkCoordinate MouthLeft { get; } + /// The coordinates of the mouth right. + public LandmarkCoordinate MouthRight { get; } + /// The coordinates of the left eyebrow outer. + public LandmarkCoordinate EyebrowLeftOuter { get; } + /// The coordinates of the left eyebrow inner. + public LandmarkCoordinate EyebrowLeftInner { get; } + /// The coordinates of the left eye outer. + public LandmarkCoordinate EyeLeftOuter { get; } + /// The coordinates of the left eye top. + public LandmarkCoordinate EyeLeftTop { get; } + /// The coordinates of the left eye bottom. + public LandmarkCoordinate EyeLeftBottom { get; } + /// The coordinates of the left eye inner. + public LandmarkCoordinate EyeLeftInner { get; } + /// The coordinates of the right eyebrow inner. + public LandmarkCoordinate EyebrowRightInner { get; } + /// The coordinates of the right eyebrow outer. + public LandmarkCoordinate EyebrowRightOuter { get; } + /// The coordinates of the right eye inner. + public LandmarkCoordinate EyeRightInner { get; } + /// The coordinates of the right eye top. + public LandmarkCoordinate EyeRightTop { get; } + /// The coordinates of the right eye bottom. + public LandmarkCoordinate EyeRightBottom { get; } + /// The coordinates of the right eye outer. + public LandmarkCoordinate EyeRightOuter { get; } + /// The coordinates of the nose root left. + public LandmarkCoordinate NoseRootLeft { get; } + /// The coordinates of the nose root right. + public LandmarkCoordinate NoseRootRight { get; } + /// The coordinates of the nose left alar top. + public LandmarkCoordinate NoseLeftAlarTop { get; } + /// The coordinates of the nose right alar top. + public LandmarkCoordinate NoseRightAlarTop { get; } + /// The coordinates of the nose left alar out tip. + public LandmarkCoordinate NoseLeftAlarOutTip { get; } + /// The coordinates of the nose right alar out tip. + public LandmarkCoordinate NoseRightAlarOutTip { get; } + /// The coordinates of the upper lip top. + public LandmarkCoordinate UpperLipTop { get; } + /// The coordinates of the upper lip bottom. + public LandmarkCoordinate UpperLipBottom { get; } + /// The coordinates of the under lip top. + public LandmarkCoordinate UnderLipTop { get; } + /// The coordinates of the under lip bottom. + public LandmarkCoordinate UnderLipBottom { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.Serialization.cs new file mode 100644 index 0000000000000..999fb93becc79 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.Serialization.cs @@ -0,0 +1,203 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceList : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceList)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("faceListId"u8); + writer.WriteStringValue(FaceListId); + } + if (Optional.IsCollectionDefined(PersistedFaces)) + { + writer.WritePropertyName("persistedFaces"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaces) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceList)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceList(document.RootElement, options); + } + + internal static FaceList DeserializeFaceList(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string faceListId = default; + IReadOnlyList persistedFaces = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("faceListId"u8)) + { + faceListId = property.Value.GetString(); + continue; + } + if (property.NameEquals("persistedFaces"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FaceListFace.DeserializeFaceListFace(item, options)); + } + persistedFaces = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceList( + name, + userData, + recognitionModel, + faceListId, + persistedFaces ?? new ChangeTrackingList(), + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceList)} does not support writing '{options.Format}' format."); + } + } + + FaceList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceList(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceList)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceList FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceList(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.cs new file mode 100644 index 0000000000000..c35100ae00c62 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceList.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face list is a list of faces, up to 1,000 faces. + public partial class FaceList + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal FaceList(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + PersistedFaces = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Face ids of registered faces in the face list. + /// Keeps track of any properties unknown to the library. + internal FaceList(string name, string userData, FaceRecognitionModel? recognitionModel, string faceListId, IReadOnlyList persistedFaces, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + FaceListId = faceListId; + PersistedFaces = persistedFaces; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceList() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + public string FaceListId { get; } + /// Face ids of registered faces in the face list. + public IReadOnlyList PersistedFaces { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.Serialization.cs new file mode 100644 index 0000000000000..23edbcb5c086b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceListFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceListFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceListFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceListFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceListFace(document.RootElement, options); + } + + internal static FaceListFace DeserializeFaceListFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceListFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceListFace)} does not support writing '{options.Format}' format."); + } + } + + FaceListFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceListFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceListFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceListFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceListFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.cs new file mode 100644 index 0000000000000..608b85aca010a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for face list. + public partial class FaceListFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal FaceListFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal FaceListFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.Serialization.cs new file mode 100644 index 0000000000000..38211c318e506 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.Serialization.cs @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceListItem : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceListItem)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + writer.WritePropertyName("faceListId"u8); + writer.WriteStringValue(FaceListId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceListItem IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceListItem)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceListItem(document.RootElement, options); + } + + internal static FaceListItem DeserializeFaceListItem(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string faceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("faceListId"u8)) + { + faceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceListItem(name, userData, recognitionModel, faceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceListItem)} does not support writing '{options.Format}' format."); + } + } + + FaceListItem IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceListItem(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceListItem)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceListItem FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceListItem(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.cs new file mode 100644 index 0000000000000..8f0730a6bf85c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceListItem.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face list item for list face list. + public partial class FaceListItem + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// or is null. + internal FaceListItem(string name, string faceListId) + { + Argument.AssertNotNull(name, nameof(name)); + Argument.AssertNotNull(faceListId, nameof(faceListId)); + + Name = name; + FaceListId = faceListId; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Keeps track of any properties unknown to the library. + internal FaceListItem(string name, string userData, FaceRecognitionModel? recognitionModel, string faceListId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + FaceListId = faceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceListItem() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + public string FaceListId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLivenessDecision.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLivenessDecision.cs new file mode 100644 index 0000000000000..4e7368e13e54c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceLivenessDecision.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The outcome of the liveness classification. + public readonly partial struct FaceLivenessDecision : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceLivenessDecision(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string UncertainValue = "uncertain"; + private const string RealFaceValue = "realface"; + private const string SpoofFaceValue = "spoofface"; + + /// The algorithm could not classify the target face as either real or spoof. + public static FaceLivenessDecision Uncertain { get; } = new FaceLivenessDecision(UncertainValue); + /// The algorithm has classified the target face as real. + public static FaceLivenessDecision RealFace { get; } = new FaceLivenessDecision(RealFaceValue); + /// The algorithm has classified the target face as a spoof. + public static FaceLivenessDecision SpoofFace { get; } = new FaceLivenessDecision(SpoofFaceValue); + /// Determines if two values are the same. + public static bool operator ==(FaceLivenessDecision left, FaceLivenessDecision right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceLivenessDecision left, FaceLivenessDecision right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceLivenessDecision(string value) => new FaceLivenessDecision(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceLivenessDecision other && Equals(other); + /// + public bool Equals(FaceLivenessDecision other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs new file mode 100644 index 0000000000000..221077491cb21 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The status of long running operation. + public readonly partial struct FaceOperationStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceOperationStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NotStartedValue = "notStarted"; + private const string RunningValue = "running"; + private const string SucceededValue = "succeeded"; + private const string FailedValue = "failed"; + + /// The operation is not started. + public static FaceOperationStatus NotStarted { get; } = new FaceOperationStatus(NotStartedValue); + /// The operation is still running. + public static FaceOperationStatus Running { get; } = new FaceOperationStatus(RunningValue); + /// The operation is succeeded. + public static FaceOperationStatus Succeeded { get; } = new FaceOperationStatus(SucceededValue); + /// The operation is failed. + public static FaceOperationStatus Failed { get; } = new FaceOperationStatus(FailedValue); + /// Determines if two values are the same. + public static bool operator ==(FaceOperationStatus left, FaceOperationStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceOperationStatus left, FaceOperationStatus right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceOperationStatus(string value) => new FaceOperationStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceOperationStatus other && Equals(other); + /// + public bool Equals(FaceOperationStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRecognitionModel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRecognitionModel.cs new file mode 100644 index 0000000000000..3880735cb3b57 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRecognitionModel.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The recognition model for the face. + public readonly partial struct FaceRecognitionModel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceRecognitionModel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string Recognition01Value = "recognition_01"; + private const string Recognition02Value = "recognition_02"; + private const string Recognition03Value = "recognition_03"; + private const string Recognition04Value = "recognition_04"; + + /// The default recognition model for "Detect". All those faceIds created before 2019 March are bonded with this recognition model. + public static FaceRecognitionModel Recognition01 { get; } = new FaceRecognitionModel(Recognition01Value); + /// Recognition model released in 2019 March. + public static FaceRecognitionModel Recognition02 { get; } = new FaceRecognitionModel(Recognition02Value); + /// Recognition model released in 2020 May. + public static FaceRecognitionModel Recognition03 { get; } = new FaceRecognitionModel(Recognition03Value); + /// Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy. + public static FaceRecognitionModel Recognition04 { get; } = new FaceRecognitionModel(Recognition04Value); + /// Determines if two values are the same. + public static bool operator ==(FaceRecognitionModel left, FaceRecognitionModel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceRecognitionModel left, FaceRecognitionModel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceRecognitionModel(string value) => new FaceRecognitionModel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceRecognitionModel other && Equals(other); + /// + public bool Equals(FaceRecognitionModel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.Serialization.cs new file mode 100644 index 0000000000000..90d9e61f3d889 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.Serialization.cs @@ -0,0 +1,159 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceRectangle : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceRectangle)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("top"u8); + writer.WriteNumberValue(Top); + writer.WritePropertyName("left"u8); + writer.WriteNumberValue(Left); + writer.WritePropertyName("width"u8); + writer.WriteNumberValue(Width); + writer.WritePropertyName("height"u8); + writer.WriteNumberValue(Height); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceRectangle IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceRectangle)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceRectangle(document.RootElement, options); + } + + internal static FaceRectangle DeserializeFaceRectangle(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + int top = default; + int left = default; + int width = default; + int height = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("top"u8)) + { + top = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("left"u8)) + { + left = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("width"u8)) + { + width = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("height"u8)) + { + height = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceRectangle(top, left, width, height, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceRectangle)} does not support writing '{options.Format}' format."); + } + } + + FaceRectangle IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceRectangle(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceRectangle)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceRectangle FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceRectangle(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.cs new file mode 100644 index 0000000000000..1a1b62ba7b4d2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceRectangle.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// A rectangle within which a face can be found. + public partial class FaceRectangle + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The distance from the top edge if the image to the top edge of the rectangle, in pixels. + /// The distance from the left edge if the image to the left edge of the rectangle, in pixels. + /// The width of the rectangle, in pixels. + /// The height of the rectangle, in pixels. + internal FaceRectangle(int top, int left, int width, int height) + { + Top = top; + Left = left; + Width = width; + Height = height; + } + + /// Initializes a new instance of . + /// The distance from the top edge if the image to the top edge of the rectangle, in pixels. + /// The distance from the left edge if the image to the left edge of the rectangle, in pixels. + /// The width of the rectangle, in pixels. + /// The height of the rectangle, in pixels. + /// Keeps track of any properties unknown to the library. + internal FaceRectangle(int top, int left, int width, int height, IDictionary serializedAdditionalRawData) + { + Top = top; + Left = left; + Width = width; + Height = height; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceRectangle() + { + } + + /// The distance from the top edge if the image to the top edge of the rectangle, in pixels. + public int Top { get; } + /// The distance from the left edge if the image to the left edge of the rectangle, in pixels. + public int Left { get; } + /// The width of the rectangle, in pixels. + public int Width { get; } + /// The height of the rectangle, in pixels. + public int Height { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs new file mode 100644 index 0000000000000..1bea35cd86205 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs @@ -0,0 +1,1628 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated client. + /// The FaceSession service client. + public partial class FaceSessionClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of FaceSessionClient for mocking. + protected FaceSessionClient() + { + } + + /// Initializes a new instance of FaceSessionClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceSessionClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceSessionClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceSessionClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceSessionClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceSessionClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of FaceSessionClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceSessionClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Create a new detect liveness session. + /// Request for creating liveness session. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLiveness/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. + /// + /// + public virtual async Task> CreateLivenessSessionAsync(CreateLivenessSessionContent createLivenessSessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + + using RequestContent content = createLivenessSessionContent.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLivenessSessionAsync(content, context).ConfigureAwait(false); + return Response.FromValue(CreateLivenessSessionResult.FromResponse(response), response); + } + + /// Create a new detect liveness session. + /// Request for creating liveness session. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLiveness/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. + /// + /// + public virtual Response CreateLivenessSession(CreateLivenessSessionContent createLivenessSessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + + using RequestContent content = createLivenessSessionContent.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLivenessSession(content, context); + return Response.FromValue(CreateLivenessSessionResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new detect liveness session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateLivenessSessionAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessSession"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessSessionRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new detect liveness session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreateLivenessSession(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessSession"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessSessionRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete all session related information for matching the specified session id. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLivenessSessionAsync(string sessionId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DeleteLivenessSession"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLivenessSessionRequest(sessionId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete all session related information for matching the specified session id. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLivenessSession(string sessionId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DeleteLivenessSession"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLivenessSessionRequest(sessionId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get session result of detectLiveness/singleModal call. + /// The unique ID to reference this session. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLivenessSessionResultAsync(string sessionId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessSessionResultAsync(sessionId, context).ConfigureAwait(false); + return Response.FromValue(LivenessSession.FromResponse(response), response); + } + + /// Get session result of detectLiveness/singleModal call. + /// The unique ID to reference this session. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLivenessSessionResult(string sessionId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessSessionResult(sessionId, context); + return Response.FromValue(LivenessSession.FromResponse(response), response); + } + + /// + /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessSessionResultAsync(string sessionId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessionResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionResultRequest(sessionId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessSessionResult(string sessionId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessionResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionResultRequest(sessionId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists sessions for /detectLiveness/SingleModal. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// List sessions from the last sessionId greater than the 'start'. + /// + /// The result should be ordered by sessionId in ascending order. + /// + /// + public virtual async Task>> GetLivenessSessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessSessionsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionItem.DeserializeLivenessSessionItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Lists sessions for /detectLiveness/SingleModal. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// List sessions from the last sessionId greater than the 'start'. + /// + /// The result should be ordered by sessionId in ascending order. + /// + /// + public virtual Response> GetLivenessSessions(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessSessions(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionItem.DeserializeLivenessSessionItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Lists sessions for /detectLiveness/SingleModal. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessSessionsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessions"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Lists sessions for /detectLiveness/SingleModal. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessSessions(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessions"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Gets session requests and response body for the session. + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task>> GetLivenessSessionAuditEntriesAsync(string sessionId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessSessionAuditEntriesAsync(sessionId, start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Gets session requests and response body for the session. + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response> GetLivenessSessionAuditEntries(string sessionId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessSessionAuditEntries(sessionId, start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Gets session requests and response body for the session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessSessionAuditEntriesAsync(string sessionId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessionAuditEntries"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionAuditEntriesRequest(sessionId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Gets session requests and response body for the session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessSessionAuditEntries(string sessionId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessSessionAuditEntries"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessSessionAuditEntriesRequest(sessionId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// Request for creating liveness session. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. + /// > * To retrieve a result, use the Get Liveness With Verify Session. + /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. + /// + /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// > [!NOTE] + /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. + /// + internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessSessionContent createLivenessSessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + + using RequestContent content = createLivenessSessionContent.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLivenessWithVerifySessionAsync(content, context).ConfigureAwait(false); + return Response.FromValue(CreateLivenessWithVerifySessionResult.FromResponse(response), response); + } + + /// Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// Request for creating liveness session. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. + /// > * To retrieve a result, use the Get Liveness With Verify Session. + /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. + /// + /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// > [!NOTE] + /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. + /// + internal virtual Response CreateLivenessWithVerifySession(CreateLivenessSessionContent createLivenessSessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + + using RequestContent content = createLivenessSessionContent.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLivenessWithVerifySession(content, context); + return Response.FromValue(CreateLivenessWithVerifySessionResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task CreateLivenessWithVerifySessionAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessWithVerifySession"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessWithVerifySessionRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response CreateLivenessWithVerifySession(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessWithVerifySession"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessWithVerifySessionRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new liveness session with verify. Provide the verify image during session creation. + /// Request of liveness with verify session creation. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. + /// > * To retrieve a result, use the Get Liveness With Verify Session. + /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. + /// + /// Recommended Option: VerifyImage is provided during session creation. + /// + internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionContent createLivenessWithVerifySessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessWithVerifySessionContent, nameof(createLivenessWithVerifySessionContent)); + + using MultipartFormDataRequestContent content = createLivenessWithVerifySessionContent.ToMultipartRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateLivenessWithVerifySessionWithVerifyImageAsync(content, content.ContentType, context).ConfigureAwait(false); + return Response.FromValue(CreateLivenessWithVerifySessionResult.FromResponse(response), response); + } + + /// Create a new liveness session with verify. Provide the verify image during session creation. + /// Request of liveness with verify session creation. + /// The cancellation token to use. + /// is null. + /// + /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. + /// + /// Permissions includes... + /// > + /// * + /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. + /// * A token lifetime of 10 minutes. + /// + /// > [!NOTE] + /// > + /// > * + /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. + /// > * To retrieve a result, use the Get Liveness With Verify Session. + /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. + /// + /// Recommended Option: VerifyImage is provided during session creation. + /// + internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionContent createLivenessWithVerifySessionContent, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(createLivenessWithVerifySessionContent, nameof(createLivenessWithVerifySessionContent)); + + using MultipartFormDataRequestContent content = createLivenessWithVerifySessionContent.ToMultipartRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreateLivenessWithVerifySessionWithVerifyImage(content, content.ContentType, context); + return Response.FromValue(CreateLivenessWithVerifySessionResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new liveness session with verify. Provide the verify image during session creation. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The content type for the operation. Always multipart/form-data for this operation. Allowed values: "multipart/form-data". + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task CreateLivenessWithVerifySessionWithVerifyImageAsync(RequestContent content, string contentType, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessWithVerifySessionWithVerifyImage"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessWithVerifySessionWithVerifyImageRequest(content, contentType, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new liveness session with verify. Provide the verify image during session creation. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The content type for the operation. Always multipart/form-data for this operation. Allowed values: "multipart/form-data". + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(RequestContent content, string contentType, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.CreateLivenessWithVerifySessionWithVerifyImage"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateLivenessWithVerifySessionWithVerifyImageRequest(content, contentType, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete all session related information for matching the specified session id. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteLivenessWithVerifySessionAsync(string sessionId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DeleteLivenessWithVerifySession"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLivenessWithVerifySessionRequest(sessionId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete all session related information for matching the specified session id. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteLivenessWithVerifySession(string sessionId, RequestContext context = null) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DeleteLivenessWithVerifySession"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteLivenessWithVerifySessionRequest(sessionId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Get session result of detectLivenessWithVerify/singleModal call. + /// The unique ID to reference this session. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetLivenessWithVerifySessionResultAsync(string sessionId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessWithVerifySessionResultAsync(sessionId, context).ConfigureAwait(false); + return Response.FromValue(LivenessWithVerifySession.FromResponse(response), response); + } + + /// Get session result of detectLivenessWithVerify/singleModal call. + /// The unique ID to reference this session. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetLivenessWithVerifySessionResult(string sessionId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessWithVerifySessionResult(sessionId, context); + return Response.FromValue(LivenessWithVerifySession.FromResponse(response), response); + } + + /// + /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessWithVerifySessionResultAsync(string sessionId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessionResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionResultRequest(sessionId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessWithVerifySessionResult(string sessionId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessionResult"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionResultRequest(sessionId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Lists sessions for /detectLivenessWithVerify/SingleModal. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// List sessions from the last sessionId greater than the "start". + /// + /// The result should be ordered by sessionId in ascending order. + /// + /// + public virtual async Task>> GetLivenessWithVerifySessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessWithVerifySessionsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionItem.DeserializeLivenessSessionItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Lists sessions for /detectLivenessWithVerify/SingleModal. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// + /// List sessions from the last sessionId greater than the "start". + /// + /// The result should be ordered by sessionId in ascending order. + /// + /// + public virtual Response> GetLivenessWithVerifySessions(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessWithVerifySessions(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionItem.DeserializeLivenessSessionItem(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Lists sessions for /detectLivenessWithVerify/SingleModal. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessWithVerifySessionsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessions"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Lists sessions for /detectLivenessWithVerify/SingleModal. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessWithVerifySessions(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessions"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Gets session requests and response body for the session. + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task>> GetLivenessWithVerifySessionAuditEntriesAsync(string sessionId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLivenessWithVerifySessionAuditEntriesAsync(sessionId, start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Gets session requests and response body for the session. + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response> GetLivenessWithVerifySessionAuditEntries(string sessionId, string start = null, int? top = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLivenessWithVerifySessionAuditEntries(sessionId, start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Gets session requests and response body for the session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLivenessWithVerifySessionAuditEntriesAsync(string sessionId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessionAuditEntries"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionAuditEntriesRequest(sessionId, start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Gets session requests and response body for the session. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The unique ID to reference this session. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLivenessWithVerifySessionAuditEntries(string sessionId, string start, int? top, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionId, nameof(sessionId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetLivenessWithVerifySessionAuditEntries"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLivenessWithVerifySessionAuditEntriesRequest(sessionId, start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateLivenessSessionRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLiveness/singleModal/sessions", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLivenessSessionRequest(string sessionId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLiveness/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessSessionResultRequest(string sessionId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLiveness/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessSessionsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLiveness/singleModal/sessions", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessSessionAuditEntriesRequest(string sessionId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLiveness/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + uri.AppendPath("/audit", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreateLivenessWithVerifySessionRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateCreateLivenessWithVerifySessionWithVerifyImageRequest(RequestContent content, string contentType, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("content-type", contentType); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteLivenessWithVerifySessionRequest(string sessionId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessWithVerifySessionResultRequest(string sessionId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessWithVerifySessionsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLivenessWithVerifySessionAuditEntriesRequest(string sessionId, string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detectLivenessWithVerify/singleModal/sessions/", false); + uri.AppendPath(sessionId, true); + uri.AppendPath("/audit", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionStatus.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionStatus.cs new file mode 100644 index 0000000000000..ed3fc9f4bb780 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceSessionStatus.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The current status of the session. + public readonly partial struct FaceSessionStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceSessionStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NotStartedValue = "NotStarted"; + private const string StartedValue = "Started"; + private const string ResultAvailableValue = "ResultAvailable"; + + /// Session has not started. + public static FaceSessionStatus NotStarted { get; } = new FaceSessionStatus(NotStartedValue); + /// Session has started. + public static FaceSessionStatus Started { get; } = new FaceSessionStatus(StartedValue); + /// Session has available result. + public static FaceSessionStatus ResultAvailable { get; } = new FaceSessionStatus(ResultAvailableValue); + /// Determines if two values are the same. + public static bool operator ==(FaceSessionStatus left, FaceSessionStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceSessionStatus left, FaceSessionStatus right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FaceSessionStatus(string value) => new FaceSessionStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceSessionStatus other && Equals(other); + /// + public bool Equals(FaceSessionStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.Serialization.cs new file mode 100644 index 0000000000000..8ba4c2b48de15 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceVerificationResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceVerificationResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("isIdentical"u8); + writer.WriteBooleanValue(IsIdentical); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceVerificationResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceVerificationResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceVerificationResult(document.RootElement, options); + } + + internal static FaceVerificationResult DeserializeFaceVerificationResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool isIdentical = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("isIdentical"u8)) + { + isIdentical = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceVerificationResult(isIdentical, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceVerificationResult)} does not support writing '{options.Format}' format."); + } + } + + FaceVerificationResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceVerificationResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceVerificationResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceVerificationResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceVerificationResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.cs new file mode 100644 index 0000000000000..ad7abd235c877 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FaceVerificationResult.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Verify result. + public partial class FaceVerificationResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// True if the two faces belong to the same person or the face belongs to the person, otherwise false. + /// A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. + internal FaceVerificationResult(bool isIdentical, float confidence) + { + IsIdentical = isIdentical; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// True if the two faces belong to the same person or the face belongs to the person, otherwise false. + /// A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. + /// Keeps track of any properties unknown to the library. + internal FaceVerificationResult(bool isIdentical, float confidence, IDictionary serializedAdditionalRawData) + { + IsIdentical = isIdentical; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceVerificationResult() + { + } + + /// True if the two faces belong to the same person or the face belongs to the person, otherwise false. + public bool IsIdentical { get; } + /// A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. + public float Confidence { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.Serialization.cs new file mode 100644 index 0000000000000..c6b6eeec36a1a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FacialHair : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FacialHair)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("moustache"u8); + writer.WriteNumberValue(Moustache); + writer.WritePropertyName("beard"u8); + writer.WriteNumberValue(Beard); + writer.WritePropertyName("sideburns"u8); + writer.WriteNumberValue(Sideburns); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FacialHair IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FacialHair)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFacialHair(document.RootElement, options); + } + + internal static FacialHair DeserializeFacialHair(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float moustache = default; + float beard = default; + float sideburns = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("moustache"u8)) + { + moustache = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("beard"u8)) + { + beard = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("sideburns"u8)) + { + sideburns = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FacialHair(moustache, beard, sideburns, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FacialHair)} does not support writing '{options.Format}' format."); + } + } + + FacialHair IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFacialHair(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FacialHair)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FacialHair FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFacialHair(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.cs new file mode 100644 index 0000000000000..b87cce6176e65 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FacialHair.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing facial hair attributes. + public partial class FacialHair + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + internal FacialHair(float moustache, float beard, float sideburns) + { + Moustache = moustache; + Beard = beard; + Sideburns = sideburns; + } + + /// Initializes a new instance of . + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + /// Keeps track of any properties unknown to the library. + internal FacialHair(float moustache, float beard, float sideburns, IDictionary serializedAdditionalRawData) + { + Moustache = moustache; + Beard = beard; + Sideburns = sideburns; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FacialHair() + { + } + + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + public float Moustache { get; } + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + public float Beard { get; } + /// A number ranging from 0 to 1 indicating a level of confidence associated with a property. + public float Sideburns { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.Serialization.cs new file mode 100644 index 0000000000000..768e059b824f4 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class FindSimilarFromFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + writer.WritePropertyName("faceListId"u8); + writer.WriteStringValue(FaceListId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FindSimilarFromFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFindSimilarFromFaceListRequest(document.RootElement, options); + } + + internal static FindSimilarFromFaceListRequest DeserializeFindSimilarFromFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + int? maxNumOfCandidatesReturned = default; + FindSimilarMatchMode? mode = default; + string faceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new FindSimilarMatchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("faceListId"u8)) + { + faceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FindSimilarFromFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, faceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FindSimilarFromFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + FindSimilarFromFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFindSimilarFromFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FindSimilarFromFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FindSimilarFromFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFindSimilarFromFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.cs new file mode 100644 index 0000000000000..d08bc4246bc61 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromFaceListRequest.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The FindSimilarFromFaceListRequest. + internal partial class FindSimilarFromFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Face List, created in "Create Face List". Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// is null. + public FindSimilarFromFaceListRequest(Guid faceId, string faceListId) + { + Argument.AssertNotNull(faceListId, nameof(faceListId)); + + FaceId = faceId; + FaceListId = faceListId; + } + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// An existing user-specified unique candidate Face List, created in "Create Face List". Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// Keeps track of any properties unknown to the library. + internal FindSimilarFromFaceListRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, string faceListId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + Mode = mode; + FaceListId = faceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FindSimilarFromFaceListRequest() + { + } + + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + public Guid FaceId { get; } + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + public FindSimilarMatchMode? Mode { get; set; } + /// An existing user-specified unique candidate Face List, created in "Create Face List". Face List contains a set of persistedFaceIds which are persisted and will never expire. + public string FaceListId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs new file mode 100644 index 0000000000000..dedc938a5798c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class FindSimilarFromLargeFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FindSimilarFromLargeFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + + internal static FindSimilarFromLargeFaceListRequest DeserializeFindSimilarFromLargeFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + int? maxNumOfCandidatesReturned = default; + FindSimilarMatchMode? mode = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new FindSimilarMatchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + FindSimilarFromLargeFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FindSimilarFromLargeFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs new file mode 100644 index 0000000000000..74b5784080f5c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The FindSimilarFromLargeFaceListRequest. + internal partial class FindSimilarFromLargeFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// is null. + public FindSimilarFromLargeFaceListRequest(Guid faceId, string largeFaceListId) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FaceId = faceId; + LargeFaceListId = largeFaceListId; + } + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// Keeps track of any properties unknown to the library. + internal FindSimilarFromLargeFaceListRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + Mode = mode; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FindSimilarFromLargeFaceListRequest() + { + } + + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + public Guid FaceId { get; } + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + public FindSimilarMatchMode? Mode { get; set; } + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarMatchMode.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarMatchMode.cs new file mode 100644 index 0000000000000..3e8a0da282e97 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarMatchMode.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Similar face searching mode. + public readonly partial struct FindSimilarMatchMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FindSimilarMatchMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string MatchPersonValue = "matchPerson"; + private const string MatchFaceValue = "matchFace"; + + /// Match person. + public static FindSimilarMatchMode MatchPerson { get; } = new FindSimilarMatchMode(MatchPersonValue); + /// Match face. + public static FindSimilarMatchMode MatchFace { get; } = new FindSimilarMatchMode(MatchFaceValue); + /// Determines if two values are the same. + public static bool operator ==(FindSimilarMatchMode left, FindSimilarMatchMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FindSimilarMatchMode left, FindSimilarMatchMode right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator FindSimilarMatchMode(string value) => new FindSimilarMatchMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FindSimilarMatchMode other && Equals(other); + /// + public bool Equals(FindSimilarMatchMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.Serialization.cs new file mode 100644 index 0000000000000..f09dae52486cc --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class FindSimilarRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FindSimilarRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFindSimilarRequest(document.RootElement, options); + } + + internal static FindSimilarRequest DeserializeFindSimilarRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + int? maxNumOfCandidatesReturned = default; + FindSimilarMatchMode? mode = default; + IList faceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new FindSimilarMatchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FindSimilarRequest(faceId, maxNumOfCandidatesReturned, mode, faceIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FindSimilarRequest)} does not support writing '{options.Format}' format."); + } + } + + FindSimilarRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFindSimilarRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FindSimilarRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FindSimilarRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFindSimilarRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.cs new file mode 100644 index 0000000000000..58c576cd4537c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/FindSimilarRequest.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The FindSimilarRequest. + internal partial class FindSimilarRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An array of candidate faceIds. All of them are created by "Detect" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000. + /// is null. + public FindSimilarRequest(Guid faceId, IEnumerable faceIds) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + FaceId = faceId; + FaceIds = faceIds.ToList(); + } + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// An array of candidate faceIds. All of them are created by "Detect" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000. + /// Keeps track of any properties unknown to the library. + internal FindSimilarRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, IList faceIds, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + Mode = mode; + FaceIds = faceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FindSimilarRequest() + { + } + + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + public Guid FaceId { get; } + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + public FindSimilarMatchMode? Mode { get; set; } + /// An array of candidate faceIds. All of them are created by "Detect" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000. + public IList FaceIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/GlassesType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GlassesType.cs new file mode 100644 index 0000000000000..ebc7aa052d078 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GlassesType.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Glasses type of the face. + public readonly partial struct GlassesType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public GlassesType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NoGlassesValue = "noGlasses"; + private const string ReadingGlassesValue = "readingGlasses"; + private const string SunglassesValue = "sunglasses"; + private const string SwimmingGogglesValue = "swimmingGoggles"; + + /// No glasses on the face. + public static GlassesType NoGlasses { get; } = new GlassesType(NoGlassesValue); + /// Normal glasses on the face. + public static GlassesType ReadingGlasses { get; } = new GlassesType(ReadingGlassesValue); + /// Sunglasses on the face. + public static GlassesType Sunglasses { get; } = new GlassesType(SunglassesValue); + /// Swimming goggles on the face. + public static GlassesType SwimmingGoggles { get; } = new GlassesType(SwimmingGogglesValue); + /// Determines if two values are the same. + public static bool operator ==(GlassesType left, GlassesType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(GlassesType left, GlassesType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator GlassesType(string value) => new GlassesType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is GlassesType other && Equals(other); + /// + public bool Equals(GlassesType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.Serialization.cs new file mode 100644 index 0000000000000..6ac7b9325288a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class GroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(GroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + GroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(GroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeGroupRequest(document.RootElement, options); + } + + internal static GroupRequest DeserializeGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList faceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new GroupRequest(faceIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(GroupRequest)} does not support writing '{options.Format}' format."); + } + } + + GroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(GroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static GroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.cs new file mode 100644 index 0000000000000..1ee442ec44c79 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/GroupRequest.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The GroupRequest. + internal partial class GroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. + /// is null. + public GroupRequest(IEnumerable faceIds) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + + FaceIds = faceIds.ToList(); + } + + /// Initializes a new instance of . + /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. + /// Keeps track of any properties unknown to the library. + internal GroupRequest(IList faceIds, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal GroupRequest() + { + } + + /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. + public IList FaceIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.Serialization.cs new file mode 100644 index 0000000000000..94129722b51c9 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class HairColor : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HairColor)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("color"u8); + writer.WriteStringValue(Color.ToString()); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + HairColor IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HairColor)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHairColor(document.RootElement, options); + } + + internal static HairColor DeserializeHairColor(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + HairColorType color = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("color"u8)) + { + color = new HairColorType(property.Value.GetString()); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HairColor(color, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HairColor)} does not support writing '{options.Format}' format."); + } + } + + HairColor IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeHairColor(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HairColor)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static HairColor FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeHairColor(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.cs new file mode 100644 index 0000000000000..211ec6d15fc16 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColor.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// An array of candidate colors and confidence level in the presence of each. + public partial class HairColor + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Name of the hair color. + /// Confidence level of the color. Range between [0,1]. + internal HairColor(HairColorType color, float confidence) + { + Color = color; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// Name of the hair color. + /// Confidence level of the color. Range between [0,1]. + /// Keeps track of any properties unknown to the library. + internal HairColor(HairColorType color, float confidence, IDictionary serializedAdditionalRawData) + { + Color = color; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal HairColor() + { + } + + /// Name of the hair color. + public HairColorType Color { get; } + /// Confidence level of the color. Range between [0,1]. + public float Confidence { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColorType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColorType.cs new file mode 100644 index 0000000000000..1c0094ccfcaeb --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairColorType.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Name of the hair color. + public readonly partial struct HairColorType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public HairColorType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string UnknownHairColorValue = "unknown"; + private const string WhiteValue = "white"; + private const string GrayValue = "gray"; + private const string BlondValue = "blond"; + private const string BrownValue = "brown"; + private const string RedValue = "red"; + private const string BlackValue = "black"; + private const string OtherValue = "other"; + + /// Unknown. + public static HairColorType UnknownHairColor { get; } = new HairColorType(UnknownHairColorValue); + /// White. + public static HairColorType White { get; } = new HairColorType(WhiteValue); + /// Gray. + public static HairColorType Gray { get; } = new HairColorType(GrayValue); + /// Blond. + public static HairColorType Blond { get; } = new HairColorType(BlondValue); + /// Brown. + public static HairColorType Brown { get; } = new HairColorType(BrownValue); + /// Red. + public static HairColorType Red { get; } = new HairColorType(RedValue); + /// Black. + public static HairColorType Black { get; } = new HairColorType(BlackValue); + /// Other. + public static HairColorType Other { get; } = new HairColorType(OtherValue); + /// Determines if two values are the same. + public static bool operator ==(HairColorType left, HairColorType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(HairColorType left, HairColorType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator HairColorType(string value) => new HairColorType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is HairColorType other && Equals(other); + /// + public bool Equals(HairColorType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.Serialization.cs new file mode 100644 index 0000000000000..8e45029e1f634 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class HairProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HairProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("bald"u8); + writer.WriteNumberValue(Bald); + writer.WritePropertyName("invisible"u8); + writer.WriteBooleanValue(Invisible); + writer.WritePropertyName("hairColor"u8); + writer.WriteStartArray(); + foreach (var item in HairColor) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + HairProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HairProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHairProperties(document.RootElement, options); + } + + internal static HairProperties DeserializeHairProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float bald = default; + bool invisible = default; + IReadOnlyList hairColor = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("bald"u8)) + { + bald = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("invisible"u8)) + { + invisible = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("hairColor"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(Face.HairColor.DeserializeHairColor(item, options)); + } + hairColor = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HairProperties(bald, invisible, hairColor, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HairProperties)} does not support writing '{options.Format}' format."); + } + } + + HairProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeHairProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HairProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static HairProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeHairProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.cs new file mode 100644 index 0000000000000..1548a40cf3413 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HairProperties.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing hair attributes. + public partial class HairProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A number describing confidence level of whether the person is bald. + /// A boolean value describing whether the hair is visible in the image. + /// An array of candidate colors and confidence level in the presence of each. + /// is null. + internal HairProperties(float bald, bool invisible, IEnumerable hairColor) + { + Argument.AssertNotNull(hairColor, nameof(hairColor)); + + Bald = bald; + Invisible = invisible; + HairColor = hairColor.ToList(); + } + + /// Initializes a new instance of . + /// A number describing confidence level of whether the person is bald. + /// A boolean value describing whether the hair is visible in the image. + /// An array of candidate colors and confidence level in the presence of each. + /// Keeps track of any properties unknown to the library. + internal HairProperties(float bald, bool invisible, IReadOnlyList hairColor, IDictionary serializedAdditionalRawData) + { + Bald = bald; + Invisible = invisible; + HairColor = hairColor; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal HairProperties() + { + } + + /// A number describing confidence level of whether the person is bald. + public float Bald { get; } + /// A boolean value describing whether the hair is visible in the image. + public bool Invisible { get; } + /// An array of candidate colors and confidence level in the presence of each. + public IReadOnlyList HairColor { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.Serialization.cs new file mode 100644 index 0000000000000..9627fa488522b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class HeadPose : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HeadPose)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("pitch"u8); + writer.WriteNumberValue(Pitch); + writer.WritePropertyName("roll"u8); + writer.WriteNumberValue(Roll); + writer.WritePropertyName("yaw"u8); + writer.WriteNumberValue(Yaw); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + HeadPose IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(HeadPose)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeHeadPose(document.RootElement, options); + } + + internal static HeadPose DeserializeHeadPose(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float pitch = default; + float roll = default; + float yaw = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("pitch"u8)) + { + pitch = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("roll"u8)) + { + roll = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("yaw"u8)) + { + yaw = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new HeadPose(pitch, roll, yaw, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(HeadPose)} does not support writing '{options.Format}' format."); + } + } + + HeadPose IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeHeadPose(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(HeadPose)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static HeadPose FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeHeadPose(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.cs new file mode 100644 index 0000000000000..52b57696e962e --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/HeadPose.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// 3-D roll/yaw/pitch angles for face direction. + public partial class HeadPose + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Value of angles. + /// Value of angles. + /// Value of angles. + internal HeadPose(float pitch, float roll, float yaw) + { + Pitch = pitch; + Roll = roll; + Yaw = yaw; + } + + /// Initializes a new instance of . + /// Value of angles. + /// Value of angles. + /// Value of angles. + /// Keeps track of any properties unknown to the library. + internal HeadPose(float pitch, float roll, float yaw, IDictionary serializedAdditionalRawData) + { + Pitch = pitch; + Roll = roll; + Yaw = yaw; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal HeadPose() + { + } + + /// Value of angles. + public float Pitch { get; } + /// Value of angles. + public float Roll { get; } + /// Value of angles. + public float Yaw { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..ffb7a98ff75d2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromDynamicPersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromDynamicPersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("dynamicPersonGroupId"u8); + writer.WriteStringValue(DynamicPersonGroupId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromDynamicPersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromDynamicPersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromDynamicPersonGroupRequest(document.RootElement, options); + } + + internal static IdentifyFromDynamicPersonGroupRequest DeserializeIdentifyFromDynamicPersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList faceIds = default; + string dynamicPersonGroupId = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("dynamicPersonGroupId"u8)) + { + dynamicPersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromDynamicPersonGroupRequest(faceIds, dynamicPersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromDynamicPersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromDynamicPersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromDynamicPersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromDynamicPersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromDynamicPersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromDynamicPersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.cs new file mode 100644 index 0000000000000..df0ea224ceed1 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromDynamicPersonGroupRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromDynamicPersonGroupRequest. + internal partial class IdentifyFromDynamicPersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against. + /// or is null. + public IdentifyFromDynamicPersonGroupRequest(IEnumerable faceIds, string dynamicPersonGroupId) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(dynamicPersonGroupId, nameof(dynamicPersonGroupId)); + + FaceIds = faceIds.ToList(); + DynamicPersonGroupId = dynamicPersonGroupId; + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromDynamicPersonGroupRequest(IList faceIds, string dynamicPersonGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + DynamicPersonGroupId = dynamicPersonGroupId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromDynamicPersonGroupRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IList FaceIds { get; } + /// DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against. + public string DynamicPersonGroupId { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..032b339a0ef9c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static IdentifyFromLargePersonGroupRequest DeserializeIdentifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList faceIds = default; + string largePersonGroupId = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromLargePersonGroupRequest(faceIds, largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs new file mode 100644 index 0000000000000..1a2d7512ca4f5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromLargePersonGroupRequest. + internal partial class IdentifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// or is null. + public IdentifyFromLargePersonGroupRequest(IEnumerable faceIds, string largePersonGroupId) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceIds = faceIds.ToList(); + LargePersonGroupId = largePersonGroupId; + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromLargePersonGroupRequest(IList faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + LargePersonGroupId = largePersonGroupId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromLargePersonGroupRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IList FaceIds { get; } + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + public string LargePersonGroupId { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.Serialization.cs new file mode 100644 index 0000000000000..d2ad65fb2b1d2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.Serialization.cs @@ -0,0 +1,193 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromPersonDirectoryRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromPersonDirectoryRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("personIds"u8); + writer.WriteStartArray(); + foreach (var item in PersonIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromPersonDirectoryRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromPersonDirectoryRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromPersonDirectoryRequest(document.RootElement, options); + } + + internal static IdentifyFromPersonDirectoryRequest DeserializeIdentifyFromPersonDirectoryRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList faceIds = default; + IList personIds = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("personIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + personIds = array; + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromPersonDirectoryRequest(faceIds, personIds, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromPersonDirectoryRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromPersonDirectoryRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromPersonDirectoryRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromPersonDirectoryRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromPersonDirectoryRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromPersonDirectoryRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.cs new file mode 100644 index 0000000000000..ae300a421c909 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonDirectoryRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromPersonDirectoryRequest. + internal partial class IdentifyFromPersonDirectoryRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// Array of personIds created in Person Directory "Create Person". The valid number of personIds is between [1,30]. + /// or is null. + public IdentifyFromPersonDirectoryRequest(IEnumerable faceIds, IEnumerable personIds) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personIds, nameof(personIds)); + + FaceIds = faceIds.ToList(); + PersonIds = personIds.ToList(); + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// Array of personIds created in Person Directory "Create Person". The valid number of personIds is between [1,30]. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromPersonDirectoryRequest(IList faceIds, IList personIds, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + PersonIds = personIds; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromPersonDirectoryRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IList FaceIds { get; } + /// Array of personIds created in Person Directory "Create Person". The valid number of personIds is between [1,30]. + public IList PersonIds { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..9964600f76917 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromPersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromPersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("personGroupId"u8); + writer.WriteStringValue(PersonGroupId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromPersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromPersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromPersonGroupRequest(document.RootElement, options); + } + + internal static IdentifyFromPersonGroupRequest DeserializeIdentifyFromPersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IList faceIds = default; + string personGroupId = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("personGroupId"u8)) + { + personGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromPersonGroupRequest(faceIds, personGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromPersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromPersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromPersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromPersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromPersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromPersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.cs new file mode 100644 index 0000000000000..13316825a639b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/IdentifyFromPersonGroupRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromPersonGroupRequest. + internal partial class IdentifyFromPersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// personGroupId of the target Person Group, created by "Create Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// or is null. + public IdentifyFromPersonGroupRequest(IEnumerable faceIds, string personGroupId) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + FaceIds = faceIds.ToList(); + PersonGroupId = personGroupId; + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// personGroupId of the target Person Group, created by "Create Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromPersonGroupRequest(IList faceIds, string personGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + PersonGroupId = personGroupId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromPersonGroupRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IList FaceIds { get; } + /// personGroupId of the target Person Group, created by "Create Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + public string PersonGroupId { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; set; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; set; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Argument.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Argument.cs new file mode 100644 index 0000000000000..bc73d052fd30e --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Argument.cs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + internal static class Argument + { + public static void AssertNotNull(T value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + } + + public static void AssertNotNull(T? value, string name) + where T : struct + { + if (!value.HasValue) + { + throw new ArgumentNullException(name); + } + } + + public static void AssertNotNullOrEmpty(IEnumerable value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + if (value is ICollection collectionOfT && collectionOfT.Count == 0) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + if (value is ICollection collection && collection.Count == 0) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + using IEnumerator e = value.GetEnumerator(); + if (!e.MoveNext()) + { + throw new ArgumentException("Value cannot be an empty collection.", name); + } + } + + public static void AssertNotNullOrEmpty(string value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + if (value.Length == 0) + { + throw new ArgumentException("Value cannot be an empty string.", name); + } + } + + public static void AssertNotNullOrWhiteSpace(string value, string name) + { + if (value is null) + { + throw new ArgumentNullException(name); + } + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value cannot be empty or contain only white-space characters.", name); + } + } + + public static void AssertNotDefault(ref T value, string name) + where T : struct, IEquatable + { + if (value.Equals(default)) + { + throw new ArgumentException("Value cannot be empty.", name); + } + } + + public static void AssertInRange(T value, T minimum, T maximum, string name) + where T : notnull, IComparable + { + if (minimum.CompareTo(value) > 0) + { + throw new ArgumentOutOfRangeException(name, "Value is less than the minimum allowed."); + } + if (maximum.CompareTo(value) < 0) + { + throw new ArgumentOutOfRangeException(name, "Value is greater than the maximum allowed."); + } + } + + public static void AssertEnumDefined(Type enumType, object value, string name) + { + if (!Enum.IsDefined(enumType, value)) + { + throw new ArgumentException($"Value not defined for {enumType.FullName}.", name); + } + } + + public static T CheckNotNull(T value, string name) + where T : class + { + AssertNotNull(value, name); + return value; + } + + public static string CheckNotNullOrEmpty(string value, string name) + { + AssertNotNullOrEmpty(value, name); + return value; + } + + public static void AssertNull(T value, string name, string message = null) + { + if (value != null) + { + throw new ArgumentException(message ?? "Value must be null.", name); + } + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingDictionary.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingDictionary.cs new file mode 100644 index 0000000000000..6558c088c9b59 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingDictionary.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + internal class ChangeTrackingDictionary : IDictionary, IReadOnlyDictionary where TKey : notnull + { + private IDictionary _innerDictionary; + + public ChangeTrackingDictionary() + { + } + + public ChangeTrackingDictionary(IDictionary dictionary) + { + if (dictionary == null) + { + return; + } + _innerDictionary = new Dictionary(dictionary); + } + + public ChangeTrackingDictionary(IReadOnlyDictionary dictionary) + { + if (dictionary == null) + { + return; + } + _innerDictionary = new Dictionary(); + foreach (var pair in dictionary) + { + _innerDictionary.Add(pair); + } + } + + public bool IsUndefined => _innerDictionary == null; + + public int Count => IsUndefined ? 0 : EnsureDictionary().Count; + + public bool IsReadOnly => IsUndefined ? false : EnsureDictionary().IsReadOnly; + + public ICollection Keys => IsUndefined ? Array.Empty() : EnsureDictionary().Keys; + + public ICollection Values => IsUndefined ? Array.Empty() : EnsureDictionary().Values; + + public TValue this[TKey key] + { + get + { + if (IsUndefined) + { + throw new KeyNotFoundException(nameof(key)); + } + return EnsureDictionary()[key]; + } + set + { + EnsureDictionary()[key] = value; + } + } + + IEnumerable IReadOnlyDictionary.Keys => Keys; + + IEnumerable IReadOnlyDictionary.Values => Values; + + public IEnumerator> GetEnumerator() + { + if (IsUndefined) + { + IEnumerator> enumerateEmpty() + { + yield break; + } + return enumerateEmpty(); + } + return EnsureDictionary().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + public void Add(KeyValuePair item) + { + EnsureDictionary().Add(item); + } + + public void Clear() + { + EnsureDictionary().Clear(); + } + + public bool Contains(KeyValuePair item) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Contains(item); + } + + public void CopyTo(KeyValuePair[] array, int index) + { + if (IsUndefined) + { + return; + } + EnsureDictionary().CopyTo(array, index); + } + + public bool Remove(KeyValuePair item) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Remove(item); + } + + public void Add(TKey key, TValue value) + { + EnsureDictionary().Add(key, value); + } + + public bool ContainsKey(TKey key) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().ContainsKey(key); + } + + public bool Remove(TKey key) + { + if (IsUndefined) + { + return false; + } + return EnsureDictionary().Remove(key); + } + + public bool TryGetValue(TKey key, out TValue value) + { + if (IsUndefined) + { + value = default; + return false; + } + return EnsureDictionary().TryGetValue(key, out value); + } + + public IDictionary EnsureDictionary() + { + return _innerDictionary ??= new Dictionary(); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingList.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingList.cs new file mode 100644 index 0000000000000..2c71b511450b5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ChangeTrackingList.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + internal class ChangeTrackingList : IList, IReadOnlyList + { + private IList _innerList; + + public ChangeTrackingList() + { + } + + public ChangeTrackingList(IList innerList) + { + if (innerList != null) + { + _innerList = innerList; + } + } + + public ChangeTrackingList(IReadOnlyList innerList) + { + if (innerList != null) + { + _innerList = innerList.ToList(); + } + } + + public bool IsUndefined => _innerList == null; + + public int Count => IsUndefined ? 0 : EnsureList().Count; + + public bool IsReadOnly => IsUndefined ? false : EnsureList().IsReadOnly; + + public T this[int index] + { + get + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + return EnsureList()[index]; + } + set + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + EnsureList()[index] = value; + } + } + + public void Reset() + { + _innerList = null; + } + + public IEnumerator GetEnumerator() + { + if (IsUndefined) + { + IEnumerator enumerateEmpty() + { + yield break; + } + return enumerateEmpty(); + } + return EnsureList().GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + public void Add(T item) + { + EnsureList().Add(item); + } + + public void Clear() + { + EnsureList().Clear(); + } + + public bool Contains(T item) + { + if (IsUndefined) + { + return false; + } + return EnsureList().Contains(item); + } + + public void CopyTo(T[] array, int arrayIndex) + { + if (IsUndefined) + { + return; + } + EnsureList().CopyTo(array, arrayIndex); + } + + public bool Remove(T item) + { + if (IsUndefined) + { + return false; + } + return EnsureList().Remove(item); + } + + public int IndexOf(T item) + { + if (IsUndefined) + { + return -1; + } + return EnsureList().IndexOf(item); + } + + public void Insert(int index, T item) + { + EnsureList().Insert(index, item); + } + + public void RemoveAt(int index) + { + if (IsUndefined) + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + EnsureList().RemoveAt(index); + } + + public IList EnsureList() + { + return _innerList ??= new List(); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ModelSerializationExtensions.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ModelSerializationExtensions.cs new file mode 100644 index 0000000000000..1e015ddfd0b3f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/ModelSerializationExtensions.cs @@ -0,0 +1,398 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Text.Json; +using System.Xml; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal static class ModelSerializationExtensions + { + internal static readonly ModelReaderWriterOptions WireOptions = new ModelReaderWriterOptions("W"); + + public static object GetObject(this JsonElement element) + { + switch (element.ValueKind) + { + case JsonValueKind.String: + return element.GetString(); + case JsonValueKind.Number: + if (element.TryGetInt32(out int intValue)) + { + return intValue; + } + if (element.TryGetInt64(out long longValue)) + { + return longValue; + } + return element.GetDouble(); + case JsonValueKind.True: + return true; + case JsonValueKind.False: + return false; + case JsonValueKind.Undefined: + case JsonValueKind.Null: + return null; + case JsonValueKind.Object: + var dictionary = new Dictionary(); + foreach (var jsonProperty in element.EnumerateObject()) + { + dictionary.Add(jsonProperty.Name, jsonProperty.Value.GetObject()); + } + return dictionary; + case JsonValueKind.Array: + var list = new List(); + foreach (var item in element.EnumerateArray()) + { + list.Add(item.GetObject()); + } + return list.ToArray(); + default: + throw new NotSupportedException($"Not supported value kind {element.ValueKind}"); + } + } + + public static byte[] GetBytesFromBase64(this JsonElement element, string format) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + + return format switch + { + "U" => TypeFormatters.FromBase64UrlString(element.GetRequiredString()), + "D" => element.GetBytesFromBase64(), + _ => throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)) + }; + } + + public static DateTimeOffset GetDateTimeOffset(this JsonElement element, string format) => format switch + { + "U" when element.ValueKind == JsonValueKind.Number => DateTimeOffset.FromUnixTimeSeconds(element.GetInt64()), + _ => TypeFormatters.ParseDateTimeOffset(element.GetString(), format) + }; + + public static TimeSpan GetTimeSpan(this JsonElement element, string format) => TypeFormatters.ParseTimeSpan(element.GetString(), format); + + public static char GetChar(this JsonElement element) + { + if (element.ValueKind == JsonValueKind.String) + { + var text = element.GetString(); + if (text == null || text.Length != 1) + { + throw new NotSupportedException($"Cannot convert \"{text}\" to a char"); + } + return text[0]; + } + else + { + throw new NotSupportedException($"Cannot convert {element.ValueKind} to a char"); + } + } + + [Conditional("DEBUG")] + public static void ThrowNonNullablePropertyIsNull(this JsonProperty property) + { + throw new JsonException($"A property '{property.Name}' defined as non-nullable but received as null from the service. This exception only happens in DEBUG builds of the library and would be ignored in the release build"); + } + + public static string GetRequiredString(this JsonElement element) + { + var value = element.GetString(); + if (value == null) + { + throw new InvalidOperationException($"The requested operation requires an element of type 'String', but the target element has type '{element.ValueKind}'."); + } + return value; + } + + public static void WriteStringValue(this Utf8JsonWriter writer, DateTimeOffset value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, DateTime value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, TimeSpan value, string format) + { + writer.WriteStringValue(TypeFormatters.ToString(value, format)); + } + + public static void WriteStringValue(this Utf8JsonWriter writer, char value) + { + writer.WriteStringValue(value.ToString(CultureInfo.InvariantCulture)); + } + + public static void WriteBase64StringValue(this Utf8JsonWriter writer, byte[] value, string format) + { + if (value == null) + { + writer.WriteNullValue(); + return; + } + switch (format) + { + case "U": + writer.WriteStringValue(TypeFormatters.ToBase64UrlString(value)); + break; + case "D": + writer.WriteBase64StringValue(value); + break; + default: + throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)); + } + } + + public static void WriteNumberValue(this Utf8JsonWriter writer, DateTimeOffset value, string format) + { + if (format != "U") + { + throw new ArgumentOutOfRangeException(nameof(format), "Only 'U' format is supported when writing a DateTimeOffset as a Number."); + } + writer.WriteNumberValue(value.ToUnixTimeSeconds()); + } + + public static void WriteObjectValue(this Utf8JsonWriter writer, T value, ModelReaderWriterOptions options = null) + { + switch (value) + { + case null: + writer.WriteNullValue(); + break; + case IJsonModel jsonModel: + jsonModel.Write(writer, options ?? WireOptions); + break; + case IUtf8JsonSerializable serializable: + serializable.Write(writer); + break; + case byte[] bytes: + writer.WriteBase64StringValue(bytes); + break; + case BinaryData bytes0: + writer.WriteBase64StringValue(bytes0); + break; + case JsonElement json: + json.WriteTo(writer); + break; + case int i: + writer.WriteNumberValue(i); + break; + case decimal d: + writer.WriteNumberValue(d); + break; + case double d0: + if (double.IsNaN(d0)) + { + writer.WriteStringValue("NaN"); + } + else + { + writer.WriteNumberValue(d0); + } + break; + case float f: + writer.WriteNumberValue(f); + break; + case long l: + writer.WriteNumberValue(l); + break; + case string s: + writer.WriteStringValue(s); + break; + case bool b: + writer.WriteBooleanValue(b); + break; + case Guid g: + writer.WriteStringValue(g); + break; + case DateTimeOffset dateTimeOffset: + writer.WriteStringValue(dateTimeOffset, "O"); + break; + case DateTime dateTime: + writer.WriteStringValue(dateTime, "O"); + break; + case IEnumerable> enumerable: + writer.WriteStartObject(); + foreach (var pair in enumerable) + { + writer.WritePropertyName(pair.Key); + writer.WriteObjectValue(pair.Value, options); + } + writer.WriteEndObject(); + break; + case IEnumerable objectEnumerable: + writer.WriteStartArray(); + foreach (var item in objectEnumerable) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + break; + case TimeSpan timeSpan: + writer.WriteStringValue(timeSpan, "P"); + break; + default: + throw new NotSupportedException($"Not supported type {value.GetType()}"); + } + } + + public static void WriteObjectValue(this Utf8JsonWriter writer, object value, ModelReaderWriterOptions options = null) + { + writer.WriteObjectValue(value, options); + } + + internal static class TypeFormatters + { + private const string RoundtripZFormat = "yyyy-MM-ddTHH:mm:ss.fffffffZ"; + public const string DefaultNumberFormat = "G"; + + public static string ToString(bool value) => value ? "true" : "false"; + + public static string ToString(DateTime value, string format) => value.Kind switch + { + DateTimeKind.Utc => ToString((DateTimeOffset)value, format), + _ => throw new NotSupportedException($"DateTime {value} has a Kind of {value.Kind}. Azure SDK requires it to be UTC. You can call DateTime.SpecifyKind to change Kind property value to DateTimeKind.Utc.") + }; + + public static string ToString(DateTimeOffset value, string format) => format switch + { + "D" => value.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture), + "U" => value.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture), + "O" => value.ToUniversalTime().ToString(RoundtripZFormat, CultureInfo.InvariantCulture), + "o" => value.ToUniversalTime().ToString(RoundtripZFormat, CultureInfo.InvariantCulture), + "R" => value.ToString("r", CultureInfo.InvariantCulture), + _ => value.ToString(format, CultureInfo.InvariantCulture) + }; + + public static string ToString(TimeSpan value, string format) => format switch + { + "P" => XmlConvert.ToString(value), + _ => value.ToString(format, CultureInfo.InvariantCulture) + }; + + public static string ToString(byte[] value, string format) => format switch + { + "U" => ToBase64UrlString(value), + "D" => Convert.ToBase64String(value), + _ => throw new ArgumentException($"Format is not supported: '{format}'", nameof(format)) + }; + + public static string ToBase64UrlString(byte[] value) + { + int numWholeOrPartialInputBlocks = checked(value.Length + 2) / 3; + int size = checked(numWholeOrPartialInputBlocks * 4); + char[] output = new char[size]; + + int numBase64Chars = Convert.ToBase64CharArray(value, 0, value.Length, output, 0); + + int i = 0; + for (; i < numBase64Chars; i++) + { + char ch = output[i]; + if (ch == '+') + { + output[i] = '-'; + } + else + { + if (ch == '/') + { + output[i] = '_'; + } + else + { + if (ch == '=') + { + break; + } + } + } + } + + return new string(output, 0, i); + } + + public static byte[] FromBase64UrlString(string value) + { + int paddingCharsToAdd = (value.Length % 4) switch + { + 0 => 0, + 2 => 2, + 3 => 1, + _ => throw new InvalidOperationException("Malformed input") + }; + char[] output = new char[(value.Length + paddingCharsToAdd)]; + int i = 0; + for (; i < value.Length; i++) + { + char ch = value[i]; + if (ch == '-') + { + output[i] = '+'; + } + else + { + if (ch == '_') + { + output[i] = '/'; + } + else + { + output[i] = ch; + } + } + } + + for (; i < output.Length; i++) + { + output[i] = '='; + } + + return Convert.FromBase64CharArray(output, 0, output.Length); + } + + public static DateTimeOffset ParseDateTimeOffset(string value, string format) => format switch + { + "U" => DateTimeOffset.FromUnixTimeSeconds(long.Parse(value, CultureInfo.InvariantCulture)), + _ => DateTimeOffset.Parse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal) + }; + + public static TimeSpan ParseTimeSpan(string value, string format) => format switch + { + "P" => XmlConvert.ToTimeSpan(value), + _ => TimeSpan.ParseExact(value, format, CultureInfo.InvariantCulture) + }; + + public static string ConvertToString(object value, string format = null) => value switch + { + null => "null", + string s => s, + bool b => ToString(b), + int or float or double or long or decimal => ((IFormattable)value).ToString(DefaultNumberFormat, CultureInfo.InvariantCulture), + byte[] b0 when format != null => ToString(b0, format), + IEnumerable s0 => string.Join(",", s0), + DateTimeOffset dateTime when format != null => ToString(dateTime, format), + TimeSpan timeSpan when format != null => ToString(timeSpan, format), + TimeSpan timeSpan0 => XmlConvert.ToString(timeSpan0), + Guid guid => guid.ToString(), + BinaryData binaryData => ConvertToString(binaryData.ToArray(), format), + _ => value.ToString() + }; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/MultipartFormDataRequestContent.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/MultipartFormDataRequestContent.cs new file mode 100644 index 0000000000000..5b57b2e4c5acc --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/MultipartFormDataRequestContent.cs @@ -0,0 +1,203 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Globalization; +using System.IO; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + internal class MultipartFormDataRequestContent : RequestContent + { + private readonly System.Net.Http.MultipartFormDataContent _multipartContent; + private static readonly Random _random = new Random(); + private static readonly char[] _boundaryValues = "0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz".ToCharArray(); + + public MultipartFormDataRequestContent() + { + _multipartContent = new System.Net.Http.MultipartFormDataContent(CreateBoundary()); + } + + public string ContentType + { + get + { + return _multipartContent.Headers.ContentType.ToString(); + } + } + + internal HttpContent HttpContent => _multipartContent; + + private static string CreateBoundary() + { + Span chars = new char[70]; + byte[] random = new byte[70]; + _random.NextBytes(random); + int mask = 255 >> 2; + for (int i = 0; i < 70; i++) + { + chars[i] = _boundaryValues[random[i] & mask]; + } + return chars.ToString(); + } + + public void Add(string content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + Add(new StringContent(content), name, filename, contentType); + } + + public void Add(int content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content.ToString("G", CultureInfo.InvariantCulture); + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(long content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content.ToString("G", CultureInfo.InvariantCulture); + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(float content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content.ToString("G", CultureInfo.InvariantCulture); + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(double content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content.ToString("G", CultureInfo.InvariantCulture); + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(decimal content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content.ToString("G", CultureInfo.InvariantCulture); + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(bool content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + string value = content ? "true" : "false"; + Add(new StringContent(value), name, filename, contentType); + } + + public void Add(Stream content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + Add(new StreamContent(content), name, filename, contentType); + } + + public void Add(byte[] content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + Add(new ByteArrayContent(content), name, filename, contentType); + } + + public void Add(BinaryData content, string name, string filename = null, string contentType = null) + { + Argument.AssertNotNull(content, nameof(content)); + Argument.AssertNotNullOrEmpty(name, nameof(name)); + + Add(new ByteArrayContent(content.ToArray()), name, filename, contentType); + } + + private void Add(HttpContent content, string name, string filename, string contentType) + { + if (filename != null) + { + Argument.AssertNotNullOrEmpty(filename, nameof(filename)); + AddFilenameHeader(content, name, filename); + } + if (contentType != null) + { + Argument.AssertNotNullOrEmpty(contentType, nameof(contentType)); + AddContentTypeHeader(content, contentType); + } + _multipartContent.Add(content, name); + } + + public static void AddFilenameHeader(HttpContent content, string name, string filename) + { + ContentDispositionHeaderValue header = new ContentDispositionHeaderValue("form-data") { Name = name, FileName = filename }; + content.Headers.ContentDisposition = header; + } + + public static void AddContentTypeHeader(HttpContent content, string contentType) + { + MediaTypeHeaderValue header = new MediaTypeHeaderValue(contentType); + content.Headers.ContentType = header; + } + + public override bool TryComputeLength(out long length) + { + if (_multipartContent.Headers.ContentLength is long contentLength) + { + length = contentLength; + return true; + } + length = 0; + return false; + } + + public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) + { +#if NET6_0_OR_GREATER + _multipartContent.CopyTo(stream, default, cancellationToken); +#else +#pragma warning disable AZC0107 + _multipartContent.CopyToAsync(stream).EnsureCompleted(); +#pragma warning restore AZC0107 +#endif + } + + public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) + { +#if NET6_0_OR_GREATER + await _multipartContent.CopyToAsync(stream, cancellationToken).ConfigureAwait(false); +#else + await _multipartContent.CopyToAsync(stream).ConfigureAwait(false); +#endif + } + + public override void Dispose() + { + _multipartContent.Dispose(); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Optional.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Optional.cs new file mode 100644 index 0000000000000..185e31275d274 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Optional.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; + +namespace Azure.AI.Vision.Face +{ + internal static class Optional + { + public static bool IsCollectionDefined(IEnumerable collection) + { + return !(collection is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined); + } + + public static bool IsCollectionDefined(IDictionary collection) + { + return !(collection is ChangeTrackingDictionary changeTrackingDictionary && changeTrackingDictionary.IsUndefined); + } + + public static bool IsCollectionDefined(IReadOnlyDictionary collection) + { + return !(collection is ChangeTrackingDictionary changeTrackingDictionary && changeTrackingDictionary.IsUndefined); + } + + public static bool IsDefined(T? value) + where T : struct + { + return value.HasValue; + } + + public static bool IsDefined(object value) + { + return value != null; + } + + public static bool IsDefined(JsonElement value) + { + return value.ValueKind != JsonValueKind.Undefined; + } + + public static bool IsDefined(string value) + { + return value != null; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Utf8JsonRequestContent.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Utf8JsonRequestContent.cs new file mode 100644 index 0000000000000..9f4c076065924 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/Internal/Utf8JsonRequestContent.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal class Utf8JsonRequestContent : RequestContent + { + private readonly MemoryStream _stream; + private readonly RequestContent _content; + + public Utf8JsonRequestContent() + { + _stream = new MemoryStream(); + _content = Create(_stream); + JsonWriter = new Utf8JsonWriter(_stream); + } + + public Utf8JsonWriter JsonWriter { get; } + + public override async Task WriteToAsync(Stream stream, CancellationToken cancellationToken = default) + { + await JsonWriter.FlushAsync().ConfigureAwait(false); + await _content.WriteToAsync(stream, cancellationToken).ConfigureAwait(false); + } + + public override void WriteTo(Stream stream, CancellationToken cancellationToken = default) + { + JsonWriter.Flush(); + _content.WriteTo(stream, cancellationToken); + } + + public override bool TryComputeLength(out long length) + { + length = JsonWriter.BytesCommitted + JsonWriter.BytesPending; + return true; + } + + public override void Dispose() + { + JsonWriter.Dispose(); + _content.Dispose(); + _stream.Dispose(); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.Serialization.cs new file mode 100644 index 0000000000000..fda4a7e3e76c3 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LandmarkCoordinate : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LandmarkCoordinate)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("x"u8); + writer.WriteNumberValue(X); + writer.WritePropertyName("y"u8); + writer.WriteNumberValue(Y); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LandmarkCoordinate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LandmarkCoordinate)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLandmarkCoordinate(document.RootElement, options); + } + + internal static LandmarkCoordinate DeserializeLandmarkCoordinate(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + float x = default; + float y = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("x"u8)) + { + x = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("y"u8)) + { + y = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LandmarkCoordinate(x, y, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LandmarkCoordinate)} does not support writing '{options.Format}' format."); + } + } + + LandmarkCoordinate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLandmarkCoordinate(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LandmarkCoordinate)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LandmarkCoordinate FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLandmarkCoordinate(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.cs new file mode 100644 index 0000000000000..71b7906240e9d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LandmarkCoordinate.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Landmark coordinates within an image. + public partial class LandmarkCoordinate + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The horizontal component, in pixels. + /// The vertical component, in pixels. + internal LandmarkCoordinate(float x, float y) + { + X = x; + Y = y; + } + + /// Initializes a new instance of . + /// The horizontal component, in pixels. + /// The vertical component, in pixels. + /// Keeps track of any properties unknown to the library. + internal LandmarkCoordinate(float x, float y, IDictionary serializedAdditionalRawData) + { + X = x; + Y = y; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LandmarkCoordinate() + { + } + + /// The horizontal component, in pixels. + public float X { get; } + /// The vertical component, in pixels. + public float Y { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs new file mode 100644 index 0000000000000..d52d68e0bc120 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceList : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceList(document.RootElement, options); + } + + internal static LargeFaceList DeserializeLargeFaceList(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceList(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceList FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceList(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs new file mode 100644 index 0000000000000..586366e513366 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Large face list is a list of faces, up to 1,000,000 faces. + public partial class LargeFaceList + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargeFaceList(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Keeps track of any properties unknown to the library. + internal LargeFaceList(string name, string userData, FaceRecognitionModel? recognitionModel, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargeFaceList() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs new file mode 100644 index 0000000000000..2f9621a39bbcf --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceListFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceListFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + + internal static LargeFaceListFace DeserializeLargeFaceListFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceListFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceListFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceListFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs new file mode 100644 index 0000000000000..6a0efa75f2a00 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large face list. + public partial class LargeFaceListFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargeFaceListFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargeFaceListFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs new file mode 100644 index 0000000000000..0a39ef9f4066b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroup : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroup(document.RootElement, options); + } + + internal static LargePersonGroup DeserializeLargePersonGroup(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largePersonGroupId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroup IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroup(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroup FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroup(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs new file mode 100644 index 0000000000000..a38758c190fd3 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people. + public partial class LargePersonGroup + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroup(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroup(string name, string userData, FaceRecognitionModel? recognitionModel, string largePersonGroupId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargePersonGroupId = largePersonGroupId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroup() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// ID of the container. + public string LargePersonGroupId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs new file mode 100644 index 0000000000000..eb363bd7f27c2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPerson : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsCollectionDefined(PersistedFaceIds)) + { + writer.WritePropertyName("persistedFaceIds"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPerson IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + + internal static LargePersonGroupPerson DeserializeLargePersonGroupPerson(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + string name = default; + string userData = default; + IReadOnlyList persistedFaceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("persistedFaceIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + persistedFaceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPerson IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPerson FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPerson(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs new file mode 100644 index 0000000000000..1884b1812ec41 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The person in a specified large person group. To add face to this person, please call "Add Large Person Group Person Face". + public partial class LargePersonGroupPerson + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroupPerson(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + PersistedFaceIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPerson(Guid personId, string name, string userData, IReadOnlyList persistedFaceIds, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Name = name; + UserData = userData; + PersistedFaceIds = persistedFaceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroupPerson() + { + } + + /// ID of the person. + public Guid PersonId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Face ids of registered faces in the person. + public IReadOnlyList PersistedFaceIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs new file mode 100644 index 0000000000000..a602eb3457e01 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPersonFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPersonFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + + internal static LargePersonGroupPersonFace DeserializeLargePersonGroupPersonFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPersonFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPersonFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPersonFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs new file mode 100644 index 0000000000000..56aa4d0061ffe --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large person group person. + public partial class LargePersonGroupPersonFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargePersonGroupPersonFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPersonFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.Serialization.cs new file mode 100644 index 0000000000000..cd2baa5b49068 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class ListFaceResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListFaceResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + writer.WritePropertyName("persistedFaceIds"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ListFaceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListFaceResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListFaceResult(document.RootElement, options); + } + + internal static ListFaceResult DeserializeListFaceResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + IReadOnlyList persistedFaceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("persistedFaceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + persistedFaceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListFaceResult(personId, persistedFaceIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListFaceResult)} does not support writing '{options.Format}' format."); + } + } + + ListFaceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeListFaceResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListFaceResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListFaceResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeListFaceResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.cs new file mode 100644 index 0000000000000..9525f9620bce8 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListFaceResult.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Response of list face of person. + public partial class ListFaceResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Id of person. + /// Array of persisted face ids. + /// is null. + internal ListFaceResult(Guid personId, IEnumerable persistedFaceIds) + { + Argument.AssertNotNull(persistedFaceIds, nameof(persistedFaceIds)); + + PersonId = personId; + PersistedFaceIds = persistedFaceIds.ToList(); + } + + /// Initializes a new instance of . + /// Id of person. + /// Array of persisted face ids. + /// Keeps track of any properties unknown to the library. + internal ListFaceResult(Guid personId, IReadOnlyList persistedFaceIds, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + PersistedFaceIds = persistedFaceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListFaceResult() + { + } + + /// Id of person. + public Guid PersonId { get; } + /// Array of persisted face ids. + public IReadOnlyList PersistedFaceIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.Serialization.cs new file mode 100644 index 0000000000000..bc4fbfe35b927 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class ListGroupReferenceResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListGroupReferenceResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("dynamicPersonGroupIds"u8); + writer.WriteStartArray(); + foreach (var item in DynamicPersonGroupIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ListGroupReferenceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListGroupReferenceResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListGroupReferenceResult(document.RootElement, options); + } + + internal static ListGroupReferenceResult DeserializeListGroupReferenceResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList dynamicPersonGroupIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("dynamicPersonGroupIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + dynamicPersonGroupIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListGroupReferenceResult(dynamicPersonGroupIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListGroupReferenceResult)} does not support writing '{options.Format}' format."); + } + } + + ListGroupReferenceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeListGroupReferenceResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListGroupReferenceResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListGroupReferenceResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeListGroupReferenceResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.cs new file mode 100644 index 0000000000000..4ab4ba0e9ca9a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListGroupReferenceResult.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Response of list dynamic person group of person. + public partial class ListGroupReferenceResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of PersonDirectory DynamicPersonGroup ids. + /// is null. + internal ListGroupReferenceResult(IEnumerable dynamicPersonGroupIds) + { + Argument.AssertNotNull(dynamicPersonGroupIds, nameof(dynamicPersonGroupIds)); + + DynamicPersonGroupIds = dynamicPersonGroupIds.ToList(); + } + + /// Initializes a new instance of . + /// Array of PersonDirectory DynamicPersonGroup ids. + /// Keeps track of any properties unknown to the library. + internal ListGroupReferenceResult(IReadOnlyList dynamicPersonGroupIds, IDictionary serializedAdditionalRawData) + { + DynamicPersonGroupIds = dynamicPersonGroupIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListGroupReferenceResult() + { + } + + /// Array of PersonDirectory DynamicPersonGroup ids. + public IReadOnlyList DynamicPersonGroupIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.Serialization.cs new file mode 100644 index 0000000000000..e8bff82b84e20 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.Serialization.cs @@ -0,0 +1,145 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class ListPersonResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListPersonResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personIds"u8); + writer.WriteStartArray(); + foreach (var item in PersonIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ListPersonResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ListPersonResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeListPersonResult(document.RootElement, options); + } + + internal static ListPersonResult DeserializeListPersonResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList personIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + personIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ListPersonResult(personIds, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ListPersonResult)} does not support writing '{options.Format}' format."); + } + } + + ListPersonResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeListPersonResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ListPersonResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ListPersonResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeListPersonResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.cs new file mode 100644 index 0000000000000..84db9e776251c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/ListPersonResult.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Response of list dynamic person group person. + public partial class ListPersonResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of PersonDirectory Person ids. + /// is null. + internal ListPersonResult(IEnumerable personIds) + { + Argument.AssertNotNull(personIds, nameof(personIds)); + + PersonIds = personIds.ToList(); + } + + /// Initializes a new instance of . + /// Array of PersonDirectory Person ids. + /// Keeps track of any properties unknown to the library. + internal ListPersonResult(IReadOnlyList personIds, IDictionary serializedAdditionalRawData) + { + PersonIds = personIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal ListPersonResult() + { + } + + /// Array of PersonDirectory Person ids. + public IReadOnlyList PersonIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs new file mode 100644 index 0000000000000..c5fc48820d5f9 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The model version used for liveness classification. + public readonly partial struct LivenessModel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public LivenessModel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string V20200215Preview01Value = "2020-02-15-preview.01"; + private const string V20211112Preview03Value = "2021-11-12-preview.03"; + private const string V20221015Preview04Value = "2022-10-15-preview.04"; + private const string V20230302Preview05Value = "2023-03-02-preview.05"; + + /// 2020-02-15-preview.01. + public static LivenessModel V20200215Preview01 { get; } = new LivenessModel(V20200215Preview01Value); + /// 2021-11-12-preview.03. + public static LivenessModel V20211112Preview03 { get; } = new LivenessModel(V20211112Preview03Value); + /// 2022-10-15-preview.04. + public static LivenessModel V20221015Preview04 { get; } = new LivenessModel(V20221015Preview04Value); + /// 2023-03-02-preview.05. + public static LivenessModel V20230302Preview05 { get; } = new LivenessModel(V20230302Preview05Value); + /// Determines if two values are the same. + public static bool operator ==(LivenessModel left, LivenessModel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(LivenessModel left, LivenessModel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator LivenessModel(string value) => new LivenessModel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is LivenessModel other && Equals(other); + /// + public bool Equals(LivenessModel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs new file mode 100644 index 0000000000000..cb1e9d541a9bb --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The operation mode for the liveness modal. + public readonly partial struct LivenessOperationMode : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public LivenessOperationMode(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string PassiveValue = "Passive"; + + /// The operation mode for the liveness modal. + public static LivenessOperationMode Passive { get; } = new LivenessOperationMode(PassiveValue); + /// Determines if two values are the same. + public static bool operator ==(LivenessOperationMode left, LivenessOperationMode right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(LivenessOperationMode left, LivenessOperationMode right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator LivenessOperationMode(string value) => new LivenessOperationMode(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is LivenessOperationMode other && Equals(other); + /// + public bool Equals(LivenessOperationMode other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.Serialization.cs new file mode 100644 index 0000000000000..f836f261ef50f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.Serialization.cs @@ -0,0 +1,159 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessOutputsTarget : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessOutputsTarget)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceRectangle"u8); + writer.WriteObjectValue(FaceRectangle, options); + writer.WritePropertyName("fileName"u8); + writer.WriteStringValue(FileName); + writer.WritePropertyName("timeOffsetWithinFile"u8); + writer.WriteNumberValue(TimeOffsetWithinFile); + writer.WritePropertyName("imageType"u8); + writer.WriteStringValue(ImageType.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessOutputsTarget IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessOutputsTarget)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessOutputsTarget(document.RootElement, options); + } + + internal static LivenessOutputsTarget DeserializeLivenessOutputsTarget(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceRectangle faceRectangle = default; + string fileName = default; + int timeOffsetWithinFile = default; + FaceImageType imageType = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceRectangle"u8)) + { + faceRectangle = FaceRectangle.DeserializeFaceRectangle(property.Value, options); + continue; + } + if (property.NameEquals("fileName"u8)) + { + fileName = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeOffsetWithinFile"u8)) + { + timeOffsetWithinFile = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("imageType"u8)) + { + imageType = new FaceImageType(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessOutputsTarget(faceRectangle, fileName, timeOffsetWithinFile, imageType, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessOutputsTarget)} does not support writing '{options.Format}' format."); + } + } + + LivenessOutputsTarget IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessOutputsTarget(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessOutputsTarget)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessOutputsTarget FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessOutputsTarget(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.cs new file mode 100644 index 0000000000000..4469cf9f8d50f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessOutputsTarget.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The liveness classification for target face. + public partial class LivenessOutputsTarget + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The face region where the liveness classification was made on. + /// The file name which contains the face rectangle where the liveness classification was made on. + /// The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on. + /// The image type which contains the face rectangle where the liveness classification was made on. + /// or is null. + internal LivenessOutputsTarget(FaceRectangle faceRectangle, string fileName, int timeOffsetWithinFile, FaceImageType imageType) + { + Argument.AssertNotNull(faceRectangle, nameof(faceRectangle)); + Argument.AssertNotNull(fileName, nameof(fileName)); + + FaceRectangle = faceRectangle; + FileName = fileName; + TimeOffsetWithinFile = timeOffsetWithinFile; + ImageType = imageType; + } + + /// Initializes a new instance of . + /// The face region where the liveness classification was made on. + /// The file name which contains the face rectangle where the liveness classification was made on. + /// The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on. + /// The image type which contains the face rectangle where the liveness classification was made on. + /// Keeps track of any properties unknown to the library. + internal LivenessOutputsTarget(FaceRectangle faceRectangle, string fileName, int timeOffsetWithinFile, FaceImageType imageType, IDictionary serializedAdditionalRawData) + { + FaceRectangle = faceRectangle; + FileName = fileName; + TimeOffsetWithinFile = timeOffsetWithinFile; + ImageType = imageType; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessOutputsTarget() + { + } + + /// The face region where the liveness classification was made on. + public FaceRectangle FaceRectangle { get; } + /// The file name which contains the face rectangle where the liveness classification was made on. + public string FileName { get; } + /// The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on. + public int TimeOffsetWithinFile { get; } + /// The image type which contains the face rectangle where the liveness classification was made on. + public FaceImageType ImageType { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.Serialization.cs new file mode 100644 index 0000000000000..092cb634cafcc --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.Serialization.cs @@ -0,0 +1,181 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessResponseBody : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessResponseBody)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (Optional.IsDefined(LivenessDecision)) + { + writer.WritePropertyName("livenessDecision"u8); + writer.WriteStringValue(LivenessDecision.Value.ToString()); + } + if (Optional.IsDefined(Target)) + { + writer.WritePropertyName("target"u8); + writer.WriteObjectValue(Target, options); + } + if (Optional.IsDefined(ModelVersionUsed)) + { + writer.WritePropertyName("modelVersionUsed"u8); + writer.WriteStringValue(ModelVersionUsed.Value.ToString()); + } + if (Optional.IsDefined(VerifyResult)) + { + writer.WritePropertyName("verifyResult"u8); + writer.WriteObjectValue(VerifyResult, options); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + LivenessResponseBody IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessResponseBody)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessResponseBody(document.RootElement, options); + } + + internal static LivenessResponseBody DeserializeLivenessResponseBody(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceLivenessDecision? livenessDecision = default; + LivenessOutputsTarget target = default; + LivenessModel? modelVersionUsed = default; + LivenessWithVerifyOutputs verifyResult = default; + IReadOnlyDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("livenessDecision"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + livenessDecision = new FaceLivenessDecision(property.Value.GetString()); + continue; + } + if (property.NameEquals("target"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + target = LivenessOutputsTarget.DeserializeLivenessOutputsTarget(property.Value, options); + continue; + } + if (property.NameEquals("modelVersionUsed"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + modelVersionUsed = new LivenessModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("verifyResult"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + verifyResult = LivenessWithVerifyOutputs.DeserializeLivenessWithVerifyOutputs(property.Value, options); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new LivenessResponseBody(livenessDecision, target, modelVersionUsed, verifyResult, additionalProperties); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessResponseBody)} does not support writing '{options.Format}' format."); + } + } + + LivenessResponseBody IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessResponseBody(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessResponseBody)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessResponseBody FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessResponseBody(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.cs new file mode 100644 index 0000000000000..5d831412b0a9a --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessResponseBody.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The response body of detect liveness API call. + public partial class LivenessResponseBody + { + /// Initializes a new instance of . + internal LivenessResponseBody() + { + AdditionalProperties = new ChangeTrackingDictionary(); + } + + /// Initializes a new instance of . + /// The liveness classification for the target face. + /// Specific targets used for liveness classification. + /// The model version used for liveness classification. + /// The face verification output. Only available when the request is liveness with verify. + /// Additional Properties. + internal LivenessResponseBody(FaceLivenessDecision? livenessDecision, LivenessOutputsTarget target, LivenessModel? modelVersionUsed, LivenessWithVerifyOutputs verifyResult, IReadOnlyDictionary additionalProperties) + { + LivenessDecision = livenessDecision; + Target = target; + ModelVersionUsed = modelVersionUsed; + VerifyResult = verifyResult; + AdditionalProperties = additionalProperties; + } + + /// The liveness classification for the target face. + public FaceLivenessDecision? LivenessDecision { get; } + /// Specific targets used for liveness classification. + public LivenessOutputsTarget Target { get; } + /// The model version used for liveness classification. + public LivenessModel? ModelVersionUsed { get; } + /// The face verification output. Only available when the request is liveness with verify. + public LivenessWithVerifyOutputs VerifyResult { get; } + /// + /// Additional Properties + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public IReadOnlyDictionary AdditionalProperties { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.Serialization.cs new file mode 100644 index 0000000000000..c55c1e05f9dd0 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.Serialization.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessSession : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSession)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + if (Optional.IsDefined(SessionStartDateTime)) + { + writer.WritePropertyName("sessionStartDateTime"u8); + writer.WriteStringValue(SessionStartDateTime.Value, "O"); + } + writer.WritePropertyName("sessionExpired"u8); + writer.WriteBooleanValue(SessionExpired); + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (Optional.IsDefined(Result)) + { + writer.WritePropertyName("result"u8); + writer.WriteObjectValue(Result, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessSession IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSession)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessSession(document.RootElement, options); + } + + internal static LivenessSession DeserializeLivenessSession(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset? sessionStartDateTime = default; + bool sessionExpired = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + FaceSessionStatus status = default; + LivenessSessionAuditEntry result = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("id"u8)) + { + id = property.Value.GetString(); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionStartDateTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sessionStartDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionExpired"u8)) + { + sessionExpired = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("status"u8)) + { + status = new FaceSessionStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("result"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + result = LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessSession( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + status, + result, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessSession)} does not support writing '{options.Format}' format."); + } + } + + LivenessSession IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessSession(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessSession)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessSession FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessSession(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.cs new file mode 100644 index 0000000000000..bda0898e67ff5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSession.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Session result of detect liveness. + public partial class LivenessSession + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// DateTime when this session was created. + /// Whether or not the session is expired. + /// The current status of the session. + internal LivenessSession(DateTimeOffset createdDateTime, bool sessionExpired, FaceSessionStatus status) + { + CreatedDateTime = createdDateTime; + SessionExpired = sessionExpired; + Status = status; + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// The current status of the session. + /// The latest session audit result only populated if status == 'ResultAvailable'. + /// Keeps track of any properties unknown to the library. + internal LivenessSession(string id, DateTimeOffset createdDateTime, DateTimeOffset? sessionStartDateTime, bool sessionExpired, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, FaceSessionStatus status, LivenessSessionAuditEntry result, IDictionary serializedAdditionalRawData) + { + Id = id; + CreatedDateTime = createdDateTime; + SessionStartDateTime = sessionStartDateTime; + SessionExpired = sessionExpired; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + Status = status; + Result = result; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessSession() + { + } + + /// The unique ID to reference this session. + public string Id { get; } + /// DateTime when this session was created. + public DateTimeOffset CreatedDateTime { get; } + /// DateTime when this session was started by the client. + public DateTimeOffset? SessionStartDateTime { get; } + /// Whether or not the session is expired. + public bool SessionExpired { get; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; } + /// The current status of the session. + public FaceSessionStatus Status { get; } + /// The latest session audit result only populated if status == 'ResultAvailable'. + public LivenessSessionAuditEntry Result { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs new file mode 100644 index 0000000000000..8db9413667150 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs @@ -0,0 +1,200 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessSessionAuditEntry : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSessionAuditEntry)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("id"u8); + writer.WriteNumberValue(Id); + writer.WritePropertyName("sessionId"u8); + writer.WriteStringValue(SessionId); + writer.WritePropertyName("requestId"u8); + writer.WriteStringValue(RequestId); + writer.WritePropertyName("clientRequestId"u8); + writer.WriteStringValue(ClientRequestId); + writer.WritePropertyName("receivedDateTime"u8); + writer.WriteStringValue(ReceivedDateTime, "O"); + writer.WritePropertyName("request"u8); + writer.WriteObjectValue(Request, options); + writer.WritePropertyName("response"u8); + writer.WriteObjectValue(Response, options); + writer.WritePropertyName("digest"u8); + writer.WriteStringValue(Digest); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessSessionAuditEntry IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSessionAuditEntry)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessSessionAuditEntry(document.RootElement, options); + } + + internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + long id = default; + string sessionId = default; + string requestId = default; + string clientRequestId = default; + DateTimeOffset receivedDateTime = default; + AuditRequestInfo request = default; + AuditLivenessResponseInfo response = default; + string digest = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("id"u8)) + { + id = property.Value.GetInt64(); + continue; + } + if (property.NameEquals("sessionId"u8)) + { + sessionId = property.Value.GetString(); + continue; + } + if (property.NameEquals("requestId"u8)) + { + requestId = property.Value.GetString(); + continue; + } + if (property.NameEquals("clientRequestId"u8)) + { + clientRequestId = property.Value.GetString(); + continue; + } + if (property.NameEquals("receivedDateTime"u8)) + { + receivedDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("request"u8)) + { + request = AuditRequestInfo.DeserializeAuditRequestInfo(property.Value, options); + continue; + } + if (property.NameEquals("response"u8)) + { + response = AuditLivenessResponseInfo.DeserializeAuditLivenessResponseInfo(property.Value, options); + continue; + } + if (property.NameEquals("digest"u8)) + { + digest = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessSessionAuditEntry( + id, + sessionId, + requestId, + clientRequestId, + receivedDateTime, + request, + response, + digest, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessSessionAuditEntry)} does not support writing '{options.Format}' format."); + } + } + + LivenessSessionAuditEntry IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessSessionAuditEntry(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessSessionAuditEntry)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessSessionAuditEntry FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessSessionAuditEntry(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs new file mode 100644 index 0000000000000..9993915402406 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Audit entry for a request in session. + public partial class LivenessSessionAuditEntry + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results. + /// The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation. + /// The unique requestId that is returned by the service to the client in the 'apim-request-id' header. + /// The unique clientRequestId that is sent by the client in the 'client-request-id' header. + /// The UTC DateTime that the request was received. + /// The request of this entry. + /// The response of this entry. + /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// , , , , or is null. + internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest) + { + Argument.AssertNotNull(sessionId, nameof(sessionId)); + Argument.AssertNotNull(requestId, nameof(requestId)); + Argument.AssertNotNull(clientRequestId, nameof(clientRequestId)); + Argument.AssertNotNull(request, nameof(request)); + Argument.AssertNotNull(response, nameof(response)); + Argument.AssertNotNull(digest, nameof(digest)); + + Id = id; + SessionId = sessionId; + RequestId = requestId; + ClientRequestId = clientRequestId; + ReceivedDateTime = receivedDateTime; + Request = request; + Response = response; + Digest = digest; + } + + /// Initializes a new instance of . + /// The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results. + /// The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation. + /// The unique requestId that is returned by the service to the client in the 'apim-request-id' header. + /// The unique clientRequestId that is sent by the client in the 'client-request-id' header. + /// The UTC DateTime that the request was received. + /// The request of this entry. + /// The response of this entry. + /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// Keeps track of any properties unknown to the library. + internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest, IDictionary serializedAdditionalRawData) + { + Id = id; + SessionId = sessionId; + RequestId = requestId; + ClientRequestId = clientRequestId; + ReceivedDateTime = receivedDateTime; + Request = request; + Response = response; + Digest = digest; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessSessionAuditEntry() + { + } + + /// The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results. + public long Id { get; } + /// The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation. + public string SessionId { get; } + /// The unique requestId that is returned by the service to the client in the 'apim-request-id' header. + public string RequestId { get; } + /// The unique clientRequestId that is sent by the client in the 'client-request-id' header. + public string ClientRequestId { get; } + /// The UTC DateTime that the request was received. + public DateTimeOffset ReceivedDateTime { get; } + /// The request of this entry. + public AuditRequestInfo Request { get; } + /// The response of this entry. + public AuditLivenessResponseInfo Response { get; } + /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + public string Digest { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.Serialization.cs new file mode 100644 index 0000000000000..b6a246ecab3ca --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.Serialization.cs @@ -0,0 +1,202 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessSessionItem : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSessionItem)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + if (Optional.IsDefined(SessionStartDateTime)) + { + writer.WritePropertyName("sessionStartDateTime"u8); + writer.WriteStringValue(SessionStartDateTime.Value, "O"); + } + writer.WritePropertyName("sessionExpired"u8); + writer.WriteBooleanValue(SessionExpired); + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessSessionItem IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessSessionItem)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessSessionItem(document.RootElement, options); + } + + internal static LivenessSessionItem DeserializeLivenessSessionItem(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset? sessionStartDateTime = default; + bool sessionExpired = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("id"u8)) + { + id = property.Value.GetString(); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionStartDateTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sessionStartDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionExpired"u8)) + { + sessionExpired = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessSessionItem( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessSessionItem)} does not support writing '{options.Format}' format."); + } + } + + LivenessSessionItem IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessSessionItem(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessSessionItem)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessSessionItem FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessSessionItem(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.cs new file mode 100644 index 0000000000000..e46e4901a8a14 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessSessionItem.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Session data returned for enumeration. + public partial class LivenessSessionItem + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// DateTime when this session was created. + /// Whether or not the session is expired. + internal LivenessSessionItem(DateTimeOffset createdDateTime, bool sessionExpired) + { + CreatedDateTime = createdDateTime; + SessionExpired = sessionExpired; + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Keeps track of any properties unknown to the library. + internal LivenessSessionItem(string id, DateTimeOffset createdDateTime, DateTimeOffset? sessionStartDateTime, bool sessionExpired, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) + { + Id = id; + CreatedDateTime = createdDateTime; + SessionStartDateTime = sessionStartDateTime; + SessionExpired = sessionExpired; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessSessionItem() + { + } + + /// The unique ID to reference this session. + public string Id { get; } + /// DateTime when this session was created. + public DateTimeOffset CreatedDateTime { get; } + /// DateTime when this session was started by the client. + public DateTimeOffset? SessionStartDateTime { get; } + /// Whether or not the session is expired. + public bool SessionExpired { get; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.Serialization.cs new file mode 100644 index 0000000000000..74f0209588bb2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessWithVerifyImage : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifyImage)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceRectangle"u8); + writer.WriteObjectValue(FaceRectangle, options); + writer.WritePropertyName("qualityForRecognition"u8); + writer.WriteStringValue(QualityForRecognition.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessWithVerifyImage IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifyImage)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessWithVerifyImage(document.RootElement, options); + } + + internal static LivenessWithVerifyImage DeserializeLivenessWithVerifyImage(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceRectangle faceRectangle = default; + QualityForRecognition qualityForRecognition = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceRectangle"u8)) + { + faceRectangle = FaceRectangle.DeserializeFaceRectangle(property.Value, options); + continue; + } + if (property.NameEquals("qualityForRecognition"u8)) + { + qualityForRecognition = new QualityForRecognition(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessWithVerifyImage(faceRectangle, qualityForRecognition, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessWithVerifyImage)} does not support writing '{options.Format}' format."); + } + } + + LivenessWithVerifyImage IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessWithVerifyImage(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessWithVerifyImage)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessWithVerifyImage FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessWithVerifyImage(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.cs new file mode 100644 index 0000000000000..2cb09a6725eff --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyImage.cs @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The detail of face for verification. + public partial class LivenessWithVerifyImage + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The face region where the comparison image's classification was made. + /// Quality of face image for recognition. + /// is null. + internal LivenessWithVerifyImage(FaceRectangle faceRectangle, QualityForRecognition qualityForRecognition) + { + Argument.AssertNotNull(faceRectangle, nameof(faceRectangle)); + + FaceRectangle = faceRectangle; + QualityForRecognition = qualityForRecognition; + } + + /// Initializes a new instance of . + /// The face region where the comparison image's classification was made. + /// Quality of face image for recognition. + /// Keeps track of any properties unknown to the library. + internal LivenessWithVerifyImage(FaceRectangle faceRectangle, QualityForRecognition qualityForRecognition, IDictionary serializedAdditionalRawData) + { + FaceRectangle = faceRectangle; + QualityForRecognition = qualityForRecognition; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessWithVerifyImage() + { + } + + /// The face region where the comparison image's classification was made. + public FaceRectangle FaceRectangle { get; } + /// Quality of face image for recognition. + public QualityForRecognition QualityForRecognition { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.Serialization.cs new file mode 100644 index 0000000000000..723538b6f3e08 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessWithVerifyOutputs : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifyOutputs)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("verifyImage"u8); + writer.WriteObjectValue(VerifyImage, options); + writer.WritePropertyName("matchConfidence"u8); + writer.WriteNumberValue(MatchConfidence); + writer.WritePropertyName("isIdentical"u8); + writer.WriteBooleanValue(IsIdentical); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessWithVerifyOutputs IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifyOutputs)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessWithVerifyOutputs(document.RootElement, options); + } + + internal static LivenessWithVerifyOutputs DeserializeLivenessWithVerifyOutputs(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + LivenessWithVerifyImage verifyImage = default; + float matchConfidence = default; + bool isIdentical = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("verifyImage"u8)) + { + verifyImage = LivenessWithVerifyImage.DeserializeLivenessWithVerifyImage(property.Value, options); + continue; + } + if (property.NameEquals("matchConfidence"u8)) + { + matchConfidence = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("isIdentical"u8)) + { + isIdentical = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessWithVerifyOutputs(verifyImage, matchConfidence, isIdentical, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessWithVerifyOutputs)} does not support writing '{options.Format}' format."); + } + } + + LivenessWithVerifyOutputs IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessWithVerifyOutputs(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessWithVerifyOutputs)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessWithVerifyOutputs FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessWithVerifyOutputs(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.cs new file mode 100644 index 0000000000000..45c6d6405b7fa --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifyOutputs.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The face verification output. + public partial class LivenessWithVerifyOutputs + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The detail of face for verification. + /// The target face liveness face and comparison image face verification confidence. + /// Whether the target liveness face and comparison image face match. + /// is null. + internal LivenessWithVerifyOutputs(LivenessWithVerifyImage verifyImage, float matchConfidence, bool isIdentical) + { + Argument.AssertNotNull(verifyImage, nameof(verifyImage)); + + VerifyImage = verifyImage; + MatchConfidence = matchConfidence; + IsIdentical = isIdentical; + } + + /// Initializes a new instance of . + /// The detail of face for verification. + /// The target face liveness face and comparison image face verification confidence. + /// Whether the target liveness face and comparison image face match. + /// Keeps track of any properties unknown to the library. + internal LivenessWithVerifyOutputs(LivenessWithVerifyImage verifyImage, float matchConfidence, bool isIdentical, IDictionary serializedAdditionalRawData) + { + VerifyImage = verifyImage; + MatchConfidence = matchConfidence; + IsIdentical = isIdentical; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessWithVerifyOutputs() + { + } + + /// The detail of face for verification. + public LivenessWithVerifyImage VerifyImage { get; } + /// The target face liveness face and comparison image face verification confidence. + public float MatchConfidence { get; } + /// Whether the target liveness face and comparison image face match. + public bool IsIdentical { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.Serialization.cs new file mode 100644 index 0000000000000..49201dd5552df --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.Serialization.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LivenessWithVerifySession : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifySession)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("id"u8); + writer.WriteStringValue(Id); + } + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + if (Optional.IsDefined(SessionStartDateTime)) + { + writer.WritePropertyName("sessionStartDateTime"u8); + writer.WriteStringValue(SessionStartDateTime.Value, "O"); + } + writer.WritePropertyName("sessionExpired"u8); + writer.WriteBooleanValue(SessionExpired); + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + if (Optional.IsDefined(Result)) + { + writer.WritePropertyName("result"u8); + writer.WriteObjectValue(Result, options); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LivenessWithVerifySession IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LivenessWithVerifySession)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLivenessWithVerifySession(document.RootElement, options); + } + + internal static LivenessWithVerifySession DeserializeLivenessWithVerifySession(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string id = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset? sessionStartDateTime = default; + bool sessionExpired = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + FaceSessionStatus status = default; + LivenessSessionAuditEntry result = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("id"u8)) + { + id = property.Value.GetString(); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionStartDateTime"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sessionStartDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("sessionExpired"u8)) + { + sessionExpired = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("status"u8)) + { + status = new FaceSessionStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("result"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + result = LivenessSessionAuditEntry.DeserializeLivenessSessionAuditEntry(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LivenessWithVerifySession( + id, + createdDateTime, + sessionStartDateTime, + sessionExpired, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + status, + result, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LivenessWithVerifySession)} does not support writing '{options.Format}' format."); + } + } + + LivenessWithVerifySession IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLivenessWithVerifySession(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LivenessWithVerifySession)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LivenessWithVerifySession FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLivenessWithVerifySession(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.cs new file mode 100644 index 0000000000000..9aaec1db8d474 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/LivenessWithVerifySession.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Session result of detect liveness with verify. + public partial class LivenessWithVerifySession + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// DateTime when this session was created. + /// Whether or not the session is expired. + /// The current status of the session. + internal LivenessWithVerifySession(DateTimeOffset createdDateTime, bool sessionExpired, FaceSessionStatus status) + { + CreatedDateTime = createdDateTime; + SessionExpired = sessionExpired; + Status = status; + } + + /// Initializes a new instance of . + /// The unique ID to reference this session. + /// DateTime when this session was created. + /// DateTime when this session was started by the client. + /// Whether or not the session is expired. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// The current status of the session. + /// The latest session audit result only populated if status == 'ResultAvailable'. + /// Keeps track of any properties unknown to the library. + internal LivenessWithVerifySession(string id, DateTimeOffset createdDateTime, DateTimeOffset? sessionStartDateTime, bool sessionExpired, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, FaceSessionStatus status, LivenessSessionAuditEntry result, IDictionary serializedAdditionalRawData) + { + Id = id; + CreatedDateTime = createdDateTime; + SessionStartDateTime = sessionStartDateTime; + SessionExpired = sessionExpired; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + Status = status; + Result = result; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LivenessWithVerifySession() + { + } + + /// The unique ID to reference this session. + public string Id { get; } + /// DateTime when this session was created. + public DateTimeOffset CreatedDateTime { get; } + /// DateTime when this session was started by the client. + public DateTimeOffset? SessionStartDateTime { get; } + /// Whether or not the session is expired. + public bool SessionExpired { get; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; } + /// The current status of the session. + public FaceSessionStatus Status { get; } + /// The latest session audit result only populated if status == 'ResultAvailable'. + public LivenessSessionAuditEntry Result { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.Serialization.cs new file mode 100644 index 0000000000000..279293e0b1bc0 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class MaskProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MaskProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("noseAndMouthCovered"u8); + writer.WriteBooleanValue(NoseAndMouthCovered); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(Type.ToString()); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + MaskProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(MaskProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeMaskProperties(document.RootElement, options); + } + + internal static MaskProperties DeserializeMaskProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool noseAndMouthCovered = default; + MaskType type = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("noseAndMouthCovered"u8)) + { + noseAndMouthCovered = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("type"u8)) + { + type = new MaskType(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new MaskProperties(noseAndMouthCovered, type, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(MaskProperties)} does not support writing '{options.Format}' format."); + } + } + + MaskProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeMaskProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(MaskProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static MaskProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeMaskProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.cs new file mode 100644 index 0000000000000..52b549af75870 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskProperties.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing the presence of a mask on a given face. + public partial class MaskProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A boolean value indicating whether nose and mouth are covered. + /// Type of the mask. + internal MaskProperties(bool noseAndMouthCovered, MaskType type) + { + NoseAndMouthCovered = noseAndMouthCovered; + Type = type; + } + + /// Initializes a new instance of . + /// A boolean value indicating whether nose and mouth are covered. + /// Type of the mask. + /// Keeps track of any properties unknown to the library. + internal MaskProperties(bool noseAndMouthCovered, MaskType type, IDictionary serializedAdditionalRawData) + { + NoseAndMouthCovered = noseAndMouthCovered; + Type = type; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal MaskProperties() + { + } + + /// A boolean value indicating whether nose and mouth are covered. + public bool NoseAndMouthCovered { get; } + /// Type of the mask. + public MaskType Type { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskType.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskType.cs new file mode 100644 index 0000000000000..d42e78132adaf --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/MaskType.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Type of the mask. + public readonly partial struct MaskType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public MaskType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string FaceMaskValue = "faceMask"; + private const string NoMaskValue = "noMask"; + private const string OtherMaskOrOcclusionValue = "otherMaskOrOcclusion"; + private const string UncertainValue = "uncertain"; + + /// Face mask. + public static MaskType FaceMask { get; } = new MaskType(FaceMaskValue); + /// No mask. + public static MaskType NoMask { get; } = new MaskType(NoMaskValue); + /// Other types of mask or occlusion. + public static MaskType OtherMaskOrOcclusion { get; } = new MaskType(OtherMaskOrOcclusionValue); + /// Uncertain. + public static MaskType Uncertain { get; } = new MaskType(UncertainValue); + /// Determines if two values are the same. + public static bool operator ==(MaskType left, MaskType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(MaskType left, MaskType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator MaskType(string value) => new MaskType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is MaskType other && Equals(other); + /// + public bool Equals(MaskType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseLevel.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseLevel.cs new file mode 100644 index 0000000000000..1f770813738cc --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseLevel.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Indicates level of noise. + public readonly partial struct NoiseLevel : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public NoiseLevel(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LowValue = "low"; + private const string MediumValue = "medium"; + private const string HighValue = "high"; + + /// Low noise level. + public static NoiseLevel Low { get; } = new NoiseLevel(LowValue); + /// Medium noise level. + public static NoiseLevel Medium { get; } = new NoiseLevel(MediumValue); + /// High noise level. + public static NoiseLevel High { get; } = new NoiseLevel(HighValue); + /// Determines if two values are the same. + public static bool operator ==(NoiseLevel left, NoiseLevel right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(NoiseLevel left, NoiseLevel right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator NoiseLevel(string value) => new NoiseLevel(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is NoiseLevel other && Equals(other); + /// + public bool Equals(NoiseLevel other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.Serialization.cs new file mode 100644 index 0000000000000..601e419f59ad8 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class NoiseProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NoiseProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("noiseLevel"u8); + writer.WriteStringValue(NoiseLevel.ToString()); + writer.WritePropertyName("value"u8); + writer.WriteNumberValue(Value); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + NoiseProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(NoiseProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeNoiseProperties(document.RootElement, options); + } + + internal static NoiseProperties DeserializeNoiseProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + NoiseLevel noiseLevel = default; + float value = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("noiseLevel"u8)) + { + noiseLevel = new NoiseLevel(property.Value.GetString()); + continue; + } + if (property.NameEquals("value"u8)) + { + value = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new NoiseProperties(noiseLevel, value, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(NoiseProperties)} does not support writing '{options.Format}' format."); + } + } + + NoiseProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeNoiseProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(NoiseProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static NoiseProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeNoiseProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.cs new file mode 100644 index 0000000000000..2404e073a2e62 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/NoiseProperties.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing noise level of the image. + public partial class NoiseProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// An enum value indicating level of noise. + /// A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. + internal NoiseProperties(NoiseLevel noiseLevel, float value) + { + NoiseLevel = noiseLevel; + Value = value; + } + + /// Initializes a new instance of . + /// An enum value indicating level of noise. + /// A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. + /// Keeps track of any properties unknown to the library. + internal NoiseProperties(NoiseLevel noiseLevel, float value, IDictionary serializedAdditionalRawData) + { + NoiseLevel = noiseLevel; + Value = value; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal NoiseProperties() + { + } + + /// An enum value indicating level of noise. + public NoiseLevel NoiseLevel { get; } + /// A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. + public float Value { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.Serialization.cs new file mode 100644 index 0000000000000..beef90070b679 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class OcclusionProperties : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OcclusionProperties)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("foreheadOccluded"u8); + writer.WriteBooleanValue(ForeheadOccluded); + writer.WritePropertyName("eyeOccluded"u8); + writer.WriteBooleanValue(EyeOccluded); + writer.WritePropertyName("mouthOccluded"u8); + writer.WriteBooleanValue(MouthOccluded); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + OcclusionProperties IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(OcclusionProperties)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeOcclusionProperties(document.RootElement, options); + } + + internal static OcclusionProperties DeserializeOcclusionProperties(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool foreheadOccluded = default; + bool eyeOccluded = default; + bool mouthOccluded = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("foreheadOccluded"u8)) + { + foreheadOccluded = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("eyeOccluded"u8)) + { + eyeOccluded = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("mouthOccluded"u8)) + { + mouthOccluded = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new OcclusionProperties(foreheadOccluded, eyeOccluded, mouthOccluded, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(OcclusionProperties)} does not support writing '{options.Format}' format."); + } + } + + OcclusionProperties IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeOcclusionProperties(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(OcclusionProperties)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static OcclusionProperties FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeOcclusionProperties(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.cs new file mode 100644 index 0000000000000..6b86d41527b91 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/OcclusionProperties.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Properties describing occlusions on a given face. + public partial class OcclusionProperties + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// A boolean value indicating whether forehead is occluded. + /// A boolean value indicating whether eyes are occluded. + /// A boolean value indicating whether the mouth is occluded. + internal OcclusionProperties(bool foreheadOccluded, bool eyeOccluded, bool mouthOccluded) + { + ForeheadOccluded = foreheadOccluded; + EyeOccluded = eyeOccluded; + MouthOccluded = mouthOccluded; + } + + /// Initializes a new instance of . + /// A boolean value indicating whether forehead is occluded. + /// A boolean value indicating whether eyes are occluded. + /// A boolean value indicating whether the mouth is occluded. + /// Keeps track of any properties unknown to the library. + internal OcclusionProperties(bool foreheadOccluded, bool eyeOccluded, bool mouthOccluded, IDictionary serializedAdditionalRawData) + { + ForeheadOccluded = foreheadOccluded; + EyeOccluded = eyeOccluded; + MouthOccluded = mouthOccluded; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal OcclusionProperties() + { + } + + /// A boolean value indicating whether forehead is occluded. + public bool ForeheadOccluded { get; } + /// A boolean value indicating whether eyes are occluded. + public bool EyeOccluded { get; } + /// A boolean value indicating whether the mouth is occluded. + public bool MouthOccluded { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.Serialization.cs new file mode 100644 index 0000000000000..291b1c7d5dfc2 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class PersonDirectoryFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonDirectoryFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + PersonDirectoryFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonDirectoryFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePersonDirectoryFace(document.RootElement, options); + } + + internal static PersonDirectoryFace DeserializePersonDirectoryFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PersonDirectoryFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PersonDirectoryFace)} does not support writing '{options.Format}' format."); + } + } + + PersonDirectoryFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializePersonDirectoryFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PersonDirectoryFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static PersonDirectoryFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializePersonDirectoryFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.cs new file mode 100644 index 0000000000000..43134d6899191 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for person directory person. + public partial class PersonDirectoryFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal PersonDirectoryFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal PersonDirectoryFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.Serialization.cs new file mode 100644 index 0000000000000..718f4870e8e4c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.Serialization.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class PersonDirectoryPerson : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonDirectoryPerson)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + PersonDirectoryPerson IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonDirectoryPerson)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePersonDirectoryPerson(document.RootElement, options); + } + + internal static PersonDirectoryPerson DeserializePersonDirectoryPerson(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PersonDirectoryPerson(personId, name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PersonDirectoryPerson)} does not support writing '{options.Format}' format."); + } + } + + PersonDirectoryPerson IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializePersonDirectoryPerson(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PersonDirectoryPerson)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static PersonDirectoryPerson FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializePersonDirectoryPerson(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.cs new file mode 100644 index 0000000000000..3de9a0ff2398d --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonDirectoryPerson.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Person resource for person directory. + public partial class PersonDirectoryPerson + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal PersonDirectoryPerson(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal PersonDirectoryPerson(Guid personId, string name, string userData, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal PersonDirectoryPerson() + { + } + + /// Person ID of the person. + public Guid PersonId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.Serialization.cs new file mode 100644 index 0000000000000..160123edd68df --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class PersonGroup : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroup)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("personGroupId"u8); + writer.WriteStringValue(PersonGroupId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + PersonGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroup)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePersonGroup(document.RootElement, options); + } + + internal static PersonGroup DeserializePersonGroup(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string personGroupId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("personGroupId"u8)) + { + personGroupId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PersonGroup(name, userData, recognitionModel, personGroupId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PersonGroup)} does not support writing '{options.Format}' format."); + } + } + + PersonGroup IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializePersonGroup(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PersonGroup)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static PersonGroup FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializePersonGroup(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.cs new file mode 100644 index 0000000000000..789e9eb32ba7b --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroup.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group. + public partial class PersonGroup + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal PersonGroup(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// Keeps track of any properties unknown to the library. + internal PersonGroup(string name, string userData, FaceRecognitionModel? recognitionModel, string personGroupId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + PersonGroupId = personGroupId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal PersonGroup() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// ID of the container. + public string PersonGroupId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.Serialization.cs new file mode 100644 index 0000000000000..3696f5ce5095f --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class PersonGroupPerson : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroupPerson)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsCollectionDefined(PersistedFaceIds)) + { + writer.WritePropertyName("persistedFaceIds"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + PersonGroupPerson IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroupPerson)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePersonGroupPerson(document.RootElement, options); + } + + internal static PersonGroupPerson DeserializePersonGroupPerson(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + string name = default; + string userData = default; + IReadOnlyList persistedFaceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("persistedFaceIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + persistedFaceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PersonGroupPerson(personId, name, userData, persistedFaceIds ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PersonGroupPerson)} does not support writing '{options.Format}' format."); + } + } + + PersonGroupPerson IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializePersonGroupPerson(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PersonGroupPerson)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static PersonGroupPerson FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializePersonGroupPerson(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.cs new file mode 100644 index 0000000000000..b901f8303bb3e --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPerson.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The person in a specified person group. To add face to this person, please call "Add Large Person Group Person Face". + public partial class PersonGroupPerson + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal PersonGroupPerson(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + PersistedFaceIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// Keeps track of any properties unknown to the library. + internal PersonGroupPerson(Guid personId, string name, string userData, IReadOnlyList persistedFaceIds, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Name = name; + UserData = userData; + PersistedFaceIds = persistedFaceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal PersonGroupPerson() + { + } + + /// ID of the person. + public Guid PersonId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Face ids of registered faces in the person. + public IReadOnlyList PersistedFaceIds { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.Serialization.cs new file mode 100644 index 0000000000000..4c9bd8e9de745 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class PersonGroupPersonFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroupPersonFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + PersonGroupPersonFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(PersonGroupPersonFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializePersonGroupPersonFace(document.RootElement, options); + } + + internal static PersonGroupPersonFace DeserializePersonGroupPersonFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new PersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(PersonGroupPersonFace)} does not support writing '{options.Format}' format."); + } + } + + PersonGroupPersonFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializePersonGroupPersonFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(PersonGroupPersonFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static PersonGroupPersonFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializePersonGroupPersonFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.cs new file mode 100644 index 0000000000000..0727b5495ebe3 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/PersonGroupPersonFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for person group person. + public partial class PersonGroupPersonFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal PersonGroupPersonFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal PersonGroupPersonFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/QualityForRecognition.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/QualityForRecognition.cs new file mode 100644 index 0000000000000..9ff5272faf615 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/QualityForRecognition.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// Indicates quality of image for recognition. + public readonly partial struct QualityForRecognition : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public QualityForRecognition(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string LowValue = "low"; + private const string MediumValue = "medium"; + private const string HighValue = "high"; + + /// Low quality. + public static QualityForRecognition Low { get; } = new QualityForRecognition(LowValue); + /// Medium quality. + public static QualityForRecognition Medium { get; } = new QualityForRecognition(MediumValue); + /// High quality. + public static QualityForRecognition High { get; } = new QualityForRecognition(HighValue); + /// Determines if two values are the same. + public static bool operator ==(QualityForRecognition left, QualityForRecognition right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(QualityForRecognition left, QualityForRecognition right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator QualityForRecognition(string value) => new QualityForRecognition(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is QualityForRecognition other && Equals(other); + /// + public bool Equals(QualityForRecognition other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.Serialization.cs new file mode 100644 index 0000000000000..db1cddfe5c5db --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFaceToFaceRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFaceToFaceRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId1"u8); + writer.WriteStringValue(FaceId1); + writer.WritePropertyName("faceId2"u8); + writer.WriteStringValue(FaceId2); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFaceToFaceRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFaceToFaceRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFaceToFaceRequest(document.RootElement, options); + } + + internal static VerifyFaceToFaceRequest DeserializeVerifyFaceToFaceRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId1 = default; + Guid faceId2 = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId1"u8)) + { + faceId1 = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("faceId2"u8)) + { + faceId2 = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFaceToFaceRequest(faceId1, faceId2, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFaceToFaceRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFaceToFaceRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFaceToFaceRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFaceToFaceRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFaceToFaceRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFaceToFaceRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.cs new file mode 100644 index 0000000000000..1bef94257da83 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFaceToFaceRequest.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFaceToFaceRequest. + internal partial class VerifyFaceToFaceRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of one face, come from "Detect". + /// The faceId of another face, come from "Detect". + public VerifyFaceToFaceRequest(Guid faceId1, Guid faceId2) + { + FaceId1 = faceId1; + FaceId2 = faceId2; + } + + /// Initializes a new instance of . + /// The faceId of one face, come from "Detect". + /// The faceId of another face, come from "Detect". + /// Keeps track of any properties unknown to the library. + internal VerifyFaceToFaceRequest(Guid faceId1, Guid faceId2, IDictionary serializedAdditionalRawData) + { + FaceId1 = faceId1; + FaceId2 = faceId2; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFaceToFaceRequest() + { + } + + /// The faceId of one face, come from "Detect". + public Guid FaceId1 { get; } + /// The faceId of another face, come from "Detect". + public Guid FaceId2 { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..fa9cc27b3b68c --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static VerifyFromLargePersonGroupRequest DeserializeVerifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + string largePersonGroupId = default; + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs new file mode 100644 index 0000000000000..a6c86321af912 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFromLargePersonGroupRequest. + internal partial class VerifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// is null. + public VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + } + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// Keeps track of any properties unknown to the library. + internal VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFromLargePersonGroupRequest() + { + } + + /// The faceId of the face, come from "Detect". + public Guid FaceId { get; } + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + public string LargePersonGroupId { get; } + /// Specify a certain person in Large Person Group. + public Guid PersonId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.Serialization.cs new file mode 100644 index 0000000000000..cd30007780ce5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFromPersonDirectoryRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromPersonDirectoryRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFromPersonDirectoryRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromPersonDirectoryRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFromPersonDirectoryRequest(document.RootElement, options); + } + + internal static VerifyFromPersonDirectoryRequest DeserializeVerifyFromPersonDirectoryRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFromPersonDirectoryRequest(faceId, personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFromPersonDirectoryRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFromPersonDirectoryRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFromPersonDirectoryRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFromPersonDirectoryRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFromPersonDirectoryRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFromPersonDirectoryRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.cs new file mode 100644 index 0000000000000..7b33b4ec4aa23 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonDirectoryRequest.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFromPersonDirectoryRequest. + internal partial class VerifyFromPersonDirectoryRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Specify a certain person in PersonDirectory Person. + public VerifyFromPersonDirectoryRequest(Guid faceId, Guid personId) + { + FaceId = faceId; + PersonId = personId; + } + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Specify a certain person in PersonDirectory Person. + /// Keeps track of any properties unknown to the library. + internal VerifyFromPersonDirectoryRequest(Guid faceId, Guid personId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFromPersonDirectoryRequest() + { + } + + /// The faceId of the face, come from "Detect". + public Guid FaceId { get; } + /// Specify a certain person in PersonDirectory Person. + public Guid PersonId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.Serialization.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.Serialization.cs new file mode 100644 index 0000000000000..6b10ac70d4027 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFromPersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromPersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("personGroupId"u8); + writer.WriteStringValue(PersonGroupId); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFromPersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromPersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFromPersonGroupRequest(document.RootElement, options); + } + + internal static VerifyFromPersonGroupRequest DeserializeVerifyFromPersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + string personGroupId = default; + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("personGroupId"u8)) + { + personGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFromPersonGroupRequest(faceId, personGroupId, personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFromPersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFromPersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFromPersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFromPersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFromPersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFromPersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.cs b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.cs new file mode 100644 index 0000000000000..2ee7a2f600382 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Generated/VerifyFromPersonGroupRequest.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFromPersonGroupRequest. + internal partial class VerifyFromPersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in "Create Person Group". + /// Specify a certain person in Person Group. + /// is null. + public VerifyFromPersonGroupRequest(Guid faceId, string personGroupId, Guid personId) + { + Argument.AssertNotNull(personGroupId, nameof(personGroupId)); + + FaceId = faceId; + PersonGroupId = personGroupId; + PersonId = personId; + } + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in "Create Person Group". + /// Specify a certain person in Person Group. + /// Keeps track of any properties unknown to the library. + internal VerifyFromPersonGroupRequest(Guid faceId, string personGroupId, Guid personId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + PersonGroupId = personGroupId; + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFromPersonGroupRequest() + { + } + + /// The faceId of the face, come from "Detect". + public Guid FaceId { get; } + /// Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in "Create Person Group". + public string PersonGroupId { get; } + /// Specify a certain person in Person Group. + public Guid PersonId { get; } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/src/Properties/AssemblyInfo.cs b/sdk/vision/Azure.AI.Vision.Face/src/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000000..5776b8398c596 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/src/Properties/AssemblyInfo.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("Azure.AI.Vision.Face.Tests, PublicKey = 0024000004800000940000000602000000240000525341310004000001000100d15ddcb29688295338af4b7686603fe614abd555e09efba8fb88ee09e1f7b1ccaeed2e8f823fa9eef3fdd60217fc012ea67d2479751a0b8c087a4185541b851bd8b16f8d91b840e51b1cb0ba6fe647997e57429265e85ef62d565db50a69ae1647d54d7bd855e4db3d8a91510e5bcbd0edfbbecaa20a7bd9ae74593daa7b11b4")] + + +// Replace Microsoft.Test with the correct resource provider namepace for your service and uncomment. +// See https://docs.microsoft.com/en-us/azure/azure-resource-manager/management/azure-services-resource-providers +// for the list of possible values. +[assembly: Azure.Core.AzureResourceProviderNamespace("Microsoft.Template")] diff --git a/sdk/vision/Azure.AI.Vision.Face/tests/Azure.AI.Vision.Face.Tests.csproj b/sdk/vision/Azure.AI.Vision.Face/tests/Azure.AI.Vision.Face.Tests.csproj new file mode 100644 index 0000000000000..107d8d47b7dee --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/tests/Azure.AI.Vision.Face.Tests.csproj @@ -0,0 +1,20 @@ + + + $(RequiredTargetFrameworks) + + $(NoWarn);CS1591 + + + + + + + + + + + + + + + diff --git a/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceAdministrationClient.cs b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceAdministrationClient.cs new file mode 100644 index 0000000000000..50aba71555fa5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceAdministrationClient.cs @@ -0,0 +1,6774 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_FaceAdministrationClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_CreateFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreateFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_CreateFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreateFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_CreateFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateFaceList("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_CreateFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateFaceListAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_CreateFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = client.CreateFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_CreateFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_CreateFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateFaceList("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_CreateFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateFaceListAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_DeleteFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteFaceList(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_DeleteFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteFaceListAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_DeleteFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteFaceList(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_DeleteFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteFaceListAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceList("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListAsync("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceList(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceList("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("faceListId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListAsync("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("faceListId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaces")[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceList("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListAsync("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_UpdateFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_UpdateFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_UpdateFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdateFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_UpdateFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdateFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_GetFaceLists_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceLists(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_GetFaceLists_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListsAsync(null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_GetFaceLists_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetFaceLists(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_GetFaceLists_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetFaceListsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_GetFaceLists_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetFaceLists(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_GetFaceLists_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetFaceListsAsync(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("faceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceList_GetFaceLists_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetFaceLists(returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceList_GetFaceLists_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetFaceListsAsync(returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFaceFromUrl_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddFaceListFaceFromUrl("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFaceFromUrl_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddFaceListFaceFromUrlAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFaceFromUrl_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddFaceListFaceFromUrl("", new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFaceFromUrl_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFaceFromUrl_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddFaceListFaceFromUrl("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFaceFromUrl_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddFaceListFaceFromUrlAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFaceFromUrl_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddFaceListFaceFromUrl("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFaceFromUrl_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddFaceListFace("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddFaceListFaceAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddFaceListFace("", BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddFaceListFace("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddFaceListFaceAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_AddFaceListFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddFaceListFace("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_AddFaceListFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_DeleteFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_DeleteFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceListFace_DeleteFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceListFace_DeleteFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_CreateLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreateLargeFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_CreateLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreateLargeFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_CreateLargeFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargeFaceList("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_CreateLargeFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargeFaceListAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_CreateLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = client.CreateLargeFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_CreateLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateLargeFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_CreateLargeFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargeFaceList("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_CreateLargeFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargeFaceListAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_DeleteLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargeFaceList(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_DeleteLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargeFaceListAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_DeleteLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargeFaceList(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_DeleteLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargeFaceListAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceList("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListAsync("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceList(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceList("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListAsync("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceList("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListAsync("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_UpdateLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateLargeFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_UpdateLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateLargeFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_UpdateLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdateLargeFaceList("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_UpdateLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdateLargeFaceListAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceLists(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListsAsync(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargeFaceLists(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargeFaceListsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceLists("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListsAsync("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargeFaceLists(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargeFaceListsAsync(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargeFaceListTrainingStatus_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddLargeFaceListFaceFromUrl("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddLargeFaceListFaceFromUrlAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargeFaceListFaceFromUrl("", new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargeFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddLargeFaceListFaceFromUrl("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddLargeFaceListFaceFromUrlAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargeFaceListFaceFromUrl("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFaceFromUrl_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargeFaceListFaceFromUrlAsync("", new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddLargeFaceListFace("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddLargeFaceListFaceAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargeFaceListFace("", BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargeFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddLargeFaceListFace("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddLargeFaceListFaceAsync("", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_AddLargeFaceListFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargeFaceListFace("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_AddLargeFaceListFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargeFaceListFaceAsync("", BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_DeleteLargeFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_DeleteLargeFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_DeleteLargeFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_DeleteLargeFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_UpdateLargeFaceListFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_UpdateLargeFaceListFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_UpdateLargeFaceListFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = client.UpdateLargeFaceListFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_UpdateLargeFaceListFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = await client.UpdateLargeFaceListFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFaces_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFaces("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFaces_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFacesAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFaces_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargeFaceListFaces(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFaces_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargeFaceListFacesAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFaces_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargeFaceListFaces("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFaces_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargeFaceListFacesAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetLargeFaceListFaces_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargeFaceListFaces("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetLargeFaceListFaces_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargeFaceListFacesAsync("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_CreatePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreatePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_CreatePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreatePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_CreatePersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreatePersonGroup("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_CreatePersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreatePersonGroupAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_CreatePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = client.CreatePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_CreatePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = await client.CreatePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_CreatePersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreatePersonGroup("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_CreatePersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreatePersonGroupAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_DeletePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroup(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_DeletePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_DeletePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroup(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_DeletePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroup("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupAsync("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroup(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroup("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupAsync("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroup("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupAsync("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_UpdatePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdatePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_UpdatePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdatePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_UpdatePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdatePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_UpdatePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdatePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_GetPersonGroups_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroups(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_GetPersonGroups_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupsAsync(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_GetPersonGroups_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersonGroups(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_GetPersonGroups_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonGroupsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_GetPersonGroups_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroups("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_GetPersonGroups_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupsAsync("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("personGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroup_GetPersonGroups_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersonGroups(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroup_GetPersonGroups_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonGroupsAsync(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetPersonGroupTrainingStatus_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_CreatePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreatePersonGroupPerson("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_CreatePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreatePersonGroupPersonAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_CreatePersonGroupPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreatePersonGroupPerson("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_CreatePersonGroupPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreatePersonGroupPersonAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_CreatePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.CreatePersonGroupPerson("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_CreatePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.CreatePersonGroupPersonAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_CreatePersonGroupPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreatePersonGroupPerson("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_CreatePersonGroupPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreatePersonGroupPersonAsync("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_DeletePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_DeletePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_DeletePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_DeletePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_UpdatePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdatePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_UpdatePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdatePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_UpdatePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdatePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_UpdatePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdatePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPersons_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersons("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPersons_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonsAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPersons_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersonGroupPersons(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPersons_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonGroupPersonsAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPersons_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersons("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPersons_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonsAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPerson_GetPersonGroupPersons_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersonGroupPersons("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPerson_GetPersonGroupPersons_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonGroupPersonsAsync("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddPersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFaceFromUrl_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddPersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_AddPersonGroupPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_AddPersonGroupPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_DeletePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_DeletePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_DeletePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeletePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_DeletePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeletePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_GetPersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_GetPersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_GetPersonGroupPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_GetPersonGroupPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_GetPersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_GetPersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_GetPersonGroupPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_GetPersonGroupPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_UpdatePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdatePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_UpdatePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdatePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonGroupPersonFace_UpdatePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = client.UpdatePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonGroupPersonFace_UpdatePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = await client.UpdatePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_CreateLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreateLargePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_CreateLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreateLargePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_CreateLargePersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargePersonGroup("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_CreateLargePersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargePersonGroupAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_CreateLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = client.CreateLargePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_CreateLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateLargePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_CreateLargePersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargePersonGroup("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_CreateLargePersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargePersonGroupAsync("", "", userData: "", recognitionModel: FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_DeleteLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroup(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_DeleteLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_DeleteLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroup(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_DeleteLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroup("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupAsync("", null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroup(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroup("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupAsync("", true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("recognitionModel").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroup("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupAsync("", returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_UpdateLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateLargePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_UpdateLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateLargePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_UpdateLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdateLargePersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_UpdateLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdateLargePersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroups(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupsAsync(null, null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargePersonGroups(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargePersonGroupsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroups("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupsAsync("", 1234, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("recognitionModel").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargePersonGroups(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargePersonGroupsAsync(start: "", top: 1234, returnRecognitionModel: true); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupTrainingStatus("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupTrainingStatusAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + Console.WriteLine(result.GetProperty("message").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupTrainingStatus(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetLargePersonGroupTrainingStatus_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupTrainingStatusAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreateLargePersonGroupPerson("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreateLargePersonGroupPersonAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargePersonGroupPerson("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargePersonGroupPersonAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.CreateLargePersonGroupPerson("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.CreateLargePersonGroupPersonAsync("", content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateLargePersonGroupPerson("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreateLargePersonGroupPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateLargePersonGroupPersonAsync("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_DeleteLargePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_DeleteLargePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_DeleteLargePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_DeleteLargePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_UpdateLargePersonGroupPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_UpdateLargePersonGroupPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_UpdateLargePersonGroupPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdateLargePersonGroupPerson("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_UpdateLargePersonGroupPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdateLargePersonGroupPersonAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPersons_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersons("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPersons_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonsAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPersons_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargePersonGroupPersons(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPersons_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargePersonGroupPersonsAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPersons_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersons("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPersons_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonsAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetLargePersonGroupPersons_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetLargePersonGroupPersons("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetLargePersonGroupPersons_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetLargePersonGroupPersonsAsync("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargePersonGroupPersonFaceFromUrl("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFaceFromUrl_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargePersonGroupPersonFaceFromUrlAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object())); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.AddLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_AddLargePersonGroupPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.AddLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_DeleteLargePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_DeleteLargePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_DeleteLargePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.DeleteLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_DeleteLargePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.DeleteLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetLargePersonGroupPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_UpdateLargePersonGroupPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_UpdateLargePersonGroupPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_UpdateLargePersonGroupPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = client.UpdateLargePersonGroupPersonFace("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_UpdateLargePersonGroupPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = await client.UpdateLargePersonGroupPersonFaceAsync("", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_UpdatePerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdatePerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_UpdatePerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdatePersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_UpdatePerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdatePerson(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_UpdatePerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdatePersonAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPersons_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersons(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPersons_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonsAsync(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPersons_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersons(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPersons_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPersons_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersons("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPersons_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonsAsync("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_GetPersons_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetPersons(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_GetPersons_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetPersonsAsync(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupReferences(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupReferences_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupReferencesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_UpdatePersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdatePersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_UpdatePersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdatePersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_UpdatePersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = client.UpdatePersonFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_UpdatePersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userData = "", + }); + Response response = await client.UpdatePersonFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFaces_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFaces_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFaces_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFaces_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFaces_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFaces_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01"); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("persistedFaceIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_GetPersonFaces_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetPersonFaces(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_GetPersonFaces_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetPersonFacesAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = client.CreateDynamicPersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Response response = await client.CreateDynamicPersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateDynamicPersonGroup("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateDynamicPersonGroupAsync("", ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.CreateDynamicPersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.CreateDynamicPersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.CreateDynamicPersonGroup("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.CreateDynamicPersonGroupAsync("", "", userData: ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroup("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroup(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroup("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroup(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_UpdateDynamicPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.UpdateDynamicPersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_UpdateDynamicPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.UpdateDynamicPersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_UpdateDynamicPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = client.UpdateDynamicPersonGroup("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_UpdateDynamicPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Response response = await client.UpdateDynamicPersonGroupAsync("", content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroups_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroups(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroups_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupsAsync(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroups_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetDynamicPersonGroups(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroups_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetDynamicPersonGroupsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroups_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroups("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroups_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupsAsync("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_GetDynamicPersonGroups_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = client.GetDynamicPersonGroups(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_GetDynamicPersonGroups_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response> response = await client.GetDynamicPersonGroupsAsync(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupPersons("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupPersonsAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupPersons(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupPersonsAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupPersons("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupPersonsAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personIds")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = client.GetDynamicPersonGroupPersons("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_GetDynamicPersonGroupPersons_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Response response = await client.GetDynamicPersonGroupPersonsAsync("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainLargeFaceList(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainLargeFaceListAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainLargeFaceList(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainLargeFaceListAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainPersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainPersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainPersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainPersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainLargePersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainLargePersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_TrainLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.TrainLargePersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_TrainLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.TrainLargePersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_CreatePerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Operation operation = client.CreatePerson(WaitUntil.Completed, content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_CreatePerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + }); + Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_CreatePerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.CreatePerson(WaitUntil.Completed, ""); + PersonDirectoryPerson responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_CreatePerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, ""); + PersonDirectoryPerson responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_CreatePerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Operation operation = client.CreatePerson(WaitUntil.Completed, content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_CreatePerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + }); + Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_CreatePerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.CreatePerson(WaitUntil.Completed, "", userData: ""); + PersonDirectoryPerson responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_CreatePerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.CreatePersonAsync(WaitUntil.Completed, "", userData: ""); + PersonDirectoryPerson responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_DeletePerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePerson(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_DeletePerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryPerson_DeletePerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePerson(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryPerson_DeletePerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFaceFromUrl_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFaceFromUrl_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFaceFromUrl_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000")); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFaceFromUrl_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000")); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFaceFromUrl_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFaceFromUrl_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + url = "http://localhost:3000", + }); + Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFaceFromUrl_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.AddPersonFaceFromUrl(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFaceFromUrl_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.AddPersonFaceFromUrlAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, new Uri("http://localhost:3000"), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object())); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object())); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", content, targetFace: new int[] { 1234 }, detectionModel: "detection_01", userData: ""); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_AddPersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.AddPersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_AddPersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.AddPersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, BinaryData.FromObjectAsJson(new object()), targetFace: new int[] { 1234 }, detectionModel: FaceDetectionModel.Detection01, userData: ""); + PersonDirectoryFace responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_DeletePersonFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_DeletePersonFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_DeletePersonFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_DeletePersonFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_DeletePersonFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_DeletePersonFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "recognition_01", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_PersonDirectoryFace_DeletePersonFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeletePersonFace(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_PersonDirectoryFace_DeletePersonFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeletePersonFaceAsync(WaitUntil.Completed, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), FaceRecognitionModel.Recognition01, Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + DynamicPersonGroup responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + DynamicPersonGroup responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", content); + BinaryData responseData = operation.Value; + + JsonElement result = JsonDocument.Parse(responseData.ToStream()).RootElement; + Console.WriteLine(result.GetProperty("dynamicPersonGroupId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("userData").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.CreateDynamicPersonGroupWithPerson(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, userData: ""); + DynamicPersonGroup responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_CreateDynamicPersonGroupWithPerson_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.CreateDynamicPersonGroupWithPersonAsync(WaitUntil.Completed, "", "", new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, userData: ""); + DynamicPersonGroup responseData = operation.Value; + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_DeleteDynamicPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeleteDynamicPersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_DeleteDynamicPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeleteDynamicPersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_DynamicPersonGroup_DeleteDynamicPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = client.DeleteDynamicPersonGroup(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_DynamicPersonGroup_DeleteDynamicPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + Operation operation = await client.DeleteDynamicPersonGroupAsync(WaitUntil.Completed, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_UpdateDynamicPersonGroupWithPersonChanges_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = client.UpdateDynamicPersonGroupWithPersonChanges(WaitUntil.Completed, "", content); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_UpdateDynamicPersonGroupWithPersonChanges_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Operation operation = await client.UpdateDynamicPersonGroupWithPersonChangesAsync(WaitUntil.Completed, "", content); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceAdministrationClient_UpdateDynamicPersonGroupWithPersonChanges_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + removePersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = client.UpdateDynamicPersonGroupWithPersonChanges(WaitUntil.Completed, "", content); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceAdministrationClient_UpdateDynamicPersonGroupWithPersonChanges_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceAdministrationClient client = new FaceAdministrationClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + name = "", + userData = "", + addPersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + removePersonIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Operation operation = await client.UpdateDynamicPersonGroupWithPersonChangesAsync(WaitUntil.Completed, "", content); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs new file mode 100644 index 0000000000000..0bdd161386f52 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs @@ -0,0 +1,1640 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_FaceClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilar_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = client.FindSimilar(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilar_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = await client.FindSimilarAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilar_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilar(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilar_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilar_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = client.FindSimilar(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilar_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = await client.FindSimilarAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilar_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilar(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilar_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceListId = "", + }); + Response response = client.FindSimilarFromFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceListId = "", + }); + Response response = await client.FindSimilarFromFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceListId = "", + }); + Response response = client.FindSimilarFromFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + faceListId = "", + }); + Response response = await client.FindSimilarFromFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largeFaceListId = "", + }); + Response response = client.FindSimilarFromLargeFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largeFaceListId = "", + }); + Response response = await client.FindSimilarFromLargeFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + largeFaceListId = "", + }); + Response response = client.FindSimilarFromLargeFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + maxNumOfCandidatesReturned = 1234, + mode = "matchPerson", + largeFaceListId = "", + }); + Response response = await client.FindSimilarFromLargeFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", maxNumOfCandidatesReturned: 1234, mode: FindSimilarMatchMode.MatchPerson); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + }); + Response response = client.IdentifyFromPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + }); + Response response = await client.IdentifyFromPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = client.IdentifyFromPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = await client.IdentifyFromPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + }); + Response response = client.IdentifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + }); + Response response = await client.IdentifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = client.IdentifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + largePersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = await client.IdentifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonDirectory_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = client.IdentifyFromPersonDirectory(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonDirectory_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = await client.IdentifyFromPersonDirectoryAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonDirectory_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromPersonDirectory(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonDirectory_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromPersonDirectoryAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonDirectory_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = client.IdentifyFromPersonDirectory(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonDirectory_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + personIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = await client.IdentifyFromPersonDirectoryAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromPersonDirectory_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromPersonDirectory(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromPersonDirectory_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromPersonDirectoryAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromDynamicPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + }); + Response response = client.IdentifyFromDynamicPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromDynamicPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + }); + Response response = await client.IdentifyFromDynamicPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromDynamicPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromDynamicPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromDynamicPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromDynamicPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, ""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromDynamicPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = client.IdentifyFromDynamicPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromDynamicPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + dynamicPersonGroupId = "", + maxNumOfCandidatesReturned = 1234, + confidenceThreshold = 123.45F, + }); + Response response = await client.IdentifyFromDynamicPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromDynamicPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromDynamicPersonGroup(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromDynamicPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromDynamicPersonGroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }, "", maxNumOfCandidatesReturned: 1234, confidenceThreshold: 123.45F); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFaceToFace_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFaceToFace(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFaceToFace_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFaceToFaceAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFaceToFace_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFaceToFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFaceToFace_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFaceToFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFaceToFace_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFaceToFace(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFaceToFace_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId1 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + faceId2 = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFaceToFaceAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFaceToFace_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFaceToFace(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFaceToFace_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFaceToFaceAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromPersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromPersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromPersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromPersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromPersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromPersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromLargePersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + largePersonGroupId = "", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromLargePersonGroup(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), "", Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonDirectory_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromPersonDirectory(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonDirectory_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromPersonDirectoryAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonDirectory_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromPersonDirectory(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonDirectory_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromPersonDirectoryAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonDirectory_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = client.VerifyFromPersonDirectory(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonDirectory_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + personId = "73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a", + }); + Response response = await client.VerifyFromPersonDirectoryAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromPersonDirectory_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromPersonDirectory(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromPersonDirectory_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromPersonDirectoryAsync(Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a"), Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_Group_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = client.Group(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); + Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_Group_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = await client.GroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); + Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_Group_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.Group(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_Group_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.GroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_Group_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = client.Group(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); + Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_Group_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a" + }, + }); + Response response = await client.GroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); + Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_Group_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.Group(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_Group_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.GroupAsync(new Guid[] { Guid.Parse("73f411fe-4f43-4b4b-9cbd-6828d8f4cf9a") }); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs new file mode 100644 index 0000000000000..ba531b3cb3e60 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs @@ -0,0 +1,1204 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_FaceSessionClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_CreateLivenessSession_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + livenessOperationMode = "Passive", + }); + Response response = client.CreateLivenessSession(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("authToken").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_CreateLivenessSession_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + livenessOperationMode = "Passive", + }); + Response response = await client.CreateLivenessSessionAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("authToken").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_CreateLivenessSession_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive); + Response response = client.CreateLivenessSession(createLivenessSessionContent); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_CreateLivenessSession_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive); + Response response = await client.CreateLivenessSessionAsync(createLivenessSessionContent); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_CreateLivenessSession_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + livenessOperationMode = "Passive", + sendResultsToClient = true, + deviceCorrelationIdSetInClient = true, + deviceCorrelationId = "", + authTokenTimeToLiveInSeconds = 1234, + }); + Response response = client.CreateLivenessSession(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("authToken").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_CreateLivenessSession_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + livenessOperationMode = "Passive", + sendResultsToClient = true, + deviceCorrelationIdSetInClient = true, + deviceCorrelationId = "", + authTokenTimeToLiveInSeconds = 1234, + }); + Response response = await client.CreateLivenessSessionAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("authToken").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_CreateLivenessSession_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive) + { + SendResultsToClient = true, + DeviceCorrelationIdSetInClient = true, + DeviceCorrelationId = "", + AuthTokenTimeToLiveInSeconds = 1234, + }; + Response response = client.CreateLivenessSession(createLivenessSessionContent); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_CreateLivenessSession_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + CreateLivenessSessionContent createLivenessSessionContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive) + { + SendResultsToClient = true, + DeviceCorrelationIdSetInClient = true, + DeviceCorrelationId = "", + AuthTokenTimeToLiveInSeconds = 1234, + }; + Response response = await client.CreateLivenessSessionAsync(createLivenessSessionContent); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_DeleteLivenessSession_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.DeleteLivenessSession(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_DeleteLivenessSession_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.DeleteLivenessSessionAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_DeleteLivenessSession_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.DeleteLivenessSession(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_DeleteLivenessSession_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.DeleteLivenessSessionAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessionResult_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionResult("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessionResult_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionResultAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessionResult_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionResult(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessionResult_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionResultAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessionResult_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionResult("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessionResult_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionResultAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessionResult_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionResult(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessionResult_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionResultAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessions_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessions(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessions_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionsAsync(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessions_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessSessions(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessions_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessSessionsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessions_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessions("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessions_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionsAsync("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessSession_GetLivenessSessions_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessSessions(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessSession_GetLivenessSessions_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessSessionsAsync(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessSessionAuditEntries_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionAuditEntries("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessSessionAuditEntries_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionAuditEntriesAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessSessionAuditEntries_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessSessionAuditEntries(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessSessionAuditEntries_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessSessionAuditEntriesAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessSessionAuditEntries_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessSessionAuditEntries("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessSessionAuditEntries_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessSessionAuditEntriesAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessSessionAuditEntries_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessSessionAuditEntries("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessSessionAuditEntries_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessSessionAuditEntriesAsync("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_DeleteLivenessWithVerifySession_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.DeleteLivenessWithVerifySession(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_DeleteLivenessWithVerifySession_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.DeleteLivenessWithVerifySessionAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_DeleteLivenessWithVerifySession_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.DeleteLivenessWithVerifySession(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_DeleteLivenessWithVerifySession_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.DeleteLivenessWithVerifySessionAsync(""); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionResult("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionResultAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionResult(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionResultAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionResult("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionResultAsync("", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result.GetProperty("sessionExpired").ToString()); + Console.WriteLine(result.GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result.GetProperty("authTokenTimeToLiveInSeconds").ToString()); + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("id").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("sessionId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("requestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("clientRequestId").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result.GetProperty("result").GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionResult(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessionResult_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionResultAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessions(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionsAsync(null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessWithVerifySessions(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessWithVerifySessionsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessions("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionsAsync("", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("createdDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionStartDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("sessionExpired").ToString()); + Console.WriteLine(result[0].GetProperty("deviceCorrelationId").ToString()); + Console.WriteLine(result[0].GetProperty("authTokenTimeToLiveInSeconds").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessWithVerifySessions(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LivenessWithVerifySession_GetLivenessWithVerifySessions_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessWithVerifySessionsAsync(start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionAuditEntries("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", null, null, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessWithVerifySessionAuditEntries(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync(""); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetLivenessWithVerifySessionAuditEntries("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", "", 1234, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("id").ToString()); + Console.WriteLine(result[0].GetProperty("sessionId").ToString()); + Console.WriteLine(result[0].GetProperty("requestId").ToString()); + Console.WriteLine(result[0].GetProperty("clientRequestId").ToString()); + Console.WriteLine(result[0].GetProperty("receivedDateTime").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("url").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("method").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentLength").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("contentType").ToString()); + Console.WriteLine(result[0].GetProperty("request").GetProperty("userAgent").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("livenessDecision").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("fileName").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("timeOffsetWithinFile").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("target").GetProperty("imageType").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("modelVersionUsed").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("faceRectangle").GetProperty("height").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("verifyImage").GetProperty("qualityForRecognition").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("matchConfidence").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("body").GetProperty("verifyResult").GetProperty("isIdentical").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); + Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); + Console.WriteLine(result[0].GetProperty("digest").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.GetLivenessWithVerifySessionAuditEntries("", start: "", top: 1234); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEntries_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("", start: "", top: 1234); + } + } +} diff --git a/sdk/vision/Azure.AI.Vision.Face/tsp-location.yaml b/sdk/vision/Azure.AI.Vision.Face/tsp-location.yaml new file mode 100644 index 0000000000000..f7db8392b60d5 --- /dev/null +++ b/sdk/vision/Azure.AI.Vision.Face/tsp-location.yaml @@ -0,0 +1,5 @@ +repo: Azure/azure-rest-api-specs +directory: specification/ai/Face +commit: 4d15e8e8e0c72b7e3e2f74186da4b9ed950ce2c2 +additionalDirectories: [] +