@azure-rest/ai-vision-face 1.0.0-alpha.20250210.1 → 1.0.0-alpha.20250211.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. package/README.md +17 -23
  2. package/dist/browser/clientDefinitions.d.ts +15 -241
  3. package/dist/browser/clientDefinitions.d.ts.map +1 -1
  4. package/dist/browser/clientDefinitions.js.map +1 -1
  5. package/dist/browser/faceClient.d.ts.map +1 -1
  6. package/dist/browser/faceClient.js +1 -1
  7. package/dist/browser/faceClient.js.map +1 -1
  8. package/dist/browser/isUnexpected.d.ts +3 -29
  9. package/dist/browser/isUnexpected.d.ts.map +1 -1
  10. package/dist/browser/isUnexpected.js +7 -31
  11. package/dist/browser/isUnexpected.js.map +1 -1
  12. package/dist/browser/models.d.ts +37 -40
  13. package/dist/browser/models.d.ts.map +1 -1
  14. package/dist/browser/models.js.map +1 -1
  15. package/dist/browser/outputModels.d.ts +139 -215
  16. package/dist/browser/outputModels.d.ts.map +1 -1
  17. package/dist/browser/outputModels.js.map +1 -1
  18. package/dist/browser/parameters.d.ts +115 -195
  19. package/dist/browser/parameters.d.ts.map +1 -1
  20. package/dist/browser/parameters.js.map +1 -1
  21. package/dist/browser/pollingHelper.d.ts +1 -8
  22. package/dist/browser/pollingHelper.d.ts.map +1 -1
  23. package/dist/browser/pollingHelper.js.map +1 -1
  24. package/dist/browser/responses.d.ts +9 -434
  25. package/dist/browser/responses.d.ts.map +1 -1
  26. package/dist/browser/responses.js.map +1 -1
  27. package/dist/commonjs/clientDefinitions.d.ts +15 -241
  28. package/dist/commonjs/clientDefinitions.d.ts.map +1 -1
  29. package/dist/commonjs/clientDefinitions.js.map +1 -1
  30. package/dist/commonjs/faceClient.d.ts.map +1 -1
  31. package/dist/commonjs/faceClient.js +1 -1
  32. package/dist/commonjs/faceClient.js.map +1 -1
  33. package/dist/commonjs/isUnexpected.d.ts +3 -29
  34. package/dist/commonjs/isUnexpected.d.ts.map +1 -1
  35. package/dist/commonjs/isUnexpected.js +7 -31
  36. package/dist/commonjs/isUnexpected.js.map +1 -1
  37. package/dist/commonjs/models.d.ts +37 -40
  38. package/dist/commonjs/models.d.ts.map +1 -1
  39. package/dist/commonjs/models.js.map +1 -1
  40. package/dist/commonjs/outputModels.d.ts +139 -215
  41. package/dist/commonjs/outputModels.d.ts.map +1 -1
  42. package/dist/commonjs/outputModels.js.map +1 -1
  43. package/dist/commonjs/parameters.d.ts +115 -195
  44. package/dist/commonjs/parameters.d.ts.map +1 -1
  45. package/dist/commonjs/parameters.js.map +1 -1
  46. package/dist/commonjs/pollingHelper.d.ts +1 -8
  47. package/dist/commonjs/pollingHelper.d.ts.map +1 -1
  48. package/dist/commonjs/pollingHelper.js.map +1 -1
  49. package/dist/commonjs/responses.d.ts +9 -434
  50. package/dist/commonjs/responses.d.ts.map +1 -1
  51. package/dist/commonjs/responses.js.map +1 -1
  52. package/dist/esm/clientDefinitions.d.ts +15 -241
  53. package/dist/esm/clientDefinitions.d.ts.map +1 -1
  54. package/dist/esm/clientDefinitions.js.map +1 -1
  55. package/dist/esm/faceClient.d.ts.map +1 -1
  56. package/dist/esm/faceClient.js +1 -1
  57. package/dist/esm/faceClient.js.map +1 -1
  58. package/dist/esm/isUnexpected.d.ts +3 -29
  59. package/dist/esm/isUnexpected.d.ts.map +1 -1
  60. package/dist/esm/isUnexpected.js +7 -31
  61. package/dist/esm/isUnexpected.js.map +1 -1
  62. package/dist/esm/models.d.ts +37 -40
  63. package/dist/esm/models.d.ts.map +1 -1
  64. package/dist/esm/models.js.map +1 -1
  65. package/dist/esm/outputModels.d.ts +139 -215
  66. package/dist/esm/outputModels.d.ts.map +1 -1
  67. package/dist/esm/outputModels.js.map +1 -1
  68. package/dist/esm/parameters.d.ts +115 -195
  69. package/dist/esm/parameters.d.ts.map +1 -1
  70. package/dist/esm/parameters.js.map +1 -1
  71. package/dist/esm/pollingHelper.d.ts +1 -8
  72. package/dist/esm/pollingHelper.d.ts.map +1 -1
  73. package/dist/esm/pollingHelper.js.map +1 -1
  74. package/dist/esm/responses.d.ts +9 -434
  75. package/dist/esm/responses.d.ts.map +1 -1
  76. package/dist/esm/responses.js.map +1 -1
  77. package/dist/react-native/clientDefinitions.d.ts +15 -241
  78. package/dist/react-native/clientDefinitions.d.ts.map +1 -1
  79. package/dist/react-native/clientDefinitions.js.map +1 -1
  80. package/dist/react-native/faceClient.d.ts.map +1 -1
  81. package/dist/react-native/faceClient.js +1 -1
  82. package/dist/react-native/faceClient.js.map +1 -1
  83. package/dist/react-native/isUnexpected.d.ts +3 -29
  84. package/dist/react-native/isUnexpected.d.ts.map +1 -1
  85. package/dist/react-native/isUnexpected.js +7 -31
  86. package/dist/react-native/isUnexpected.js.map +1 -1
  87. package/dist/react-native/models.d.ts +37 -40
  88. package/dist/react-native/models.d.ts.map +1 -1
  89. package/dist/react-native/models.js.map +1 -1
  90. package/dist/react-native/outputModels.d.ts +139 -215
  91. package/dist/react-native/outputModels.d.ts.map +1 -1
  92. package/dist/react-native/outputModels.js.map +1 -1
  93. package/dist/react-native/parameters.d.ts +115 -195
  94. package/dist/react-native/parameters.d.ts.map +1 -1
  95. package/dist/react-native/parameters.js.map +1 -1
  96. package/dist/react-native/pollingHelper.d.ts +1 -8
  97. package/dist/react-native/pollingHelper.d.ts.map +1 -1
  98. package/dist/react-native/pollingHelper.js.map +1 -1
  99. package/dist/react-native/responses.d.ts +9 -434
  100. package/dist/react-native/responses.d.ts.map +1 -1
  101. package/dist/react-native/responses.js.map +1 -1
  102. package/package.json +1 -1
@@ -1 +1 @@
1
- {"version":3,"file":"outputModels.js","sourceRoot":"","sources":["../../src/outputModels.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\n/** Long running operation resource for person directory. */\nexport interface OperationResultOutput {\n /** Operation ID of the operation. */\n readonly operationId: string;\n /**\n * Current status of the operation.\n *\n * Possible values: \"notStarted\", \"running\", \"succeeded\", \"failed\"\n */\n status: OperationStatusOutput;\n /** Date and time the operation was created. */\n createdTime: string;\n /** Date and time the operation was last updated. */\n lastActionTime?: string;\n /** Date and time the operation was finished. */\n finishedTime?: string;\n /** Message for the operation. */\n message?: string;\n}\n\n/** A response containing error details. */\nexport interface FaceErrorResponseOutput {\n /** The error object. */\n error: FaceErrorOutput;\n}\n\n/** The error object. For comprehensive details on error codes and messages returned by the Face Service, please refer to the following link: https://aka.ms/face-error-codes-and-messages. */\nexport interface FaceErrorOutput {\n /** One of a server-defined set of error codes. */\n code: string;\n /** A human-readable representation of the error. */\n message: string;\n}\n\n/** Response for detect API. */\nexport interface FaceDetectionResultOutput {\n /** Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. */\n faceId?: string;\n /**\n * The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** A rectangle area for the face location on image. */\n faceRectangle: FaceRectangleOutput;\n /** An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true. */\n faceLandmarks?: FaceLandmarksOutput;\n /** Face attributes for detected face. */\n faceAttributes?: FaceAttributesOutput;\n}\n\n/** A rectangle within which a face can be found. */\nexport interface FaceRectangleOutput {\n /** The distance from the top edge if the image to the top edge of the rectangle, in pixels. */\n top: number;\n /** The distance from the left edge if the image to the left edge of the rectangle, in pixels. */\n left: number;\n /** The width of the rectangle, in pixels. */\n width: number;\n /** The height of the rectangle, in pixels. */\n height: number;\n}\n\n/** A collection of 27-point face landmarks pointing to the important positions of face components. */\nexport interface FaceLandmarksOutput {\n /** The coordinates of the left eye pupil. */\n pupilLeft: LandmarkCoordinateOutput;\n /** The coordinates of the right eye pupil. */\n pupilRight: LandmarkCoordinateOutput;\n /** The coordinates of the nose tip. */\n noseTip: LandmarkCoordinateOutput;\n /** The coordinates of the mouth left. */\n mouthLeft: LandmarkCoordinateOutput;\n /** The coordinates of the mouth right. */\n mouthRight: LandmarkCoordinateOutput;\n /** The coordinates of the left eyebrow outer. */\n eyebrowLeftOuter: LandmarkCoordinateOutput;\n /** The coordinates of the left eyebrow inner. */\n eyebrowLeftInner: LandmarkCoordinateOutput;\n /** The coordinates of the left eye outer. */\n eyeLeftOuter: LandmarkCoordinateOutput;\n /** The coordinates of the left eye top. */\n eyeLeftTop: LandmarkCoordinateOutput;\n /** The coordinates of the left eye bottom. */\n eyeLeftBottom: LandmarkCoordinateOutput;\n /** The coordinates of the left eye inner. */\n eyeLeftInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eyebrow inner. */\n eyebrowRightInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eyebrow outer. */\n eyebrowRightOuter: LandmarkCoordinateOutput;\n /** The coordinates of the right eye inner. */\n eyeRightInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eye top. */\n eyeRightTop: LandmarkCoordinateOutput;\n /** The coordinates of the right eye bottom. */\n eyeRightBottom: LandmarkCoordinateOutput;\n /** The coordinates of the right eye outer. */\n eyeRightOuter: LandmarkCoordinateOutput;\n /** The coordinates of the nose root left. */\n noseRootLeft: LandmarkCoordinateOutput;\n /** The coordinates of the nose root right. */\n noseRootRight: LandmarkCoordinateOutput;\n /** The coordinates of the nose left alar top. */\n noseLeftAlarTop: LandmarkCoordinateOutput;\n /** The coordinates of the nose right alar top. */\n noseRightAlarTop: LandmarkCoordinateOutput;\n /** The coordinates of the nose left alar out tip. */\n noseLeftAlarOutTip: LandmarkCoordinateOutput;\n /** The coordinates of the nose right alar out tip. */\n noseRightAlarOutTip: LandmarkCoordinateOutput;\n /** The coordinates of the upper lip top. */\n upperLipTop: LandmarkCoordinateOutput;\n /** The coordinates of the upper lip bottom. */\n upperLipBottom: LandmarkCoordinateOutput;\n /** The coordinates of the under lip top. */\n underLipTop: LandmarkCoordinateOutput;\n /** The coordinates of the under lip bottom. */\n underLipBottom: LandmarkCoordinateOutput;\n}\n\n/** Landmark coordinates within an image. */\nexport interface LandmarkCoordinateOutput {\n /** The horizontal component, in pixels. */\n x: number;\n /** The vertical component, in pixels. */\n y: number;\n}\n\n/** Face attributes for the detected face. */\nexport interface FaceAttributesOutput {\n /** Age in years. */\n age?: number;\n /** Smile intensity, a number between [0,1]. */\n smile?: number;\n /** Properties describing facial hair attributes. */\n facialHair?: FacialHairOutput;\n /**\n * Glasses type if any of the face.\n *\n * Possible values: \"noGlasses\", \"readingGlasses\", \"sunglasses\", \"swimmingGoggles\"\n */\n glasses?: GlassesTypeOutput;\n /** 3-D roll/yaw/pitch angles for face direction. */\n headPose?: HeadPoseOutput;\n /** Properties describing hair attributes. */\n hair?: HairPropertiesOutput;\n /** Properties describing occlusions on a given face. */\n occlusion?: OcclusionPropertiesOutput;\n /** Properties describing any accessories on a given face. */\n accessories?: Array<AccessoryItemOutput>;\n /** Properties describing any presence of blur within the image. */\n blur?: BlurPropertiesOutput;\n /** Properties describing exposure level of the image. */\n exposure?: ExposurePropertiesOutput;\n /** Properties describing noise level of the image. */\n noise?: NoisePropertiesOutput;\n /** Properties describing the presence of a mask on a given face. */\n mask?: MaskPropertiesOutput;\n /**\n * Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n qualityForRecognition?: QualityForRecognitionOutput;\n}\n\n/** Properties describing facial hair attributes. */\nexport interface FacialHairOutput {\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n moustache: number;\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n beard: number;\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n sideburns: number;\n}\n\n/** 3-D roll/yaw/pitch angles for face direction. */\nexport interface HeadPoseOutput {\n /** Value of angles. */\n pitch: number;\n /** Value of angles. */\n roll: number;\n /** Value of angles. */\n yaw: number;\n}\n\n/** Properties describing hair attributes. */\nexport interface HairPropertiesOutput {\n /** A number describing confidence level of whether the person is bald. */\n bald: number;\n /** A boolean value describing whether the hair is visible in the image. */\n invisible: boolean;\n /** An array of candidate colors and confidence level in the presence of each. */\n hairColor: Array<HairColorOutput>;\n}\n\n/** An array of candidate colors and confidence level in the presence of each. */\nexport interface HairColorOutput {\n /**\n * Name of the hair color.\n *\n * Possible values: \"unknown\", \"white\", \"gray\", \"blond\", \"brown\", \"red\", \"black\", \"other\"\n */\n color: HairColorTypeOutput;\n /** Confidence level of the color. Range between [0,1]. */\n confidence: number;\n}\n\n/** Properties describing occlusions on a given face. */\nexport interface OcclusionPropertiesOutput {\n /** A boolean value indicating whether forehead is occluded. */\n foreheadOccluded: boolean;\n /** A boolean value indicating whether eyes are occluded. */\n eyeOccluded: boolean;\n /** A boolean value indicating whether the mouth is occluded. */\n mouthOccluded: boolean;\n}\n\n/** Accessory item and corresponding confidence level. */\nexport interface AccessoryItemOutput {\n /**\n * Type of the accessory.\n *\n * Possible values: \"headwear\", \"glasses\", \"mask\"\n */\n type: AccessoryTypeOutput;\n /** Confidence level of the accessory type. Range between [0,1]. */\n confidence: number;\n}\n\n/** Properties describing any presence of blur within the image. */\nexport interface BlurPropertiesOutput {\n /**\n * An enum value indicating level of blurriness.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n blurLevel: BlurLevelOutput;\n /** A number indicating level of blurriness ranging from 0 to 1. */\n value: number;\n}\n\n/** Properties describing exposure level of the image. */\nexport interface ExposurePropertiesOutput {\n /**\n * An enum value indicating level of exposure.\n *\n * Possible values: \"underExposure\", \"goodExposure\", \"overExposure\"\n */\n exposureLevel: ExposureLevelOutput;\n /** A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. */\n value: number;\n}\n\n/** Properties describing noise level of the image. */\nexport interface NoisePropertiesOutput {\n /**\n * An enum value indicating level of noise.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n noiseLevel: NoiseLevelOutput;\n /** A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. */\n value: number;\n}\n\n/** Properties describing the presence of a mask on a given face. */\nexport interface MaskPropertiesOutput {\n /** A boolean value indicating whether nose and mouth are covered. */\n noseAndMouthCovered: boolean;\n /**\n * Type of the mask.\n *\n * Possible values: \"faceMask\", \"noMask\", \"otherMaskOrOcclusion\", \"uncertain\"\n */\n type: MaskTypeOutput;\n}\n\n/** Response body for find similar face operation. */\nexport interface FindSimilarResultOutput {\n /** Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. */\n confidence: number;\n /** faceId of candidate face when find by faceIds. faceId is created by \"Detect\" and will expire 24 hours after the detection call. */\n faceId?: string;\n /** persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire. */\n persistedFaceId?: string;\n}\n\n/** Identify result. */\nexport interface IdentificationResultOutput {\n /** faceId of the query face. */\n faceId: string;\n /** Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. */\n candidates: Array<IdentificationCandidateOutput>;\n}\n\n/** Candidate for identify call. */\nexport interface IdentificationCandidateOutput {\n /** personId of candidate person. */\n personId: string;\n /** Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. */\n confidence: number;\n}\n\n/** Verify result. */\nexport interface VerificationResultOutput {\n /** True if the two faces belong to the same person or the face belongs to the person, otherwise false. */\n isIdentical: boolean;\n /** A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. */\n confidence: number;\n}\n\n/** Response body for group face operation. */\nexport interface GroupingResultOutput {\n /** A partition of the original faces based on face similarity. Groups are ranked by number of faces. */\n groups: string[][];\n /** Face ids array of faces that cannot find any similar faces from original faces. */\n messyGroup: string[];\n}\n\n/** Face list is a list of faces, up to 1,000 faces. */\nexport interface FaceListOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n readonly faceListId: string;\n /** Face ids of registered faces in the face list. */\n persistedFaces?: Array<FaceListFaceOutput>;\n}\n\n/** Face resource for face list. */\nexport interface FaceListFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** Face list item for list face list. */\nexport interface FaceListItemOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n faceListId: string;\n}\n\n/** Response body for adding face. */\nexport interface AddFaceResultOutput {\n /** Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in \"Detect\" and will expire in 24 hours after the detection call. */\n persistedFaceId: string;\n}\n\n/** Large face list is a list of faces, up to 1,000,000 faces. */\nexport interface LargeFaceListOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n readonly largeFaceListId: string;\n}\n\n/** Training result of a container */\nexport interface TrainingResultOutput {\n /**\n * Training status of the container.\n *\n * Possible values: \"notStarted\", \"running\", \"succeeded\", \"failed\"\n */\n status: OperationStatusOutput;\n /** A combined UTC date and time string that describes the created time of the person group, large person group or large face list. */\n createdDateTime: string;\n /** A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. */\n lastActionDateTime: string;\n /** A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. */\n lastSuccessfulTrainingDateTime: string;\n /** Show failure message when training failed (omitted when training succeed). */\n message?: string;\n}\n\n/** Face resource for large face list. */\nexport interface LargeFaceListFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group. */\nexport interface PersonGroupOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** ID of the container. */\n readonly personGroupId: string;\n}\n\n/** Response of create person. */\nexport interface CreatePersonResultOutput {\n /** Person ID of the person. */\n personId: string;\n}\n\n/** The person in a specified person group. To add face to this person, please call \"Add Large Person Group Person Face\". */\nexport interface PersonGroupPersonOutput {\n /** ID of the person. */\n readonly personId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /** Face ids of registered faces in the person. */\n persistedFaceIds?: string[];\n}\n\n/** Face resource for person group person. */\nexport interface PersonGroupPersonFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people. */\nexport interface LargePersonGroupOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** ID of the container. */\n readonly largePersonGroupId: string;\n}\n\n/** The person in a specified large person group. To add face to this person, please call \"Add Large Person Group Person Face\". */\nexport interface LargePersonGroupPersonOutput {\n /** ID of the person. */\n readonly personId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /** Face ids of registered faces in the person. */\n persistedFaceIds?: string[];\n}\n\n/** Face resource for large person group person. */\nexport interface LargePersonGroupPersonFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** Response of liveness session creation. */\nexport interface CreateLivenessSessionResultOutput {\n /** The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. */\n sessionId: string;\n /** Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. */\n authToken: string;\n}\n\n/** Session result of detect liveness. */\nexport interface LivenessSessionOutput {\n /** The unique ID to reference this session. */\n readonly id: string;\n /** DateTime when this session was created. */\n createdDateTime: string;\n /** DateTime when this session was started by the client. */\n sessionStartDateTime?: string;\n /** Whether or not the session is expired. */\n sessionExpired: boolean;\n /** Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. */\n deviceCorrelationId?: string;\n /** Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. */\n authTokenTimeToLiveInSeconds?: number;\n /**\n * The current status of the session.\n *\n * Possible values: \"NotStarted\", \"Started\", \"ResultAvailable\"\n */\n status: FaceSessionStatusOutput;\n /** The latest session audit result only populated if status == 'ResultAvailable'. */\n result?: LivenessSessionAuditEntryOutput;\n}\n\n/** Audit entry for a request in session. */\nexport interface LivenessSessionAuditEntryOutput {\n /** The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results. */\n id: number;\n /** The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation. */\n sessionId: string;\n /** The unique requestId that is returned by the service to the client in the 'apim-request-id' header. */\n requestId: string;\n /** The unique clientRequestId that is sent by the client in the 'client-request-id' header. */\n clientRequestId: string;\n /** The UTC DateTime that the request was received. */\n receivedDateTime: string;\n /** The request of this entry. */\n request: AuditRequestInfoOutput;\n /** The response of this entry. */\n response: AuditLivenessResponseInfoOutput;\n /** The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. */\n digest: string;\n /** The image ID of the session request. */\n sessionImageId?: string;\n /** The sha256 hash of the verify-image in the request. */\n verifyImageHash?: string;\n}\n\n/** Audit entry for a request in the session. */\nexport interface AuditRequestInfoOutput {\n /** The relative URL and query of the liveness request. */\n url: string;\n /** The HTTP method of the request (i.e., GET, POST, DELETE). */\n method: string;\n /** The length of the request body in bytes. */\n contentLength?: number;\n /** The content type of the request. */\n contentType: string;\n /** The user agent used to submit the request. */\n userAgent?: string;\n}\n\n/** Audit entry for a response in the session. */\nexport interface AuditLivenessResponseInfoOutput {\n /** The response body. The schema of this field will depend on the request.url and request.method used by the client. */\n body: LivenessResponseBodyOutput;\n /** The HTTP status code returned to the client. */\n statusCode: number;\n /** The server measured latency for this request in milliseconds. */\n latencyInMilliseconds: number;\n}\n\n/** The response body of detect liveness API call. */\nexport interface LivenessResponseBodyOutput extends Record<string, any> {\n /**\n * The liveness classification for the target face.\n *\n * Possible values: \"uncertain\", \"realface\", \"spoofface\"\n */\n livenessDecision?: LivenessDecisionOutput;\n /** Specific targets used for liveness classification. */\n target?: LivenessOutputsTargetOutput;\n /**\n * The model version used for liveness classification.\n *\n * Possible values: \"2022-10-15-preview.04\", \"2023-12-20-preview.06\"\n */\n modelVersionUsed?: LivenessModelOutput;\n /** The face verification output. Only available when the request is liveness with verify. */\n verifyResult?: LivenessWithVerifyOutputsOutput;\n}\n\n/** The liveness classification for target face. */\nexport interface LivenessOutputsTargetOutput {\n /** The face region where the liveness classification was made on. */\n faceRectangle: FaceRectangleOutput;\n /** The file name which contains the face rectangle where the liveness classification was made on. */\n fileName: string;\n /** The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on. */\n timeOffsetWithinFile: number;\n /**\n * The image type which contains the face rectangle where the liveness classification was made on.\n *\n * Possible values: \"Color\", \"Infrared\", \"Depth\"\n */\n imageType: ImageTypeOutput;\n}\n\n/** The face verification output. */\nexport interface LivenessWithVerifyOutputsOutput {\n /** The detail of face for verification. */\n verifyImage: LivenessWithVerifyImageOutput;\n /** The target face liveness face and comparison image face verification confidence. */\n matchConfidence: number;\n /** Whether the target liveness face and comparison image face match. */\n isIdentical: boolean;\n}\n\n/** The detail of face for verification. */\nexport interface LivenessWithVerifyImageOutput {\n /** The face region where the comparison image's classification was made. */\n faceRectangle: FaceRectangleOutput;\n /**\n * Quality of face image for recognition.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n qualityForRecognition: QualityForRecognitionOutput;\n}\n\n/** Session data returned for enumeration. */\nexport interface LivenessSessionItemOutput {\n /** The unique ID to reference this session. */\n readonly id: string;\n /** DateTime when this session was created. */\n createdDateTime: string;\n /** DateTime when this session was started by the client. */\n sessionStartDateTime?: string;\n /** Whether or not the session is expired. */\n sessionExpired: boolean;\n /** Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. */\n deviceCorrelationId?: string;\n /** Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. */\n authTokenTimeToLiveInSeconds?: number;\n}\n\n/** Response of liveness session with verify creation with verify image provided. */\nexport interface CreateLivenessWithVerifySessionResultOutput {\n /** The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. */\n sessionId: string;\n /** Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. */\n authToken: string;\n /** The detail of face for verification. */\n verifyImage?: LivenessWithVerifyImageOutput;\n}\n\n/** Session result of detect liveness with verify. */\nexport interface LivenessWithVerifySessionOutput {\n /** The unique ID to reference this session. */\n readonly id: string;\n /** DateTime when this session was created. */\n createdDateTime: string;\n /** DateTime when this session was started by the client. */\n sessionStartDateTime?: string;\n /** Whether or not the session is expired. */\n sessionExpired: boolean;\n /** Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. */\n deviceCorrelationId?: string;\n /** Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. */\n authTokenTimeToLiveInSeconds?: number;\n /**\n * The current status of the session.\n *\n * Possible values: \"NotStarted\", \"Started\", \"ResultAvailable\"\n */\n status: FaceSessionStatusOutput;\n /** The latest session audit result only populated if status == 'ResultAvailable'. */\n result?: LivenessSessionAuditEntryOutput;\n}\n\n/** Person resource for person directory */\nexport interface PersonDirectoryPersonOutput {\n /** Person ID of the person. */\n readonly personId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n}\n\n/** Response of list dynamic person group of person. */\nexport interface ListGroupReferenceResultOutput {\n /** Array of PersonDirectory DynamicPersonGroup ids. */\n dynamicPersonGroupIds: string[];\n}\n\n/** Face resource for person directory person. */\nexport interface PersonDirectoryFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** Response of list face of person. */\nexport interface ListFaceResultOutput {\n /** Id of person. */\n personId: string;\n /** Array of persisted face ids. */\n persistedFaceIds: string[];\n}\n\n/** A container that references Person Directory \"Create Person\". */\nexport interface DynamicPersonGroupOutput {\n /** ID of the dynamic person group. */\n readonly dynamicPersonGroupId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n}\n\n/** Response of list dynamic person group person. */\nexport interface ListPersonResultOutput {\n /** Array of PersonDirectory Person ids. */\n personIds: string[];\n}\n\n/** Alias for OperationStatusOutput */\nexport type OperationStatusOutput = string;\n/** Alias for RecognitionModelOutput */\nexport type RecognitionModelOutput = string;\n/** Alias for GlassesTypeOutput */\nexport type GlassesTypeOutput = string;\n/** Alias for HairColorTypeOutput */\nexport type HairColorTypeOutput = string;\n/** Alias for AccessoryTypeOutput */\nexport type AccessoryTypeOutput = string;\n/** Alias for BlurLevelOutput */\nexport type BlurLevelOutput = string;\n/** Alias for ExposureLevelOutput */\nexport type ExposureLevelOutput = string;\n/** Alias for NoiseLevelOutput */\nexport type NoiseLevelOutput = string;\n/** Alias for MaskTypeOutput */\nexport type MaskTypeOutput = string;\n/** Alias for QualityForRecognitionOutput */\nexport type QualityForRecognitionOutput = string;\n/** Alias for LivenessModelOutput */\nexport type LivenessModelOutput = string;\n/** Alias for FaceSessionStatusOutput */\nexport type FaceSessionStatusOutput = string;\n/** Alias for LivenessDecisionOutput */\nexport type LivenessDecisionOutput = string;\n/** Alias for ImageTypeOutput */\nexport type ImageTypeOutput = string;\n"]}
1
+ {"version":3,"file":"outputModels.js","sourceRoot":"","sources":["../../src/outputModels.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\n/** Response for detect API. */\nexport interface FaceDetectionResultOutput {\n /** Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. */\n faceId?: string;\n /**\n * The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** A rectangle area for the face location on image. */\n faceRectangle: FaceRectangleOutput;\n /** An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true. */\n faceLandmarks?: FaceLandmarksOutput;\n /** Face attributes for detected face. */\n faceAttributes?: FaceAttributesOutput;\n}\n\n/** A rectangle within which a face can be found. */\nexport interface FaceRectangleOutput {\n /** The distance from the top edge if the image to the top edge of the rectangle, in pixels. */\n top: number;\n /** The distance from the left edge if the image to the left edge of the rectangle, in pixels. */\n left: number;\n /** The width of the rectangle, in pixels. */\n width: number;\n /** The height of the rectangle, in pixels. */\n height: number;\n}\n\n/** A collection of 27-point face landmarks pointing to the important positions of face components. */\nexport interface FaceLandmarksOutput {\n /** The coordinates of the left eye pupil. */\n pupilLeft: LandmarkCoordinateOutput;\n /** The coordinates of the right eye pupil. */\n pupilRight: LandmarkCoordinateOutput;\n /** The coordinates of the nose tip. */\n noseTip: LandmarkCoordinateOutput;\n /** The coordinates of the mouth left. */\n mouthLeft: LandmarkCoordinateOutput;\n /** The coordinates of the mouth right. */\n mouthRight: LandmarkCoordinateOutput;\n /** The coordinates of the left eyebrow outer. */\n eyebrowLeftOuter: LandmarkCoordinateOutput;\n /** The coordinates of the left eyebrow inner. */\n eyebrowLeftInner: LandmarkCoordinateOutput;\n /** The coordinates of the left eye outer. */\n eyeLeftOuter: LandmarkCoordinateOutput;\n /** The coordinates of the left eye top. */\n eyeLeftTop: LandmarkCoordinateOutput;\n /** The coordinates of the left eye bottom. */\n eyeLeftBottom: LandmarkCoordinateOutput;\n /** The coordinates of the left eye inner. */\n eyeLeftInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eyebrow inner. */\n eyebrowRightInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eyebrow outer. */\n eyebrowRightOuter: LandmarkCoordinateOutput;\n /** The coordinates of the right eye inner. */\n eyeRightInner: LandmarkCoordinateOutput;\n /** The coordinates of the right eye top. */\n eyeRightTop: LandmarkCoordinateOutput;\n /** The coordinates of the right eye bottom. */\n eyeRightBottom: LandmarkCoordinateOutput;\n /** The coordinates of the right eye outer. */\n eyeRightOuter: LandmarkCoordinateOutput;\n /** The coordinates of the nose root left. */\n noseRootLeft: LandmarkCoordinateOutput;\n /** The coordinates of the nose root right. */\n noseRootRight: LandmarkCoordinateOutput;\n /** The coordinates of the nose left alar top. */\n noseLeftAlarTop: LandmarkCoordinateOutput;\n /** The coordinates of the nose right alar top. */\n noseRightAlarTop: LandmarkCoordinateOutput;\n /** The coordinates of the nose left alar out tip. */\n noseLeftAlarOutTip: LandmarkCoordinateOutput;\n /** The coordinates of the nose right alar out tip. */\n noseRightAlarOutTip: LandmarkCoordinateOutput;\n /** The coordinates of the upper lip top. */\n upperLipTop: LandmarkCoordinateOutput;\n /** The coordinates of the upper lip bottom. */\n upperLipBottom: LandmarkCoordinateOutput;\n /** The coordinates of the under lip top. */\n underLipTop: LandmarkCoordinateOutput;\n /** The coordinates of the under lip bottom. */\n underLipBottom: LandmarkCoordinateOutput;\n}\n\n/** Landmark coordinates within an image. */\nexport interface LandmarkCoordinateOutput {\n /** The horizontal component, in pixels. */\n x: number;\n /** The vertical component, in pixels. */\n y: number;\n}\n\n/** Face attributes for the detected face. */\nexport interface FaceAttributesOutput {\n /** Age in years. */\n age?: number;\n /** Smile intensity, a number between [0,1]. */\n smile?: number;\n /** Properties describing facial hair attributes. */\n facialHair?: FacialHairOutput;\n /**\n * Glasses type if any of the face.\n *\n * Possible values: \"noGlasses\", \"readingGlasses\", \"sunglasses\", \"swimmingGoggles\"\n */\n glasses?: GlassesTypeOutput;\n /** 3-D roll/yaw/pitch angles for face direction. */\n headPose?: HeadPoseOutput;\n /** Properties describing hair attributes. */\n hair?: HairPropertiesOutput;\n /** Properties describing occlusions on a given face. */\n occlusion?: OcclusionPropertiesOutput;\n /** Properties describing any accessories on a given face. */\n accessories?: Array<AccessoryItemOutput>;\n /** Properties describing any presence of blur within the image. */\n blur?: BlurPropertiesOutput;\n /** Properties describing exposure level of the image. */\n exposure?: ExposurePropertiesOutput;\n /** Properties describing noise level of the image. */\n noise?: NoisePropertiesOutput;\n /** Properties describing the presence of a mask on a given face. */\n mask?: MaskPropertiesOutput;\n /**\n * Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n qualityForRecognition?: QualityForRecognitionOutput;\n}\n\n/** Properties describing facial hair attributes. */\nexport interface FacialHairOutput {\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n moustache: number;\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n beard: number;\n /** A number ranging from 0 to 1 indicating a level of confidence associated with a property. */\n sideburns: number;\n}\n\n/** 3-D roll/yaw/pitch angles for face direction. */\nexport interface HeadPoseOutput {\n /** Value of angles. */\n pitch: number;\n /** Value of angles. */\n roll: number;\n /** Value of angles. */\n yaw: number;\n}\n\n/** Properties describing hair attributes. */\nexport interface HairPropertiesOutput {\n /** A number describing confidence level of whether the person is bald. */\n bald: number;\n /** A boolean value describing whether the hair is visible in the image. */\n invisible: boolean;\n /** An array of candidate colors and confidence level in the presence of each. */\n hairColor: Array<HairColorOutput>;\n}\n\n/** An array of candidate colors and confidence level in the presence of each. */\nexport interface HairColorOutput {\n /**\n * Name of the hair color.\n *\n * Possible values: \"unknown\", \"white\", \"gray\", \"blond\", \"brown\", \"red\", \"black\", \"other\"\n */\n color: HairColorTypeOutput;\n /** Confidence level of the color. Range between [0,1]. */\n confidence: number;\n}\n\n/** Properties describing occlusions on a given face. */\nexport interface OcclusionPropertiesOutput {\n /** A boolean value indicating whether forehead is occluded. */\n foreheadOccluded: boolean;\n /** A boolean value indicating whether eyes are occluded. */\n eyeOccluded: boolean;\n /** A boolean value indicating whether the mouth is occluded. */\n mouthOccluded: boolean;\n}\n\n/** Accessory item and corresponding confidence level. */\nexport interface AccessoryItemOutput {\n /**\n * Type of the accessory.\n *\n * Possible values: \"headwear\", \"glasses\", \"mask\"\n */\n type: AccessoryTypeOutput;\n /** Confidence level of the accessory type. Range between [0,1]. */\n confidence: number;\n}\n\n/** Properties describing any presence of blur within the image. */\nexport interface BlurPropertiesOutput {\n /**\n * An enum value indicating level of blurriness.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n blurLevel: BlurLevelOutput;\n /** A number indicating level of blurriness ranging from 0 to 1. */\n value: number;\n}\n\n/** Properties describing exposure level of the image. */\nexport interface ExposurePropertiesOutput {\n /**\n * An enum value indicating level of exposure.\n *\n * Possible values: \"underExposure\", \"goodExposure\", \"overExposure\"\n */\n exposureLevel: ExposureLevelOutput;\n /** A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. */\n value: number;\n}\n\n/** Properties describing noise level of the image. */\nexport interface NoisePropertiesOutput {\n /**\n * An enum value indicating level of noise.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n noiseLevel: NoiseLevelOutput;\n /** A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. */\n value: number;\n}\n\n/** Properties describing the presence of a mask on a given face. */\nexport interface MaskPropertiesOutput {\n /** A boolean value indicating whether nose and mouth are covered. */\n noseAndMouthCovered: boolean;\n /**\n * Type of the mask.\n *\n * Possible values: \"faceMask\", \"noMask\", \"otherMaskOrOcclusion\", \"uncertain\"\n */\n type: MaskTypeOutput;\n}\n\n/** A response containing error details. */\nexport interface FaceErrorResponseOutput {\n /** The error object. */\n error: FaceErrorOutput;\n}\n\n/** The error object. For comprehensive details on error codes and messages returned by the Face Service, please refer to the following link: https://aka.ms/face-error-codes-and-messages. */\nexport interface FaceErrorOutput {\n /** One of a server-defined set of error codes. */\n code: string;\n /** A human-readable representation of the error. */\n message: string;\n}\n\n/** Response body for find similar face operation. */\nexport interface FindSimilarResultOutput {\n /** Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. */\n confidence: number;\n /** faceId of candidate face when find by faceIds. faceId is created by \"Detect\" and will expire 24 hours after the detection call. */\n faceId?: string;\n /** persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire. */\n persistedFaceId?: string;\n}\n\n/** Identify result. */\nexport interface IdentificationResultOutput {\n /** faceId of the query face. */\n faceId: string;\n /** Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. */\n candidates: Array<IdentificationCandidateOutput>;\n}\n\n/** Candidate for identify call. */\nexport interface IdentificationCandidateOutput {\n /** personId of candidate person. */\n personId: string;\n /** Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. */\n confidence: number;\n}\n\n/** Verify result. */\nexport interface VerificationResultOutput {\n /** True if the two faces belong to the same person or the face belongs to the person, otherwise false. */\n isIdentical: boolean;\n /** A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data. */\n confidence: number;\n}\n\n/** Response body for group face operation. */\nexport interface GroupingResultOutput {\n /** A partition of the original faces based on face similarity. Groups are ranked by number of faces. */\n groups: string[][];\n /** Face ids array of faces that cannot find any similar faces from original faces. */\n messyGroup: string[];\n}\n\n/** Face list is a list of faces, up to 1,000 faces. */\nexport interface FaceListOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n readonly faceListId: string;\n /** Face ids of registered faces in the face list. */\n persistedFaces?: Array<FaceListFaceOutput>;\n}\n\n/** Face resource for face list. */\nexport interface FaceListFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** Face list item for list face list. */\nexport interface FaceListItemOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n faceListId: string;\n}\n\n/** Response body for adding face. */\nexport interface AddFaceResultOutput {\n /** Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in \"Detect\" and will expire in 24 hours after the detection call. */\n persistedFaceId: string;\n}\n\n/** Large face list is a list of faces, up to 1,000,000 faces. */\nexport interface LargeFaceListOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. */\n readonly largeFaceListId: string;\n}\n\n/** Training result of a container */\nexport interface TrainingResultOutput {\n /**\n * Training status of the container.\n *\n * Possible values: \"notStarted\", \"running\", \"succeeded\", \"failed\"\n */\n status: OperationStatusOutput;\n /** A combined UTC date and time string that describes the created time of the person group, large person group or large face list. */\n createdDateTime: string;\n /** A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. */\n lastActionDateTime: string;\n /** A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. */\n lastSuccessfulTrainingDateTime: string;\n /** Show failure message when training failed (omitted when training succeed). */\n message?: string;\n}\n\n/** Face resource for large face list. */\nexport interface LargeFaceListFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group. */\nexport interface PersonGroupOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** ID of the container. */\n readonly personGroupId: string;\n}\n\n/** Response of create person. */\nexport interface CreatePersonResultOutput {\n /** Person ID of the person. */\n personId: string;\n}\n\n/** The person in a specified person group. To add face to this person, please call \"Add Large Person Group Person Face\". */\nexport interface PersonGroupPersonOutput {\n /** ID of the person. */\n readonly personId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /** Face ids of registered faces in the person. */\n persistedFaceIds?: string[];\n}\n\n/** Face resource for person group person. */\nexport interface PersonGroupPersonFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people. */\nexport interface LargePersonGroupOutput {\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /**\n * Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.\n *\n * Possible values: \"recognition_01\", \"recognition_02\", \"recognition_03\", \"recognition_04\"\n */\n recognitionModel?: RecognitionModelOutput;\n /** ID of the container. */\n readonly largePersonGroupId: string;\n}\n\n/** The person in a specified large person group. To add face to this person, please call \"Add Large Person Group Person Face\". */\nexport interface LargePersonGroupPersonOutput {\n /** ID of the person. */\n readonly personId: string;\n /** User defined name, maximum length is 128. */\n name: string;\n /** Optional user defined data. Length should not exceed 16K. */\n userData?: string;\n /** Face ids of registered faces in the person. */\n persistedFaceIds?: string[];\n}\n\n/** Face resource for large person group person. */\nexport interface LargePersonGroupPersonFaceOutput {\n /** Face ID of the face. */\n readonly persistedFaceId: string;\n /** User-provided data attached to the face. The length limit is 1K. */\n userData?: string;\n}\n\n/** Session result of detect liveness. */\nexport interface LivenessSessionOutput {\n /** The unique ID to reference this session. */\n readonly sessionId: string;\n /** Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. */\n authToken: string;\n /**\n * The current status of the session.\n *\n * Possible values: \"NotStarted\", \"Running\", \"Succeeded\", \"Failed\", \"Canceled\"\n */\n status: OperationStateOutput;\n /**\n * The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen\n *\n * Possible values: \"2024-11-15\"\n */\n modelVersion?: LivenessModelOutput;\n /** The results of the liveness session. */\n results: LivenessSessionResultsOutput;\n}\n\n/** The results of the liveness session. */\nexport interface LivenessSessionResultsOutput {\n /** The attempts data of underlying liveness call with the session. */\n attempts: Array<LivenessSessionAttemptOutput>;\n}\n\n/** The liveness session attempt. */\nexport interface LivenessSessionAttemptOutput {\n /** The attempt ID, start from 1. */\n attemptId: number;\n /**\n * The status of the attempt.\n *\n * Possible values: \"NotStarted\", \"Running\", \"Succeeded\", \"Failed\", \"Canceled\"\n */\n attemptStatus: OperationStateOutput;\n /** The result of the liveness call, will be null if there is error. */\n result?: LivenessResultOutput;\n /** The error of the liveness call, will be null if there is result. */\n error?: LivenessErrorOutput;\n}\n\n/** The results of the liveness classification. */\nexport interface LivenessResultOutput {\n /**\n * The liveness classification for the target face.\n *\n * Possible values: \"uncertain\", \"realface\", \"spoofface\"\n */\n livenessDecision?: LivenessDecisionOutput;\n /** Targets used for liveness classification. */\n targets: LivenessDecisionTargetsOutput;\n /** The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. */\n digest: string;\n /** The image ID of the session request. */\n sessionImageId?: string;\n}\n\n/** The targets used for liveness classification. */\nexport interface LivenessDecisionTargetsOutput {\n /** The target from color image used for liveness classification. */\n color: LivenessColorDecisionTargetOutput;\n}\n\n/** The target from color image used for liveness classification. */\nexport interface LivenessColorDecisionTargetOutput {\n /** The face region where the liveness classification was made on. */\n faceRectangle: FaceRectangleOutput;\n}\n\n/** The error of the liveness classification. */\nexport interface LivenessErrorOutput {\n /** The error code. */\n code: string;\n /** The error message. */\n message: string;\n /** Targets used for liveness classification. */\n targets: LivenessDecisionTargetsOutput;\n}\n\n/** Session result of detect liveness with verify. */\nexport interface LivenessWithVerifySessionOutput {\n /** The unique ID to reference this session. */\n readonly sessionId: string;\n /** Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. */\n authToken: string;\n /**\n * The current status of the session.\n *\n * Possible values: \"NotStarted\", \"Running\", \"Succeeded\", \"Failed\", \"Canceled\"\n */\n status: OperationStateOutput;\n /**\n * The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen\n *\n * Possible values: \"2024-11-15\"\n */\n modelVersion?: LivenessModelOutput;\n /** The results of the liveness with verify session. */\n results: LivenessWithVerifySessionResultsOutput;\n}\n\n/** The results of the liveness with verify session. */\nexport interface LivenessWithVerifySessionResultsOutput {\n /** The references used for face verification. */\n verifyReferences: Array<LivenessWithVerifyReferenceOutput>;\n /** The attempts data of underlying liveness with verify call with the session. */\n attempts: Array<LivenessWithVerifySessionAttemptOutput>;\n}\n\n/** The detail of face for verification. */\nexport interface LivenessWithVerifyReferenceOutput {\n /**\n * The image type which contains the face rectangle where the liveness classification was made on.\n *\n * Possible values: \"Color\", \"Infrared\", \"Depth\"\n */\n referenceType: ImageTypeOutput;\n /** The face region where the comparison image's classification was made. */\n faceRectangle: FaceRectangleOutput;\n /**\n * Quality of face image for recognition.\n *\n * Possible values: \"low\", \"medium\", \"high\"\n */\n qualityForRecognition: QualityForRecognitionOutput;\n}\n\n/** The liveness with verify session attempt. */\nexport interface LivenessWithVerifySessionAttemptOutput {\n /** The attempt ID, start from 1. */\n attemptId: number;\n /**\n * The status of the attempt.\n *\n * Possible values: \"NotStarted\", \"Running\", \"Succeeded\", \"Failed\", \"Canceled\"\n */\n attemptStatus: OperationStateOutput;\n /** The result of the liveness with verify call, will be null if there is error. */\n result?: LivenessWithVerifyResultOutput;\n /** The error of the liveness with verify call, will be null if there is result. */\n error?: LivenessErrorOutput;\n}\n\n/** The results of the liveness with verify call. */\nexport interface LivenessWithVerifyResultOutput {\n /**\n * The liveness classification for the target face.\n *\n * Possible values: \"uncertain\", \"realface\", \"spoofface\"\n */\n livenessDecision?: LivenessDecisionOutput;\n /** Targets used for liveness classification. */\n targets: LivenessDecisionTargetsOutput;\n /** The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. */\n digest: string;\n /** The image ID of the session request. */\n sessionImageId?: string;\n /** The face verification output. Only available when the request is liveness with verify. */\n verifyResult?: LivenessWithVerifyOutputsOutput;\n /** The sha256 hash of the verify-image in the request. */\n verifyImageHash?: string;\n}\n\n/** The face verification output. */\nexport interface LivenessWithVerifyOutputsOutput {\n /** The target face liveness face and comparison image face verification confidence. */\n matchConfidence: number;\n /** Whether the target liveness face and comparison image face match. */\n isIdentical: boolean;\n}\n\n/** Alias for RecognitionModelOutput */\nexport type RecognitionModelOutput = string;\n/** Alias for GlassesTypeOutput */\nexport type GlassesTypeOutput = string;\n/** Alias for HairColorTypeOutput */\nexport type HairColorTypeOutput = string;\n/** Alias for AccessoryTypeOutput */\nexport type AccessoryTypeOutput = string;\n/** Alias for BlurLevelOutput */\nexport type BlurLevelOutput = string;\n/** Alias for ExposureLevelOutput */\nexport type ExposureLevelOutput = string;\n/** Alias for NoiseLevelOutput */\nexport type NoiseLevelOutput = string;\n/** Alias for MaskTypeOutput */\nexport type MaskTypeOutput = string;\n/** Alias for QualityForRecognitionOutput */\nexport type QualityForRecognitionOutput = string;\n/** Alias for OperationStatusOutput */\nexport type OperationStatusOutput = string;\n/** Alias for LivenessModelOutput */\nexport type LivenessModelOutput = string;\n/** Alias for OperationStateOutput */\nexport type OperationStateOutput = string;\n/** Alias for LivenessDecisionOutput */\nexport type LivenessDecisionOutput = string;\n/** Alias for ImageTypeOutput */\nexport type ImageTypeOutput = string;\n"]}
@@ -1,11 +1,19 @@
1
1
  import type { RequestParameters } from "@azure-rest/core-client";
2
- import type { DetectionModel, RecognitionModel, FaceAttributeType, FindSimilarMatchMode, CreateCollectionRequest, UserDefinedFieldsForUpdate, AddFaceFromUrlRequest, FaceUserData, UserDefinedFields, CreateLivenessSessionContent, CreateLivenessWithVerifySessionMultipartContent, CreateLivenessWithVerifySessionJsonContent } from "./models.js";
3
- export type GetOperationResultParameters = RequestParameters;
2
+ import type { DetectionModel, RecognitionModel, FaceAttributeType, FindSimilarMatchMode, CreateCollectionRequest, UserDefinedFieldsForUpdate, AddFaceFromUrlRequest, FaceUserData, UserDefinedFields, CreateLivenessSessionContent, CreateLivenessWithVerifySessionContent } from "./models.js";
4
3
  export interface DetectFromUrlBodyParam {
5
4
  body: {
6
5
  url: string;
7
6
  };
8
7
  }
8
+ /** This is the wrapper object for the parameter `returnFaceAttributes` with explode set to false and style set to form. */
9
+ export interface DetectFromUrlReturnFaceAttributesQueryParam {
10
+ /** Value of the parameter */
11
+ value: FaceAttributeType[];
12
+ /** Should we explode the value? */
13
+ explode: false;
14
+ /** Style of the value */
15
+ style: "form";
16
+ }
9
17
  export interface DetectFromUrlQueryParamProperties {
10
18
  /**
11
19
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
@@ -22,7 +30,7 @@ export interface DetectFromUrlQueryParamProperties {
22
30
  /** Return faceIds of the detected faces or not. The default value is true. */
23
31
  returnFaceId?: boolean;
24
32
  /** Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. */
25
- returnFaceAttributes?: FaceAttributeType[];
33
+ returnFaceAttributes?: FaceAttributeType[] | DetectFromUrlReturnFaceAttributesQueryParam;
26
34
  /** Return face landmarks of the detected faces or not. The default value is false. */
27
35
  returnFaceLandmarks?: boolean;
28
36
  /** Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. */
@@ -46,6 +54,15 @@ export interface DetectBodyParam {
46
54
  */
47
55
  body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
48
56
  }
57
+ /** This is the wrapper object for the parameter `returnFaceAttributes` with explode set to false and style set to form. */
58
+ export interface DetectReturnFaceAttributesQueryParam {
59
+ /** Value of the parameter */
60
+ value: FaceAttributeType[];
61
+ /** Should we explode the value? */
62
+ explode: false;
63
+ /** Style of the value */
64
+ style: "form";
65
+ }
49
66
  export interface DetectQueryParamProperties {
50
67
  /**
51
68
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
@@ -62,7 +79,7 @@ export interface DetectQueryParamProperties {
62
79
  /** Return faceIds of the detected faces or not. The default value is true. */
63
80
  returnFaceId?: boolean;
64
81
  /** Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. */
65
- returnFaceAttributes?: FaceAttributeType[];
82
+ returnFaceAttributes?: FaceAttributeType[] | DetectReturnFaceAttributesQueryParam;
66
83
  /** Return face landmarks of the detected faces or not. The default value is false. */
67
84
  returnFaceLandmarks?: boolean;
68
85
  /** Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. */
@@ -83,6 +100,15 @@ export interface DetectFromSessionImageIdBodyParam {
83
100
  sessionImageId: string;
84
101
  };
85
102
  }
103
+ /** This is the wrapper object for the parameter `returnFaceAttributes` with explode set to false and style set to form. */
104
+ export interface DetectFromSessionImageIdReturnFaceAttributesQueryParam {
105
+ /** Value of the parameter */
106
+ value: FaceAttributeType[];
107
+ /** Should we explode the value? */
108
+ explode: false;
109
+ /** Style of the value */
110
+ style: "form";
111
+ }
86
112
  export interface DetectFromSessionImageIdQueryParamProperties {
87
113
  /**
88
114
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations.
@@ -99,7 +125,7 @@ export interface DetectFromSessionImageIdQueryParamProperties {
99
125
  /** Return faceIds of the detected faces or not. The default value is true. */
100
126
  returnFaceId?: boolean;
101
127
  /** Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. */
102
- returnFaceAttributes?: FaceAttributeType[];
128
+ returnFaceAttributes?: FaceAttributeType[] | DetectFromSessionImageIdReturnFaceAttributesQueryParam;
103
129
  /** Return face landmarks of the detected faces or not. The default value is false. */
104
130
  returnFaceLandmarks?: boolean;
105
131
  /** Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. */
@@ -242,9 +268,18 @@ export type GetFaceListsParameters = GetFaceListsQueryParam & RequestParameters;
242
268
  export interface AddFaceListFaceFromUrlBodyParam {
243
269
  body: AddFaceFromUrlRequest;
244
270
  }
271
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
272
+ export interface AddFaceListFaceFromUrlTargetFaceQueryParam {
273
+ /** Value of the parameter */
274
+ value: number[];
275
+ /** Should we explode the value? */
276
+ explode: false;
277
+ /** Style of the value */
278
+ style: "form";
279
+ }
245
280
  export interface AddFaceListFaceFromUrlQueryParamProperties {
246
281
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
247
- targetFace?: number[];
282
+ targetFace?: number[] | AddFaceListFaceFromUrlTargetFaceQueryParam;
248
283
  /**
249
284
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
250
285
  *
@@ -266,9 +301,18 @@ export interface AddFaceListFaceBodyParam {
266
301
  */
267
302
  body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
268
303
  }
304
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
305
+ export interface AddFaceListFaceTargetFaceQueryParam {
306
+ /** Value of the parameter */
307
+ value: number[];
308
+ /** Should we explode the value? */
309
+ explode: false;
310
+ /** Style of the value */
311
+ style: "form";
312
+ }
269
313
  export interface AddFaceListFaceQueryParamProperties {
270
314
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
271
- targetFace?: number[];
315
+ targetFace?: number[] | AddFaceListFaceTargetFaceQueryParam;
272
316
  /**
273
317
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
274
318
  *
@@ -321,9 +365,18 @@ export type TrainLargeFaceListParameters = RequestParameters;
321
365
  export interface AddLargeFaceListFaceFromUrlBodyParam {
322
366
  body: AddFaceFromUrlRequest;
323
367
  }
368
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
369
+ export interface AddLargeFaceListFaceFromUrlTargetFaceQueryParam {
370
+ /** Value of the parameter */
371
+ value: number[];
372
+ /** Should we explode the value? */
373
+ explode: false;
374
+ /** Style of the value */
375
+ style: "form";
376
+ }
324
377
  export interface AddLargeFaceListFaceFromUrlQueryParamProperties {
325
378
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
326
- targetFace?: number[];
379
+ targetFace?: number[] | AddLargeFaceListFaceFromUrlTargetFaceQueryParam;
327
380
  /**
328
381
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
329
382
  *
@@ -345,9 +398,18 @@ export interface AddLargeFaceListFaceBodyParam {
345
398
  */
346
399
  body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
347
400
  }
401
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
402
+ export interface AddLargeFaceListFaceTargetFaceQueryParam {
403
+ /** Value of the parameter */
404
+ value: number[];
405
+ /** Should we explode the value? */
406
+ explode: false;
407
+ /** Style of the value */
408
+ style: "form";
409
+ }
348
410
  export interface AddLargeFaceListFaceQueryParamProperties {
349
411
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
350
- targetFace?: number[];
412
+ targetFace?: number[] | AddLargeFaceListFaceTargetFaceQueryParam;
351
413
  /**
352
414
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
353
415
  *
@@ -435,9 +497,18 @@ export type GetPersonGroupPersonsParameters = GetPersonGroupPersonsQueryParam &
435
497
  export interface AddPersonGroupPersonFaceFromUrlBodyParam {
436
498
  body: AddFaceFromUrlRequest;
437
499
  }
500
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
501
+ export interface AddPersonGroupPersonFaceFromUrlTargetFaceQueryParam {
502
+ /** Value of the parameter */
503
+ value: number[];
504
+ /** Should we explode the value? */
505
+ explode: false;
506
+ /** Style of the value */
507
+ style: "form";
508
+ }
438
509
  export interface AddPersonGroupPersonFaceFromUrlQueryParamProperties {
439
510
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
440
- targetFace?: number[];
511
+ targetFace?: number[] | AddPersonGroupPersonFaceFromUrlTargetFaceQueryParam;
441
512
  /**
442
513
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
443
514
  *
@@ -459,9 +530,18 @@ export interface AddPersonGroupPersonFaceBodyParam {
459
530
  */
460
531
  body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
461
532
  }
533
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
534
+ export interface AddPersonGroupPersonFaceTargetFaceQueryParam {
535
+ /** Value of the parameter */
536
+ value: number[];
537
+ /** Should we explode the value? */
538
+ explode: false;
539
+ /** Style of the value */
540
+ style: "form";
541
+ }
462
542
  export interface AddPersonGroupPersonFaceQueryParamProperties {
463
543
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
464
- targetFace?: number[];
544
+ targetFace?: number[] | AddPersonGroupPersonFaceTargetFaceQueryParam;
465
545
  /**
466
546
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
467
547
  *
@@ -539,9 +619,18 @@ export type GetLargePersonGroupPersonsParameters = GetLargePersonGroupPersonsQue
539
619
  export interface AddLargePersonGroupPersonFaceFromUrlBodyParam {
540
620
  body: AddFaceFromUrlRequest;
541
621
  }
622
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
623
+ export interface AddLargePersonGroupPersonFaceFromUrlTargetFaceQueryParam {
624
+ /** Value of the parameter */
625
+ value: number[];
626
+ /** Should we explode the value? */
627
+ explode: false;
628
+ /** Style of the value */
629
+ style: "form";
630
+ }
542
631
  export interface AddLargePersonGroupPersonFaceFromUrlQueryParamProperties {
543
632
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
544
- targetFace?: number[];
633
+ targetFace?: number[] | AddLargePersonGroupPersonFaceFromUrlTargetFaceQueryParam;
545
634
  /**
546
635
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
547
636
  *
@@ -563,9 +652,18 @@ export interface AddLargePersonGroupPersonFaceBodyParam {
563
652
  */
564
653
  body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
565
654
  }
655
+ /** This is the wrapper object for the parameter `targetFace` with explode set to false and style set to form. */
656
+ export interface AddLargePersonGroupPersonFaceTargetFaceQueryParam {
657
+ /** Value of the parameter */
658
+ value: number[];
659
+ /** Should we explode the value? */
660
+ explode: false;
661
+ /** Style of the value */
662
+ style: "form";
663
+ }
566
664
  export interface AddLargePersonGroupPersonFaceQueryParamProperties {
567
665
  /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
568
- targetFace?: number[];
666
+ targetFace?: number[] | AddLargePersonGroupPersonFaceTargetFaceQueryParam;
569
667
  /**
570
668
  * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
571
669
  *
@@ -596,194 +694,16 @@ export interface CreateLivenessSessionBodyParam {
596
694
  export type CreateLivenessSessionParameters = CreateLivenessSessionBodyParam & RequestParameters;
597
695
  export type DeleteLivenessSessionParameters = RequestParameters;
598
696
  export type GetLivenessSessionResultParameters = RequestParameters;
599
- export interface GetLivenessSessionsQueryParamProperties {
600
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
601
- start?: string;
602
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
603
- top?: number;
604
- }
605
- export interface GetLivenessSessionsQueryParam {
606
- queryParameters?: GetLivenessSessionsQueryParamProperties;
607
- }
608
- export type GetLivenessSessionsParameters = GetLivenessSessionsQueryParam & RequestParameters;
609
- export interface GetLivenessSessionAuditEntriesQueryParamProperties {
610
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
611
- start?: string;
612
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
613
- top?: number;
614
- }
615
- export interface GetLivenessSessionAuditEntriesQueryParam {
616
- queryParameters?: GetLivenessSessionAuditEntriesQueryParamProperties;
617
- }
618
- export type GetLivenessSessionAuditEntriesParameters = GetLivenessSessionAuditEntriesQueryParam & RequestParameters;
619
- export interface CreateLivenessWithVerifySessionWithVerifyImageBodyParam {
697
+ export interface CreateLivenessWithVerifySessionBodyParam {
620
698
  /** Request content of liveness with verify session creation. */
621
- body: CreateLivenessWithVerifySessionMultipartContent;
699
+ body: CreateLivenessWithVerifySessionContent;
622
700
  }
623
- export interface CreateLivenessWithVerifySessionWithVerifyImageMediaTypesParam {
701
+ export interface CreateLivenessWithVerifySessionMediaTypesParam {
624
702
  /** The content type for the operation. Always multipart/form-data for this operation. */
625
703
  contentType: "multipart/form-data";
626
704
  }
627
- export type CreateLivenessWithVerifySessionWithVerifyImageParameters = CreateLivenessWithVerifySessionWithVerifyImageMediaTypesParam & CreateLivenessWithVerifySessionWithVerifyImageBodyParam & RequestParameters;
628
- export interface CreateLivenessWithVerifySessionBodyParam {
629
- /** Body parameter. */
630
- body: CreateLivenessWithVerifySessionJsonContent;
631
- }
632
- export type CreateLivenessWithVerifySessionParameters = CreateLivenessWithVerifySessionBodyParam & RequestParameters;
705
+ export type CreateLivenessWithVerifySessionParameters = CreateLivenessWithVerifySessionMediaTypesParam & CreateLivenessWithVerifySessionBodyParam & RequestParameters;
633
706
  export type DeleteLivenessWithVerifySessionParameters = RequestParameters;
634
707
  export type GetLivenessWithVerifySessionResultParameters = RequestParameters;
635
- export interface GetLivenessWithVerifySessionsQueryParamProperties {
636
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
637
- start?: string;
638
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
639
- top?: number;
640
- }
641
- export interface GetLivenessWithVerifySessionsQueryParam {
642
- queryParameters?: GetLivenessWithVerifySessionsQueryParamProperties;
643
- }
644
- export type GetLivenessWithVerifySessionsParameters = GetLivenessWithVerifySessionsQueryParam & RequestParameters;
645
- export interface GetLivenessWithVerifySessionAuditEntriesQueryParamProperties {
646
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
647
- start?: string;
648
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
649
- top?: number;
650
- }
651
- export interface GetLivenessWithVerifySessionAuditEntriesQueryParam {
652
- queryParameters?: GetLivenessWithVerifySessionAuditEntriesQueryParamProperties;
653
- }
654
- export type GetLivenessWithVerifySessionAuditEntriesParameters = GetLivenessWithVerifySessionAuditEntriesQueryParam & RequestParameters;
655
708
  export type GetSessionImageParameters = RequestParameters;
656
- export interface CreatePersonBodyParam {
657
- body: UserDefinedFields;
658
- }
659
- export type CreatePersonParameters = CreatePersonBodyParam & RequestParameters;
660
- export type DeletePersonParameters = RequestParameters;
661
- export type GetPersonParameters = RequestParameters;
662
- export interface UpdatePersonBodyParam {
663
- body: UserDefinedFieldsForUpdate;
664
- }
665
- export type UpdatePersonParameters = UpdatePersonBodyParam & RequestParameters;
666
- export interface GetPersonsQueryParamProperties {
667
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
668
- start?: string;
669
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
670
- top?: number;
671
- }
672
- export interface GetPersonsQueryParam {
673
- queryParameters?: GetPersonsQueryParamProperties;
674
- }
675
- export type GetPersonsParameters = GetPersonsQueryParam & RequestParameters;
676
- export interface GetDynamicPersonGroupReferencesQueryParamProperties {
677
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
678
- start?: string;
679
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
680
- top?: number;
681
- }
682
- export interface GetDynamicPersonGroupReferencesQueryParam {
683
- queryParameters?: GetDynamicPersonGroupReferencesQueryParamProperties;
684
- }
685
- export type GetDynamicPersonGroupReferencesParameters = GetDynamicPersonGroupReferencesQueryParam & RequestParameters;
686
- export interface AddPersonFaceBodyParam {
687
- /**
688
- * The image to be analyzed
689
- *
690
- * Value may contain any sequence of octets
691
- */
692
- body: string | Uint8Array | ReadableStream<Uint8Array> | NodeJS.ReadableStream;
693
- }
694
- export interface AddPersonFaceQueryParamProperties {
695
- /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
696
- targetFace?: number[];
697
- /**
698
- * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
699
- *
700
- * Possible values: "detection_01", "detection_02", "detection_03"
701
- */
702
- detectionModel?: DetectionModel;
703
- /** User-provided data attached to the face. The size limit is 1K. */
704
- userData?: string;
705
- }
706
- export interface AddPersonFaceQueryParam {
707
- queryParameters?: AddPersonFaceQueryParamProperties;
708
- }
709
- export interface AddPersonFaceMediaTypesParam {
710
- /** The format of the HTTP payload. */
711
- contentType: "application/octet-stream";
712
- }
713
- export type AddPersonFaceParameters = AddPersonFaceQueryParam & AddPersonFaceMediaTypesParam & AddPersonFaceBodyParam & RequestParameters;
714
- export interface AddPersonFaceFromUrlBodyParam {
715
- body: {
716
- url: string;
717
- };
718
- }
719
- export interface AddPersonFaceFromUrlQueryParamProperties {
720
- /** A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. */
721
- targetFace?: number[];
722
- /**
723
- * The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.
724
- *
725
- * Possible values: "detection_01", "detection_02", "detection_03"
726
- */
727
- detectionModel?: DetectionModel;
728
- /** User-provided data attached to the face. The size limit is 1K. */
729
- userData?: string;
730
- }
731
- export interface AddPersonFaceFromUrlQueryParam {
732
- queryParameters?: AddPersonFaceFromUrlQueryParamProperties;
733
- }
734
- export type AddPersonFaceFromUrlParameters = AddPersonFaceFromUrlQueryParam & AddPersonFaceFromUrlBodyParam & RequestParameters;
735
- export type DeletePersonFaceParameters = RequestParameters;
736
- export type GetPersonFaceParameters = RequestParameters;
737
- export interface UpdatePersonFaceBodyParam {
738
- body: FaceUserData;
739
- }
740
- export type UpdatePersonFaceParameters = UpdatePersonFaceBodyParam & RequestParameters;
741
- export type GetPersonFacesParameters = RequestParameters;
742
- export interface CreateDynamicPersonGroupWithPersonBodyParam {
743
- body: {
744
- name: string;
745
- userData?: string;
746
- addPersonIds: string[];
747
- };
748
- }
749
- export type CreateDynamicPersonGroupWithPersonParameters = CreateDynamicPersonGroupWithPersonBodyParam & RequestParameters;
750
- export interface CreateDynamicPersonGroupBodyParam {
751
- body: UserDefinedFields;
752
- }
753
- export type CreateDynamicPersonGroupParameters = CreateDynamicPersonGroupBodyParam & RequestParameters;
754
- export type DeleteDynamicPersonGroupParameters = RequestParameters;
755
- export type GetDynamicPersonGroupParameters = RequestParameters;
756
- export interface UpdateDynamicPersonGroupWithPersonChangesBodyParam {
757
- body: {
758
- name?: string;
759
- userData?: string;
760
- addPersonIds?: string[];
761
- removePersonIds?: string[];
762
- };
763
- }
764
- export type UpdateDynamicPersonGroupWithPersonChangesParameters = UpdateDynamicPersonGroupWithPersonChangesBodyParam & RequestParameters;
765
- export interface UpdateDynamicPersonGroupBodyParam {
766
- body: UserDefinedFieldsForUpdate;
767
- }
768
- export type UpdateDynamicPersonGroupParameters = UpdateDynamicPersonGroupBodyParam & RequestParameters;
769
- export interface GetDynamicPersonGroupsQueryParamProperties {
770
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
771
- start?: string;
772
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
773
- top?: number;
774
- }
775
- export interface GetDynamicPersonGroupsQueryParam {
776
- queryParameters?: GetDynamicPersonGroupsQueryParamProperties;
777
- }
778
- export type GetDynamicPersonGroupsParameters = GetDynamicPersonGroupsQueryParam & RequestParameters;
779
- export interface GetDynamicPersonGroupPersonsQueryParamProperties {
780
- /** List resources greater than the "start". It contains no more than 64 characters. Default is empty. */
781
- start?: string;
782
- /** The number of items to list, ranging in [1, 1000]. Default is 1000. */
783
- top?: number;
784
- }
785
- export interface GetDynamicPersonGroupPersonsQueryParam {
786
- queryParameters?: GetDynamicPersonGroupPersonsQueryParamProperties;
787
- }
788
- export type GetDynamicPersonGroupPersonsParameters = GetDynamicPersonGroupPersonsQueryParam & RequestParameters;
789
709
  //# sourceMappingURL=parameters.d.ts.map