be_azure_cognitiveservices_face 0.19.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (79) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE.txt +21 -0
  3. data/lib/1.0/generated/azure_cognitiveservices_face/face.rb +1711 -0
  4. data/lib/1.0/generated/azure_cognitiveservices_face/face_client.rb +159 -0
  5. data/lib/1.0/generated/azure_cognitiveservices_face/face_list_operations.rb +1269 -0
  6. data/lib/1.0/generated/azure_cognitiveservices_face/large_face_list_operations.rb +1894 -0
  7. data/lib/1.0/generated/azure_cognitiveservices_face/large_person_group_operations.rb +886 -0
  8. data/lib/1.0/generated/azure_cognitiveservices_face/large_person_group_person.rb +1477 -0
  9. data/lib/1.0/generated/azure_cognitiveservices_face/models/accessory.rb +59 -0
  10. data/lib/1.0/generated/azure_cognitiveservices_face/models/accessory_type.rb +17 -0
  11. data/lib/1.0/generated/azure_cognitiveservices_face/models/apierror.rb +47 -0
  12. data/lib/1.0/generated/azure_cognitiveservices_face/models/apply_snapshot_request.rb +69 -0
  13. data/lib/1.0/generated/azure_cognitiveservices_face/models/blur.rb +60 -0
  14. data/lib/1.0/generated/azure_cognitiveservices_face/models/blur_level.rb +17 -0
  15. data/lib/1.0/generated/azure_cognitiveservices_face/models/coordinate.rb +57 -0
  16. data/lib/1.0/generated/azure_cognitiveservices_face/models/detected_face.rb +95 -0
  17. data/lib/1.0/generated/azure_cognitiveservices_face/models/detection_model.rb +16 -0
  18. data/lib/1.0/generated/azure_cognitiveservices_face/models/emotion.rb +124 -0
  19. data/lib/1.0/generated/azure_cognitiveservices_face/models/error.rb +57 -0
  20. data/lib/1.0/generated/azure_cognitiveservices_face/models/exposure.rb +62 -0
  21. data/lib/1.0/generated/azure_cognitiveservices_face/models/exposure_level.rb +17 -0
  22. data/lib/1.0/generated/azure_cognitiveservices_face/models/face_attribute_type.rb +28 -0
  23. data/lib/1.0/generated/azure_cognitiveservices_face/models/face_attributes.rb +214 -0
  24. data/lib/1.0/generated/azure_cognitiveservices_face/models/face_landmarks.rb +360 -0
  25. data/lib/1.0/generated/azure_cognitiveservices_face/models/face_list.rb +101 -0
  26. data/lib/1.0/generated/azure_cognitiveservices_face/models/face_rectangle.rb +81 -0
  27. data/lib/1.0/generated/azure_cognitiveservices_face/models/facial_hair.rb +68 -0
  28. data/lib/1.0/generated/azure_cognitiveservices_face/models/find_similar_match_mode.rb +16 -0
  29. data/lib/1.0/generated/azure_cognitiveservices_face/models/find_similar_request.rb +143 -0
  30. data/lib/1.0/generated/azure_cognitiveservices_face/models/gender.rb +16 -0
  31. data/lib/1.0/generated/azure_cognitiveservices_face/models/glasses_type.rb +18 -0
  32. data/lib/1.0/generated/azure_cognitiveservices_face/models/group_request.rb +58 -0
  33. data/lib/1.0/generated/azure_cognitiveservices_face/models/group_result.rb +83 -0
  34. data/lib/1.0/generated/azure_cognitiveservices_face/models/hair.rb +80 -0
  35. data/lib/1.0/generated/azure_cognitiveservices_face/models/hair_color.rb +60 -0
  36. data/lib/1.0/generated/azure_cognitiveservices_face/models/hair_color_type.rb +22 -0
  37. data/lib/1.0/generated/azure_cognitiveservices_face/models/head_pose.rb +68 -0
  38. data/lib/1.0/generated/azure_cognitiveservices_face/models/identify_candidate.rb +59 -0
  39. data/lib/1.0/generated/azure_cognitiveservices_face/models/identify_request.rb +123 -0
  40. data/lib/1.0/generated/azure_cognitiveservices_face/models/identify_result.rb +69 -0
  41. data/lib/1.0/generated/azure_cognitiveservices_face/models/image_url.rb +47 -0
  42. data/lib/1.0/generated/azure_cognitiveservices_face/models/large_face_list.rb +81 -0
  43. data/lib/1.0/generated/azure_cognitiveservices_face/models/large_person_group.rb +81 -0
  44. data/lib/1.0/generated/azure_cognitiveservices_face/models/makeup.rb +59 -0
  45. data/lib/1.0/generated/azure_cognitiveservices_face/models/meta_data_contract.rb +72 -0
  46. data/lib/1.0/generated/azure_cognitiveservices_face/models/name_and_user_data_contract.rb +64 -0
  47. data/lib/1.0/generated/azure_cognitiveservices_face/models/noise.rb +62 -0
  48. data/lib/1.0/generated/azure_cognitiveservices_face/models/noise_level.rb +17 -0
  49. data/lib/1.0/generated/azure_cognitiveservices_face/models/occlusion.rb +70 -0
  50. data/lib/1.0/generated/azure_cognitiveservices_face/models/operation_status.rb +112 -0
  51. data/lib/1.0/generated/azure_cognitiveservices_face/models/operation_status_type.rb +18 -0
  52. data/lib/1.0/generated/azure_cognitiveservices_face/models/persisted_face.rb +63 -0
  53. data/lib/1.0/generated/azure_cognitiveservices_face/models/person.rb +89 -0
  54. data/lib/1.0/generated/azure_cognitiveservices_face/models/person_group.rb +81 -0
  55. data/lib/1.0/generated/azure_cognitiveservices_face/models/recognition_model.rb +16 -0
  56. data/lib/1.0/generated/azure_cognitiveservices_face/models/similar_face.rb +73 -0
  57. data/lib/1.0/generated/azure_cognitiveservices_face/models/snapshot.rb +136 -0
  58. data/lib/1.0/generated/azure_cognitiveservices_face/models/snapshot_apply_mode.rb +15 -0
  59. data/lib/1.0/generated/azure_cognitiveservices_face/models/snapshot_object_type.rb +18 -0
  60. data/lib/1.0/generated/azure_cognitiveservices_face/models/take_snapshot_request.rb +101 -0
  61. data/lib/1.0/generated/azure_cognitiveservices_face/models/training_status.rb +107 -0
  62. data/lib/1.0/generated/azure_cognitiveservices_face/models/training_status_type.rb +18 -0
  63. data/lib/1.0/generated/azure_cognitiveservices_face/models/update_face_request.rb +50 -0
  64. data/lib/1.0/generated/azure_cognitiveservices_face/models/update_snapshot_request.rb +73 -0
  65. data/lib/1.0/generated/azure_cognitiveservices_face/models/verify_face_to_face_request.rb +57 -0
  66. data/lib/1.0/generated/azure_cognitiveservices_face/models/verify_face_to_person_request.rb +95 -0
  67. data/lib/1.0/generated/azure_cognitiveservices_face/models/verify_result.rb +62 -0
  68. data/lib/1.0/generated/azure_cognitiveservices_face/module_definition.rb +9 -0
  69. data/lib/1.0/generated/azure_cognitiveservices_face/person_group_operations.rb +865 -0
  70. data/lib/1.0/generated/azure_cognitiveservices_face/person_group_person.rb +1528 -0
  71. data/lib/1.0/generated/azure_cognitiveservices_face/snapshot_operations.rb +983 -0
  72. data/lib/1.0/generated/azure_cognitiveservices_face.rb +94 -0
  73. data/lib/azure_cognitiveservices_face.rb +6 -0
  74. data/lib/module_definition.rb +7 -0
  75. data/lib/profiles/latest/face_latest_profile_client.rb +38 -0
  76. data/lib/profiles/latest/face_module_definition.rb +8 -0
  77. data/lib/profiles/latest/modules/face_profile_module.rb +309 -0
  78. data/lib/version.rb +7 -0
  79. metadata +196 -0
@@ -0,0 +1,1711 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::Face::V1_0
7
+ #
8
+ # An API for face detection, verification, and identification.
9
+ #
10
+ class Face
11
+ include MsRestAzure
12
+
13
+ #
14
+ # Creates and initializes a new instance of the Face class.
15
+ # @param client service class for accessing basic functionality.
16
+ #
17
+ def initialize(client)
18
+ @client = client
19
+ end
20
+
21
+ # @return [FaceClient] reference to the FaceClient
22
+ attr_reader :client
23
+
24
+ #
25
+ # Given query face's faceId, to search the similar-looking faces from a faceId
26
+ # array, a face list or a large face list. faceId array contains the faces
27
+ # created by [Face -
28
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236),
29
+ # which will expire 24 hours after creation. A "faceListId" is created by
30
+ # [FaceList -
31
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524b)
32
+ # containing persistedFaceIds that will not expire. And a "largeFaceListId" is
33
+ # created by [LargeFaceList -
34
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/5a157b68d2de3616c086f2cc)
35
+ # containing persistedFaceIds that will also not expire. Depending on the input
36
+ # the returned similar faces list contains faceIds or persistedFaceIds ranked
37
+ # by similarity.
38
+ # <br/>Find similar has two working modes, "matchPerson" and "matchFace".
39
+ # "matchPerson" is the default mode that it tries to find faces of the same
40
+ # person as possible by using internal same-person thresholds. It is useful to
41
+ # find a known person's other photos. Note that an empty list will be returned
42
+ # if no faces pass the internal thresholds. "matchFace" mode ignores
43
+ # same-person thresholds and returns ranked similar faces anyway, even the
44
+ # similarity is low. It can be used in the cases like searching
45
+ # celebrity-looking faces.
46
+ # <br/>The 'recognitionModel' associated with the query face's faceId should be
47
+ # the same as the 'recognitionModel' used by the target faceId array, face list
48
+ # or large face list.
49
+ #
50
+ #
51
+ # @param face_id FaceId of the query face. User needs to call Face - Detect
52
+ # first to get a valid faceId. Note that this faceId is not persisted and will
53
+ # expire 24 hours after the detection call
54
+ # @param face_list_id [String] An existing user-specified unique candidate face
55
+ # list, created in Face List - Create a Face List. Face list contains a set of
56
+ # persistedFaceIds which are persisted and will never expire. Parameter
57
+ # faceListId, largeFaceListId and faceIds should not be provided at the same
58
+ # time.
59
+ # @param large_face_list_id [String] An existing user-specified unique
60
+ # candidate large face list, created in LargeFaceList - Create. Large face list
61
+ # contains a set of persistedFaceIds which are persisted and will never expire.
62
+ # Parameter faceListId, largeFaceListId and faceIds should not be provided at
63
+ # the same time.
64
+ # @param face_ids An array of candidate faceIds. All of them are created by
65
+ # Face - Detect and the faceIds will expire 24 hours after the detection call.
66
+ # The number of faceIds is limited to 1000. Parameter faceListId,
67
+ # largeFaceListId and faceIds should not be provided at the same time.
68
+ # @param max_num_of_candidates_returned [Integer] The number of top similar
69
+ # faces returned. The valid range is [1, 1000].
70
+ # @param mode [FindSimilarMatchMode] Similar face searching mode. It can be
71
+ # "matchPerson" or "matchFace". Possible values include: 'matchPerson',
72
+ # 'matchFace'
73
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
74
+ # will be added to the HTTP request.
75
+ #
76
+ # @return [Array] operation results.
77
+ #
78
+ def find_similar(face_id, face_list_id:nil, large_face_list_id:nil, face_ids:nil, max_num_of_candidates_returned:20, mode:nil, custom_headers:nil)
79
+ response = find_similar_async(face_id, face_list_id:face_list_id, large_face_list_id:large_face_list_id, face_ids:face_ids, max_num_of_candidates_returned:max_num_of_candidates_returned, mode:mode, custom_headers:custom_headers).value!
80
+ response.body unless response.nil?
81
+ end
82
+
83
+ #
84
+ # Given query face's faceId, to search the similar-looking faces from a faceId
85
+ # array, a face list or a large face list. faceId array contains the faces
86
+ # created by [Face -
87
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236),
88
+ # which will expire 24 hours after creation. A "faceListId" is created by
89
+ # [FaceList -
90
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524b)
91
+ # containing persistedFaceIds that will not expire. And a "largeFaceListId" is
92
+ # created by [LargeFaceList -
93
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/5a157b68d2de3616c086f2cc)
94
+ # containing persistedFaceIds that will also not expire. Depending on the input
95
+ # the returned similar faces list contains faceIds or persistedFaceIds ranked
96
+ # by similarity.
97
+ # <br/>Find similar has two working modes, "matchPerson" and "matchFace".
98
+ # "matchPerson" is the default mode that it tries to find faces of the same
99
+ # person as possible by using internal same-person thresholds. It is useful to
100
+ # find a known person's other photos. Note that an empty list will be returned
101
+ # if no faces pass the internal thresholds. "matchFace" mode ignores
102
+ # same-person thresholds and returns ranked similar faces anyway, even the
103
+ # similarity is low. It can be used in the cases like searching
104
+ # celebrity-looking faces.
105
+ # <br/>The 'recognitionModel' associated with the query face's faceId should be
106
+ # the same as the 'recognitionModel' used by the target faceId array, face list
107
+ # or large face list.
108
+ #
109
+ #
110
+ # @param face_id FaceId of the query face. User needs to call Face - Detect
111
+ # first to get a valid faceId. Note that this faceId is not persisted and will
112
+ # expire 24 hours after the detection call
113
+ # @param face_list_id [String] An existing user-specified unique candidate face
114
+ # list, created in Face List - Create a Face List. Face list contains a set of
115
+ # persistedFaceIds which are persisted and will never expire. Parameter
116
+ # faceListId, largeFaceListId and faceIds should not be provided at the same
117
+ # time.
118
+ # @param large_face_list_id [String] An existing user-specified unique
119
+ # candidate large face list, created in LargeFaceList - Create. Large face list
120
+ # contains a set of persistedFaceIds which are persisted and will never expire.
121
+ # Parameter faceListId, largeFaceListId and faceIds should not be provided at
122
+ # the same time.
123
+ # @param face_ids An array of candidate faceIds. All of them are created by
124
+ # Face - Detect and the faceIds will expire 24 hours after the detection call.
125
+ # The number of faceIds is limited to 1000. Parameter faceListId,
126
+ # largeFaceListId and faceIds should not be provided at the same time.
127
+ # @param max_num_of_candidates_returned [Integer] The number of top similar
128
+ # faces returned. The valid range is [1, 1000].
129
+ # @param mode [FindSimilarMatchMode] Similar face searching mode. It can be
130
+ # "matchPerson" or "matchFace". Possible values include: 'matchPerson',
131
+ # 'matchFace'
132
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
133
+ # will be added to the HTTP request.
134
+ #
135
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
136
+ #
137
+ def find_similar_with_http_info(face_id, face_list_id:nil, large_face_list_id:nil, face_ids:nil, max_num_of_candidates_returned:20, mode:nil, custom_headers:nil)
138
+ find_similar_async(face_id, face_list_id:face_list_id, large_face_list_id:large_face_list_id, face_ids:face_ids, max_num_of_candidates_returned:max_num_of_candidates_returned, mode:mode, custom_headers:custom_headers).value!
139
+ end
140
+
141
+ #
142
+ # Given query face's faceId, to search the similar-looking faces from a faceId
143
+ # array, a face list or a large face list. faceId array contains the faces
144
+ # created by [Face -
145
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236),
146
+ # which will expire 24 hours after creation. A "faceListId" is created by
147
+ # [FaceList -
148
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039524b)
149
+ # containing persistedFaceIds that will not expire. And a "largeFaceListId" is
150
+ # created by [LargeFaceList -
151
+ # Create](/docs/services/563879b61984550e40cbbe8d/operations/5a157b68d2de3616c086f2cc)
152
+ # containing persistedFaceIds that will also not expire. Depending on the input
153
+ # the returned similar faces list contains faceIds or persistedFaceIds ranked
154
+ # by similarity.
155
+ # <br/>Find similar has two working modes, "matchPerson" and "matchFace".
156
+ # "matchPerson" is the default mode that it tries to find faces of the same
157
+ # person as possible by using internal same-person thresholds. It is useful to
158
+ # find a known person's other photos. Note that an empty list will be returned
159
+ # if no faces pass the internal thresholds. "matchFace" mode ignores
160
+ # same-person thresholds and returns ranked similar faces anyway, even the
161
+ # similarity is low. It can be used in the cases like searching
162
+ # celebrity-looking faces.
163
+ # <br/>The 'recognitionModel' associated with the query face's faceId should be
164
+ # the same as the 'recognitionModel' used by the target faceId array, face list
165
+ # or large face list.
166
+ #
167
+ #
168
+ # @param face_id FaceId of the query face. User needs to call Face - Detect
169
+ # first to get a valid faceId. Note that this faceId is not persisted and will
170
+ # expire 24 hours after the detection call
171
+ # @param face_list_id [String] An existing user-specified unique candidate face
172
+ # list, created in Face List - Create a Face List. Face list contains a set of
173
+ # persistedFaceIds which are persisted and will never expire. Parameter
174
+ # faceListId, largeFaceListId and faceIds should not be provided at the same
175
+ # time.
176
+ # @param large_face_list_id [String] An existing user-specified unique
177
+ # candidate large face list, created in LargeFaceList - Create. Large face list
178
+ # contains a set of persistedFaceIds which are persisted and will never expire.
179
+ # Parameter faceListId, largeFaceListId and faceIds should not be provided at
180
+ # the same time.
181
+ # @param face_ids An array of candidate faceIds. All of them are created by
182
+ # Face - Detect and the faceIds will expire 24 hours after the detection call.
183
+ # The number of faceIds is limited to 1000. Parameter faceListId,
184
+ # largeFaceListId and faceIds should not be provided at the same time.
185
+ # @param max_num_of_candidates_returned [Integer] The number of top similar
186
+ # faces returned. The valid range is [1, 1000].
187
+ # @param mode [FindSimilarMatchMode] Similar face searching mode. It can be
188
+ # "matchPerson" or "matchFace". Possible values include: 'matchPerson',
189
+ # 'matchFace'
190
+ # @param [Hash{String => String}] A hash of custom headers that will be added
191
+ # to the HTTP request.
192
+ #
193
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
194
+ #
195
+ def find_similar_async(face_id, face_list_id:nil, large_face_list_id:nil, face_ids:nil, max_num_of_candidates_returned:20, mode:nil, custom_headers:nil)
196
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
197
+ fail ArgumentError, 'face_id is nil' if face_id.nil?
198
+ fail ArgumentError, "'face_list_id' should satisfy the constraint - 'MaxLength': '64'" if !face_list_id.nil? && face_list_id.length > 64
199
+ fail ArgumentError, "'face_list_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !face_list_id.nil? && face_list_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
200
+ fail ArgumentError, "'large_face_list_id' should satisfy the constraint - 'MaxLength': '64'" if !large_face_list_id.nil? && large_face_list_id.length > 64
201
+ fail ArgumentError, "'large_face_list_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !large_face_list_id.nil? && large_face_list_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
202
+ fail ArgumentError, "'face_ids' should satisfy the constraint - 'MaxItems': '1000'" if !face_ids.nil? && face_ids.length > 1000
203
+ fail ArgumentError, "'max_num_of_candidates_returned' should satisfy the constraint - 'InclusiveMaximum': '1000'" if !max_num_of_candidates_returned.nil? && max_num_of_candidates_returned > 1000
204
+ fail ArgumentError, "'max_num_of_candidates_returned' should satisfy the constraint - 'InclusiveMinimum': '1'" if !max_num_of_candidates_returned.nil? && max_num_of_candidates_returned < 1
205
+
206
+ body = FindSimilarRequest.new
207
+ unless face_id.nil? && face_list_id.nil? && large_face_list_id.nil? && face_ids.nil? && max_num_of_candidates_returned.nil? && mode.nil?
208
+ body.face_id = face_id
209
+ body.face_list_id = face_list_id
210
+ body.large_face_list_id = large_face_list_id
211
+ body.face_ids = face_ids
212
+ body.max_num_of_candidates_returned = max_num_of_candidates_returned
213
+ body.mode = mode
214
+ end
215
+
216
+ request_headers = {}
217
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
218
+
219
+ # Set Headers
220
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
221
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
222
+
223
+ # Serialize Request
224
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::FindSimilarRequest.mapper()
225
+ request_content = @client.serialize(request_mapper, body)
226
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
227
+
228
+ path_template = 'findsimilars'
229
+
230
+ request_url = @base_url || @client.base_url
231
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
232
+
233
+ options = {
234
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
235
+ body: request_content,
236
+ headers: request_headers.merge(custom_headers || {}),
237
+ base_url: request_url
238
+ }
239
+ promise = @client.make_request_async(:post, path_template, options)
240
+
241
+ promise = promise.then do |result|
242
+ http_response = result.response
243
+ status_code = http_response.status
244
+ response_content = http_response.body
245
+ unless status_code == 200
246
+ error_model = JSON.load(response_content)
247
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
248
+ end
249
+
250
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
251
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
252
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
253
+ # Deserialize Response
254
+ if status_code == 200
255
+ begin
256
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
257
+ result_mapper = {
258
+ client_side_validation: true,
259
+ required: false,
260
+ serialized_name: 'parsed_response',
261
+ type: {
262
+ name: 'Sequence',
263
+ element: {
264
+ client_side_validation: true,
265
+ required: false,
266
+ serialized_name: 'SimilarFaceElementType',
267
+ type: {
268
+ name: 'Composite',
269
+ class_name: 'SimilarFace'
270
+ }
271
+ }
272
+ }
273
+ }
274
+ result.body = @client.deserialize(result_mapper, parsed_response)
275
+ rescue Exception => e
276
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
277
+ end
278
+ end
279
+
280
+ result
281
+ end
282
+
283
+ promise.execute
284
+ end
285
+
286
+ #
287
+ # Divide candidate faces into groups based on face similarity.<br />
288
+ # * The output is one or more disjointed face groups and a messyGroup. A face
289
+ # group contains faces that have similar looking, often of the same person.
290
+ # Face groups are ranked by group size, i.e. number of faces. Notice that faces
291
+ # belonging to a same person might be split into several groups in the result.
292
+ # * MessyGroup is a special face group containing faces that cannot find any
293
+ # similar counterpart face from original faces. The messyGroup will not appear
294
+ # in the result if all faces found their counterparts.
295
+ # * Group API needs at least 2 candidate faces and 1000 at most. We suggest to
296
+ # try [Face -
297
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a)
298
+ # when you only have 2 candidate faces.
299
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
300
+ # the same.
301
+ #
302
+ #
303
+ # @param face_ids Array of candidate faceId created by Face - Detect. The
304
+ # maximum is 1000 faces
305
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
306
+ # will be added to the HTTP request.
307
+ #
308
+ # @return [GroupResult] operation results.
309
+ #
310
+ def group(face_ids, custom_headers:nil)
311
+ response = group_async(face_ids, custom_headers:custom_headers).value!
312
+ response.body unless response.nil?
313
+ end
314
+
315
+ #
316
+ # Divide candidate faces into groups based on face similarity.<br />
317
+ # * The output is one or more disjointed face groups and a messyGroup. A face
318
+ # group contains faces that have similar looking, often of the same person.
319
+ # Face groups are ranked by group size, i.e. number of faces. Notice that faces
320
+ # belonging to a same person might be split into several groups in the result.
321
+ # * MessyGroup is a special face group containing faces that cannot find any
322
+ # similar counterpart face from original faces. The messyGroup will not appear
323
+ # in the result if all faces found their counterparts.
324
+ # * Group API needs at least 2 candidate faces and 1000 at most. We suggest to
325
+ # try [Face -
326
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a)
327
+ # when you only have 2 candidate faces.
328
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
329
+ # the same.
330
+ #
331
+ #
332
+ # @param face_ids Array of candidate faceId created by Face - Detect. The
333
+ # maximum is 1000 faces
334
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
335
+ # will be added to the HTTP request.
336
+ #
337
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
338
+ #
339
+ def group_with_http_info(face_ids, custom_headers:nil)
340
+ group_async(face_ids, custom_headers:custom_headers).value!
341
+ end
342
+
343
+ #
344
+ # Divide candidate faces into groups based on face similarity.<br />
345
+ # * The output is one or more disjointed face groups and a messyGroup. A face
346
+ # group contains faces that have similar looking, often of the same person.
347
+ # Face groups are ranked by group size, i.e. number of faces. Notice that faces
348
+ # belonging to a same person might be split into several groups in the result.
349
+ # * MessyGroup is a special face group containing faces that cannot find any
350
+ # similar counterpart face from original faces. The messyGroup will not appear
351
+ # in the result if all faces found their counterparts.
352
+ # * Group API needs at least 2 candidate faces and 1000 at most. We suggest to
353
+ # try [Face -
354
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a)
355
+ # when you only have 2 candidate faces.
356
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
357
+ # the same.
358
+ #
359
+ #
360
+ # @param face_ids Array of candidate faceId created by Face - Detect. The
361
+ # maximum is 1000 faces
362
+ # @param [Hash{String => String}] A hash of custom headers that will be added
363
+ # to the HTTP request.
364
+ #
365
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
366
+ #
367
+ def group_async(face_ids, custom_headers:nil)
368
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
369
+ fail ArgumentError, 'face_ids is nil' if face_ids.nil?
370
+ fail ArgumentError, "'face_ids' should satisfy the constraint - 'MaxItems': '1000'" if !face_ids.nil? && face_ids.length > 1000
371
+
372
+ body = GroupRequest.new
373
+ unless face_ids.nil?
374
+ body.face_ids = face_ids
375
+ end
376
+
377
+ request_headers = {}
378
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
379
+
380
+ # Set Headers
381
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
382
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
383
+
384
+ # Serialize Request
385
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::GroupRequest.mapper()
386
+ request_content = @client.serialize(request_mapper, body)
387
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
388
+
389
+ path_template = 'group'
390
+
391
+ request_url = @base_url || @client.base_url
392
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
393
+
394
+ options = {
395
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
396
+ body: request_content,
397
+ headers: request_headers.merge(custom_headers || {}),
398
+ base_url: request_url
399
+ }
400
+ promise = @client.make_request_async(:post, path_template, options)
401
+
402
+ promise = promise.then do |result|
403
+ http_response = result.response
404
+ status_code = http_response.status
405
+ response_content = http_response.body
406
+ unless status_code == 200
407
+ error_model = JSON.load(response_content)
408
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
409
+ end
410
+
411
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
412
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
413
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
414
+ # Deserialize Response
415
+ if status_code == 200
416
+ begin
417
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
418
+ result_mapper = Azure::CognitiveServices::Face::V1_0::Models::GroupResult.mapper()
419
+ result.body = @client.deserialize(result_mapper, parsed_response)
420
+ rescue Exception => e
421
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
422
+ end
423
+ end
424
+
425
+ result
426
+ end
427
+
428
+ promise.execute
429
+ end
430
+
431
+ #
432
+ # 1-to-many identification to find the closest matches of the specific query
433
+ # person face from a person group or large person group.
434
+ # <br/> For each face in the faceIds array, Face Identify will compute
435
+ # similarities between the query face and all the faces in the person group
436
+ # (given by personGroupId) or large person group (given by largePersonGroupId),
437
+ # and return candidate person(s) for that face ranked by similarity confidence.
438
+ # The person group/large person group should be trained to make it ready for
439
+ # identification. See more in [PersonGroup -
440
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395249)
441
+ # and [LargePersonGroup -
442
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/599ae2d16ac60f11b48b5aa4).
443
+ # <br/>
444
+ #
445
+ # Remarks:<br />
446
+ # * The algorithm allows more than one face to be identified independently at
447
+ # the same request, but no more than 10 faces.
448
+ # * Each person in the person group/large person group could have more than one
449
+ # face, but no more than 248 faces.
450
+ # * Higher face image quality means better identification precision. Please
451
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
452
+ # (100 pixels between eyes) or bigger.
453
+ # * Number of candidates returned is restricted by maxNumOfCandidatesReturned
454
+ # and confidenceThreshold. If no person is identified, the returned candidates
455
+ # will be an empty array.
456
+ # * Try [Face - Find
457
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
458
+ # when you need to find similar faces from a face list/large face list instead
459
+ # of a person group/large person group.
460
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
461
+ # the same as the 'recognitionModel' used by the target person group or large
462
+ # person group.
463
+ #
464
+ #
465
+ # @param face_ids Array of query faces faceIds, created by the Face - Detect.
466
+ # Each of the faces are identified independently. The valid number of faceIds
467
+ # is between [1, 10].
468
+ # @param person_group_id [String] PersonGroupId of the target person group,
469
+ # created by PersonGroup - Create. Parameter personGroupId and
470
+ # largePersonGroupId should not be provided at the same time.
471
+ # @param large_person_group_id [String] LargePersonGroupId of the target large
472
+ # person group, created by LargePersonGroup - Create. Parameter personGroupId
473
+ # and largePersonGroupId should not be provided at the same time.
474
+ # @param max_num_of_candidates_returned [Integer] The range of
475
+ # maxNumOfCandidatesReturned is between 1 and 5 (default is 1).
476
+ # @param confidence_threshold [Float] Confidence threshold of identification,
477
+ # used to judge whether one face belong to one person. The range of
478
+ # confidenceThreshold is [0, 1] (default specified by algorithm).
479
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
480
+ # will be added to the HTTP request.
481
+ #
482
+ # @return [Array] operation results.
483
+ #
484
+ def identify(face_ids, person_group_id:nil, large_person_group_id:nil, max_num_of_candidates_returned:1, confidence_threshold:nil, custom_headers:nil)
485
+ response = identify_async(face_ids, person_group_id:person_group_id, large_person_group_id:large_person_group_id, max_num_of_candidates_returned:max_num_of_candidates_returned, confidence_threshold:confidence_threshold, custom_headers:custom_headers).value!
486
+ response.body unless response.nil?
487
+ end
488
+
489
+ #
490
+ # 1-to-many identification to find the closest matches of the specific query
491
+ # person face from a person group or large person group.
492
+ # <br/> For each face in the faceIds array, Face Identify will compute
493
+ # similarities between the query face and all the faces in the person group
494
+ # (given by personGroupId) or large person group (given by largePersonGroupId),
495
+ # and return candidate person(s) for that face ranked by similarity confidence.
496
+ # The person group/large person group should be trained to make it ready for
497
+ # identification. See more in [PersonGroup -
498
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395249)
499
+ # and [LargePersonGroup -
500
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/599ae2d16ac60f11b48b5aa4).
501
+ # <br/>
502
+ #
503
+ # Remarks:<br />
504
+ # * The algorithm allows more than one face to be identified independently at
505
+ # the same request, but no more than 10 faces.
506
+ # * Each person in the person group/large person group could have more than one
507
+ # face, but no more than 248 faces.
508
+ # * Higher face image quality means better identification precision. Please
509
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
510
+ # (100 pixels between eyes) or bigger.
511
+ # * Number of candidates returned is restricted by maxNumOfCandidatesReturned
512
+ # and confidenceThreshold. If no person is identified, the returned candidates
513
+ # will be an empty array.
514
+ # * Try [Face - Find
515
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
516
+ # when you need to find similar faces from a face list/large face list instead
517
+ # of a person group/large person group.
518
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
519
+ # the same as the 'recognitionModel' used by the target person group or large
520
+ # person group.
521
+ #
522
+ #
523
+ # @param face_ids Array of query faces faceIds, created by the Face - Detect.
524
+ # Each of the faces are identified independently. The valid number of faceIds
525
+ # is between [1, 10].
526
+ # @param person_group_id [String] PersonGroupId of the target person group,
527
+ # created by PersonGroup - Create. Parameter personGroupId and
528
+ # largePersonGroupId should not be provided at the same time.
529
+ # @param large_person_group_id [String] LargePersonGroupId of the target large
530
+ # person group, created by LargePersonGroup - Create. Parameter personGroupId
531
+ # and largePersonGroupId should not be provided at the same time.
532
+ # @param max_num_of_candidates_returned [Integer] The range of
533
+ # maxNumOfCandidatesReturned is between 1 and 5 (default is 1).
534
+ # @param confidence_threshold [Float] Confidence threshold of identification,
535
+ # used to judge whether one face belong to one person. The range of
536
+ # confidenceThreshold is [0, 1] (default specified by algorithm).
537
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
538
+ # will be added to the HTTP request.
539
+ #
540
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
541
+ #
542
+ def identify_with_http_info(face_ids, person_group_id:nil, large_person_group_id:nil, max_num_of_candidates_returned:1, confidence_threshold:nil, custom_headers:nil)
543
+ identify_async(face_ids, person_group_id:person_group_id, large_person_group_id:large_person_group_id, max_num_of_candidates_returned:max_num_of_candidates_returned, confidence_threshold:confidence_threshold, custom_headers:custom_headers).value!
544
+ end
545
+
546
+ #
547
+ # 1-to-many identification to find the closest matches of the specific query
548
+ # person face from a person group or large person group.
549
+ # <br/> For each face in the faceIds array, Face Identify will compute
550
+ # similarities between the query face and all the faces in the person group
551
+ # (given by personGroupId) or large person group (given by largePersonGroupId),
552
+ # and return candidate person(s) for that face ranked by similarity confidence.
553
+ # The person group/large person group should be trained to make it ready for
554
+ # identification. See more in [PersonGroup -
555
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395249)
556
+ # and [LargePersonGroup -
557
+ # Train](/docs/services/563879b61984550e40cbbe8d/operations/599ae2d16ac60f11b48b5aa4).
558
+ # <br/>
559
+ #
560
+ # Remarks:<br />
561
+ # * The algorithm allows more than one face to be identified independently at
562
+ # the same request, but no more than 10 faces.
563
+ # * Each person in the person group/large person group could have more than one
564
+ # face, but no more than 248 faces.
565
+ # * Higher face image quality means better identification precision. Please
566
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
567
+ # (100 pixels between eyes) or bigger.
568
+ # * Number of candidates returned is restricted by maxNumOfCandidatesReturned
569
+ # and confidenceThreshold. If no person is identified, the returned candidates
570
+ # will be an empty array.
571
+ # * Try [Face - Find
572
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
573
+ # when you need to find similar faces from a face list/large face list instead
574
+ # of a person group/large person group.
575
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
576
+ # the same as the 'recognitionModel' used by the target person group or large
577
+ # person group.
578
+ #
579
+ #
580
+ # @param face_ids Array of query faces faceIds, created by the Face - Detect.
581
+ # Each of the faces are identified independently. The valid number of faceIds
582
+ # is between [1, 10].
583
+ # @param person_group_id [String] PersonGroupId of the target person group,
584
+ # created by PersonGroup - Create. Parameter personGroupId and
585
+ # largePersonGroupId should not be provided at the same time.
586
+ # @param large_person_group_id [String] LargePersonGroupId of the target large
587
+ # person group, created by LargePersonGroup - Create. Parameter personGroupId
588
+ # and largePersonGroupId should not be provided at the same time.
589
+ # @param max_num_of_candidates_returned [Integer] The range of
590
+ # maxNumOfCandidatesReturned is between 1 and 5 (default is 1).
591
+ # @param confidence_threshold [Float] Confidence threshold of identification,
592
+ # used to judge whether one face belong to one person. The range of
593
+ # confidenceThreshold is [0, 1] (default specified by algorithm).
594
+ # @param [Hash{String => String}] A hash of custom headers that will be added
595
+ # to the HTTP request.
596
+ #
597
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
598
+ #
599
+ def identify_async(face_ids, person_group_id:nil, large_person_group_id:nil, max_num_of_candidates_returned:1, confidence_threshold:nil, custom_headers:nil)
600
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
601
+ fail ArgumentError, 'face_ids is nil' if face_ids.nil?
602
+ fail ArgumentError, "'face_ids' should satisfy the constraint - 'MaxItems': '10'" if !face_ids.nil? && face_ids.length > 10
603
+ fail ArgumentError, "'person_group_id' should satisfy the constraint - 'MaxLength': '64'" if !person_group_id.nil? && person_group_id.length > 64
604
+ fail ArgumentError, "'person_group_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !person_group_id.nil? && person_group_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
605
+ fail ArgumentError, "'large_person_group_id' should satisfy the constraint - 'MaxLength': '64'" if !large_person_group_id.nil? && large_person_group_id.length > 64
606
+ fail ArgumentError, "'large_person_group_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !large_person_group_id.nil? && large_person_group_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
607
+ fail ArgumentError, "'max_num_of_candidates_returned' should satisfy the constraint - 'InclusiveMaximum': '5'" if !max_num_of_candidates_returned.nil? && max_num_of_candidates_returned > 5
608
+ fail ArgumentError, "'max_num_of_candidates_returned' should satisfy the constraint - 'InclusiveMinimum': '1'" if !max_num_of_candidates_returned.nil? && max_num_of_candidates_returned < 1
609
+
610
+ body = IdentifyRequest.new
611
+ unless face_ids.nil? && person_group_id.nil? && large_person_group_id.nil? && max_num_of_candidates_returned.nil? && confidence_threshold.nil?
612
+ body.face_ids = face_ids
613
+ body.person_group_id = person_group_id
614
+ body.large_person_group_id = large_person_group_id
615
+ body.max_num_of_candidates_returned = max_num_of_candidates_returned
616
+ body.confidence_threshold = confidence_threshold
617
+ end
618
+
619
+ request_headers = {}
620
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
621
+
622
+ # Set Headers
623
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
624
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
625
+
626
+ # Serialize Request
627
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::IdentifyRequest.mapper()
628
+ request_content = @client.serialize(request_mapper, body)
629
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
630
+
631
+ path_template = 'identify'
632
+
633
+ request_url = @base_url || @client.base_url
634
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
635
+
636
+ options = {
637
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
638
+ body: request_content,
639
+ headers: request_headers.merge(custom_headers || {}),
640
+ base_url: request_url
641
+ }
642
+ promise = @client.make_request_async(:post, path_template, options)
643
+
644
+ promise = promise.then do |result|
645
+ http_response = result.response
646
+ status_code = http_response.status
647
+ response_content = http_response.body
648
+ unless status_code == 200
649
+ error_model = JSON.load(response_content)
650
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
651
+ end
652
+
653
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
654
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
655
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
656
+ # Deserialize Response
657
+ if status_code == 200
658
+ begin
659
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
660
+ result_mapper = {
661
+ client_side_validation: true,
662
+ required: false,
663
+ serialized_name: 'parsed_response',
664
+ type: {
665
+ name: 'Sequence',
666
+ element: {
667
+ client_side_validation: true,
668
+ required: false,
669
+ serialized_name: 'IdentifyResultElementType',
670
+ type: {
671
+ name: 'Composite',
672
+ class_name: 'IdentifyResult'
673
+ }
674
+ }
675
+ }
676
+ }
677
+ result.body = @client.deserialize(result_mapper, parsed_response)
678
+ rescue Exception => e
679
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
680
+ end
681
+ end
682
+
683
+ result
684
+ end
685
+
686
+ promise.execute
687
+ end
688
+
689
+ #
690
+ # Verify whether two faces belong to a same person or whether one face belongs
691
+ # to a person.
692
+ # <br/>
693
+ # Remarks:<br />
694
+ # * Higher face image quality means better identification precision. Please
695
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
696
+ # (100 pixels between eyes) or bigger.
697
+ # * For the scenarios that are sensitive to accuracy please make your own
698
+ # judgment.
699
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
700
+ # the same as the 'recognitionModel' used by the target face, person group or
701
+ # large person group.
702
+ #
703
+ #
704
+ # @param face_id1 FaceId of the first face, comes from Face - Detect
705
+ # @param face_id2 FaceId of the second face, comes from Face - Detect
706
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
707
+ # will be added to the HTTP request.
708
+ #
709
+ # @return [VerifyResult] operation results.
710
+ #
711
+ def verify_face_to_face(face_id1, face_id2, custom_headers:nil)
712
+ response = verify_face_to_face_async(face_id1, face_id2, custom_headers:custom_headers).value!
713
+ response.body unless response.nil?
714
+ end
715
+
716
+ #
717
+ # Verify whether two faces belong to a same person or whether one face belongs
718
+ # to a person.
719
+ # <br/>
720
+ # Remarks:<br />
721
+ # * Higher face image quality means better identification precision. Please
722
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
723
+ # (100 pixels between eyes) or bigger.
724
+ # * For the scenarios that are sensitive to accuracy please make your own
725
+ # judgment.
726
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
727
+ # the same as the 'recognitionModel' used by the target face, person group or
728
+ # large person group.
729
+ #
730
+ #
731
+ # @param face_id1 FaceId of the first face, comes from Face - Detect
732
+ # @param face_id2 FaceId of the second face, comes from Face - Detect
733
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
734
+ # will be added to the HTTP request.
735
+ #
736
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
737
+ #
738
+ def verify_face_to_face_with_http_info(face_id1, face_id2, custom_headers:nil)
739
+ verify_face_to_face_async(face_id1, face_id2, custom_headers:custom_headers).value!
740
+ end
741
+
742
+ #
743
+ # Verify whether two faces belong to a same person or whether one face belongs
744
+ # to a person.
745
+ # <br/>
746
+ # Remarks:<br />
747
+ # * Higher face image quality means better identification precision. Please
748
+ # consider high-quality faces: frontal, clear, and face size is 200x200 pixels
749
+ # (100 pixels between eyes) or bigger.
750
+ # * For the scenarios that are sensitive to accuracy please make your own
751
+ # judgment.
752
+ # * The 'recognitionModel' associated with the query faces' faceIds should be
753
+ # the same as the 'recognitionModel' used by the target face, person group or
754
+ # large person group.
755
+ #
756
+ #
757
+ # @param face_id1 FaceId of the first face, comes from Face - Detect
758
+ # @param face_id2 FaceId of the second face, comes from Face - Detect
759
+ # @param [Hash{String => String}] A hash of custom headers that will be added
760
+ # to the HTTP request.
761
+ #
762
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
763
+ #
764
+ def verify_face_to_face_async(face_id1, face_id2, custom_headers:nil)
765
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
766
+ fail ArgumentError, 'face_id1 is nil' if face_id1.nil?
767
+ fail ArgumentError, 'face_id2 is nil' if face_id2.nil?
768
+
769
+ body = VerifyFaceToFaceRequest.new
770
+ unless face_id1.nil? && face_id2.nil?
771
+ body.face_id1 = face_id1
772
+ body.face_id2 = face_id2
773
+ end
774
+
775
+ request_headers = {}
776
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
777
+
778
+ # Set Headers
779
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
780
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
781
+
782
+ # Serialize Request
783
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::VerifyFaceToFaceRequest.mapper()
784
+ request_content = @client.serialize(request_mapper, body)
785
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
786
+
787
+ path_template = 'verify'
788
+
789
+ request_url = @base_url || @client.base_url
790
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
791
+
792
+ options = {
793
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
794
+ body: request_content,
795
+ headers: request_headers.merge(custom_headers || {}),
796
+ base_url: request_url
797
+ }
798
+ promise = @client.make_request_async(:post, path_template, options)
799
+
800
+ promise = promise.then do |result|
801
+ http_response = result.response
802
+ status_code = http_response.status
803
+ response_content = http_response.body
804
+ unless status_code == 200
805
+ error_model = JSON.load(response_content)
806
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
807
+ end
808
+
809
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
810
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
811
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
812
+ # Deserialize Response
813
+ if status_code == 200
814
+ begin
815
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
816
+ result_mapper = Azure::CognitiveServices::Face::V1_0::Models::VerifyResult.mapper()
817
+ result.body = @client.deserialize(result_mapper, parsed_response)
818
+ rescue Exception => e
819
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
820
+ end
821
+ end
822
+
823
+ result
824
+ end
825
+
826
+ promise.execute
827
+ end
828
+
829
+ #
830
+ # Detect human faces in an image, return face rectangles, and optionally with
831
+ # faceIds, landmarks, and attributes.<br />
832
+ # * No image will be stored. Only the extracted face feature will be stored on
833
+ # server. The faceId is an identifier of the face feature and will be used in
834
+ # [Face -
835
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
836
+ # [Face -
837
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
838
+ # and [Face - Find
839
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
840
+ # The stored face feature(s) will expire and be deleted 24 hours after the
841
+ # original detection call.
842
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
843
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
844
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
845
+ # returned for specific attributes may not be highly accurate.
846
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
847
+ # image file size is from 1KB to 6MB.
848
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
849
+ # rectangle size from large to small.
850
+ # * For optimal results when querying [Face -
851
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
852
+ # [Face -
853
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
854
+ # and [Face - Find
855
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
856
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
857
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
858
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
859
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
860
+ # need a proportionally larger minimum face size.
861
+ # * Different 'detectionModel' values can be provided. To use and compare
862
+ # different detection models, please refer to [How to specify a detection
863
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
864
+ # | Model | Recommended use-case(s) |
865
+ # | ---------- | -------- |
866
+ # | 'detection_01': | The default detection model for [Face -
867
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
868
+ # Recommend for near frontal face detection. For scenarios with exceptionally
869
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
870
+ # faces in such cases may not be detected. |
871
+ # | 'detection_02': | Detection model released in 2019 May with improved
872
+ # accuracy especially on small, side and blurry faces. |
873
+ #
874
+ # * Different 'recognitionModel' values are provided. If follow-up operations
875
+ # like Verify, Identify, Find Similar are needed, please specify the
876
+ # recognition model with 'recognitionModel' parameter. The default value for
877
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
878
+ # explicitly specify the model you need in this parameter. Once specified, the
879
+ # detected faceIds will be associated with the specified recognition model.
880
+ # More details, please refer to [How to specify a recognition
881
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
882
+ # | Model | Recommended use-case(s) |
883
+ # | ---------- | -------- |
884
+ # | 'recognition_01': | The default recognition model for [Face -
885
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
886
+ # All those faceIds created before 2019 March are bonded with this recognition
887
+ # model. |
888
+ # | 'recognition_02': | Recognition model released in 2019 March.
889
+ # 'recognition_02' is recommended since its overall accuracy is improved
890
+ # compared with 'recognition_01'. |
891
+ #
892
+ # @param url [String] Publicly reachable URL of an image
893
+ # @param return_face_id [Boolean] A value indicating whether the operation
894
+ # should return faceIds of detected faces.
895
+ # @param return_face_landmarks [Boolean] A value indicating whether the
896
+ # operation should return landmarks of the detected faces.
897
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
898
+ # the one or more specified face attributes in the comma-separated string like
899
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
900
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
901
+ # attribute analysis has additional computational and time cost.
902
+ # @param recognition_model [RecognitionModel] Name of recognition model.
903
+ # Recognition model is used when the face features are extracted and associated
904
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
905
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
906
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
907
+ # if latest model needed, please explicitly specify the model you need.
908
+ # Possible values include: 'recognition_01', 'recognition_02'
909
+ # @param return_recognition_model [Boolean] A value indicating whether the
910
+ # operation should return 'recognitionModel' in response.
911
+ # @param detection_model [DetectionModel] Name of detection model. Detection
912
+ # model is used to detect faces in the submitted image. A detection model name
913
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
914
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
915
+ # another model is needed, please explicitly specify it. Possible values
916
+ # include: 'detection_01', 'detection_02'
917
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
918
+ # will be added to the HTTP request.
919
+ #
920
+ # @return [Array] operation results.
921
+ #
922
+ def detect_with_url(url, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
923
+ response = detect_with_url_async(url, return_face_id:return_face_id, return_face_landmarks:return_face_landmarks, return_face_attributes:return_face_attributes, recognition_model:recognition_model, return_recognition_model:return_recognition_model, detection_model:detection_model, custom_headers:custom_headers).value!
924
+ response.body unless response.nil?
925
+ end
926
+
927
+ #
928
+ # Detect human faces in an image, return face rectangles, and optionally with
929
+ # faceIds, landmarks, and attributes.<br />
930
+ # * No image will be stored. Only the extracted face feature will be stored on
931
+ # server. The faceId is an identifier of the face feature and will be used in
932
+ # [Face -
933
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
934
+ # [Face -
935
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
936
+ # and [Face - Find
937
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
938
+ # The stored face feature(s) will expire and be deleted 24 hours after the
939
+ # original detection call.
940
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
941
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
942
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
943
+ # returned for specific attributes may not be highly accurate.
944
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
945
+ # image file size is from 1KB to 6MB.
946
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
947
+ # rectangle size from large to small.
948
+ # * For optimal results when querying [Face -
949
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
950
+ # [Face -
951
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
952
+ # and [Face - Find
953
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
954
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
955
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
956
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
957
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
958
+ # need a proportionally larger minimum face size.
959
+ # * Different 'detectionModel' values can be provided. To use and compare
960
+ # different detection models, please refer to [How to specify a detection
961
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
962
+ # | Model | Recommended use-case(s) |
963
+ # | ---------- | -------- |
964
+ # | 'detection_01': | The default detection model for [Face -
965
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
966
+ # Recommend for near frontal face detection. For scenarios with exceptionally
967
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
968
+ # faces in such cases may not be detected. |
969
+ # | 'detection_02': | Detection model released in 2019 May with improved
970
+ # accuracy especially on small, side and blurry faces. |
971
+ #
972
+ # * Different 'recognitionModel' values are provided. If follow-up operations
973
+ # like Verify, Identify, Find Similar are needed, please specify the
974
+ # recognition model with 'recognitionModel' parameter. The default value for
975
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
976
+ # explicitly specify the model you need in this parameter. Once specified, the
977
+ # detected faceIds will be associated with the specified recognition model.
978
+ # More details, please refer to [How to specify a recognition
979
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
980
+ # | Model | Recommended use-case(s) |
981
+ # | ---------- | -------- |
982
+ # | 'recognition_01': | The default recognition model for [Face -
983
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
984
+ # All those faceIds created before 2019 March are bonded with this recognition
985
+ # model. |
986
+ # | 'recognition_02': | Recognition model released in 2019 March.
987
+ # 'recognition_02' is recommended since its overall accuracy is improved
988
+ # compared with 'recognition_01'. |
989
+ #
990
+ # @param url [String] Publicly reachable URL of an image
991
+ # @param return_face_id [Boolean] A value indicating whether the operation
992
+ # should return faceIds of detected faces.
993
+ # @param return_face_landmarks [Boolean] A value indicating whether the
994
+ # operation should return landmarks of the detected faces.
995
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
996
+ # the one or more specified face attributes in the comma-separated string like
997
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
998
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
999
+ # attribute analysis has additional computational and time cost.
1000
+ # @param recognition_model [RecognitionModel] Name of recognition model.
1001
+ # Recognition model is used when the face features are extracted and associated
1002
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
1003
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
1004
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
1005
+ # if latest model needed, please explicitly specify the model you need.
1006
+ # Possible values include: 'recognition_01', 'recognition_02'
1007
+ # @param return_recognition_model [Boolean] A value indicating whether the
1008
+ # operation should return 'recognitionModel' in response.
1009
+ # @param detection_model [DetectionModel] Name of detection model. Detection
1010
+ # model is used to detect faces in the submitted image. A detection model name
1011
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
1012
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
1013
+ # another model is needed, please explicitly specify it. Possible values
1014
+ # include: 'detection_01', 'detection_02'
1015
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1016
+ # will be added to the HTTP request.
1017
+ #
1018
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
1019
+ #
1020
+ def detect_with_url_with_http_info(url, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
1021
+ detect_with_url_async(url, return_face_id:return_face_id, return_face_landmarks:return_face_landmarks, return_face_attributes:return_face_attributes, recognition_model:recognition_model, return_recognition_model:return_recognition_model, detection_model:detection_model, custom_headers:custom_headers).value!
1022
+ end
1023
+
1024
+ #
1025
+ # Detect human faces in an image, return face rectangles, and optionally with
1026
+ # faceIds, landmarks, and attributes.<br />
1027
+ # * No image will be stored. Only the extracted face feature will be stored on
1028
+ # server. The faceId is an identifier of the face feature and will be used in
1029
+ # [Face -
1030
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1031
+ # [Face -
1032
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1033
+ # and [Face - Find
1034
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
1035
+ # The stored face feature(s) will expire and be deleted 24 hours after the
1036
+ # original detection call.
1037
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
1038
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
1039
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
1040
+ # returned for specific attributes may not be highly accurate.
1041
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
1042
+ # image file size is from 1KB to 6MB.
1043
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
1044
+ # rectangle size from large to small.
1045
+ # * For optimal results when querying [Face -
1046
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1047
+ # [Face -
1048
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1049
+ # and [Face - Find
1050
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
1051
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
1052
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
1053
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
1054
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
1055
+ # need a proportionally larger minimum face size.
1056
+ # * Different 'detectionModel' values can be provided. To use and compare
1057
+ # different detection models, please refer to [How to specify a detection
1058
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
1059
+ # | Model | Recommended use-case(s) |
1060
+ # | ---------- | -------- |
1061
+ # | 'detection_01': | The default detection model for [Face -
1062
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1063
+ # Recommend for near frontal face detection. For scenarios with exceptionally
1064
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
1065
+ # faces in such cases may not be detected. |
1066
+ # | 'detection_02': | Detection model released in 2019 May with improved
1067
+ # accuracy especially on small, side and blurry faces. |
1068
+ #
1069
+ # * Different 'recognitionModel' values are provided. If follow-up operations
1070
+ # like Verify, Identify, Find Similar are needed, please specify the
1071
+ # recognition model with 'recognitionModel' parameter. The default value for
1072
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
1073
+ # explicitly specify the model you need in this parameter. Once specified, the
1074
+ # detected faceIds will be associated with the specified recognition model.
1075
+ # More details, please refer to [How to specify a recognition
1076
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
1077
+ # | Model | Recommended use-case(s) |
1078
+ # | ---------- | -------- |
1079
+ # | 'recognition_01': | The default recognition model for [Face -
1080
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1081
+ # All those faceIds created before 2019 March are bonded with this recognition
1082
+ # model. |
1083
+ # | 'recognition_02': | Recognition model released in 2019 March.
1084
+ # 'recognition_02' is recommended since its overall accuracy is improved
1085
+ # compared with 'recognition_01'. |
1086
+ #
1087
+ # @param url [String] Publicly reachable URL of an image
1088
+ # @param return_face_id [Boolean] A value indicating whether the operation
1089
+ # should return faceIds of detected faces.
1090
+ # @param return_face_landmarks [Boolean] A value indicating whether the
1091
+ # operation should return landmarks of the detected faces.
1092
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
1093
+ # the one or more specified face attributes in the comma-separated string like
1094
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
1095
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
1096
+ # attribute analysis has additional computational and time cost.
1097
+ # @param recognition_model [RecognitionModel] Name of recognition model.
1098
+ # Recognition model is used when the face features are extracted and associated
1099
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
1100
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
1101
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
1102
+ # if latest model needed, please explicitly specify the model you need.
1103
+ # Possible values include: 'recognition_01', 'recognition_02'
1104
+ # @param return_recognition_model [Boolean] A value indicating whether the
1105
+ # operation should return 'recognitionModel' in response.
1106
+ # @param detection_model [DetectionModel] Name of detection model. Detection
1107
+ # model is used to detect faces in the submitted image. A detection model name
1108
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
1109
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
1110
+ # another model is needed, please explicitly specify it. Possible values
1111
+ # include: 'detection_01', 'detection_02'
1112
+ # @param [Hash{String => String}] A hash of custom headers that will be added
1113
+ # to the HTTP request.
1114
+ #
1115
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
1116
+ #
1117
+ def detect_with_url_async(url, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
1118
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
1119
+ fail ArgumentError, 'url is nil' if url.nil?
1120
+
1121
+ image_url = Azure::CognitiveServices::Face::V1_0::Models::ImageUrl.new
1122
+ unless url.nil?
1123
+ image_url.url = url
1124
+ end
1125
+
1126
+ request_headers = {}
1127
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
1128
+
1129
+ # Set Headers
1130
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
1131
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
1132
+
1133
+ # Serialize Request
1134
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::ImageUrl.mapper()
1135
+ request_content = @client.serialize(request_mapper, image_url)
1136
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
1137
+
1138
+ path_template = 'detect'
1139
+
1140
+ request_url = @base_url || @client.base_url
1141
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
1142
+
1143
+ options = {
1144
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
1145
+ query_params: {'returnFaceId' => return_face_id,'returnFaceLandmarks' => return_face_landmarks,'returnFaceAttributes' => return_face_attributes.nil? ? nil : return_face_attributes.join(','),'recognitionModel' => recognition_model,'returnRecognitionModel' => return_recognition_model,'detectionModel' => detection_model},
1146
+ body: request_content,
1147
+ headers: request_headers.merge(custom_headers || {}),
1148
+ base_url: request_url
1149
+ }
1150
+ promise = @client.make_request_async(:post, path_template, options)
1151
+
1152
+ promise = promise.then do |result|
1153
+ http_response = result.response
1154
+ status_code = http_response.status
1155
+ response_content = http_response.body
1156
+ unless status_code == 200
1157
+ error_model = JSON.load(response_content)
1158
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
1159
+ end
1160
+
1161
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
1162
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
1163
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
1164
+ # Deserialize Response
1165
+ if status_code == 200
1166
+ begin
1167
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
1168
+ result_mapper = {
1169
+ client_side_validation: true,
1170
+ required: false,
1171
+ serialized_name: 'parsed_response',
1172
+ type: {
1173
+ name: 'Sequence',
1174
+ element: {
1175
+ client_side_validation: true,
1176
+ required: false,
1177
+ serialized_name: 'DetectedFaceElementType',
1178
+ type: {
1179
+ name: 'Composite',
1180
+ class_name: 'DetectedFace'
1181
+ }
1182
+ }
1183
+ }
1184
+ }
1185
+ result.body = @client.deserialize(result_mapper, parsed_response)
1186
+ rescue Exception => e
1187
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
1188
+ end
1189
+ end
1190
+
1191
+ result
1192
+ end
1193
+
1194
+ promise.execute
1195
+ end
1196
+
1197
+ #
1198
+ # Verify whether two faces belong to a same person. Compares a face Id with a
1199
+ # Person Id
1200
+ #
1201
+ # @param face_id FaceId of the face, comes from Face - Detect
1202
+ # @param person_id Specify a certain person in a person group or a large person
1203
+ # group. personId is created in PersonGroup Person - Create or LargePersonGroup
1204
+ # Person - Create.
1205
+ # @param person_group_id [String] Using existing personGroupId and personId for
1206
+ # fast loading a specified person. personGroupId is created in PersonGroup -
1207
+ # Create. Parameter personGroupId and largePersonGroupId should not be provided
1208
+ # at the same time.
1209
+ # @param large_person_group_id [String] Using existing largePersonGroupId and
1210
+ # personId for fast loading a specified person. largePersonGroupId is created
1211
+ # in LargePersonGroup - Create. Parameter personGroupId and largePersonGroupId
1212
+ # should not be provided at the same time.
1213
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1214
+ # will be added to the HTTP request.
1215
+ #
1216
+ # @return [VerifyResult] operation results.
1217
+ #
1218
+ def verify_face_to_person(face_id, person_id, person_group_id:nil, large_person_group_id:nil, custom_headers:nil)
1219
+ response = verify_face_to_person_async(face_id, person_id, person_group_id:person_group_id, large_person_group_id:large_person_group_id, custom_headers:custom_headers).value!
1220
+ response.body unless response.nil?
1221
+ end
1222
+
1223
+ #
1224
+ # Verify whether two faces belong to a same person. Compares a face Id with a
1225
+ # Person Id
1226
+ #
1227
+ # @param face_id FaceId of the face, comes from Face - Detect
1228
+ # @param person_id Specify a certain person in a person group or a large person
1229
+ # group. personId is created in PersonGroup Person - Create or LargePersonGroup
1230
+ # Person - Create.
1231
+ # @param person_group_id [String] Using existing personGroupId and personId for
1232
+ # fast loading a specified person. personGroupId is created in PersonGroup -
1233
+ # Create. Parameter personGroupId and largePersonGroupId should not be provided
1234
+ # at the same time.
1235
+ # @param large_person_group_id [String] Using existing largePersonGroupId and
1236
+ # personId for fast loading a specified person. largePersonGroupId is created
1237
+ # in LargePersonGroup - Create. Parameter personGroupId and largePersonGroupId
1238
+ # should not be provided at the same time.
1239
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1240
+ # will be added to the HTTP request.
1241
+ #
1242
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
1243
+ #
1244
+ def verify_face_to_person_with_http_info(face_id, person_id, person_group_id:nil, large_person_group_id:nil, custom_headers:nil)
1245
+ verify_face_to_person_async(face_id, person_id, person_group_id:person_group_id, large_person_group_id:large_person_group_id, custom_headers:custom_headers).value!
1246
+ end
1247
+
1248
+ #
1249
+ # Verify whether two faces belong to a same person. Compares a face Id with a
1250
+ # Person Id
1251
+ #
1252
+ # @param face_id FaceId of the face, comes from Face - Detect
1253
+ # @param person_id Specify a certain person in a person group or a large person
1254
+ # group. personId is created in PersonGroup Person - Create or LargePersonGroup
1255
+ # Person - Create.
1256
+ # @param person_group_id [String] Using existing personGroupId and personId for
1257
+ # fast loading a specified person. personGroupId is created in PersonGroup -
1258
+ # Create. Parameter personGroupId and largePersonGroupId should not be provided
1259
+ # at the same time.
1260
+ # @param large_person_group_id [String] Using existing largePersonGroupId and
1261
+ # personId for fast loading a specified person. largePersonGroupId is created
1262
+ # in LargePersonGroup - Create. Parameter personGroupId and largePersonGroupId
1263
+ # should not be provided at the same time.
1264
+ # @param [Hash{String => String}] A hash of custom headers that will be added
1265
+ # to the HTTP request.
1266
+ #
1267
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
1268
+ #
1269
+ def verify_face_to_person_async(face_id, person_id, person_group_id:nil, large_person_group_id:nil, custom_headers:nil)
1270
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
1271
+ fail ArgumentError, 'face_id is nil' if face_id.nil?
1272
+ fail ArgumentError, "'person_group_id' should satisfy the constraint - 'MaxLength': '64'" if !person_group_id.nil? && person_group_id.length > 64
1273
+ fail ArgumentError, "'person_group_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !person_group_id.nil? && person_group_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
1274
+ fail ArgumentError, "'large_person_group_id' should satisfy the constraint - 'MaxLength': '64'" if !large_person_group_id.nil? && large_person_group_id.length > 64
1275
+ fail ArgumentError, "'large_person_group_id' should satisfy the constraint - 'Pattern': '^[a-z0-9-_]+$'" if !large_person_group_id.nil? && large_person_group_id.match(Regexp.new('^^[a-z0-9-_]+$$')).nil?
1276
+ fail ArgumentError, 'person_id is nil' if person_id.nil?
1277
+
1278
+ body = Azure::CognitiveServices::Face::V1_0::Models::VerifyFaceToPersonRequest.new
1279
+ unless face_id.nil? && person_group_id.nil? && large_person_group_id.nil? && person_id.nil?
1280
+ body.face_id = face_id
1281
+ body.person_group_id = person_group_id
1282
+ body.large_person_group_id = large_person_group_id
1283
+ body.person_id = person_id
1284
+ end
1285
+
1286
+ request_headers = {}
1287
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
1288
+
1289
+ # Set Headers
1290
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
1291
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
1292
+
1293
+ # Serialize Request
1294
+ request_mapper = Azure::CognitiveServices::Face::V1_0::Models::VerifyFaceToPersonRequest.mapper()
1295
+ request_content = @client.serialize(request_mapper, body)
1296
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
1297
+
1298
+ path_template = 'verify'
1299
+
1300
+ request_url = @base_url || @client.base_url
1301
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
1302
+
1303
+ options = {
1304
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
1305
+ body: request_content,
1306
+ headers: request_headers.merge(custom_headers || {}),
1307
+ base_url: request_url
1308
+ }
1309
+ promise = @client.make_request_async(:post, path_template, options)
1310
+
1311
+ promise = promise.then do |result|
1312
+ http_response = result.response
1313
+ status_code = http_response.status
1314
+ response_content = http_response.body
1315
+ unless status_code == 200
1316
+ error_model = JSON.load(response_content)
1317
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
1318
+ end
1319
+
1320
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
1321
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
1322
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
1323
+ # Deserialize Response
1324
+ if status_code == 200
1325
+ begin
1326
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
1327
+ result_mapper = Azure::CognitiveServices::Face::V1_0::Models::VerifyResult.mapper()
1328
+ result.body = @client.deserialize(result_mapper, parsed_response)
1329
+ rescue Exception => e
1330
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
1331
+ end
1332
+ end
1333
+
1334
+ result
1335
+ end
1336
+
1337
+ promise.execute
1338
+ end
1339
+
1340
+ #
1341
+ # Detect human faces in an image, return face rectangles, and optionally with
1342
+ # faceIds, landmarks, and attributes.<br />
1343
+ # * No image will be stored. Only the extracted face feature will be stored on
1344
+ # server. The faceId is an identifier of the face feature and will be used in
1345
+ # [Face -
1346
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1347
+ # [Face -
1348
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1349
+ # and [Face - Find
1350
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
1351
+ # The stored face feature(s) will expire and be deleted 24 hours after the
1352
+ # original detection call.
1353
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
1354
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
1355
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
1356
+ # returned for specific attributes may not be highly accurate.
1357
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
1358
+ # image file size is from 1KB to 6MB.
1359
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
1360
+ # rectangle size from large to small.
1361
+ # * For optimal results when querying [Face -
1362
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1363
+ # [Face -
1364
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1365
+ # and [Face - Find
1366
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
1367
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
1368
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
1369
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
1370
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
1371
+ # need a proportionally larger minimum face size.
1372
+ # * Different 'detectionModel' values can be provided. To use and compare
1373
+ # different detection models, please refer to [How to specify a detection
1374
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
1375
+ # | Model | Recommended use-case(s) |
1376
+ # | ---------- | -------- |
1377
+ # | 'detection_01': | The default detection model for [Face -
1378
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1379
+ # Recommend for near frontal face detection. For scenarios with exceptionally
1380
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
1381
+ # faces in such cases may not be detected. |
1382
+ # | 'detection_02': | Detection model released in 2019 May with improved
1383
+ # accuracy especially on small, side and blurry faces. |
1384
+ #
1385
+ # * Different 'recognitionModel' values are provided. If follow-up operations
1386
+ # like Verify, Identify, Find Similar are needed, please specify the
1387
+ # recognition model with 'recognitionModel' parameter. The default value for
1388
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
1389
+ # explicitly specify the model you need in this parameter. Once specified, the
1390
+ # detected faceIds will be associated with the specified recognition model.
1391
+ # More details, please refer to [How to specify a recognition
1392
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
1393
+ # | Model | Recommended use-case(s) |
1394
+ # | ---------- | -------- |
1395
+ # | 'recognition_01': | The default recognition model for [Face -
1396
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1397
+ # All those faceIds created before 2019 March are bonded with this recognition
1398
+ # model. |
1399
+ # | 'recognition_02': | Recognition model released in 2019 March.
1400
+ # 'recognition_02' is recommended since its overall accuracy is improved
1401
+ # compared with 'recognition_01'. |
1402
+ #
1403
+ # @param image An image stream.
1404
+ # @param return_face_id [Boolean] A value indicating whether the operation
1405
+ # should return faceIds of detected faces.
1406
+ # @param return_face_landmarks [Boolean] A value indicating whether the
1407
+ # operation should return landmarks of the detected faces.
1408
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
1409
+ # the one or more specified face attributes in the comma-separated string like
1410
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
1411
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
1412
+ # attribute analysis has additional computational and time cost.
1413
+ # @param recognition_model [RecognitionModel] Name of recognition model.
1414
+ # Recognition model is used when the face features are extracted and associated
1415
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
1416
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
1417
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
1418
+ # if latest model needed, please explicitly specify the model you need.
1419
+ # Possible values include: 'recognition_01', 'recognition_02'
1420
+ # @param return_recognition_model [Boolean] A value indicating whether the
1421
+ # operation should return 'recognitionModel' in response.
1422
+ # @param detection_model [DetectionModel] Name of detection model. Detection
1423
+ # model is used to detect faces in the submitted image. A detection model name
1424
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
1425
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
1426
+ # another model is needed, please explicitly specify it. Possible values
1427
+ # include: 'detection_01', 'detection_02'
1428
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1429
+ # will be added to the HTTP request.
1430
+ #
1431
+ # @return [Array] operation results.
1432
+ #
1433
+ def detect_with_stream(image, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
1434
+ response = detect_with_stream_async(image, return_face_id:return_face_id, return_face_landmarks:return_face_landmarks, return_face_attributes:return_face_attributes, recognition_model:recognition_model, return_recognition_model:return_recognition_model, detection_model:detection_model, custom_headers:custom_headers).value!
1435
+ response.body unless response.nil?
1436
+ end
1437
+
1438
+ #
1439
+ # Detect human faces in an image, return face rectangles, and optionally with
1440
+ # faceIds, landmarks, and attributes.<br />
1441
+ # * No image will be stored. Only the extracted face feature will be stored on
1442
+ # server. The faceId is an identifier of the face feature and will be used in
1443
+ # [Face -
1444
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1445
+ # [Face -
1446
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1447
+ # and [Face - Find
1448
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
1449
+ # The stored face feature(s) will expire and be deleted 24 hours after the
1450
+ # original detection call.
1451
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
1452
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
1453
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
1454
+ # returned for specific attributes may not be highly accurate.
1455
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
1456
+ # image file size is from 1KB to 6MB.
1457
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
1458
+ # rectangle size from large to small.
1459
+ # * For optimal results when querying [Face -
1460
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1461
+ # [Face -
1462
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1463
+ # and [Face - Find
1464
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
1465
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
1466
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
1467
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
1468
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
1469
+ # need a proportionally larger minimum face size.
1470
+ # * Different 'detectionModel' values can be provided. To use and compare
1471
+ # different detection models, please refer to [How to specify a detection
1472
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
1473
+ # | Model | Recommended use-case(s) |
1474
+ # | ---------- | -------- |
1475
+ # | 'detection_01': | The default detection model for [Face -
1476
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1477
+ # Recommend for near frontal face detection. For scenarios with exceptionally
1478
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
1479
+ # faces in such cases may not be detected. |
1480
+ # | 'detection_02': | Detection model released in 2019 May with improved
1481
+ # accuracy especially on small, side and blurry faces. |
1482
+ #
1483
+ # * Different 'recognitionModel' values are provided. If follow-up operations
1484
+ # like Verify, Identify, Find Similar are needed, please specify the
1485
+ # recognition model with 'recognitionModel' parameter. The default value for
1486
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
1487
+ # explicitly specify the model you need in this parameter. Once specified, the
1488
+ # detected faceIds will be associated with the specified recognition model.
1489
+ # More details, please refer to [How to specify a recognition
1490
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
1491
+ # | Model | Recommended use-case(s) |
1492
+ # | ---------- | -------- |
1493
+ # | 'recognition_01': | The default recognition model for [Face -
1494
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1495
+ # All those faceIds created before 2019 March are bonded with this recognition
1496
+ # model. |
1497
+ # | 'recognition_02': | Recognition model released in 2019 March.
1498
+ # 'recognition_02' is recommended since its overall accuracy is improved
1499
+ # compared with 'recognition_01'. |
1500
+ #
1501
+ # @param image An image stream.
1502
+ # @param return_face_id [Boolean] A value indicating whether the operation
1503
+ # should return faceIds of detected faces.
1504
+ # @param return_face_landmarks [Boolean] A value indicating whether the
1505
+ # operation should return landmarks of the detected faces.
1506
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
1507
+ # the one or more specified face attributes in the comma-separated string like
1508
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
1509
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
1510
+ # attribute analysis has additional computational and time cost.
1511
+ # @param recognition_model [RecognitionModel] Name of recognition model.
1512
+ # Recognition model is used when the face features are extracted and associated
1513
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
1514
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
1515
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
1516
+ # if latest model needed, please explicitly specify the model you need.
1517
+ # Possible values include: 'recognition_01', 'recognition_02'
1518
+ # @param return_recognition_model [Boolean] A value indicating whether the
1519
+ # operation should return 'recognitionModel' in response.
1520
+ # @param detection_model [DetectionModel] Name of detection model. Detection
1521
+ # model is used to detect faces in the submitted image. A detection model name
1522
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
1523
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
1524
+ # another model is needed, please explicitly specify it. Possible values
1525
+ # include: 'detection_01', 'detection_02'
1526
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1527
+ # will be added to the HTTP request.
1528
+ #
1529
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
1530
+ #
1531
+ def detect_with_stream_with_http_info(image, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
1532
+ detect_with_stream_async(image, return_face_id:return_face_id, return_face_landmarks:return_face_landmarks, return_face_attributes:return_face_attributes, recognition_model:recognition_model, return_recognition_model:return_recognition_model, detection_model:detection_model, custom_headers:custom_headers).value!
1533
+ end
1534
+
1535
+ #
1536
+ # Detect human faces in an image, return face rectangles, and optionally with
1537
+ # faceIds, landmarks, and attributes.<br />
1538
+ # * No image will be stored. Only the extracted face feature will be stored on
1539
+ # server. The faceId is an identifier of the face feature and will be used in
1540
+ # [Face -
1541
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1542
+ # [Face -
1543
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1544
+ # and [Face - Find
1545
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237).
1546
+ # The stored face feature(s) will expire and be deleted 24 hours after the
1547
+ # original detection call.
1548
+ # * Optional parameters include faceId, landmarks, and attributes. Attributes
1549
+ # include age, gender, headPose, smile, facialHair, glasses, emotion, hair,
1550
+ # makeup, occlusion, accessories, blur, exposure and noise. Some of the results
1551
+ # returned for specific attributes may not be highly accurate.
1552
+ # * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed
1553
+ # image file size is from 1KB to 6MB.
1554
+ # * Up to 100 faces can be returned for an image. Faces are ranked by face
1555
+ # rectangle size from large to small.
1556
+ # * For optimal results when querying [Face -
1557
+ # Identify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395239),
1558
+ # [Face -
1559
+ # Verify](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f3039523a),
1560
+ # and [Face - Find
1561
+ # Similar](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395237)
1562
+ # ('returnFaceId' is true), please use faces that are: frontal, clear, and with
1563
+ # a minimum size of 200x200 pixels (100 pixels between eyes).
1564
+ # * The minimum detectable face size is 36x36 pixels in an image no larger than
1565
+ # 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will
1566
+ # need a proportionally larger minimum face size.
1567
+ # * Different 'detectionModel' values can be provided. To use and compare
1568
+ # different detection models, please refer to [How to specify a detection
1569
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
1570
+ # | Model | Recommended use-case(s) |
1571
+ # | ---------- | -------- |
1572
+ # | 'detection_01': | The default detection model for [Face -
1573
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1574
+ # Recommend for near frontal face detection. For scenarios with exceptionally
1575
+ # large angle (head-pose) faces, occluded faces or wrong image orientation, the
1576
+ # faces in such cases may not be detected. |
1577
+ # | 'detection_02': | Detection model released in 2019 May with improved
1578
+ # accuracy especially on small, side and blurry faces. |
1579
+ #
1580
+ # * Different 'recognitionModel' values are provided. If follow-up operations
1581
+ # like Verify, Identify, Find Similar are needed, please specify the
1582
+ # recognition model with 'recognitionModel' parameter. The default value for
1583
+ # 'recognitionModel' is 'recognition_01', if latest model needed, please
1584
+ # explicitly specify the model you need in this parameter. Once specified, the
1585
+ # detected faceIds will be associated with the specified recognition model.
1586
+ # More details, please refer to [How to specify a recognition
1587
+ # model](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
1588
+ # | Model | Recommended use-case(s) |
1589
+ # | ---------- | -------- |
1590
+ # | 'recognition_01': | The default recognition model for [Face -
1591
+ # Detect](/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236).
1592
+ # All those faceIds created before 2019 March are bonded with this recognition
1593
+ # model. |
1594
+ # | 'recognition_02': | Recognition model released in 2019 March.
1595
+ # 'recognition_02' is recommended since its overall accuracy is improved
1596
+ # compared with 'recognition_01'. |
1597
+ #
1598
+ # @param image An image stream.
1599
+ # @param return_face_id [Boolean] A value indicating whether the operation
1600
+ # should return faceIds of detected faces.
1601
+ # @param return_face_landmarks [Boolean] A value indicating whether the
1602
+ # operation should return landmarks of the detected faces.
1603
+ # @param return_face_attributes [Array<FaceAttributeType>] Analyze and return
1604
+ # the one or more specified face attributes in the comma-separated string like
1605
+ # "returnFaceAttributes=age,gender". Supported face attributes include age,
1606
+ # gender, headPose, smile, facialHair, glasses and emotion. Note that each face
1607
+ # attribute analysis has additional computational and time cost.
1608
+ # @param recognition_model [RecognitionModel] Name of recognition model.
1609
+ # Recognition model is used when the face features are extracted and associated
1610
+ # with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition
1611
+ # model name can be provided when performing Face - Detect or (Large)FaceList -
1612
+ # Create or (Large)PersonGroup - Create. The default value is 'recognition_01',
1613
+ # if latest model needed, please explicitly specify the model you need.
1614
+ # Possible values include: 'recognition_01', 'recognition_02'
1615
+ # @param return_recognition_model [Boolean] A value indicating whether the
1616
+ # operation should return 'recognitionModel' in response.
1617
+ # @param detection_model [DetectionModel] Name of detection model. Detection
1618
+ # model is used to detect faces in the submitted image. A detection model name
1619
+ # can be provided when performing Face - Detect or (Large)FaceList - Add Face
1620
+ # or (Large)PersonGroup - Add Face. The default value is 'detection_01', if
1621
+ # another model is needed, please explicitly specify it. Possible values
1622
+ # include: 'detection_01', 'detection_02'
1623
+ # @param [Hash{String => String}] A hash of custom headers that will be added
1624
+ # to the HTTP request.
1625
+ #
1626
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
1627
+ #
1628
+ def detect_with_stream_async(image, return_face_id:true, return_face_landmarks:false, return_face_attributes:nil, recognition_model:nil, return_recognition_model:false, detection_model:nil, custom_headers:nil)
1629
+ fail ArgumentError, '@client.endpoint is nil' if @client.endpoint.nil?
1630
+ fail ArgumentError, 'image is nil' if image.nil?
1631
+
1632
+
1633
+ request_headers = {}
1634
+ request_headers['Content-Type'] = 'application/octet-stream'
1635
+
1636
+ # Set Headers
1637
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
1638
+ request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
1639
+
1640
+ # Serialize Request
1641
+ request_mapper = {
1642
+ client_side_validation: true,
1643
+ required: true,
1644
+ serialized_name: 'Image',
1645
+ type: {
1646
+ name: 'Stream'
1647
+ }
1648
+ }
1649
+ request_content = @client.serialize(request_mapper, image)
1650
+
1651
+ path_template = 'detect'
1652
+
1653
+ request_url = @base_url || @client.base_url
1654
+ request_url = request_url.gsub('{Endpoint}', @client.endpoint)
1655
+
1656
+ options = {
1657
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
1658
+ query_params: {'returnFaceId' => return_face_id,'returnFaceLandmarks' => return_face_landmarks,'returnFaceAttributes' => return_face_attributes.nil? ? nil : return_face_attributes.join(','),'recognitionModel' => recognition_model,'returnRecognitionModel' => return_recognition_model,'detectionModel' => detection_model},
1659
+ body: request_content,
1660
+ headers: request_headers.merge(custom_headers || {}),
1661
+ base_url: request_url
1662
+ }
1663
+ promise = @client.make_request_async(:post, path_template, options)
1664
+
1665
+ promise = promise.then do |result|
1666
+ http_response = result.response
1667
+ status_code = http_response.status
1668
+ response_content = http_response.body
1669
+ unless status_code == 200
1670
+ error_model = JSON.load(response_content)
1671
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
1672
+ end
1673
+
1674
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
1675
+ result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
1676
+ result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
1677
+ # Deserialize Response
1678
+ if status_code == 200
1679
+ begin
1680
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
1681
+ result_mapper = {
1682
+ client_side_validation: true,
1683
+ required: false,
1684
+ serialized_name: 'parsed_response',
1685
+ type: {
1686
+ name: 'Sequence',
1687
+ element: {
1688
+ client_side_validation: true,
1689
+ required: false,
1690
+ serialized_name: 'DetectedFaceElementType',
1691
+ type: {
1692
+ name: 'Composite',
1693
+ class_name: 'DetectedFace'
1694
+ }
1695
+ }
1696
+ }
1697
+ }
1698
+ result.body = @client.deserialize(result_mapper, parsed_response)
1699
+ rescue Exception => e
1700
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
1701
+ end
1702
+ end
1703
+
1704
+ result
1705
+ end
1706
+
1707
+ promise.execute
1708
+ end
1709
+
1710
+ end
1711
+ end