aws-sdk-rekognition 1.9.0 → 1.10.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3e0615e468443cc3292514031d09a9de486acc63
4
- data.tar.gz: 51ea7ec4153e8b23a5613ddff5a0e858a49cc1ba
3
+ metadata.gz: 2d3fa98f7cf90ae98ab69e6a5ff00cd0f761fa77
4
+ data.tar.gz: 56ab01ad98223353ce59a4d9aff1a9a2be4f495f
5
5
  SHA512:
6
- metadata.gz: 63c95533980ec3354aaf38719841d931bd00001ded444094547d26e50acaa912f0d0ce776fdb35de6e0642ce3ba3e4425a726697db2f77c1cd993b874da655f0
7
- data.tar.gz: d18bc9fc1c96aeffea5158c58f788ddca944e2a86cdab834719299fed2c1efd6a5c6cb0cd660adb598e708bb4763874085ac0e1d93803eea95568e9838b403cc
6
+ metadata.gz: 36053a204711eb24d5f0197ebead895c1d55a1f6e3bcd6ac2b758837fcb5683934a9af26c9880ddf5b49ca4f7f506d61abd455efa55d28186df567419fdf7ebd
7
+ data.tar.gz: c53dcd5b2a2e70d02977007b159155bb653eaca964c239b48c6f76fc1b6179700401aee098ead40258ef9050042d2c25432b5d8cf58813d690db502974710220
@@ -42,6 +42,6 @@ require_relative 'aws-sdk-rekognition/customizations'
42
42
  # @service
43
43
  module Aws::Rekognition
44
44
 
45
- GEM_VERSION = '1.9.0'
45
+ GEM_VERSION = '1.10.0'
46
46
 
47
47
  end
@@ -1997,18 +1997,53 @@ module Aws::Rekognition
1997
1997
  # client-side index to associate the faces with each image. You can then
1998
1998
  # use the index to find all faces in an image.
1999
1999
  #
2000
+ # You can specify the maximum number of faces to index with the
2001
+ # `MaxFaces` input parameter. This is useful when you want to index the
2002
+ # largest faces in an image, and you don't want to index other faces
2003
+ # detected in the image.
2004
+ #
2005
+ # The `QualityFilter` input parameter allows you to filter out detected
2006
+ # faces that don’t meet the required quality bar chosen by Amazon
2007
+ # Rekognition. The quality bar is based on a variety of common use
2008
+ # cases.
2009
+ #
2000
2010
  # In response, the operation returns an array of metadata for all
2001
- # detected faces. This includes, the bounding box of the detected face,
2002
- # confidence value (indicating the bounding box contains a face), a face
2003
- # ID assigned by the service for each face that is detected and stored,
2004
- # and an image ID assigned by the service for the input image. If you
2005
- # request all facial attributes (using the `detectionAttributes`
2006
- # parameter, Amazon Rekognition returns detailed facial attributes such
2011
+ # detected faces, `FaceRecords`. This includes:
2012
+ #
2013
+ # * The bounding box, `BoundingBox`, of the detected face.
2014
+ #
2015
+ # * A confidence value, `Confidence`, indicating the confidence that the
2016
+ # bounding box contains a face.
2017
+ #
2018
+ # * A face ID, `faceId`, assigned by the service for each face that is
2019
+ # detected and stored.
2020
+ #
2021
+ # * An image ID, `ImageId`, assigned by the service for the input image.
2022
+ #
2023
+ # If you request all facial attributes (using the `detectionAttributes`
2024
+ # parameter), Amazon Rekognition returns detailed facial attributes such
2007
2025
  # as facial landmarks (for example, location of eye and mouth) and other
2008
2026
  # facial attributes such gender. If you provide the same image, specify
2009
2027
  # the same collection, and use the same external ID in the `IndexFaces`
2010
2028
  # operation, Amazon Rekognition doesn't save duplicate face metadata.
2011
2029
  #
2030
+ # Information about faces detected in an image, but not indexed, is
2031
+ # returned in an array of objects, `UnindexedFaces`. Faces are not
2032
+ # indexed for reasons such as:
2033
+ #
2034
+ # * The face is too blurry.
2035
+ #
2036
+ # * The image is too dark.
2037
+ #
2038
+ # * The face has an extreme pose.
2039
+ #
2040
+ # * The face is too small.
2041
+ #
2042
+ # * The number of faces detected exceeds the value of the `MaxFaces`
2043
+ # request parameter.
2044
+ #
2045
+ #
2046
+ #
2012
2047
  # For more information, see Adding Faces to a Collection in the Amazon
2013
2048
  # Rekognition Developer Guide.
2014
2049
  #
@@ -2045,11 +2080,39 @@ module Aws::Rekognition
2045
2080
  # AND operator to determine which attributes to return (in this case,
2046
2081
  # all attributes).
2047
2082
  #
2083
+ # @option params [Integer] :max_faces
2084
+ # The maximum number of faces to index. The value of `MaxFaces` must be
2085
+ # greater than or equal to 1. `IndexFaces` returns no more that 100
2086
+ # detected faces in an image, even if you specify a larger value for
2087
+ # `MaxFaces`.
2088
+ #
2089
+ # If `IndexFaces` detects more faces than the value of `MaxFaces`, the
2090
+ # faces with the lowest quality are filtered out first. If there are
2091
+ # still more faces than the value of `MaxFaces`, the faces with the
2092
+ # smallest bounding boxes are filtered out (up to the number needed to
2093
+ # satisfy the value of `MaxFaces`). Information about the unindexed
2094
+ # faces is available in the `UnindexedFaces` array.
2095
+ #
2096
+ # The faces returned by `IndexFaces` are sorted, in descending order, by
2097
+ # the largest face bounding box size, to the smallest.
2098
+ #
2099
+ # @option params [String] :quality_filter
2100
+ # Specifies how much filtering is done to identify faces detected with
2101
+ # low quality. Filtered faces are not indexed. If you specify `AUTO`,
2102
+ # filtering prioritizes the identification of faces that don’t meet the
2103
+ # required quality bar chosen by Amazon Rekognition. The quality bar is
2104
+ # based on a variety of common use cases. Low quality detections can
2105
+ # arise for a number of reasons. For example, an object misidentified as
2106
+ # a face, a face that is too blurry, or a face with a pose that is too
2107
+ # extreme to use. If you specify `NONE`, no filtering is performed. The
2108
+ # default value is NONE.
2109
+ #
2048
2110
  # @return [Types::IndexFacesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2049
2111
  #
2050
2112
  # * {Types::IndexFacesResponse#face_records #face_records} => Array<Types::FaceRecord>
2051
2113
  # * {Types::IndexFacesResponse#orientation_correction #orientation_correction} => String
2052
2114
  # * {Types::IndexFacesResponse#face_model_version #face_model_version} => String
2115
+ # * {Types::IndexFacesResponse#unindexed_faces #unindexed_faces} => Array<Types::UnindexedFace>
2053
2116
  #
2054
2117
  #
2055
2118
  # @example Example: To add a face to a collection
@@ -2206,6 +2269,8 @@ module Aws::Rekognition
2206
2269
  # },
2207
2270
  # external_image_id: "ExternalImageId",
2208
2271
  # detection_attributes: ["DEFAULT"], # accepts DEFAULT, ALL
2272
+ # max_faces: 1,
2273
+ # quality_filter: "NONE", # accepts NONE, AUTO
2209
2274
  # })
2210
2275
  #
2211
2276
  # @example Response structure
@@ -2256,6 +2321,44 @@ module Aws::Rekognition
2256
2321
  # resp.face_records[0].face_detail.confidence #=> Float
2257
2322
  # resp.orientation_correction #=> String, one of "ROTATE_0", "ROTATE_90", "ROTATE_180", "ROTATE_270"
2258
2323
  # resp.face_model_version #=> String
2324
+ # resp.unindexed_faces #=> Array
2325
+ # resp.unindexed_faces[0].reasons #=> Array
2326
+ # resp.unindexed_faces[0].reasons[0] #=> String, one of "EXCEEDS_MAX_FACES", "EXTREME_POSE", "LOW_BRIGHTNESS", "LOW_SHARPNESS", "LOW_CONFIDENCE", "SMALL_BOUNDING_BOX"
2327
+ # resp.unindexed_faces[0].face_detail.bounding_box.width #=> Float
2328
+ # resp.unindexed_faces[0].face_detail.bounding_box.height #=> Float
2329
+ # resp.unindexed_faces[0].face_detail.bounding_box.left #=> Float
2330
+ # resp.unindexed_faces[0].face_detail.bounding_box.top #=> Float
2331
+ # resp.unindexed_faces[0].face_detail.age_range.low #=> Integer
2332
+ # resp.unindexed_faces[0].face_detail.age_range.high #=> Integer
2333
+ # resp.unindexed_faces[0].face_detail.smile.value #=> Boolean
2334
+ # resp.unindexed_faces[0].face_detail.smile.confidence #=> Float
2335
+ # resp.unindexed_faces[0].face_detail.eyeglasses.value #=> Boolean
2336
+ # resp.unindexed_faces[0].face_detail.eyeglasses.confidence #=> Float
2337
+ # resp.unindexed_faces[0].face_detail.sunglasses.value #=> Boolean
2338
+ # resp.unindexed_faces[0].face_detail.sunglasses.confidence #=> Float
2339
+ # resp.unindexed_faces[0].face_detail.gender.value #=> String, one of "Male", "Female"
2340
+ # resp.unindexed_faces[0].face_detail.gender.confidence #=> Float
2341
+ # resp.unindexed_faces[0].face_detail.beard.value #=> Boolean
2342
+ # resp.unindexed_faces[0].face_detail.beard.confidence #=> Float
2343
+ # resp.unindexed_faces[0].face_detail.mustache.value #=> Boolean
2344
+ # resp.unindexed_faces[0].face_detail.mustache.confidence #=> Float
2345
+ # resp.unindexed_faces[0].face_detail.eyes_open.value #=> Boolean
2346
+ # resp.unindexed_faces[0].face_detail.eyes_open.confidence #=> Float
2347
+ # resp.unindexed_faces[0].face_detail.mouth_open.value #=> Boolean
2348
+ # resp.unindexed_faces[0].face_detail.mouth_open.confidence #=> Float
2349
+ # resp.unindexed_faces[0].face_detail.emotions #=> Array
2350
+ # resp.unindexed_faces[0].face_detail.emotions[0].type #=> String, one of "HAPPY", "SAD", "ANGRY", "CONFUSED", "DISGUSTED", "SURPRISED", "CALM", "UNKNOWN"
2351
+ # resp.unindexed_faces[0].face_detail.emotions[0].confidence #=> Float
2352
+ # resp.unindexed_faces[0].face_detail.landmarks #=> Array
2353
+ # resp.unindexed_faces[0].face_detail.landmarks[0].type #=> String, one of "eyeLeft", "eyeRight", "nose", "mouthLeft", "mouthRight", "leftEyeBrowLeft", "leftEyeBrowRight", "leftEyeBrowUp", "rightEyeBrowLeft", "rightEyeBrowRight", "rightEyeBrowUp", "leftEyeLeft", "leftEyeRight", "leftEyeUp", "leftEyeDown", "rightEyeLeft", "rightEyeRight", "rightEyeUp", "rightEyeDown", "noseLeft", "noseRight", "mouthUp", "mouthDown", "leftPupil", "rightPupil"
2354
+ # resp.unindexed_faces[0].face_detail.landmarks[0].x #=> Float
2355
+ # resp.unindexed_faces[0].face_detail.landmarks[0].y #=> Float
2356
+ # resp.unindexed_faces[0].face_detail.pose.roll #=> Float
2357
+ # resp.unindexed_faces[0].face_detail.pose.yaw #=> Float
2358
+ # resp.unindexed_faces[0].face_detail.pose.pitch #=> Float
2359
+ # resp.unindexed_faces[0].face_detail.quality.brightness #=> Float
2360
+ # resp.unindexed_faces[0].face_detail.quality.sharpness #=> Float
2361
+ # resp.unindexed_faces[0].face_detail.confidence #=> Float
2259
2362
  #
2260
2363
  # @overload index_faces(params = {})
2261
2364
  # @param [Hash] params ({})
@@ -3446,7 +3549,7 @@ module Aws::Rekognition
3446
3549
  params: params,
3447
3550
  config: config)
3448
3551
  context[:gem_name] = 'aws-sdk-rekognition'
3449
- context[:gem_version] = '1.9.0'
3552
+ context[:gem_version] = '1.10.0'
3450
3553
  Seahorse::Client::Request.new(handlers, context)
3451
3554
  end
3452
3555
 
@@ -137,6 +137,7 @@ module Aws::Rekognition
137
137
  ListStreamProcessorsRequest = Shapes::StructureShape.new(name: 'ListStreamProcessorsRequest')
138
138
  ListStreamProcessorsResponse = Shapes::StructureShape.new(name: 'ListStreamProcessorsResponse')
139
139
  MaxFaces = Shapes::IntegerShape.new(name: 'MaxFaces')
140
+ MaxFacesToIndex = Shapes::IntegerShape.new(name: 'MaxFacesToIndex')
140
141
  MaxResults = Shapes::IntegerShape.new(name: 'MaxResults')
141
142
  ModerationLabel = Shapes::StructureShape.new(name: 'ModerationLabel')
142
143
  ModerationLabels = Shapes::ListShape.new(name: 'ModerationLabels')
@@ -158,6 +159,9 @@ module Aws::Rekognition
158
159
  Polygon = Shapes::ListShape.new(name: 'Polygon')
159
160
  Pose = Shapes::StructureShape.new(name: 'Pose')
160
161
  ProvisionedThroughputExceededException = Shapes::StructureShape.new(name: 'ProvisionedThroughputExceededException')
162
+ QualityFilter = Shapes::StringShape.new(name: 'QualityFilter')
163
+ Reason = Shapes::StringShape.new(name: 'Reason')
164
+ Reasons = Shapes::ListShape.new(name: 'Reasons')
161
165
  RecognizeCelebritiesRequest = Shapes::StructureShape.new(name: 'RecognizeCelebritiesRequest')
162
166
  RecognizeCelebritiesResponse = Shapes::StructureShape.new(name: 'RecognizeCelebritiesResponse')
163
167
  RekognitionUniqueId = Shapes::StringShape.new(name: 'RekognitionUniqueId')
@@ -209,6 +213,8 @@ module Aws::Rekognition
209
213
  Timestamp = Shapes::IntegerShape.new(name: 'Timestamp')
210
214
  UInteger = Shapes::IntegerShape.new(name: 'UInteger')
211
215
  ULong = Shapes::IntegerShape.new(name: 'ULong')
216
+ UnindexedFace = Shapes::StructureShape.new(name: 'UnindexedFace')
217
+ UnindexedFaces = Shapes::ListShape.new(name: 'UnindexedFaces')
212
218
  Url = Shapes::StringShape.new(name: 'Url')
213
219
  Urls = Shapes::ListShape.new(name: 'Urls')
214
220
  Video = Shapes::StructureShape.new(name: 'Video')
@@ -558,11 +564,14 @@ module Aws::Rekognition
558
564
  IndexFacesRequest.add_member(:image, Shapes::ShapeRef.new(shape: Image, required: true, location_name: "Image"))
559
565
  IndexFacesRequest.add_member(:external_image_id, Shapes::ShapeRef.new(shape: ExternalImageId, location_name: "ExternalImageId"))
560
566
  IndexFacesRequest.add_member(:detection_attributes, Shapes::ShapeRef.new(shape: Attributes, location_name: "DetectionAttributes"))
567
+ IndexFacesRequest.add_member(:max_faces, Shapes::ShapeRef.new(shape: MaxFacesToIndex, location_name: "MaxFaces"))
568
+ IndexFacesRequest.add_member(:quality_filter, Shapes::ShapeRef.new(shape: QualityFilter, location_name: "QualityFilter"))
561
569
  IndexFacesRequest.struct_class = Types::IndexFacesRequest
562
570
 
563
571
  IndexFacesResponse.add_member(:face_records, Shapes::ShapeRef.new(shape: FaceRecordList, location_name: "FaceRecords"))
564
572
  IndexFacesResponse.add_member(:orientation_correction, Shapes::ShapeRef.new(shape: OrientationCorrection, location_name: "OrientationCorrection"))
565
573
  IndexFacesResponse.add_member(:face_model_version, Shapes::ShapeRef.new(shape: String, location_name: "FaceModelVersion"))
574
+ IndexFacesResponse.add_member(:unindexed_faces, Shapes::ShapeRef.new(shape: UnindexedFaces, location_name: "UnindexedFaces"))
566
575
  IndexFacesResponse.struct_class = Types::IndexFacesResponse
567
576
 
568
577
  KinesisDataStream.add_member(:arn, Shapes::ShapeRef.new(shape: KinesisDataArn, location_name: "Arn"))
@@ -665,6 +674,8 @@ module Aws::Rekognition
665
674
  Pose.add_member(:pitch, Shapes::ShapeRef.new(shape: Degree, location_name: "Pitch"))
666
675
  Pose.struct_class = Types::Pose
667
676
 
677
+ Reasons.member = Shapes::ShapeRef.new(shape: Reason)
678
+
668
679
  RecognizeCelebritiesRequest.add_member(:image, Shapes::ShapeRef.new(shape: Image, required: true, location_name: "Image"))
669
680
  RecognizeCelebritiesRequest.struct_class = Types::RecognizeCelebritiesRequest
670
681
 
@@ -803,6 +814,12 @@ module Aws::Rekognition
803
814
 
804
815
  TextDetectionList.member = Shapes::ShapeRef.new(shape: TextDetection)
805
816
 
817
+ UnindexedFace.add_member(:reasons, Shapes::ShapeRef.new(shape: Reasons, location_name: "Reasons"))
818
+ UnindexedFace.add_member(:face_detail, Shapes::ShapeRef.new(shape: FaceDetail, location_name: "FaceDetail"))
819
+ UnindexedFace.struct_class = Types::UnindexedFace
820
+
821
+ UnindexedFaces.member = Shapes::ShapeRef.new(shape: UnindexedFace)
822
+
806
823
  Urls.member = Shapes::ShapeRef.new(shape: Url)
807
824
 
808
825
  Video.add_member(:s3_object, Shapes::ShapeRef.new(shape: S3Object, location_name: "S3Object"))
@@ -1823,6 +1823,8 @@ module Aws::Rekognition
1823
1823
  # },
1824
1824
  # external_image_id: "ExternalImageId",
1825
1825
  # detection_attributes: ["DEFAULT"], # accepts DEFAULT, ALL
1826
+ # max_faces: 1,
1827
+ # quality_filter: "NONE", # accepts NONE, AUTO
1826
1828
  # }
1827
1829
  #
1828
1830
  # @!attribute [rw] collection_id
@@ -1854,11 +1856,42 @@ module Aws::Rekognition
1854
1856
  # this case, all attributes).
1855
1857
  # @return [Array<String>]
1856
1858
  #
1859
+ # @!attribute [rw] max_faces
1860
+ # The maximum number of faces to index. The value of `MaxFaces` must
1861
+ # be greater than or equal to 1. `IndexFaces` returns no more that 100
1862
+ # detected faces in an image, even if you specify a larger value for
1863
+ # `MaxFaces`.
1864
+ #
1865
+ # If `IndexFaces` detects more faces than the value of `MaxFaces`, the
1866
+ # faces with the lowest quality are filtered out first. If there are
1867
+ # still more faces than the value of `MaxFaces`, the faces with the
1868
+ # smallest bounding boxes are filtered out (up to the number needed to
1869
+ # satisfy the value of `MaxFaces`). Information about the unindexed
1870
+ # faces is available in the `UnindexedFaces` array.
1871
+ #
1872
+ # The faces returned by `IndexFaces` are sorted, in descending order,
1873
+ # by the largest face bounding box size, to the smallest.
1874
+ # @return [Integer]
1875
+ #
1876
+ # @!attribute [rw] quality_filter
1877
+ # Specifies how much filtering is done to identify faces detected with
1878
+ # low quality. Filtered faces are not indexed. If you specify `AUTO`,
1879
+ # filtering prioritizes the identification of faces that don’t meet
1880
+ # the required quality bar chosen by Amazon Rekognition. The quality
1881
+ # bar is based on a variety of common use cases. Low quality
1882
+ # detections can arise for a number of reasons. For example, an object
1883
+ # misidentified as a face, a face that is too blurry, or a face with a
1884
+ # pose that is too extreme to use. If you specify `NONE`, no filtering
1885
+ # is performed. The default value is NONE.
1886
+ # @return [String]
1887
+ #
1857
1888
  class IndexFacesRequest < Struct.new(
1858
1889
  :collection_id,
1859
1890
  :image,
1860
1891
  :external_image_id,
1861
- :detection_attributes)
1892
+ :detection_attributes,
1893
+ :max_faces,
1894
+ :quality_filter)
1862
1895
  include Aws::Structure
1863
1896
  end
1864
1897
 
@@ -1890,10 +1923,18 @@ module Aws::Rekognition
1890
1923
  # collection (`CollectionId`).
1891
1924
  # @return [String]
1892
1925
  #
1926
+ # @!attribute [rw] unindexed_faces
1927
+ # An array of faces that detected in the image but not indexed either
1928
+ # because the quality filter deemed them to be of low-quality or the
1929
+ # `MaxFaces` request parameter filtered them out. To use the quality
1930
+ # filter, you specify the `QualityFilter` request parameter.
1931
+ # @return [Array<Types::UnindexedFace>]
1932
+ #
1893
1933
  class IndexFacesResponse < Struct.new(
1894
1934
  :face_records,
1895
1935
  :orientation_correction,
1896
- :face_model_version)
1936
+ :face_model_version,
1937
+ :unindexed_faces)
1897
1938
  include Aws::Structure
1898
1939
  end
1899
1940
 
@@ -3202,6 +3243,40 @@ module Aws::Rekognition
3202
3243
  include Aws::Structure
3203
3244
  end
3204
3245
 
3246
+ # A face detected by but not indexed. Use the `Reasons` response
3247
+ # attribute to determine why a face is not indexed.
3248
+ #
3249
+ # @!attribute [rw] reasons
3250
+ # An array of reasons specifying why a face was not indexed.
3251
+ #
3252
+ # * EXTREME\_POSE - The face is at a pose that can't be detected. For
3253
+ # example, the head is turned too far away from the camera.
3254
+ #
3255
+ # * EXCEEDS\_MAX\_FACES - The number of faces detected is already
3256
+ # higher than that specified by the `MaxFaces` input parameter for
3257
+ # `IndexFaces`.
3258
+ #
3259
+ # * LOW\_BRIGHTNESS - The image is too dark.
3260
+ #
3261
+ # * LOW\_SHARPNESS - The image is too blurry.
3262
+ #
3263
+ # * LOW\_CONFIDENCE - The face was detected with a low confidence.
3264
+ #
3265
+ # * SMALL\_BOUNDING\_BOX - The bounding box around the face is too
3266
+ # small.
3267
+ # @return [Array<String>]
3268
+ #
3269
+ # @!attribute [rw] face_detail
3270
+ # Structure containing attributes of a face that was detected, but not
3271
+ # indexed, by `IndexFaces`.
3272
+ # @return [Types::FaceDetail]
3273
+ #
3274
+ class UnindexedFace < Struct.new(
3275
+ :reasons,
3276
+ :face_detail)
3277
+ include Aws::Structure
3278
+ end
3279
+
3205
3280
  # Video file stored in an Amazon S3 bucket. Amazon Rekognition video
3206
3281
  # start operations such as use `Video` to specify a video for analysis.
3207
3282
  # The supported file formats are .mp4, .mov and .avi.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-rekognition
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.9.0
4
+ version: 1.10.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-09-06 00:00:00.000000000 Z
11
+ date: 2018-09-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core