azure_cognitiveservices_computervision 0.18.1 → 0.19.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (18) hide show
  1. checksums.yaml +4 -4
  2. data/lib/1.0/generated/azure_cognitiveservices_computervision/computer_vision_client.rb +1 -1
  3. data/lib/2.0/generated/azure_cognitiveservices_computervision.rb +15 -11
  4. data/lib/2.0/generated/azure_cognitiveservices_computervision/computer_vision_client.rb +335 -7
  5. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/detected_brand.rb +73 -0
  6. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/image_analysis.rb +20 -0
  7. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/line.rb +4 -5
  8. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/read_operation_result.rb +70 -0
  9. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/text_operation_result.rb +4 -4
  10. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result.rb +114 -0
  11. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_confidence_class.rb +16 -0
  12. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_dimension_unit.rb +16 -0
  13. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/visual_feature_types.rb +1 -0
  14. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/word.rb +18 -6
  15. data/lib/profiles/latest/modules/computervision_profile_module.rb +49 -33
  16. data/lib/version.rb +1 -1
  17. metadata +6 -2
  18. data/lib/2.0/generated/azure_cognitiveservices_computervision/models/recognition_result.rb +0 -56
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 057a9acb8039f7b2e3ca04b42702a7d0c61f5e08858798b8a34bb373bc289edd
4
- data.tar.gz: 983b4bb7eecace6ff23898fe8bc47f48e716b8ee8f65654aa99da4a16bf4760e
3
+ metadata.gz: ae50a37335f7d65db4ca6b1bcb25a5900121dde306dcc5908fc79cb35ccde09c
4
+ data.tar.gz: 37453a13b9a2acc22fc974d366ab41efc50c86db5c665b5cb98a48ac7090e20c
5
5
  SHA512:
6
- metadata.gz: f93ca1816494e183d600db91b7bdb41e031162661351b92396d1891fa2e1ebdf496bbe4b4bd06cc3e69ff9832802c271ea6586f7812dc8e264a2135a4d7fff0e
7
- data.tar.gz: 1afc0d8d4fc18ea3668760f8ac35ea914bd9f4de67a4d9e6fd67ac4ced3ffc9cb7163cf62cd022318976c99d900835798dbc9e4ce1eb0bea85d3272c00077a45
6
+ metadata.gz: 97e70b4af6cba069770dca5a599a4ba4414ec8a0a72a71a6edf23fdba9f88282b66dfc0ee33dd52a7bc37a134a937b20dce81e1ae5cc41b15efebbeb5e0f4665
7
+ data.tar.gz: 43f4102201fdc4a30857dacd51ffd3c83a8fb611d5839f145abf30995f87feb4131faa11d969e5211a1dbe08ae2a0bd7eda29528df4886dd9b8bfcaa82fe7aca
@@ -2254,7 +2254,7 @@ module Azure::CognitiveServices::ComputerVision::V1_0
2254
2254
  #
2255
2255
  def add_telemetry
2256
2256
  sdk_information = 'azure_cognitiveservices_computervision'
2257
- sdk_information = "#{sdk_information}/0.18.1"
2257
+ sdk_information = "#{sdk_information}/0.19.0"
2258
2258
  add_user_agent_information(sdk_information)
2259
2259
  end
2260
2260
  end
@@ -40,27 +40,31 @@ module Azure::CognitiveServices::ComputerVision::V2_0
40
40
  autoload :OcrResult, '2.0/generated/azure_cognitiveservices_computervision/models/ocr_result.rb'
41
41
  autoload :DetectedObject, '2.0/generated/azure_cognitiveservices_computervision/models/detected_object.rb'
42
42
  autoload :TagResult, '2.0/generated/azure_cognitiveservices_computervision/models/tag_result.rb'
43
- autoload :ImageAnalysis, '2.0/generated/azure_cognitiveservices_computervision/models/image_analysis.rb'
43
+ autoload :ImageMetadata, '2.0/generated/azure_cognitiveservices_computervision/models/image_metadata.rb'
44
44
  autoload :AreaOfInterestResult, '2.0/generated/azure_cognitiveservices_computervision/models/area_of_interest_result.rb'
45
- autoload :CelebritiesModel, '2.0/generated/azure_cognitiveservices_computervision/models/celebrities_model.rb'
45
+ autoload :ImageDescription, '2.0/generated/azure_cognitiveservices_computervision/models/image_description.rb'
46
46
  autoload :ImageUrl, '2.0/generated/azure_cognitiveservices_computervision/models/image_url.rb'
47
- autoload :AdultInfo, '2.0/generated/azure_cognitiveservices_computervision/models/adult_info.rb'
47
+ autoload :CategoryDetail, '2.0/generated/azure_cognitiveservices_computervision/models/category_detail.rb'
48
48
  autoload :ComputerVisionError, '2.0/generated/azure_cognitiveservices_computervision/models/computer_vision_error.rb'
49
- autoload :ImageCaption, '2.0/generated/azure_cognitiveservices_computervision/models/image_caption.rb'
49
+ autoload :ImageType, '2.0/generated/azure_cognitiveservices_computervision/models/image_type.rb'
50
50
  autoload :LandmarkResults, '2.0/generated/azure_cognitiveservices_computervision/models/landmark_results.rb'
51
- autoload :ObjectHierarchy, '2.0/generated/azure_cognitiveservices_computervision/models/object_hierarchy.rb'
51
+ autoload :FaceDescription, '2.0/generated/azure_cognitiveservices_computervision/models/face_description.rb'
52
52
  autoload :CelebrityResults, '2.0/generated/azure_cognitiveservices_computervision/models/celebrity_results.rb'
53
- autoload :ImageDescription, '2.0/generated/azure_cognitiveservices_computervision/models/image_description.rb'
53
+ autoload :DetectedBrand, '2.0/generated/azure_cognitiveservices_computervision/models/detected_brand.rb'
54
54
  autoload :Word, '2.0/generated/azure_cognitiveservices_computervision/models/word.rb'
55
- autoload :ImageType, '2.0/generated/azure_cognitiveservices_computervision/models/image_type.rb'
55
+ autoload :CelebritiesModel, '2.0/generated/azure_cognitiveservices_computervision/models/celebrities_model.rb'
56
56
  autoload :Line, '2.0/generated/azure_cognitiveservices_computervision/models/line.rb'
57
- autoload :ImageMetadata, '2.0/generated/azure_cognitiveservices_computervision/models/image_metadata.rb'
58
- autoload :RecognitionResult, '2.0/generated/azure_cognitiveservices_computervision/models/recognition_result.rb'
59
- autoload :FaceDescription, '2.0/generated/azure_cognitiveservices_computervision/models/face_description.rb'
57
+ autoload :ImageCaption, '2.0/generated/azure_cognitiveservices_computervision/models/image_caption.rb'
58
+ autoload :TextRecognitionResult, '2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result.rb'
59
+ autoload :ImageAnalysis, '2.0/generated/azure_cognitiveservices_computervision/models/image_analysis.rb'
60
60
  autoload :TextOperationResult, '2.0/generated/azure_cognitiveservices_computervision/models/text_operation_result.rb'
61
- autoload :CategoryDetail, '2.0/generated/azure_cognitiveservices_computervision/models/category_detail.rb'
61
+ autoload :ObjectHierarchy, '2.0/generated/azure_cognitiveservices_computervision/models/object_hierarchy.rb'
62
+ autoload :ReadOperationResult, '2.0/generated/azure_cognitiveservices_computervision/models/read_operation_result.rb'
63
+ autoload :AdultInfo, '2.0/generated/azure_cognitiveservices_computervision/models/adult_info.rb'
62
64
  autoload :Gender, '2.0/generated/azure_cognitiveservices_computervision/models/gender.rb'
63
65
  autoload :TextOperationStatusCodes, '2.0/generated/azure_cognitiveservices_computervision/models/text_operation_status_codes.rb'
66
+ autoload :TextRecognitionResultDimensionUnit, '2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_dimension_unit.rb'
67
+ autoload :TextRecognitionResultConfidenceClass, '2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_confidence_class.rb'
64
68
  autoload :OcrLanguages, '2.0/generated/azure_cognitiveservices_computervision/models/ocr_languages.rb'
65
69
  autoload :VisualFeatureTypes, '2.0/generated/azure_cognitiveservices_computervision/models/visual_feature_types.rb'
66
70
  autoload :TextRecognitionMode, '2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_mode.rb'
@@ -136,7 +136,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
136
136
  # - detects if the image is pornographic in nature (depicts nudity or a sex
137
137
  # act). Sexually suggestive content is also detected. Objects - detects
138
138
  # various objects within an image, including the approximate location. The
139
- # Objects argument is only available in English.
139
+ # Objects argument is only available in English. Brands - detects various
140
+ # brands within an image, including the approximate location. The Brands
141
+ # argument is only available in English.
140
142
  # @param details [Array<Details>] A string indicating which domain-specific
141
143
  # details to return. Multiple values should be comma-separated. Valid visual
142
144
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -180,7 +182,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
180
182
  # - detects if the image is pornographic in nature (depicts nudity or a sex
181
183
  # act). Sexually suggestive content is also detected. Objects - detects
182
184
  # various objects within an image, including the approximate location. The
183
- # Objects argument is only available in English.
185
+ # Objects argument is only available in English. Brands - detects various
186
+ # brands within an image, including the approximate location. The Brands
187
+ # argument is only available in English.
184
188
  # @param details [Array<Details>] A string indicating which domain-specific
185
189
  # details to return. Multiple values should be comma-separated. Valid visual
186
190
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -223,7 +227,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
223
227
  # - detects if the image is pornographic in nature (depicts nudity or a sex
224
228
  # act). Sexually suggestive content is also detected. Objects - detects
225
229
  # various objects within an image, including the approximate location. The
226
- # Objects argument is only available in English.
230
+ # Objects argument is only available in English. Brands - detects various
231
+ # brands within an image, including the approximate location. The Brands
232
+ # argument is only available in English.
227
233
  # @param details [Array<Details>] A string indicating which domain-specific
228
234
  # details to return. Multiple values should be comma-separated. Valid visual
229
235
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -1566,6 +1572,211 @@ module Azure::CognitiveServices::ComputerVision::V2_0
1566
1572
  promise.execute
1567
1573
  end
1568
1574
 
1575
+ #
1576
+ # Use this interface to get the result of a Read operation, employing the
1577
+ # state-of-the-art Optical Character Recognition (OCR) algorithms optimized for
1578
+ # text-heavy documents. When you use the Read File interface, the response
1579
+ # contains a field called "Operation-Location". The "Operation-Location" field
1580
+ # contains the URL that you must use for your "Read Operation Result" operation
1581
+ # to access OCR results.​
1582
+ #
1583
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
1584
+ # include: 'Handwritten', 'Printed'
1585
+ # @param url [String] Publicly reachable URL of an image.
1586
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1587
+ # will be added to the HTTP request.
1588
+ #
1589
+ #
1590
+ def batch_read_file(url, mode, custom_headers:nil)
1591
+ response = batch_read_file_async(url, mode, custom_headers:custom_headers).value!
1592
+ nil
1593
+ end
1594
+
1595
+ #
1596
+ # Use this interface to get the result of a Read operation, employing the
1597
+ # state-of-the-art Optical Character Recognition (OCR) algorithms optimized for
1598
+ # text-heavy documents. When you use the Read File interface, the response
1599
+ # contains a field called "Operation-Location". The "Operation-Location" field
1600
+ # contains the URL that you must use for your "Read Operation Result" operation
1601
+ # to access OCR results.​
1602
+ #
1603
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
1604
+ # include: 'Handwritten', 'Printed'
1605
+ # @param url [String] Publicly reachable URL of an image.
1606
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1607
+ # will be added to the HTTP request.
1608
+ #
1609
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
1610
+ #
1611
+ def batch_read_file_with_http_info(url, mode, custom_headers:nil)
1612
+ batch_read_file_async(url, mode, custom_headers:custom_headers).value!
1613
+ end
1614
+
1615
+ #
1616
+ # Use this interface to get the result of a Read operation, employing the
1617
+ # state-of-the-art Optical Character Recognition (OCR) algorithms optimized for
1618
+ # text-heavy documents. When you use the Read File interface, the response
1619
+ # contains a field called "Operation-Location". The "Operation-Location" field
1620
+ # contains the URL that you must use for your "Read Operation Result" operation
1621
+ # to access OCR results.​
1622
+ #
1623
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
1624
+ # include: 'Handwritten', 'Printed'
1625
+ # @param url [String] Publicly reachable URL of an image.
1626
+ # @param [Hash{String => String}] A hash of custom headers that will be added
1627
+ # to the HTTP request.
1628
+ #
1629
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
1630
+ #
1631
+ def batch_read_file_async(url, mode, custom_headers:nil)
1632
+ fail ArgumentError, 'endpoint is nil' if endpoint.nil?
1633
+ fail ArgumentError, 'mode is nil' if mode.nil?
1634
+ fail ArgumentError, 'url is nil' if url.nil?
1635
+
1636
+ image_url = ImageUrl.new
1637
+ unless url.nil?
1638
+ image_url.url = url
1639
+ end
1640
+
1641
+ request_headers = {}
1642
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
1643
+
1644
+ # Set Headers
1645
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
1646
+ request_headers['accept-language'] = accept_language unless accept_language.nil?
1647
+
1648
+ # Serialize Request
1649
+ request_mapper = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageUrl.mapper()
1650
+ request_content = self.serialize(request_mapper, image_url)
1651
+ request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
1652
+
1653
+ path_template = 'read/core/asyncBatchAnalyze'
1654
+
1655
+ request_url = @base_url || self.base_url
1656
+ request_url = request_url.gsub('{Endpoint}', endpoint)
1657
+
1658
+ options = {
1659
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
1660
+ query_params: {'mode' => mode},
1661
+ body: request_content,
1662
+ headers: request_headers.merge(custom_headers || {}),
1663
+ base_url: request_url
1664
+ }
1665
+ promise = self.make_request_async(:post, path_template, options)
1666
+
1667
+ promise = promise.then do |result|
1668
+ http_response = result.response
1669
+ status_code = http_response.status
1670
+ response_content = http_response.body
1671
+ unless status_code == 202
1672
+ error_model = JSON.load(response_content)
1673
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
1674
+ end
1675
+
1676
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
1677
+
1678
+ result
1679
+ end
1680
+
1681
+ promise.execute
1682
+ end
1683
+
1684
+ #
1685
+ # This interface is used for getting OCR results of Read operation. The URL to
1686
+ # this interface should be retrieved from "Operation-Location" field returned
1687
+ # from Batch Read File interface.
1688
+ #
1689
+ # @param operation_id [String] Id of read operation returned in the response of
1690
+ # the "Batch Read File" interface.
1691
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1692
+ # will be added to the HTTP request.
1693
+ #
1694
+ # @return [ReadOperationResult] operation results.
1695
+ #
1696
+ def get_read_operation_result(operation_id, custom_headers:nil)
1697
+ response = get_read_operation_result_async(operation_id, custom_headers:custom_headers).value!
1698
+ response.body unless response.nil?
1699
+ end
1700
+
1701
+ #
1702
+ # This interface is used for getting OCR results of Read operation. The URL to
1703
+ # this interface should be retrieved from "Operation-Location" field returned
1704
+ # from Batch Read File interface.
1705
+ #
1706
+ # @param operation_id [String] Id of read operation returned in the response of
1707
+ # the "Batch Read File" interface.
1708
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
1709
+ # will be added to the HTTP request.
1710
+ #
1711
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
1712
+ #
1713
+ def get_read_operation_result_with_http_info(operation_id, custom_headers:nil)
1714
+ get_read_operation_result_async(operation_id, custom_headers:custom_headers).value!
1715
+ end
1716
+
1717
+ #
1718
+ # This interface is used for getting OCR results of Read operation. The URL to
1719
+ # this interface should be retrieved from "Operation-Location" field returned
1720
+ # from Batch Read File interface.
1721
+ #
1722
+ # @param operation_id [String] Id of read operation returned in the response of
1723
+ # the "Batch Read File" interface.
1724
+ # @param [Hash{String => String}] A hash of custom headers that will be added
1725
+ # to the HTTP request.
1726
+ #
1727
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
1728
+ #
1729
+ def get_read_operation_result_async(operation_id, custom_headers:nil)
1730
+ fail ArgumentError, 'endpoint is nil' if endpoint.nil?
1731
+ fail ArgumentError, 'operation_id is nil' if operation_id.nil?
1732
+
1733
+
1734
+ request_headers = {}
1735
+ request_headers['Content-Type'] = 'application/json; charset=utf-8'
1736
+
1737
+ # Set Headers
1738
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
1739
+ request_headers['accept-language'] = accept_language unless accept_language.nil?
1740
+ path_template = 'read/operations/{operationId}'
1741
+
1742
+ request_url = @base_url || self.base_url
1743
+ request_url = request_url.gsub('{Endpoint}', endpoint)
1744
+
1745
+ options = {
1746
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
1747
+ path_params: {'operationId' => operation_id},
1748
+ headers: request_headers.merge(custom_headers || {}),
1749
+ base_url: request_url
1750
+ }
1751
+ promise = self.make_request_async(:get, path_template, options)
1752
+
1753
+ promise = promise.then do |result|
1754
+ http_response = result.response
1755
+ status_code = http_response.status
1756
+ response_content = http_response.body
1757
+ unless status_code == 200
1758
+ error_model = JSON.load(response_content)
1759
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
1760
+ end
1761
+
1762
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
1763
+ # Deserialize Response
1764
+ if status_code == 200
1765
+ begin
1766
+ parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
1767
+ result_mapper = Azure::CognitiveServices::ComputerVision::V2_0::Models::ReadOperationResult.mapper()
1768
+ result.body = self.deserialize(result_mapper, parsed_response)
1769
+ rescue Exception => e
1770
+ fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
1771
+ end
1772
+ end
1773
+
1774
+ result
1775
+ end
1776
+
1777
+ promise.execute
1778
+ end
1779
+
1569
1780
  #
1570
1781
  # This operation extracts a rich set of visual features based on the image
1571
1782
  # content.
@@ -1590,7 +1801,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
1590
1801
  # - detects if the image is pornographic in nature (depicts nudity or a sex
1591
1802
  # act). Sexually suggestive content is also detected. Objects - detects
1592
1803
  # various objects within an image, including the approximate location. The
1593
- # Objects argument is only available in English.
1804
+ # Objects argument is only available in English. Brands - detects various
1805
+ # brands within an image, including the approximate location. The Brands
1806
+ # argument is only available in English.
1594
1807
  # @param details [Array<Details>] A string indicating which domain-specific
1595
1808
  # details to return. Multiple values should be comma-separated. Valid visual
1596
1809
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -1634,7 +1847,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
1634
1847
  # - detects if the image is pornographic in nature (depicts nudity or a sex
1635
1848
  # act). Sexually suggestive content is also detected. Objects - detects
1636
1849
  # various objects within an image, including the approximate location. The
1637
- # Objects argument is only available in English.
1850
+ # Objects argument is only available in English. Brands - detects various
1851
+ # brands within an image, including the approximate location. The Brands
1852
+ # argument is only available in English.
1638
1853
  # @param details [Array<Details>] A string indicating which domain-specific
1639
1854
  # details to return. Multiple values should be comma-separated. Valid visual
1640
1855
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -1677,7 +1892,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
1677
1892
  # - detects if the image is pornographic in nature (depicts nudity or a sex
1678
1893
  # act). Sexually suggestive content is also detected. Objects - detects
1679
1894
  # various objects within an image, including the approximate location. The
1680
- # Objects argument is only available in English.
1895
+ # Objects argument is only available in English. Brands - detects various
1896
+ # brands within an image, including the approximate location. The Brands
1897
+ # argument is only available in English.
1681
1898
  # @param details [Array<Details>] A string indicating which domain-specific
1682
1899
  # details to return. Multiple values should be comma-separated. Valid visual
1683
1900
  # feature types include: Celebrities - identifies celebrities if detected in
@@ -2845,6 +3062,117 @@ module Azure::CognitiveServices::ComputerVision::V2_0
2845
3062
  promise.execute
2846
3063
  end
2847
3064
 
3065
+ #
3066
+ # Use this interface to get the result of a Read Document operation, employing
3067
+ # the state-of-the-art Optical Character Recognition (OCR) algorithms optimized
3068
+ # for text-heavy documents. When you use the Read Document interface, the
3069
+ # response contains a field called "Operation-Location". The
3070
+ # "Operation-Location" field contains the URL that you must use for your "Get
3071
+ # Read Result operation" to access OCR results.​
3072
+ #
3073
+ # @param image An image stream.
3074
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
3075
+ # include: 'Handwritten', 'Printed'
3076
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
3077
+ # will be added to the HTTP request.
3078
+ #
3079
+ #
3080
+ def batch_read_file_in_stream(image, mode, custom_headers:nil)
3081
+ response = batch_read_file_in_stream_async(image, mode, custom_headers:custom_headers).value!
3082
+ nil
3083
+ end
3084
+
3085
+ #
3086
+ # Use this interface to get the result of a Read Document operation, employing
3087
+ # the state-of-the-art Optical Character Recognition (OCR) algorithms optimized
3088
+ # for text-heavy documents. When you use the Read Document interface, the
3089
+ # response contains a field called "Operation-Location". The
3090
+ # "Operation-Location" field contains the URL that you must use for your "Get
3091
+ # Read Result operation" to access OCR results.​
3092
+ #
3093
+ # @param image An image stream.
3094
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
3095
+ # include: 'Handwritten', 'Printed'
3096
+ # @param custom_headers [Hash{String => String}] A hash of custom headers that
3097
+ # will be added to the HTTP request.
3098
+ #
3099
+ # @return [MsRestAzure::AzureOperationResponse] HTTP response information.
3100
+ #
3101
+ def batch_read_file_in_stream_with_http_info(image, mode, custom_headers:nil)
3102
+ batch_read_file_in_stream_async(image, mode, custom_headers:custom_headers).value!
3103
+ end
3104
+
3105
+ #
3106
+ # Use this interface to get the result of a Read Document operation, employing
3107
+ # the state-of-the-art Optical Character Recognition (OCR) algorithms optimized
3108
+ # for text-heavy documents. When you use the Read Document interface, the
3109
+ # response contains a field called "Operation-Location". The
3110
+ # "Operation-Location" field contains the URL that you must use for your "Get
3111
+ # Read Result operation" to access OCR results.​
3112
+ #
3113
+ # @param image An image stream.
3114
+ # @param mode [TextRecognitionMode] Type of text to recognize. Possible values
3115
+ # include: 'Handwritten', 'Printed'
3116
+ # @param [Hash{String => String}] A hash of custom headers that will be added
3117
+ # to the HTTP request.
3118
+ #
3119
+ # @return [Concurrent::Promise] Promise object which holds the HTTP response.
3120
+ #
3121
+ def batch_read_file_in_stream_async(image, mode, custom_headers:nil)
3122
+ fail ArgumentError, 'endpoint is nil' if endpoint.nil?
3123
+ fail ArgumentError, 'image is nil' if image.nil?
3124
+ fail ArgumentError, 'mode is nil' if mode.nil?
3125
+
3126
+
3127
+ request_headers = {}
3128
+ request_headers['Content-Type'] = 'application/octet-stream'
3129
+
3130
+ # Set Headers
3131
+ request_headers['x-ms-client-request-id'] = SecureRandom.uuid
3132
+ request_headers['accept-language'] = accept_language unless accept_language.nil?
3133
+
3134
+ # Serialize Request
3135
+ request_mapper = {
3136
+ client_side_validation: true,
3137
+ required: true,
3138
+ serialized_name: 'Image',
3139
+ type: {
3140
+ name: 'Stream'
3141
+ }
3142
+ }
3143
+ request_content = self.serialize(request_mapper, image)
3144
+
3145
+ path_template = 'read/core/asyncBatchAnalyze'
3146
+
3147
+ request_url = @base_url || self.base_url
3148
+ request_url = request_url.gsub('{Endpoint}', endpoint)
3149
+
3150
+ options = {
3151
+ middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
3152
+ query_params: {'mode' => mode},
3153
+ body: request_content,
3154
+ headers: request_headers.merge(custom_headers || {}),
3155
+ base_url: request_url
3156
+ }
3157
+ promise = self.make_request_async(:post, path_template, options)
3158
+
3159
+ promise = promise.then do |result|
3160
+ http_response = result.response
3161
+ status_code = http_response.status
3162
+ response_content = http_response.body
3163
+ unless status_code == 202
3164
+ error_model = JSON.load(response_content)
3165
+ fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
3166
+ end
3167
+
3168
+ result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
3169
+
3170
+ result
3171
+ end
3172
+
3173
+ promise.execute
3174
+ end
3175
+
2848
3176
 
2849
3177
  private
2850
3178
  #
@@ -2852,7 +3180,7 @@ module Azure::CognitiveServices::ComputerVision::V2_0
2852
3180
  #
2853
3181
  def add_telemetry
2854
3182
  sdk_information = 'azure_cognitiveservices_computervision'
2855
- sdk_information = "#{sdk_information}/0.18.1"
3183
+ sdk_information = "#{sdk_information}/0.19.0"
2856
3184
  add_user_agent_information(sdk_information)
2857
3185
  end
2858
3186
  end
@@ -0,0 +1,73 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::ComputerVision::V2_0
7
+ module Models
8
+ #
9
+ # A brand detected in an image.
10
+ #
11
+ class DetectedBrand
12
+
13
+ include MsRestAzure
14
+
15
+ # @return [String] Label for the brand.
16
+ attr_accessor :name
17
+
18
+ # @return [Float] Confidence score of having observed the brand in the
19
+ # image, as a value ranging from 0 to 1.
20
+ attr_accessor :confidence
21
+
22
+ # @return [BoundingRect] Approximate location of the detected brand.
23
+ attr_accessor :rectangle
24
+
25
+
26
+ #
27
+ # Mapper for DetectedBrand class as Ruby Hash.
28
+ # This will be used for serialization/deserialization.
29
+ #
30
+ def self.mapper()
31
+ {
32
+ client_side_validation: true,
33
+ required: false,
34
+ serialized_name: 'DetectedBrand',
35
+ type: {
36
+ name: 'Composite',
37
+ class_name: 'DetectedBrand',
38
+ model_properties: {
39
+ name: {
40
+ client_side_validation: true,
41
+ required: false,
42
+ read_only: true,
43
+ serialized_name: 'name',
44
+ type: {
45
+ name: 'String'
46
+ }
47
+ },
48
+ confidence: {
49
+ client_side_validation: true,
50
+ required: false,
51
+ read_only: true,
52
+ serialized_name: 'confidence',
53
+ type: {
54
+ name: 'Double'
55
+ }
56
+ },
57
+ rectangle: {
58
+ client_side_validation: true,
59
+ required: false,
60
+ read_only: true,
61
+ serialized_name: 'rectangle',
62
+ type: {
63
+ name: 'Composite',
64
+ class_name: 'BoundingRect'
65
+ }
66
+ }
67
+ }
68
+ }
69
+ }
70
+ end
71
+ end
72
+ end
73
+ end
@@ -42,6 +42,9 @@ module Azure::CognitiveServices::ComputerVision::V2_0
42
42
  # detected in the image.
43
43
  attr_accessor :objects
44
44
 
45
+ # @return [Array<DetectedBrand>] Array of brands detected in the image.
46
+ attr_accessor :brands
47
+
45
48
  # @return [String] Id of the REST API request.
46
49
  attr_accessor :request_id
47
50
 
@@ -166,6 +169,23 @@ module Azure::CognitiveServices::ComputerVision::V2_0
166
169
  }
167
170
  }
168
171
  },
172
+ brands: {
173
+ client_side_validation: true,
174
+ required: false,
175
+ serialized_name: 'brands',
176
+ type: {
177
+ name: 'Sequence',
178
+ element: {
179
+ client_side_validation: true,
180
+ required: false,
181
+ serialized_name: 'DetectedBrandElementType',
182
+ type: {
183
+ name: 'Composite',
184
+ class_name: 'DetectedBrand'
185
+ }
186
+ }
187
+ }
188
+ },
169
189
  request_id: {
170
190
  client_side_validation: true,
171
191
  required: false,
@@ -6,20 +6,19 @@
6
6
  module Azure::CognitiveServices::ComputerVision::V2_0
7
7
  module Models
8
8
  #
9
- # Model object.
10
- #
9
+ # Json object representing a recognized text line.
11
10
  #
12
11
  class Line
13
12
 
14
13
  include MsRestAzure
15
14
 
16
- # @return [Array<Integer>]
15
+ # @return [Array<Integer>] Bounding box of a recognized line.
17
16
  attr_accessor :bounding_box
18
17
 
19
- # @return [String]
18
+ # @return [String] The text content of the line.
20
19
  attr_accessor :text
21
20
 
22
- # @return [Array<Word>]
21
+ # @return [Array<Word>] List of words in the text line.
23
22
  attr_accessor :words
24
23
 
25
24
 
@@ -0,0 +1,70 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::ComputerVision::V2_0
7
+ module Models
8
+ #
9
+ # OCR result of the read operation.
10
+ #
11
+ class ReadOperationResult
12
+
13
+ include MsRestAzure
14
+
15
+ # @return [TextOperationStatusCodes] Status of the read operation.
16
+ # Possible values include: 'Not Started', 'Running', 'Failed',
17
+ # 'Succeeded'
18
+ attr_accessor :status
19
+
20
+ # @return [Array<TextRecognitionResult>] A array of text recognition
21
+ # result of the read operation.
22
+ attr_accessor :recognition_results
23
+
24
+
25
+ #
26
+ # Mapper for ReadOperationResult class as Ruby Hash.
27
+ # This will be used for serialization/deserialization.
28
+ #
29
+ def self.mapper()
30
+ {
31
+ client_side_validation: true,
32
+ required: false,
33
+ serialized_name: 'ReadOperationResult',
34
+ type: {
35
+ name: 'Composite',
36
+ class_name: 'ReadOperationResult',
37
+ model_properties: {
38
+ status: {
39
+ client_side_validation: true,
40
+ required: false,
41
+ serialized_name: 'status',
42
+ type: {
43
+ name: 'Enum',
44
+ module: 'TextOperationStatusCodes'
45
+ }
46
+ },
47
+ recognition_results: {
48
+ client_side_validation: true,
49
+ required: false,
50
+ serialized_name: 'recognitionResults',
51
+ type: {
52
+ name: 'Sequence',
53
+ element: {
54
+ client_side_validation: true,
55
+ required: false,
56
+ serialized_name: 'TextRecognitionResultElementType',
57
+ type: {
58
+ name: 'Composite',
59
+ class_name: 'TextRecognitionResult'
60
+ }
61
+ }
62
+ }
63
+ }
64
+ }
65
+ }
66
+ }
67
+ end
68
+ end
69
+ end
70
+ end
@@ -6,8 +6,7 @@
6
6
  module Azure::CognitiveServices::ComputerVision::V2_0
7
7
  module Models
8
8
  #
9
- # Model object.
10
- #
9
+ # Result of recognition text operation.
11
10
  #
12
11
  class TextOperationResult
13
12
 
@@ -18,7 +17,8 @@ module Azure::CognitiveServices::ComputerVision::V2_0
18
17
  # 'Succeeded'
19
18
  attr_accessor :status
20
19
 
21
- # @return [RecognitionResult]
20
+ # @return [TextRecognitionResult] Text recognition result of the text
21
+ # operation.
22
22
  attr_accessor :recognition_result
23
23
 
24
24
 
@@ -50,7 +50,7 @@ module Azure::CognitiveServices::ComputerVision::V2_0
50
50
  serialized_name: 'recognitionResult',
51
51
  type: {
52
52
  name: 'Composite',
53
- class_name: 'RecognitionResult'
53
+ class_name: 'TextRecognitionResult'
54
54
  }
55
55
  }
56
56
  }
@@ -0,0 +1,114 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::ComputerVision::V2_0
7
+ module Models
8
+ #
9
+ # Json object representing a recognized text region
10
+ #
11
+ class TextRecognitionResult
12
+
13
+ include MsRestAzure
14
+
15
+ # @return [Integer] The 1-based page number of the recognition result.
16
+ attr_accessor :page
17
+
18
+ # @return [Float] The orientation of the image in degrees in the
19
+ # clockwise direction. Range between [0, 360).
20
+ attr_accessor :clockwise_orientation
21
+
22
+ # @return [Float] The width of the image in pixels or the PDF in inches.
23
+ attr_accessor :width
24
+
25
+ # @return [Float] The height of the image in pixels or the PDF in inches.
26
+ attr_accessor :height
27
+
28
+ # @return [TextRecognitionResultDimensionUnit] The unit used in the
29
+ # Width, Height and BoundingBox. For images, the unit is "pixel". For
30
+ # PDF, the unit is "inch". Possible values include: 'pixel', 'inch'
31
+ attr_accessor :unit
32
+
33
+ # @return [Array<Line>] A list of recognized text lines.
34
+ attr_accessor :lines
35
+
36
+
37
+ #
38
+ # Mapper for TextRecognitionResult class as Ruby Hash.
39
+ # This will be used for serialization/deserialization.
40
+ #
41
+ def self.mapper()
42
+ {
43
+ client_side_validation: true,
44
+ required: false,
45
+ serialized_name: 'TextRecognitionResult',
46
+ type: {
47
+ name: 'Composite',
48
+ class_name: 'TextRecognitionResult',
49
+ model_properties: {
50
+ page: {
51
+ client_side_validation: true,
52
+ required: false,
53
+ serialized_name: 'page',
54
+ type: {
55
+ name: 'Number'
56
+ }
57
+ },
58
+ clockwise_orientation: {
59
+ client_side_validation: true,
60
+ required: false,
61
+ serialized_name: 'clockwiseOrientation',
62
+ type: {
63
+ name: 'Double'
64
+ }
65
+ },
66
+ width: {
67
+ client_side_validation: true,
68
+ required: false,
69
+ serialized_name: 'width',
70
+ type: {
71
+ name: 'Double'
72
+ }
73
+ },
74
+ height: {
75
+ client_side_validation: true,
76
+ required: false,
77
+ serialized_name: 'height',
78
+ type: {
79
+ name: 'Double'
80
+ }
81
+ },
82
+ unit: {
83
+ client_side_validation: true,
84
+ required: false,
85
+ serialized_name: 'unit',
86
+ type: {
87
+ name: 'Enum',
88
+ module: 'TextRecognitionResultDimensionUnit'
89
+ }
90
+ },
91
+ lines: {
92
+ client_side_validation: true,
93
+ required: true,
94
+ serialized_name: 'lines',
95
+ type: {
96
+ name: 'Sequence',
97
+ element: {
98
+ client_side_validation: true,
99
+ required: false,
100
+ serialized_name: 'LineElementType',
101
+ type: {
102
+ name: 'Composite',
103
+ class_name: 'Line'
104
+ }
105
+ }
106
+ }
107
+ }
108
+ }
109
+ }
110
+ }
111
+ end
112
+ end
113
+ end
114
+ end
@@ -0,0 +1,16 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::ComputerVision::V2_0
7
+ module Models
8
+ #
9
+ # Defines values for TextRecognitionResultConfidenceClass
10
+ #
11
+ module TextRecognitionResultConfidenceClass
12
+ High = "High"
13
+ Low = "Low"
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,16 @@
1
+ # encoding: utf-8
2
+ # Code generated by Microsoft (R) AutoRest Code Generator.
3
+ # Changes may cause incorrect behavior and will be lost if the code is
4
+ # regenerated.
5
+
6
+ module Azure::CognitiveServices::ComputerVision::V2_0
7
+ module Models
8
+ #
9
+ # Defines values for TextRecognitionResultDimensionUnit
10
+ #
11
+ module TextRecognitionResultDimensionUnit
12
+ Pixel = "pixel"
13
+ Inch = "inch"
14
+ end
15
+ end
16
+ end
@@ -17,6 +17,7 @@ module Azure::CognitiveServices::ComputerVision::V2_0
17
17
  Tags = "Tags"
18
18
  Description = "Description"
19
19
  Objects = "Objects"
20
+ Brands = "Brands"
20
21
  end
21
22
  end
22
23
  end
@@ -6,19 +6,22 @@
6
6
  module Azure::CognitiveServices::ComputerVision::V2_0
7
7
  module Models
8
8
  #
9
- # Model object.
10
- #
9
+ # Json object representing a recognized word.
11
10
  #
12
11
  class Word
13
12
 
14
13
  include MsRestAzure
15
14
 
16
- # @return [Array<Integer>]
15
+ # @return [Array<Integer>] Bounding box of a recognized word.
17
16
  attr_accessor :bounding_box
18
17
 
19
- # @return [String]
18
+ # @return [String] The text content of the word.
20
19
  attr_accessor :text
21
20
 
21
+ # @return [TextRecognitionResultConfidenceClass] Qualitative confidence
22
+ # measure. Possible values include: 'High', 'Low'
23
+ attr_accessor :confidence
24
+
22
25
 
23
26
  #
24
27
  # Mapper for Word class as Ruby Hash.
@@ -35,7 +38,7 @@ module Azure::CognitiveServices::ComputerVision::V2_0
35
38
  model_properties: {
36
39
  bounding_box: {
37
40
  client_side_validation: true,
38
- required: false,
41
+ required: true,
39
42
  serialized_name: 'boundingBox',
40
43
  type: {
41
44
  name: 'Sequence',
@@ -51,11 +54,20 @@ module Azure::CognitiveServices::ComputerVision::V2_0
51
54
  },
52
55
  text: {
53
56
  client_side_validation: true,
54
- required: false,
57
+ required: true,
55
58
  serialized_name: 'text',
56
59
  type: {
57
60
  name: 'String'
58
61
  }
62
+ },
63
+ confidence: {
64
+ client_side_validation: true,
65
+ required: false,
66
+ serialized_name: 'confidence',
67
+ type: {
68
+ name: 'Enum',
69
+ module: 'TextRecognitionResultConfidenceClass'
70
+ }
59
71
  }
60
72
  }
61
73
  }
@@ -24,27 +24,31 @@ module Azure::ComputerVision::Profiles::Latest
24
24
  OcrResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::OcrResult
25
25
  DetectedObject = Azure::CognitiveServices::ComputerVision::V2_0::Models::DetectedObject
26
26
  TagResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::TagResult
27
- ImageAnalysis = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageAnalysis
27
+ ImageMetadata = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageMetadata
28
28
  AreaOfInterestResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::AreaOfInterestResult
29
- CelebritiesModel = Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebritiesModel
29
+ ImageDescription = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageDescription
30
30
  ImageUrl = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageUrl
31
- AdultInfo = Azure::CognitiveServices::ComputerVision::V2_0::Models::AdultInfo
31
+ CategoryDetail = Azure::CognitiveServices::ComputerVision::V2_0::Models::CategoryDetail
32
32
  ComputerVisionError = Azure::CognitiveServices::ComputerVision::V2_0::Models::ComputerVisionError
33
- ImageCaption = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageCaption
33
+ ImageType = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageType
34
34
  LandmarkResults = Azure::CognitiveServices::ComputerVision::V2_0::Models::LandmarkResults
35
- ObjectHierarchy = Azure::CognitiveServices::ComputerVision::V2_0::Models::ObjectHierarchy
35
+ FaceDescription = Azure::CognitiveServices::ComputerVision::V2_0::Models::FaceDescription
36
36
  CelebrityResults = Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebrityResults
37
- ImageDescription = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageDescription
37
+ DetectedBrand = Azure::CognitiveServices::ComputerVision::V2_0::Models::DetectedBrand
38
38
  Word = Azure::CognitiveServices::ComputerVision::V2_0::Models::Word
39
- ImageType = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageType
39
+ CelebritiesModel = Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebritiesModel
40
40
  Line = Azure::CognitiveServices::ComputerVision::V2_0::Models::Line
41
- ImageMetadata = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageMetadata
42
- RecognitionResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::RecognitionResult
43
- FaceDescription = Azure::CognitiveServices::ComputerVision::V2_0::Models::FaceDescription
41
+ ImageCaption = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageCaption
42
+ TextRecognitionResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResult
43
+ ImageAnalysis = Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageAnalysis
44
44
  TextOperationResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextOperationResult
45
- CategoryDetail = Azure::CognitiveServices::ComputerVision::V2_0::Models::CategoryDetail
45
+ ObjectHierarchy = Azure::CognitiveServices::ComputerVision::V2_0::Models::ObjectHierarchy
46
+ ReadOperationResult = Azure::CognitiveServices::ComputerVision::V2_0::Models::ReadOperationResult
47
+ AdultInfo = Azure::CognitiveServices::ComputerVision::V2_0::Models::AdultInfo
46
48
  Gender = Azure::CognitiveServices::ComputerVision::V2_0::Models::Gender
47
49
  TextOperationStatusCodes = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextOperationStatusCodes
50
+ TextRecognitionResultDimensionUnit = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResultDimensionUnit
51
+ TextRecognitionResultConfidenceClass = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResultConfidenceClass
48
52
  OcrLanguages = Azure::CognitiveServices::ComputerVision::V2_0::Models::OcrLanguages
49
53
  VisualFeatureTypes = Azure::CognitiveServices::ComputerVision::V2_0::Models::VisualFeatureTypes
50
54
  TextRecognitionMode = Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionMode
@@ -146,62 +150,68 @@ module Azure::ComputerVision::Profiles::Latest
146
150
  def tag_result
147
151
  Azure::CognitiveServices::ComputerVision::V2_0::Models::TagResult
148
152
  end
149
- def image_analysis
150
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageAnalysis
153
+ def image_metadata
154
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageMetadata
151
155
  end
152
156
  def area_of_interest_result
153
157
  Azure::CognitiveServices::ComputerVision::V2_0::Models::AreaOfInterestResult
154
158
  end
155
- def celebrities_model
156
- Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebritiesModel
159
+ def image_description
160
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageDescription
157
161
  end
158
162
  def image_url
159
163
  Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageUrl
160
164
  end
161
- def adult_info
162
- Azure::CognitiveServices::ComputerVision::V2_0::Models::AdultInfo
165
+ def category_detail
166
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::CategoryDetail
163
167
  end
164
168
  def computer_vision_error
165
169
  Azure::CognitiveServices::ComputerVision::V2_0::Models::ComputerVisionError
166
170
  end
167
- def image_caption
168
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageCaption
171
+ def image_type
172
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageType
169
173
  end
170
174
  def landmark_results
171
175
  Azure::CognitiveServices::ComputerVision::V2_0::Models::LandmarkResults
172
176
  end
173
- def object_hierarchy
174
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ObjectHierarchy
177
+ def face_description
178
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::FaceDescription
175
179
  end
176
180
  def celebrity_results
177
181
  Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebrityResults
178
182
  end
179
- def image_description
180
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageDescription
183
+ def detected_brand
184
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::DetectedBrand
181
185
  end
182
186
  def word
183
187
  Azure::CognitiveServices::ComputerVision::V2_0::Models::Word
184
188
  end
185
- def image_type
186
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageType
189
+ def celebrities_model
190
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::CelebritiesModel
187
191
  end
188
192
  def line
189
193
  Azure::CognitiveServices::ComputerVision::V2_0::Models::Line
190
194
  end
191
- def image_metadata
192
- Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageMetadata
195
+ def image_caption
196
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageCaption
193
197
  end
194
- def recognition_result
195
- Azure::CognitiveServices::ComputerVision::V2_0::Models::RecognitionResult
198
+ def text_recognition_result
199
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResult
196
200
  end
197
- def face_description
198
- Azure::CognitiveServices::ComputerVision::V2_0::Models::FaceDescription
201
+ def image_analysis
202
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ImageAnalysis
199
203
  end
200
204
  def text_operation_result
201
205
  Azure::CognitiveServices::ComputerVision::V2_0::Models::TextOperationResult
202
206
  end
203
- def category_detail
204
- Azure::CognitiveServices::ComputerVision::V2_0::Models::CategoryDetail
207
+ def object_hierarchy
208
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ObjectHierarchy
209
+ end
210
+ def read_operation_result
211
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::ReadOperationResult
212
+ end
213
+ def adult_info
214
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::AdultInfo
205
215
  end
206
216
  def gender
207
217
  Azure::CognitiveServices::ComputerVision::V2_0::Models::Gender
@@ -209,6 +219,12 @@ module Azure::ComputerVision::Profiles::Latest
209
219
  def text_operation_status_codes
210
220
  Azure::CognitiveServices::ComputerVision::V2_0::Models::TextOperationStatusCodes
211
221
  end
222
+ def text_recognition_result_dimension_unit
223
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResultDimensionUnit
224
+ end
225
+ def text_recognition_result_confidence_class
226
+ Azure::CognitiveServices::ComputerVision::V2_0::Models::TextRecognitionResultConfidenceClass
227
+ end
212
228
  def ocr_languages
213
229
  Azure::CognitiveServices::ComputerVision::V2_0::Models::OcrLanguages
214
230
  end
@@ -3,5 +3,5 @@
3
3
  # Licensed under the MIT License. See License.txt in the project root for license information.
4
4
 
5
5
  module Azure::CognitiveServices::ComputerVision
6
- VERSION = '0.18.1'
6
+ VERSION = '0.19.0'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: azure_cognitiveservices_computervision
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.18.1
4
+ version: 0.19.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Microsoft Corporation
@@ -142,6 +142,7 @@ files:
142
142
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/computer_vision_error.rb
143
143
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/details.rb
144
144
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/detect_result.rb
145
+ - lib/2.0/generated/azure_cognitiveservices_computervision/models/detected_brand.rb
145
146
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/detected_object.rb
146
147
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/domain_model_results.rb
147
148
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/face_description.rb
@@ -166,11 +167,14 @@ files:
166
167
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/ocr_region.rb
167
168
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/ocr_result.rb
168
169
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/ocr_word.rb
169
- - lib/2.0/generated/azure_cognitiveservices_computervision/models/recognition_result.rb
170
+ - lib/2.0/generated/azure_cognitiveservices_computervision/models/read_operation_result.rb
170
171
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/tag_result.rb
171
172
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_operation_result.rb
172
173
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_operation_status_codes.rb
173
174
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_mode.rb
175
+ - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result.rb
176
+ - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_confidence_class.rb
177
+ - lib/2.0/generated/azure_cognitiveservices_computervision/models/text_recognition_result_dimension_unit.rb
174
178
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/visual_feature_types.rb
175
179
  - lib/2.0/generated/azure_cognitiveservices_computervision/models/word.rb
176
180
  - lib/2.0/generated/azure_cognitiveservices_computervision/module_definition.rb
@@ -1,56 +0,0 @@
1
- # encoding: utf-8
2
- # Code generated by Microsoft (R) AutoRest Code Generator.
3
- # Changes may cause incorrect behavior and will be lost if the code is
4
- # regenerated.
5
-
6
- module Azure::CognitiveServices::ComputerVision::V2_0
7
- module Models
8
- #
9
- # Model object.
10
- #
11
- #
12
- class RecognitionResult
13
-
14
- include MsRestAzure
15
-
16
- # @return [Array<Line>]
17
- attr_accessor :lines
18
-
19
-
20
- #
21
- # Mapper for RecognitionResult class as Ruby Hash.
22
- # This will be used for serialization/deserialization.
23
- #
24
- def self.mapper()
25
- {
26
- client_side_validation: true,
27
- required: false,
28
- serialized_name: 'RecognitionResult',
29
- type: {
30
- name: 'Composite',
31
- class_name: 'RecognitionResult',
32
- model_properties: {
33
- lines: {
34
- client_side_validation: true,
35
- required: false,
36
- serialized_name: 'lines',
37
- type: {
38
- name: 'Sequence',
39
- element: {
40
- client_side_validation: true,
41
- required: false,
42
- serialized_name: 'LineElementType',
43
- type: {
44
- name: 'Composite',
45
- class_name: 'Line'
46
- }
47
- }
48
- }
49
- }
50
- }
51
- }
52
- }
53
- end
54
- end
55
- end
56
- end