ibm_watson 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +258 -0
  3. data/bin/console +14 -0
  4. data/bin/setup +8 -0
  5. data/lib/ibm_watson.rb +16 -0
  6. data/lib/ibm_watson/assistant_v1.rb +1997 -0
  7. data/lib/ibm_watson/detailed_response.rb +21 -0
  8. data/lib/ibm_watson/discovery_v1.rb +2039 -0
  9. data/lib/ibm_watson/iam_token_manager.rb +166 -0
  10. data/lib/ibm_watson/language_translator_v3.rb +411 -0
  11. data/lib/ibm_watson/natural_language_classifier_v1.rb +309 -0
  12. data/lib/ibm_watson/natural_language_understanding_v1.rb +297 -0
  13. data/lib/ibm_watson/personality_insights_v3.rb +260 -0
  14. data/lib/ibm_watson/speech_to_text_v1.rb +2153 -0
  15. data/lib/ibm_watson/text_to_speech_v1.rb +716 -0
  16. data/lib/ibm_watson/tone_analyzer_v3.rb +287 -0
  17. data/lib/ibm_watson/version.rb +3 -0
  18. data/lib/ibm_watson/visual_recognition_v3.rb +579 -0
  19. data/lib/ibm_watson/watson_api_exception.rb +41 -0
  20. data/lib/ibm_watson/watson_service.rb +180 -0
  21. data/lib/ibm_watson/websocket/recognize_callback.rb +32 -0
  22. data/lib/ibm_watson/websocket/speech_to_text_websocket_listener.rb +162 -0
  23. data/rakefile +45 -0
  24. data/test/integration/test_assistant_v1.rb +645 -0
  25. data/test/integration/test_discovery_v1.rb +200 -0
  26. data/test/integration/test_iam_assistant_v1.rb +707 -0
  27. data/test/integration/test_language_translator_v3.rb +81 -0
  28. data/test/integration/test_natural_language_classifier_v1.rb +69 -0
  29. data/test/integration/test_natural_language_understanding_v1.rb +98 -0
  30. data/test/integration/test_personality_insights_v3.rb +95 -0
  31. data/test/integration/test_speech_to_text_v1.rb +187 -0
  32. data/test/integration/test_text_to_speech_v1.rb +81 -0
  33. data/test/integration/test_tone_analyzer_v3.rb +72 -0
  34. data/test/integration/test_visual_recognition_v3.rb +64 -0
  35. data/test/test_helper.rb +22 -0
  36. data/test/unit/test_assistant_v1.rb +1598 -0
  37. data/test/unit/test_discovery_v1.rb +1144 -0
  38. data/test/unit/test_iam_token_manager.rb +165 -0
  39. data/test/unit/test_language_translator_v3.rb +461 -0
  40. data/test/unit/test_natural_language_classifier_v1.rb +187 -0
  41. data/test/unit/test_natural_language_understanding_v1.rb +132 -0
  42. data/test/unit/test_personality_insights_v3.rb +172 -0
  43. data/test/unit/test_speech_to_text_v1.rb +755 -0
  44. data/test/unit/test_text_to_speech_v1.rb +336 -0
  45. data/test/unit/test_tone_analyzer_v3.rb +200 -0
  46. data/test/unit/test_vcap_using_personality_insights.rb +150 -0
  47. data/test/unit/test_visual_recognition_v3.rb +345 -0
  48. metadata +302 -0
@@ -0,0 +1,287 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2018 IBM All Rights Reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # The IBM Watson™ Tone Analyzer service uses linguistic analysis to detect
18
+ # emotional and language tones in written text. The service can analyze tone at both the
19
+ # document and sentence levels. You can use the service to understand how your written
20
+ # communications are perceived and then to improve the tone of your communications.
21
+ # Businesses can use the service to learn the tone of their customers' communications and
22
+ # to respond to each customer appropriately, or to understand and improve their customer
23
+ # conversations.
24
+ #
25
+ # **Note:** Request logging is disabled for the Tone Analyzer service. The service neither
26
+ # logs nor retains data from requests and responses, regardless of whether the
27
+ # `X-Watson-Learning-Opt-Out` request header is set.
28
+
29
+ require "concurrent"
30
+ require "erb"
31
+ require "json"
32
+ require_relative "./detailed_response"
33
+
34
+ require_relative "./watson_service"
35
+
36
+ module IBMWatson
37
+ ##
38
+ # The Tone Analyzer V3 service.
39
+ class ToneAnalyzerV3
40
+ include Concurrent::Async
41
+ ##
42
+ # @!method initialize(args)
43
+ # Construct a new client for the Tone Analyzer service.
44
+ #
45
+ # @param args [Hash] The args to initialize with
46
+ # @option args version [String] The API version date to use with the service, in
47
+ # "YYYY-MM-DD" format. Whenever the API is changed in a backwards
48
+ # incompatible way, a new minor version of the API is released.
49
+ # The service uses the API version for the date you specify, or
50
+ # the most recent version before that date. Note that you should
51
+ # not programmatically specify the current date at runtime, in
52
+ # case the API has been updated since your application's release.
53
+ # Instead, specify a version date that is compatible with your
54
+ # application, and don't change it until your application is
55
+ # ready for a later version.
56
+ # @option args url [String] The base url to use when contacting the service (e.g.
57
+ # "https://gateway.watsonplatform.net/tone-analyzer/api").
58
+ # The base url may differ between Bluemix regions.
59
+ # @option args username [String] The username used to authenticate with the service.
60
+ # Username and password credentials are only required to run your
61
+ # application locally or outside of Bluemix. When running on
62
+ # Bluemix, the credentials will be automatically loaded from the
63
+ # `VCAP_SERVICES` environment variable.
64
+ # @option args password [String] The password used to authenticate with the service.
65
+ # Username and password credentials are only required to run your
66
+ # application locally or outside of Bluemix. When running on
67
+ # Bluemix, the credentials will be automatically loaded from the
68
+ # `VCAP_SERVICES` environment variable.
69
+ # @option args iam_api_key [String] An API key that can be used to request IAM tokens. If
70
+ # this API key is provided, the SDK will manage the token and handle the
71
+ # refreshing.
72
+ # @option args iam_access_token [String] An IAM access token is fully managed by the application.
73
+ # Responsibility falls on the application to refresh the token, either before
74
+ # it expires or reactively upon receiving a 401 from the service as any requests
75
+ # made with an expired token will fail.
76
+ # @option args iam_url [String] An optional URL for the IAM service API. Defaults to
77
+ # 'https://iam.ng.bluemix.net/identity/token'.
78
+ def initialize(args = {})
79
+ @__async_initialized__ = false
80
+ super()
81
+ defaults = {}
82
+ defaults[:version] = nil
83
+ defaults[:url] = "https://gateway.watsonplatform.net/tone-analyzer/api"
84
+ defaults[:username] = nil
85
+ defaults[:password] = nil
86
+ defaults[:iam_api_key] = nil
87
+ defaults[:iam_access_token] = nil
88
+ defaults[:iam_url] = nil
89
+ args = defaults.merge(args)
90
+ @watson_service = WatsonService.new(
91
+ vcap_services_name: "tone_analyzer",
92
+ url: args[:url],
93
+ username: args[:username],
94
+ password: args[:password],
95
+ iam_api_key: args[:iam_api_key],
96
+ iam_access_token: args[:iam_access_token],
97
+ iam_url: args[:iam_url],
98
+ use_vcap_services: true
99
+ )
100
+ @version = args[:version]
101
+ end
102
+
103
+ # :nocov:
104
+ def add_default_headers(headers: {})
105
+ @watson_service.add_default_headers(headers: headers)
106
+ end
107
+
108
+ def _iam_access_token(iam_access_token:)
109
+ @watson_service._iam_access_token(iam_access_token: iam_access_token)
110
+ end
111
+
112
+ def _iam_api_key(iam_api_key:)
113
+ @watson_service._iam_api_key(iam_api_key: iam_api_key)
114
+ end
115
+
116
+ # @return [DetailedResponse]
117
+ def request(args)
118
+ @watson_service.request(args)
119
+ end
120
+
121
+ # @note Chainable
122
+ # @param headers [Hash] Custom headers to be sent with the request
123
+ # @return [self]
124
+ def headers(headers)
125
+ @watson_service.headers(headers)
126
+ self
127
+ end
128
+
129
+ def password=(password)
130
+ @watson_service.password = password
131
+ end
132
+
133
+ def password
134
+ @watson_service.password
135
+ end
136
+
137
+ def username=(username)
138
+ @watson_service.username = username
139
+ end
140
+
141
+ def username
142
+ @watson_service.username
143
+ end
144
+
145
+ def url=(url)
146
+ @watson_service.url = url
147
+ end
148
+
149
+ def url
150
+ @watson_service.url
151
+ end
152
+ # :nocov:
153
+ #########################
154
+ # Methods
155
+ #########################
156
+
157
+ ##
158
+ # @!method tone(tone_input:, content_type:, sentences: nil, tones: nil, content_language: nil, accept_language: nil)
159
+ # Analyze general tone.
160
+ # Use the general purpose endpoint to analyze the tone of your input content. The
161
+ # service analyzes the content for emotional and language tones. The method always
162
+ # analyzes the tone of the full document; by default, it also analyzes the tone of
163
+ # each individual sentence of the content.
164
+ #
165
+ # You can submit no more than 128 KB of total input content and no more than 1000
166
+ # individual sentences in JSON, plain text, or HTML format. The service analyzes the
167
+ # first 1000 sentences for document-level analysis and only the first 100 sentences
168
+ # for sentence-level analysis.
169
+ #
170
+ # Per the JSON specification, the default character encoding for JSON content is
171
+ # effectively always UTF-8; per the HTTP specification, the default encoding for
172
+ # plain text and HTML is ISO-8859-1 (effectively, the ASCII character set). When
173
+ # specifying a content type of plain text or HTML, include the `charset` parameter
174
+ # to indicate the character encoding of the input text; for example: `Content-Type:
175
+ # text/plain;charset=utf-8`. For `text/html`, the service removes HTML tags and
176
+ # analyzes only the textual content.
177
+ # @param tone_input [ToneInput] JSON, plain text, or HTML input that contains the content to be analyzed. For JSON
178
+ # input, provide an object of type `ToneInput`.
179
+ # @param content_type [String] The type of the input. A character encoding can be specified by including a
180
+ # `charset` parameter. For example, 'text/plain;charset=utf-8'.
181
+ # @param sentences [Boolean] Indicates whether the service is to return an analysis of each individual sentence
182
+ # in addition to its analysis of the full document. If `true` (the default), the
183
+ # service returns results for each sentence.
184
+ # @param tones [Array[String]] **`2017-09-21`:** Deprecated. The service continues to accept the parameter for
185
+ # backward-compatibility, but the parameter no longer affects the response.
186
+ #
187
+ # **`2016-05-19`:** A comma-separated list of tones for which the service is to
188
+ # return its analysis of the input; the indicated tones apply both to the full
189
+ # document and to individual sentences of the document. You can specify one or more
190
+ # of the valid values. Omit the parameter to request results for all three tones.
191
+ # @param content_language [String] The language of the input text for the request: English or French. Regional
192
+ # variants are treated as their parent language; for example, `en-US` is interpreted
193
+ # as `en`. The input content must match the specified language. Do not submit
194
+ # content that contains both languages. You can use different languages for
195
+ # **Content-Language** and **Accept-Language**.
196
+ # * **`2017-09-21`:** Accepts `en` or `fr`.
197
+ # * **`2016-05-19`:** Accepts only `en`.
198
+ # @param accept_language [String] The desired language of the response. For two-character arguments, regional
199
+ # variants are treated as their parent language; for example, `en-US` is interpreted
200
+ # as `en`. You can use different languages for **Content-Language** and
201
+ # **Accept-Language**.
202
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
203
+ def tone(tone_input:, content_type:, sentences: nil, tones: nil, content_language: nil, accept_language: nil)
204
+ raise ArgumentError("tone_input must be provided") if tone_input.nil?
205
+ raise ArgumentError("content_type must be provided") if content_type.nil?
206
+ headers = {
207
+ "Content-Type" => content_type,
208
+ "Content-Language" => content_language,
209
+ "Accept-Language" => accept_language
210
+ }
211
+ params = {
212
+ "version" => @version,
213
+ "sentences" => sentences,
214
+ "tones" => tones.to_a
215
+ }
216
+ if content_type == "application/json" && tone_input.instance_of?(Hash)
217
+ data = tone_input.to_json
218
+ else
219
+ data = tone_input
220
+ end
221
+ method_url = "/v3/tone"
222
+ response = request(
223
+ method: "POST",
224
+ url: method_url,
225
+ headers: headers,
226
+ params: params,
227
+ data: data,
228
+ accept_json: true
229
+ )
230
+ response
231
+ end
232
+
233
+ ##
234
+ # @!method tone_chat(utterances:, content_language: nil, accept_language: nil)
235
+ # Analyze customer engagement tone.
236
+ # Use the customer engagement endpoint to analyze the tone of customer service and
237
+ # customer support conversations. For each utterance of a conversation, the method
238
+ # reports the most prevalent subset of the following seven tones: sad, frustrated,
239
+ # satisfied, excited, polite, impolite, and sympathetic.
240
+ #
241
+ # If you submit more than 50 utterances, the service returns a warning for the
242
+ # overall content and analyzes only the first 50 utterances. If you submit a single
243
+ # utterance that contains more than 500 characters, the service returns an error for
244
+ # that utterance and does not analyze the utterance. The request fails if all
245
+ # utterances have more than 500 characters.
246
+ #
247
+ # Per the JSON specification, the default character encoding for JSON content is
248
+ # effectively always UTF-8.
249
+ # @param utterances [Array[Utterance]] An array of `Utterance` objects that provides the input content that the service
250
+ # is to analyze.
251
+ # @param content_language [String] The language of the input text for the request: English or French. Regional
252
+ # variants are treated as their parent language; for example, `en-US` is interpreted
253
+ # as `en`. The input content must match the specified language. Do not submit
254
+ # content that contains both languages. You can use different languages for
255
+ # **Content-Language** and **Accept-Language**.
256
+ # * **`2017-09-21`:** Accepts `en` or `fr`.
257
+ # * **`2016-05-19`:** Accepts only `en`.
258
+ # @param accept_language [String] The desired language of the response. For two-character arguments, regional
259
+ # variants are treated as their parent language; for example, `en-US` is interpreted
260
+ # as `en`. You can use different languages for **Content-Language** and
261
+ # **Accept-Language**.
262
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
263
+ def tone_chat(utterances:, content_language: nil, accept_language: nil)
264
+ raise ArgumentError("utterances must be provided") if utterances.nil?
265
+ headers = {
266
+ "Content-Language" => content_language,
267
+ "Accept-Language" => accept_language
268
+ }
269
+ params = {
270
+ "version" => @version
271
+ }
272
+ data = {
273
+ "utterances" => utterances
274
+ }
275
+ method_url = "/v3/tone_chat"
276
+ response = request(
277
+ method: "POST",
278
+ url: method_url,
279
+ headers: headers,
280
+ params: params,
281
+ json: data,
282
+ accept_json: true
283
+ )
284
+ response
285
+ end
286
+ end
287
+ end
@@ -0,0 +1,3 @@
1
+ module IBMWatson
2
+ VERSION = "0.1.0".freeze
3
+ end
@@ -0,0 +1,579 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2018 IBM All Rights Reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # The IBM Watson™ Visual Recognition service uses deep learning algorithms to
18
+ # identify scenes, objects, and faces in images you upload to the service. You can create
19
+ # and train a custom classifier to identify subjects that suit your needs.
20
+
21
+ require "concurrent"
22
+ require "erb"
23
+ require "json"
24
+ require_relative "./detailed_response"
25
+
26
+ require_relative "./watson_service"
27
+
28
+ module IBMWatson
29
+ ##
30
+ # The Visual Recognition V3 service.
31
+ class VisualRecognitionV3
32
+ include Concurrent::Async
33
+ ##
34
+ # @!method initialize(args)
35
+ # Construct a new client for the Visual Recognition service.
36
+ #
37
+ # @param args [Hash] The args to initialize with
38
+ # @option args version [String] The API version date to use with the service, in
39
+ # "YYYY-MM-DD" format. Whenever the API is changed in a backwards
40
+ # incompatible way, a new minor version of the API is released.
41
+ # The service uses the API version for the date you specify, or
42
+ # the most recent version before that date. Note that you should
43
+ # not programmatically specify the current date at runtime, in
44
+ # case the API has been updated since your application's release.
45
+ # Instead, specify a version date that is compatible with your
46
+ # application, and don't change it until your application is
47
+ # ready for a later version.
48
+ # @option args url [String] The base url to use when contacting the service (e.g.
49
+ # "https://gateway.watsonplatform.net/visual-recognition/api").
50
+ # The base url may differ between Bluemix regions.
51
+ # @option args api_key [String] The API Key used to authenticate.
52
+ # @option args iam_api_key [String] An API key that can be used to request IAM tokens. If
53
+ # this API key is provided, the SDK will manage the token and handle the
54
+ # refreshing.
55
+ # @option args iam_access_token [String] An IAM access token is fully managed by the application.
56
+ # Responsibility falls on the application to refresh the token, either before
57
+ # it expires or reactively upon receiving a 401 from the service as any requests
58
+ # made with an expired token will fail.
59
+ # @option args iam_url [String] An optional URL for the IAM service API. Defaults to
60
+ # 'https://iam.ng.bluemix.net/identity/token'.
61
+ def initialize(args = {})
62
+ @__async_initialized__ = false
63
+ super()
64
+ defaults = {}
65
+ defaults[:version] = nil
66
+ defaults[:url] = "https://gateway.watsonplatform.net/visual-recognition/api"
67
+ defaults[:api_key] = nil
68
+ defaults[:iam_api_key] = nil
69
+ defaults[:iam_access_token] = nil
70
+ defaults[:iam_url] = nil
71
+ args = defaults.merge(args)
72
+ @watson_service = WatsonService.new(
73
+ vcap_services_name: "watson_vision_combined",
74
+ url: args[:url],
75
+ api_key: args[:api_key],
76
+ iam_api_key: args[:iam_api_key],
77
+ iam_access_token: args[:iam_access_token],
78
+ iam_url: args[:iam_url],
79
+ use_vcap_services: true
80
+ )
81
+ @version = args[:version]
82
+ end
83
+
84
+ # :nocov:
85
+ def add_default_headers(headers: {})
86
+ @watson_service.add_default_headers(headers: headers)
87
+ end
88
+
89
+ def _iam_access_token(iam_access_token:)
90
+ @watson_service._iam_access_token(iam_access_token: iam_access_token)
91
+ end
92
+
93
+ def _iam_api_key(iam_api_key:)
94
+ @watson_service._iam_api_key(iam_api_key: iam_api_key)
95
+ end
96
+
97
+ # @return [DetailedResponse]
98
+ def request(args)
99
+ @watson_service.request(args)
100
+ end
101
+
102
+ # @note Chainable
103
+ # @param headers [Hash] Custom headers to be sent with the request
104
+ # @return [self]
105
+ def headers(headers)
106
+ @watson_service.headers(headers)
107
+ self
108
+ end
109
+
110
+ def password=(password)
111
+ @watson_service.password = password
112
+ end
113
+
114
+ def password
115
+ @watson_service.password
116
+ end
117
+
118
+ def username=(username)
119
+ @watson_service.username = username
120
+ end
121
+
122
+ def username
123
+ @watson_service.username
124
+ end
125
+
126
+ def url=(url)
127
+ @watson_service.url = url
128
+ end
129
+
130
+ def url
131
+ @watson_service.url
132
+ end
133
+ # :nocov:
134
+ #########################
135
+ # General
136
+ #########################
137
+
138
+ ##
139
+ # @!method classify(images_file: nil, accept_language: nil, url: nil, threshold: nil, owners: nil, classifier_ids: nil, images_file_content_type: nil, images_filename: nil)
140
+ # Classify images.
141
+ # Classify images with built-in or custom classifiers.
142
+ # @param images_file [File] An image file (.jpg, .png) or .zip file with images. Maximum image size is 10 MB.
143
+ # Include no more than 20 images and limit the .zip file to 100 MB. Encode the image
144
+ # and .zip file names in UTF-8 if they contain non-ASCII characters. The service
145
+ # assumes UTF-8 encoding if it encounters non-ASCII characters.
146
+ #
147
+ # You can also include an image with the **url** parameter.
148
+ # @param accept_language [String] The language of the output class names. The full set of languages is supported for
149
+ # the built-in classifier IDs: `default`, `food`, and `explicit`. The class names of
150
+ # custom classifiers are not translated.
151
+ #
152
+ # The response might not be in the specified language when the requested language is
153
+ # not supported or when there is no translation for the class name.
154
+ # @param url [String] The URL of an image to analyze. Must be in .jpg, or .png format. The minimum
155
+ # recommended pixel density is 32X32 pixels per inch, and the maximum image size is
156
+ # 10 MB.
157
+ #
158
+ # You can also include images with the **images_file** parameter.
159
+ # @param threshold [Float] The minimum score a class must have to be displayed in the response. Set the
160
+ # threshold to `0.0` to ignore the classification score and return all values.
161
+ # @param owners [Array[String]] The categories of classifiers to apply. Use `IBM` to classify against the
162
+ # `default` general classifier, and use `me` to classify against your custom
163
+ # classifiers. To analyze the image against both classifier categories, set the
164
+ # value to both `IBM` and `me`.
165
+ #
166
+ # The built-in `default` classifier is used if both **classifier_ids** and
167
+ # **owners** parameters are empty.
168
+ #
169
+ # The **classifier_ids** parameter overrides **owners**, so make sure that
170
+ # **classifier_ids** is empty.
171
+ # @param classifier_ids [Array[String]] Which classifiers to apply. Overrides the **owners** parameter. You can specify
172
+ # both custom and built-in classifier IDs. The built-in `default` classifier is used
173
+ # if both **classifier_ids** and **owners** parameters are empty.
174
+ #
175
+ # The following built-in classifier IDs require no training:
176
+ # - `default`: Returns classes from thousands of general tags.
177
+ # - `food`: (Beta) Enhances specificity and accuracy for images of food items.
178
+ # - `explicit`: (Beta) Evaluates whether the image might be pornographic.
179
+ # @param images_file_content_type [String] The content type of images_file.
180
+ # @param images_filename [String] The filename for images_file.
181
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
182
+ def classify(images_file: nil, accept_language: nil, url: nil, threshold: nil, owners: nil, classifier_ids: nil, images_file_content_type: nil, images_filename: nil)
183
+ headers = {
184
+ "Accept-Language" => accept_language
185
+ }
186
+ params = {
187
+ "version" => @version
188
+ }
189
+ unless images_file.nil?
190
+ mime_type = images_file_content_type.nil? ? "application/octet-stream" : images_file_content_type
191
+ unless images_file.instance_of?(StringIO) || images_file.instance_of?(File)
192
+ images_file = images_file.respond_to?(:to_json) ? StringIO.new(images_file.to_json) : StringIO.new(images_file)
193
+ end
194
+ if images_filename
195
+ images_file = images_file.instance_of?(StringIO) ? HTTP::FormData::File.new(images_file, content_type: mime_type, filename: images_filename) : HTTP::FormData::File.new(images_file.path, content_type: mime_type, filename: images_filename)
196
+ else
197
+ images_file = images_file.instance_of?(StringIO) ? HTTP::FormData::File.new(images_file, content_type: mime_type) : HTTP::FormData::File.new(images_file.path, content_type: mime_type)
198
+ end
199
+ end
200
+ threshold = HTTP::FormData::Part.new(threshold, content_type: "text/plain") unless threshold.nil?
201
+ owners = HTTP::FormData::Part.new(owners, content_type: "text/plain") unless owners.nil?
202
+ classifier_ids = HTTP::FormData::Part.new(classifier_ids, content_type: "text/plain") unless classifier_ids.nil?
203
+ method_url = "/v3/classify"
204
+ response = request(
205
+ method: "POST",
206
+ url: method_url,
207
+ headers: headers,
208
+ params: params,
209
+ form: {
210
+ images_file: images_file,
211
+ url: url,
212
+ threshold: threshold,
213
+ owners: owners,
214
+ classifier_ids: classifier_ids
215
+ },
216
+ accept_json: true
217
+ )
218
+ response
219
+ end
220
+ #########################
221
+ # Face
222
+ #########################
223
+
224
+ ##
225
+ # @!method detect_faces(images_file: nil, url: nil, images_file_content_type: nil, images_filename: nil)
226
+ # Detect faces in images.
227
+ # **Important:** On April 2, 2018, the identity information in the response to calls
228
+ # to the Face model was removed. The identity information refers to the `name` of
229
+ # the person, `score`, and `type_hierarchy` knowledge graph. For details about the
230
+ # enhanced Face model, see the [Release
231
+ # notes](https://console.bluemix.net/docs/services/visual-recognition/release-notes.html#2april2018).
232
+ #
233
+ # Analyze and get data about faces in images. Responses can include estimated age
234
+ # and gender. This feature uses a built-in model, so no training is necessary. The
235
+ # Detect faces method does not support general biometric facial recognition.
236
+ #
237
+ # Supported image formats include .gif, .jpg, .png, and .tif. The maximum image size
238
+ # is 10 MB. The minimum recommended pixel density is 32X32 pixels per inch.
239
+ # @param images_file [File] An image file (gif, .jpg, .png, .tif.) or .zip file with images. Limit the .zip
240
+ # file to 100 MB. You can include a maximum of 15 images in a request.
241
+ #
242
+ # Encode the image and .zip file names in UTF-8 if they contain non-ASCII
243
+ # characters. The service assumes UTF-8 encoding if it encounters non-ASCII
244
+ # characters.
245
+ #
246
+ # You can also include an image with the **url** parameter.
247
+ # @param url [String] The URL of an image to analyze. Must be in .gif, .jpg, .png, or .tif format. The
248
+ # minimum recommended pixel density is 32X32 pixels per inch, and the maximum image
249
+ # size is 10 MB. Redirects are followed, so you can use a shortened URL.
250
+ #
251
+ # You can also include images with the **images_file** parameter.
252
+ # @param images_file_content_type [String] The content type of images_file.
253
+ # @param images_filename [String] The filename for images_file.
254
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
255
+ def detect_faces(images_file: nil, url: nil, images_file_content_type: nil, images_filename: nil)
256
+ headers = {
257
+ }
258
+ params = {
259
+ "version" => @version
260
+ }
261
+ unless images_file.nil?
262
+ mime_type = images_file_content_type.nil? ? "application/octet-stream" : images_file_content_type
263
+ unless images_file.instance_of?(StringIO) || images_file.instance_of?(File)
264
+ images_file = images_file.respond_to?(:to_json) ? StringIO.new(images_file.to_json) : StringIO.new(images_file)
265
+ end
266
+ if images_filename
267
+ images_file = images_file.instance_of?(StringIO) ? HTTP::FormData::File.new(images_file, content_type: mime_type, filename: images_filename) : HTTP::FormData::File.new(images_file.path, content_type: mime_type, filename: images_filename)
268
+ else
269
+ images_file = images_file.instance_of?(StringIO) ? HTTP::FormData::File.new(images_file, content_type: mime_type) : HTTP::FormData::File.new(images_file.path, content_type: mime_type)
270
+ end
271
+ end
272
+ method_url = "/v3/detect_faces"
273
+ response = request(
274
+ method: "POST",
275
+ url: method_url,
276
+ headers: headers,
277
+ params: params,
278
+ form: {
279
+ images_file: images_file,
280
+ url: url
281
+ },
282
+ accept_json: true
283
+ )
284
+ response
285
+ end
286
+ #########################
287
+ # Custom
288
+ #########################
289
+
290
+ ##
291
+ # @!method create_classifier(name:, classname_positive_examples:, negative_examples: nil, classname_positive_examples_filename: nil, negative_examples_filename: nil)
292
+ # Create a classifier.
293
+ # Train a new multi-faceted classifier on the uploaded image data. Create your
294
+ # custom classifier with positive or negative examples. Include at least two sets of
295
+ # examples, either two positive example files or one positive and one negative file.
296
+ # You can upload a maximum of 256 MB per call.
297
+ #
298
+ # Encode all names in UTF-8 if they contain non-ASCII characters (.zip and image
299
+ # file names, and classifier and class names). The service assumes UTF-8 encoding if
300
+ # it encounters non-ASCII characters.
301
+ # @param name [String] The name of the new classifier. Encode special characters in UTF-8.
302
+ # @param classname_positive_examples [File] A .zip file of images that depict the visual subject of a class in the new
303
+ # classifier. You can include more than one positive example file in a call.
304
+ #
305
+ # Specify the parameter name by appending `_positive_examples` to the class name.
306
+ # For example, `goldenretriever_positive_examples` creates the class
307
+ # **goldenretriever**.
308
+ #
309
+ # Include at least 10 images in .jpg or .png format. The minimum recommended image
310
+ # resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100
311
+ # MB per .zip file.
312
+ #
313
+ # Encode special characters in the file name in UTF-8.
314
+ # @param negative_examples [File] A .zip file of images that do not depict the visual subject of any of the classes
315
+ # of the new classifier. Must contain a minimum of 10 images.
316
+ #
317
+ # Encode special characters in the file name in UTF-8.
318
+ # @param classname_positive_examples_filename [String] The filename for classname_positive_examples.
319
+ # @param negative_examples_filename [String] The filename for negative_examples.
320
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
321
+ def create_classifier(name:, classname_positive_examples:, negative_examples: nil, classname_positive_examples_filename: nil, negative_examples_filename: nil)
322
+ raise ArgumentError("name must be provided") if name.nil?
323
+ raise ArgumentError("classname_positive_examples must be provided") if classname_positive_examples.nil?
324
+ headers = {
325
+ }
326
+ params = {
327
+ "version" => @version
328
+ }
329
+ mime_type = "application/octet-stream"
330
+ unless classname_positive_examples.instance_of?(StringIO) || classname_positive_examples.instance_of?(File)
331
+ classname_positive_examples = classname_positive_examples.respond_to?(:to_json) ? StringIO.new(classname_positive_examples.to_json) : StringIO.new(classname_positive_examples)
332
+ end
333
+ if classname_positive_examples_filename
334
+ classname_positive_examples = classname_positive_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(classname_positive_examples, content_type: mime_type, filename: classname_positive_examples_filename) : HTTP::FormData::File.new(classname_positive_examples.path, content_type: mime_type, filename: classname_positive_examples_filename)
335
+ else
336
+ classname_positive_examples = classname_positive_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(classname_positive_examples, content_type: mime_type) : HTTP::FormData::File.new(classname_positive_examples.path, content_type: mime_type)
337
+ end
338
+ unless negative_examples.nil?
339
+ mime_type = "application/octet-stream"
340
+ unless negative_examples.instance_of?(StringIO) || negative_examples.instance_of?(File)
341
+ negative_examples = negative_examples.respond_to?(:to_json) ? StringIO.new(negative_examples.to_json) : StringIO.new(negative_examples)
342
+ end
343
+ if negative_examples_filename
344
+ negative_examples = negative_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(negative_examples, content_type: mime_type, filename: negative_examples_filename) : HTTP::FormData::File.new(negative_examples.path, content_type: mime_type, filename: negative_examples_filename)
345
+ else
346
+ negative_examples = negative_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(negative_examples, content_type: mime_type) : HTTP::FormData::File.new(negative_examples.path, content_type: mime_type)
347
+ end
348
+ end
349
+ method_url = "/v3/classifiers"
350
+ response = request(
351
+ method: "POST",
352
+ url: method_url,
353
+ headers: headers,
354
+ params: params,
355
+ form: {
356
+ name: name,
357
+ classname_positive_examples: classname_positive_examples,
358
+ negative_examples: negative_examples
359
+ },
360
+ accept_json: true
361
+ )
362
+ response
363
+ end
364
+
365
+ ##
366
+ # @!method list_classifiers(verbose: nil)
367
+ # Retrieve a list of classifiers.
368
+ # @param verbose [Boolean] Specify `true` to return details about the classifiers. Omit this parameter to
369
+ # return a brief list of classifiers.
370
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
371
+ def list_classifiers(verbose: nil)
372
+ headers = {
373
+ }
374
+ params = {
375
+ "version" => @version,
376
+ "verbose" => verbose
377
+ }
378
+ method_url = "/v3/classifiers"
379
+ response = request(
380
+ method: "GET",
381
+ url: method_url,
382
+ headers: headers,
383
+ params: params,
384
+ accept_json: true
385
+ )
386
+ response
387
+ end
388
+
389
+ ##
390
+ # @!method get_classifier(classifier_id:)
391
+ # Retrieve classifier details.
392
+ # Retrieve information about a custom classifier.
393
+ # @param classifier_id [String] The ID of the classifier.
394
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
395
+ def get_classifier(classifier_id:)
396
+ raise ArgumentError("classifier_id must be provided") if classifier_id.nil?
397
+ headers = {
398
+ }
399
+ params = {
400
+ "version" => @version
401
+ }
402
+ method_url = "/v3/classifiers/%s" % [ERB::Util.url_encode(classifier_id)]
403
+ response = request(
404
+ method: "GET",
405
+ url: method_url,
406
+ headers: headers,
407
+ params: params,
408
+ accept_json: true
409
+ )
410
+ response
411
+ end
412
+
413
+ ##
414
+ # @!method update_classifier(classifier_id:, classname_positive_examples: nil, negative_examples: nil, classname_positive_examples_filename: nil, negative_examples_filename: nil)
415
+ # Update a classifier.
416
+ # Update a custom classifier by adding new positive or negative classes (examples)
417
+ # or by adding new images to existing classes. You must supply at least one set of
418
+ # positive or negative examples. For details, see [Updating custom
419
+ # classifiers](https://console.bluemix.net/docs/services/visual-recognition/customizing.html#updating-custom-classifiers).
420
+ #
421
+ # Encode all names in UTF-8 if they contain non-ASCII characters (.zip and image
422
+ # file names, and classifier and class names). The service assumes UTF-8 encoding if
423
+ # it encounters non-ASCII characters.
424
+ #
425
+ # **Tip:** Don't make retraining calls on a classifier until the status is ready.
426
+ # When you submit retraining requests in parallel, the last request overwrites the
427
+ # previous requests. The retrained property shows the last time the classifier
428
+ # retraining finished.
429
+ # @param classifier_id [String] The ID of the classifier.
430
+ # @param classname_positive_examples [File] A .zip file of images that depict the visual subject of a class in the classifier.
431
+ # The positive examples create or update classes in the classifier. You can include
432
+ # more than one positive example file in a call.
433
+ #
434
+ # Specify the parameter name by appending `_positive_examples` to the class name.
435
+ # For example, `goldenretriever_positive_examples` creates the class
436
+ # `goldenretriever`.
437
+ #
438
+ # Include at least 10 images in .jpg or .png format. The minimum recommended image
439
+ # resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100
440
+ # MB per .zip file.
441
+ #
442
+ # Encode special characters in the file name in UTF-8.
443
+ # @param negative_examples [File] A .zip file of images that do not depict the visual subject of any of the classes
444
+ # of the new classifier. Must contain a minimum of 10 images.
445
+ #
446
+ # Encode special characters in the file name in UTF-8.
447
+ # @param classname_positive_examples_filename [String] The filename for classname_positive_examples.
448
+ # @param negative_examples_filename [String] The filename for negative_examples.
449
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
450
+ def update_classifier(classifier_id:, classname_positive_examples: nil, negative_examples: nil, classname_positive_examples_filename: nil, negative_examples_filename: nil)
451
+ raise ArgumentError("classifier_id must be provided") if classifier_id.nil?
452
+ headers = {
453
+ }
454
+ params = {
455
+ "version" => @version
456
+ }
457
+ unless classname_positive_examples.nil?
458
+ mime_type = "application/octet-stream"
459
+ unless classname_positive_examples.instance_of?(StringIO) || classname_positive_examples.instance_of?(File)
460
+ classname_positive_examples = classname_positive_examples.respond_to?(:to_json) ? StringIO.new(classname_positive_examples.to_json) : StringIO.new(classname_positive_examples)
461
+ end
462
+ if classname_positive_examples_filename
463
+ classname_positive_examples = classname_positive_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(classname_positive_examples, content_type: mime_type, filename: classname_positive_examples_filename) : HTTP::FormData::File.new(classname_positive_examples.path, content_type: mime_type, filename: classname_positive_examples_filename)
464
+ else
465
+ classname_positive_examples = classname_positive_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(classname_positive_examples, content_type: mime_type) : HTTP::FormData::File.new(classname_positive_examples.path, content_type: mime_type)
466
+ end
467
+ end
468
+ unless negative_examples.nil?
469
+ mime_type = "application/octet-stream"
470
+ unless negative_examples.instance_of?(StringIO) || negative_examples.instance_of?(File)
471
+ negative_examples = negative_examples.respond_to?(:to_json) ? StringIO.new(negative_examples.to_json) : StringIO.new(negative_examples)
472
+ end
473
+ if negative_examples_filename
474
+ negative_examples = negative_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(negative_examples, content_type: mime_type, filename: negative_examples_filename) : HTTP::FormData::File.new(negative_examples.path, content_type: mime_type, filename: negative_examples_filename)
475
+ else
476
+ negative_examples = negative_examples.instance_of?(StringIO) ? HTTP::FormData::File.new(negative_examples, content_type: mime_type) : HTTP::FormData::File.new(negative_examples.path, content_type: mime_type)
477
+ end
478
+ end
479
+ method_url = "/v3/classifiers/%s" % [ERB::Util.url_encode(classifier_id)]
480
+ response = request(
481
+ method: "POST",
482
+ url: method_url,
483
+ headers: headers,
484
+ params: params,
485
+ form: {
486
+ classname_positive_examples: classname_positive_examples,
487
+ negative_examples: negative_examples
488
+ },
489
+ accept_json: true
490
+ )
491
+ response
492
+ end
493
+
494
+ ##
495
+ # @!method delete_classifier(classifier_id:)
496
+ # Delete a classifier.
497
+ # @param classifier_id [String] The ID of the classifier.
498
+ # @return [nil]
499
+ def delete_classifier(classifier_id:)
500
+ raise ArgumentError("classifier_id must be provided") if classifier_id.nil?
501
+ headers = {
502
+ }
503
+ params = {
504
+ "version" => @version
505
+ }
506
+ method_url = "/v3/classifiers/%s" % [ERB::Util.url_encode(classifier_id)]
507
+ request(
508
+ method: "DELETE",
509
+ url: method_url,
510
+ headers: headers,
511
+ params: params,
512
+ accept_json: true
513
+ )
514
+ nil
515
+ end
516
+ #########################
517
+ # Core ML
518
+ #########################
519
+
520
+ ##
521
+ # @!method get_core_ml_model(classifier_id:)
522
+ # Retrieve a Core ML model of a classifier.
523
+ # Download a Core ML model file (.mlmodel) of a custom classifier that returns
524
+ # <tt>\"core_ml_enabled\": true</tt> in the classifier details.
525
+ # @param classifier_id [String] The ID of the classifier.
526
+ # @return [DetailedResponse] A `DetailedResponse` object representing the response.
527
+ def get_core_ml_model(classifier_id:)
528
+ raise ArgumentError("classifier_id must be provided") if classifier_id.nil?
529
+ headers = {
530
+ }
531
+ params = {
532
+ "version" => @version
533
+ }
534
+ method_url = "/v3/classifiers/%s/core_ml_model" % [ERB::Util.url_encode(classifier_id)]
535
+ response = request(
536
+ method: "GET",
537
+ url: method_url,
538
+ headers: headers,
539
+ params: params,
540
+ accept_json: false
541
+ )
542
+ response
543
+ end
544
+ #########################
545
+ # User data
546
+ #########################
547
+
548
+ ##
549
+ # @!method delete_user_data(customer_id:)
550
+ # Delete labeled data.
551
+ # Deletes all data associated with a specified customer ID. The method has no effect
552
+ # if no data is associated with the customer ID.
553
+ #
554
+ # You associate a customer ID with data by passing the `X-Watson-Metadata` header
555
+ # with a request that passes data. For more information about personal data and
556
+ # customer IDs, see [Information
557
+ # security](https://console.bluemix.net/docs/services/visual-recognition/information-security.html).
558
+ # @param customer_id [String] The customer ID for which all data is to be deleted.
559
+ # @return [nil]
560
+ def delete_user_data(customer_id:)
561
+ raise ArgumentError("customer_id must be provided") if customer_id.nil?
562
+ headers = {
563
+ }
564
+ params = {
565
+ "version" => @version,
566
+ "customer_id" => customer_id
567
+ }
568
+ method_url = "/v3/user_data"
569
+ request(
570
+ method: "DELETE",
571
+ url: method_url,
572
+ headers: headers,
573
+ params: params,
574
+ accept_json: true
575
+ )
576
+ nil
577
+ end
578
+ end
579
+ end