aws-sdk-personalize 1.7.0 → 1.8.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: ba039375af63cb4543ace6efb8b88a697bf1eacc
4
- data.tar.gz: d284796db16f4d705ed1a00a12939dbcbd3ead72
3
+ metadata.gz: b196ccf4986516a8f46f5a7faa06e234e28d46ff
4
+ data.tar.gz: 5275e41401ce4a37f0285eb86898aec1b726ba78
5
5
  SHA512:
6
- metadata.gz: e09bc30db52b5f442affbfec4cf48816fa4963cdaf6dcd8b9084f75928a1a039cf8ea4b12a58929ef27681a1ba1e867b20d77ad632ea69b02e633060d03d8aea
7
- data.tar.gz: ff58a356beda3ab9ef9898d24e01affebd796f1363d60241fa8f9a4d3dafc30c40d579caae1ebd7c0ed3a019be3a9f8b2d2b0a6535f0548c77f974fd44dd70b3
6
+ metadata.gz: f98345f7b5937f212252cf1e984873d4bb9724191ea3fe0ae3c90c9127b8be1a15d58081284ad93b3bb9acafd35ec38c19affbb11c52482bc2379e5ba178323c
7
+ data.tar.gz: ae5eec4fba2d788f2284fe64dfa19b5722fe4589eacb1068d07f0fcd3a5a7ea2275ceda2e4b09498d66d928fbb247692dacede8610c9aca48ed1396ee36b7f9c
@@ -42,6 +42,6 @@ require_relative 'aws-sdk-personalize/customizations'
42
42
  # @service
43
43
  module Aws::Personalize
44
44
 
45
- GEM_VERSION = '1.7.0'
45
+ GEM_VERSION = '1.8.0'
46
46
 
47
47
  end
@@ -264,6 +264,71 @@ module Aws::Personalize
264
264
 
265
265
  # @!group API Operations
266
266
 
267
+ # Creates a batch inference job. The operation can handle up to 50
268
+ # million records and the input file must be in JSON format. For more
269
+ # information, see recommendations-batch.
270
+ #
271
+ # @option params [required, String] :job_name
272
+ # The name of the batch inference job to create.
273
+ #
274
+ # @option params [required, String] :solution_version_arn
275
+ # The Amazon Resource Name (ARN) of the solution version that will be
276
+ # used to generate the batch inference recommendations.
277
+ #
278
+ # @option params [Integer] :num_results
279
+ # The number of recommendations to retreive.
280
+ #
281
+ # @option params [required, Types::BatchInferenceJobInput] :job_input
282
+ # The Amazon S3 path that leads to the input file to base your
283
+ # recommendations on. The input material must be in JSON format.
284
+ #
285
+ # @option params [required, Types::BatchInferenceJobOutput] :job_output
286
+ # The path to the Amazon S3 bucket where the job's output will be
287
+ # stored.
288
+ #
289
+ # @option params [required, String] :role_arn
290
+ # The ARN of the Amazon Identity and Access Management role that has
291
+ # permissions to read and write to your input and out Amazon S3 buckets
292
+ # respectively.
293
+ #
294
+ # @return [Types::CreateBatchInferenceJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
295
+ #
296
+ # * {Types::CreateBatchInferenceJobResponse#batch_inference_job_arn #batch_inference_job_arn} => String
297
+ #
298
+ # @example Request syntax with placeholder values
299
+ #
300
+ # resp = client.create_batch_inference_job({
301
+ # job_name: "Name", # required
302
+ # solution_version_arn: "Arn", # required
303
+ # num_results: 1,
304
+ # job_input: { # required
305
+ # s3_data_source: { # required
306
+ # path: "S3Location", # required
307
+ # kms_key_arn: "KmsKeyArn",
308
+ # },
309
+ # },
310
+ # job_output: { # required
311
+ # s3_data_destination: { # required
312
+ # path: "S3Location", # required
313
+ # kms_key_arn: "KmsKeyArn",
314
+ # },
315
+ # },
316
+ # role_arn: "RoleArn", # required
317
+ # })
318
+ #
319
+ # @example Response structure
320
+ #
321
+ # resp.batch_inference_job_arn #=> String
322
+ #
323
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateBatchInferenceJob AWS API Documentation
324
+ #
325
+ # @overload create_batch_inference_job(params = {})
326
+ # @param [Hash] params ({})
327
+ def create_batch_inference_job(params = {}, options = {})
328
+ req = build_request(:create_batch_inference_job, params)
329
+ req.send_request(options)
330
+ end
331
+
267
332
  # Creates a campaign by deploying a solution version. When a client
268
333
  # calls the [GetRecommendations][1] and [GetPersonalizedRanking][2]
269
334
  # APIs, a campaign is specified in the request.
@@ -1168,6 +1233,48 @@ module Aws::Personalize
1168
1233
  req.send_request(options)
1169
1234
  end
1170
1235
 
1236
+ # Gets the properties of a batch inference job including name, Amazon
1237
+ # Resource Name (ARN), status, input and output configurations, and the
1238
+ # ARN of the solution version used to generate the recommendations.
1239
+ #
1240
+ # @option params [required, String] :batch_inference_job_arn
1241
+ # The ARN of the batch inference job to describe.
1242
+ #
1243
+ # @return [Types::DescribeBatchInferenceJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1244
+ #
1245
+ # * {Types::DescribeBatchInferenceJobResponse#batch_inference_job #batch_inference_job} => Types::BatchInferenceJob
1246
+ #
1247
+ # @example Request syntax with placeholder values
1248
+ #
1249
+ # resp = client.describe_batch_inference_job({
1250
+ # batch_inference_job_arn: "Arn", # required
1251
+ # })
1252
+ #
1253
+ # @example Response structure
1254
+ #
1255
+ # resp.batch_inference_job.job_name #=> String
1256
+ # resp.batch_inference_job.batch_inference_job_arn #=> String
1257
+ # resp.batch_inference_job.failure_reason #=> String
1258
+ # resp.batch_inference_job.solution_version_arn #=> String
1259
+ # resp.batch_inference_job.num_results #=> Integer
1260
+ # resp.batch_inference_job.job_input.s3_data_source.path #=> String
1261
+ # resp.batch_inference_job.job_input.s3_data_source.kms_key_arn #=> String
1262
+ # resp.batch_inference_job.job_output.s3_data_destination.path #=> String
1263
+ # resp.batch_inference_job.job_output.s3_data_destination.kms_key_arn #=> String
1264
+ # resp.batch_inference_job.role_arn #=> String
1265
+ # resp.batch_inference_job.status #=> String
1266
+ # resp.batch_inference_job.creation_date_time #=> Time
1267
+ # resp.batch_inference_job.last_updated_date_time #=> Time
1268
+ #
1269
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeBatchInferenceJob AWS API Documentation
1270
+ #
1271
+ # @overload describe_batch_inference_job(params = {})
1272
+ # @param [Hash] params ({})
1273
+ def describe_batch_inference_job(params = {}, options = {})
1274
+ req = build_request(:describe_batch_inference_job, params)
1275
+ req.send_request(options)
1276
+ end
1277
+
1171
1278
  # Describes the given campaign, including its status.
1172
1279
  #
1173
1280
  # A campaign can be in one of the following states:
@@ -1657,6 +1764,53 @@ module Aws::Personalize
1657
1764
  req.send_request(options)
1658
1765
  end
1659
1766
 
1767
+ # Gets a list of the batch inference jobs that have been performed off
1768
+ # of a solution version.
1769
+ #
1770
+ # @option params [String] :solution_version_arn
1771
+ # The Amazon Resource Name (ARN) of the solution version from which the
1772
+ # batch inference jobs were created.
1773
+ #
1774
+ # @option params [String] :next_token
1775
+ # The token to request the next page of results.
1776
+ #
1777
+ # @option params [Integer] :max_results
1778
+ # The maximum number of batch inference job results to return in each
1779
+ # page. The default value is 100.
1780
+ #
1781
+ # @return [Types::ListBatchInferenceJobsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1782
+ #
1783
+ # * {Types::ListBatchInferenceJobsResponse#batch_inference_jobs #batch_inference_jobs} => Array<Types::BatchInferenceJobSummary>
1784
+ # * {Types::ListBatchInferenceJobsResponse#next_token #next_token} => String
1785
+ #
1786
+ # @example Request syntax with placeholder values
1787
+ #
1788
+ # resp = client.list_batch_inference_jobs({
1789
+ # solution_version_arn: "Arn",
1790
+ # next_token: "NextToken",
1791
+ # max_results: 1,
1792
+ # })
1793
+ #
1794
+ # @example Response structure
1795
+ #
1796
+ # resp.batch_inference_jobs #=> Array
1797
+ # resp.batch_inference_jobs[0].batch_inference_job_arn #=> String
1798
+ # resp.batch_inference_jobs[0].job_name #=> String
1799
+ # resp.batch_inference_jobs[0].status #=> String
1800
+ # resp.batch_inference_jobs[0].creation_date_time #=> Time
1801
+ # resp.batch_inference_jobs[0].last_updated_date_time #=> Time
1802
+ # resp.batch_inference_jobs[0].failure_reason #=> String
1803
+ # resp.next_token #=> String
1804
+ #
1805
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListBatchInferenceJobs AWS API Documentation
1806
+ #
1807
+ # @overload list_batch_inference_jobs(params = {})
1808
+ # @param [Hash] params ({})
1809
+ def list_batch_inference_jobs(params = {}, options = {})
1810
+ req = build_request(:list_batch_inference_jobs, params)
1811
+ req.send_request(options)
1812
+ end
1813
+
1660
1814
  # Returns a list of campaigns that use the given solution. When a
1661
1815
  # solution is not specified, all the campaigns associated with the
1662
1816
  # account are listed. The response provides the properties for each
@@ -2142,7 +2296,7 @@ module Aws::Personalize
2142
2296
  params: params,
2143
2297
  config: config)
2144
2298
  context[:gem_name] = 'aws-sdk-personalize'
2145
- context[:gem_version] = '1.7.0'
2299
+ context[:gem_version] = '1.8.0'
2146
2300
  Seahorse::Client::Request.new(handlers, context)
2147
2301
  end
2148
2302
 
@@ -19,6 +19,11 @@ module Aws::Personalize
19
19
  AutoMLConfig = Shapes::StructureShape.new(name: 'AutoMLConfig')
20
20
  AutoMLResult = Shapes::StructureShape.new(name: 'AutoMLResult')
21
21
  AvroSchema = Shapes::StringShape.new(name: 'AvroSchema')
22
+ BatchInferenceJob = Shapes::StructureShape.new(name: 'BatchInferenceJob')
23
+ BatchInferenceJobInput = Shapes::StructureShape.new(name: 'BatchInferenceJobInput')
24
+ BatchInferenceJobOutput = Shapes::StructureShape.new(name: 'BatchInferenceJobOutput')
25
+ BatchInferenceJobSummary = Shapes::StructureShape.new(name: 'BatchInferenceJobSummary')
26
+ BatchInferenceJobs = Shapes::ListShape.new(name: 'BatchInferenceJobs')
22
27
  Boolean = Shapes::BooleanShape.new(name: 'Boolean')
23
28
  Campaign = Shapes::StructureShape.new(name: 'Campaign')
24
29
  CampaignSummary = Shapes::StructureShape.new(name: 'CampaignSummary')
@@ -32,6 +37,8 @@ module Aws::Personalize
32
37
  ContinuousHyperParameterRanges = Shapes::ListShape.new(name: 'ContinuousHyperParameterRanges')
33
38
  ContinuousMaxValue = Shapes::FloatShape.new(name: 'ContinuousMaxValue')
34
39
  ContinuousMinValue = Shapes::FloatShape.new(name: 'ContinuousMinValue')
40
+ CreateBatchInferenceJobRequest = Shapes::StructureShape.new(name: 'CreateBatchInferenceJobRequest')
41
+ CreateBatchInferenceJobResponse = Shapes::StructureShape.new(name: 'CreateBatchInferenceJobResponse')
35
42
  CreateCampaignRequest = Shapes::StructureShape.new(name: 'CreateCampaignRequest')
36
43
  CreateCampaignResponse = Shapes::StructureShape.new(name: 'CreateCampaignResponse')
37
44
  CreateDatasetGroupRequest = Shapes::StructureShape.new(name: 'CreateDatasetGroupRequest')
@@ -77,6 +84,8 @@ module Aws::Personalize
77
84
  DeleteSolutionRequest = Shapes::StructureShape.new(name: 'DeleteSolutionRequest')
78
85
  DescribeAlgorithmRequest = Shapes::StructureShape.new(name: 'DescribeAlgorithmRequest')
79
86
  DescribeAlgorithmResponse = Shapes::StructureShape.new(name: 'DescribeAlgorithmResponse')
87
+ DescribeBatchInferenceJobRequest = Shapes::StructureShape.new(name: 'DescribeBatchInferenceJobRequest')
88
+ DescribeBatchInferenceJobResponse = Shapes::StructureShape.new(name: 'DescribeBatchInferenceJobResponse')
80
89
  DescribeCampaignRequest = Shapes::StructureShape.new(name: 'DescribeCampaignRequest')
81
90
  DescribeCampaignResponse = Shapes::StructureShape.new(name: 'DescribeCampaignResponse')
82
91
  DescribeDatasetGroupRequest = Shapes::StructureShape.new(name: 'DescribeDatasetGroupRequest')
@@ -126,6 +135,8 @@ module Aws::Personalize
126
135
  InvalidNextTokenException = Shapes::StructureShape.new(name: 'InvalidNextTokenException')
127
136
  KmsKeyArn = Shapes::StringShape.new(name: 'KmsKeyArn')
128
137
  LimitExceededException = Shapes::StructureShape.new(name: 'LimitExceededException')
138
+ ListBatchInferenceJobsRequest = Shapes::StructureShape.new(name: 'ListBatchInferenceJobsRequest')
139
+ ListBatchInferenceJobsResponse = Shapes::StructureShape.new(name: 'ListBatchInferenceJobsResponse')
129
140
  ListCampaignsRequest = Shapes::StructureShape.new(name: 'ListCampaignsRequest')
130
141
  ListCampaignsResponse = Shapes::StructureShape.new(name: 'ListCampaignsResponse')
131
142
  ListDatasetGroupsRequest = Shapes::StructureShape.new(name: 'ListDatasetGroupsRequest')
@@ -151,6 +162,7 @@ module Aws::Personalize
151
162
  Metrics = Shapes::MapShape.new(name: 'Metrics')
152
163
  Name = Shapes::StringShape.new(name: 'Name')
153
164
  NextToken = Shapes::StringShape.new(name: 'NextToken')
165
+ NumBatchResults = Shapes::IntegerShape.new(name: 'NumBatchResults')
154
166
  ParameterName = Shapes::StringShape.new(name: 'ParameterName')
155
167
  ParameterValue = Shapes::StringShape.new(name: 'ParameterValue')
156
168
  PerformAutoML = Shapes::BooleanShape.new(name: 'PerformAutoML')
@@ -165,6 +177,7 @@ module Aws::Personalize
165
177
  ResourceInUseException = Shapes::StructureShape.new(name: 'ResourceInUseException')
166
178
  ResourceNotFoundException = Shapes::StructureShape.new(name: 'ResourceNotFoundException')
167
179
  RoleArn = Shapes::StringShape.new(name: 'RoleArn')
180
+ S3DataConfig = Shapes::StructureShape.new(name: 'S3DataConfig')
168
181
  S3Location = Shapes::StringShape.new(name: 'S3Location')
169
182
  Schemas = Shapes::ListShape.new(name: 'Schemas')
170
183
  Solution = Shapes::StructureShape.new(name: 'Solution')
@@ -209,6 +222,35 @@ module Aws::Personalize
209
222
  AutoMLResult.add_member(:best_recipe_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "bestRecipeArn"))
210
223
  AutoMLResult.struct_class = Types::AutoMLResult
211
224
 
225
+ BatchInferenceJob.add_member(:job_name, Shapes::ShapeRef.new(shape: Name, location_name: "jobName"))
226
+ BatchInferenceJob.add_member(:batch_inference_job_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "batchInferenceJobArn"))
227
+ BatchInferenceJob.add_member(:failure_reason, Shapes::ShapeRef.new(shape: FailureReason, location_name: "failureReason"))
228
+ BatchInferenceJob.add_member(:solution_version_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "solutionVersionArn"))
229
+ BatchInferenceJob.add_member(:num_results, Shapes::ShapeRef.new(shape: NumBatchResults, location_name: "numResults"))
230
+ BatchInferenceJob.add_member(:job_input, Shapes::ShapeRef.new(shape: BatchInferenceJobInput, location_name: "jobInput"))
231
+ BatchInferenceJob.add_member(:job_output, Shapes::ShapeRef.new(shape: BatchInferenceJobOutput, location_name: "jobOutput"))
232
+ BatchInferenceJob.add_member(:role_arn, Shapes::ShapeRef.new(shape: RoleArn, location_name: "roleArn"))
233
+ BatchInferenceJob.add_member(:status, Shapes::ShapeRef.new(shape: Status, location_name: "status"))
234
+ BatchInferenceJob.add_member(:creation_date_time, Shapes::ShapeRef.new(shape: Date, location_name: "creationDateTime"))
235
+ BatchInferenceJob.add_member(:last_updated_date_time, Shapes::ShapeRef.new(shape: Date, location_name: "lastUpdatedDateTime"))
236
+ BatchInferenceJob.struct_class = Types::BatchInferenceJob
237
+
238
+ BatchInferenceJobInput.add_member(:s3_data_source, Shapes::ShapeRef.new(shape: S3DataConfig, required: true, location_name: "s3DataSource"))
239
+ BatchInferenceJobInput.struct_class = Types::BatchInferenceJobInput
240
+
241
+ BatchInferenceJobOutput.add_member(:s3_data_destination, Shapes::ShapeRef.new(shape: S3DataConfig, required: true, location_name: "s3DataDestination"))
242
+ BatchInferenceJobOutput.struct_class = Types::BatchInferenceJobOutput
243
+
244
+ BatchInferenceJobSummary.add_member(:batch_inference_job_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "batchInferenceJobArn"))
245
+ BatchInferenceJobSummary.add_member(:job_name, Shapes::ShapeRef.new(shape: Name, location_name: "jobName"))
246
+ BatchInferenceJobSummary.add_member(:status, Shapes::ShapeRef.new(shape: Status, location_name: "status"))
247
+ BatchInferenceJobSummary.add_member(:creation_date_time, Shapes::ShapeRef.new(shape: Date, location_name: "creationDateTime"))
248
+ BatchInferenceJobSummary.add_member(:last_updated_date_time, Shapes::ShapeRef.new(shape: Date, location_name: "lastUpdatedDateTime"))
249
+ BatchInferenceJobSummary.add_member(:failure_reason, Shapes::ShapeRef.new(shape: FailureReason, location_name: "failureReason"))
250
+ BatchInferenceJobSummary.struct_class = Types::BatchInferenceJobSummary
251
+
252
+ BatchInferenceJobs.member = Shapes::ShapeRef.new(shape: BatchInferenceJobSummary)
253
+
212
254
  Campaign.add_member(:name, Shapes::ShapeRef.new(shape: Name, location_name: "name"))
213
255
  Campaign.add_member(:campaign_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "campaignArn"))
214
256
  Campaign.add_member(:solution_version_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "solutionVersionArn"))
@@ -253,6 +295,17 @@ module Aws::Personalize
253
295
 
254
296
  ContinuousHyperParameterRanges.member = Shapes::ShapeRef.new(shape: ContinuousHyperParameterRange)
255
297
 
298
+ CreateBatchInferenceJobRequest.add_member(:job_name, Shapes::ShapeRef.new(shape: Name, required: true, location_name: "jobName"))
299
+ CreateBatchInferenceJobRequest.add_member(:solution_version_arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "solutionVersionArn"))
300
+ CreateBatchInferenceJobRequest.add_member(:num_results, Shapes::ShapeRef.new(shape: NumBatchResults, location_name: "numResults"))
301
+ CreateBatchInferenceJobRequest.add_member(:job_input, Shapes::ShapeRef.new(shape: BatchInferenceJobInput, required: true, location_name: "jobInput"))
302
+ CreateBatchInferenceJobRequest.add_member(:job_output, Shapes::ShapeRef.new(shape: BatchInferenceJobOutput, required: true, location_name: "jobOutput"))
303
+ CreateBatchInferenceJobRequest.add_member(:role_arn, Shapes::ShapeRef.new(shape: RoleArn, required: true, location_name: "roleArn"))
304
+ CreateBatchInferenceJobRequest.struct_class = Types::CreateBatchInferenceJobRequest
305
+
306
+ CreateBatchInferenceJobResponse.add_member(:batch_inference_job_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "batchInferenceJobArn"))
307
+ CreateBatchInferenceJobResponse.struct_class = Types::CreateBatchInferenceJobResponse
308
+
256
309
  CreateCampaignRequest.add_member(:name, Shapes::ShapeRef.new(shape: Name, required: true, location_name: "name"))
257
310
  CreateCampaignRequest.add_member(:solution_version_arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "solutionVersionArn"))
258
311
  CreateCampaignRequest.add_member(:min_provisioned_tps, Shapes::ShapeRef.new(shape: TransactionsPerSecond, required: true, location_name: "minProvisionedTPS"))
@@ -450,6 +503,12 @@ module Aws::Personalize
450
503
  DescribeAlgorithmResponse.add_member(:algorithm, Shapes::ShapeRef.new(shape: Algorithm, location_name: "algorithm"))
451
504
  DescribeAlgorithmResponse.struct_class = Types::DescribeAlgorithmResponse
452
505
 
506
+ DescribeBatchInferenceJobRequest.add_member(:batch_inference_job_arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "batchInferenceJobArn"))
507
+ DescribeBatchInferenceJobRequest.struct_class = Types::DescribeBatchInferenceJobRequest
508
+
509
+ DescribeBatchInferenceJobResponse.add_member(:batch_inference_job, Shapes::ShapeRef.new(shape: BatchInferenceJob, location_name: "batchInferenceJob"))
510
+ DescribeBatchInferenceJobResponse.struct_class = Types::DescribeBatchInferenceJobResponse
511
+
453
512
  DescribeCampaignRequest.add_member(:campaign_arn, Shapes::ShapeRef.new(shape: Arn, required: true, location_name: "campaignArn"))
454
513
  DescribeCampaignRequest.struct_class = Types::DescribeCampaignRequest
455
514
 
@@ -588,6 +647,15 @@ module Aws::Personalize
588
647
  LimitExceededException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
589
648
  LimitExceededException.struct_class = Types::LimitExceededException
590
649
 
650
+ ListBatchInferenceJobsRequest.add_member(:solution_version_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "solutionVersionArn"))
651
+ ListBatchInferenceJobsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "nextToken"))
652
+ ListBatchInferenceJobsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "maxResults"))
653
+ ListBatchInferenceJobsRequest.struct_class = Types::ListBatchInferenceJobsRequest
654
+
655
+ ListBatchInferenceJobsResponse.add_member(:batch_inference_jobs, Shapes::ShapeRef.new(shape: BatchInferenceJobs, location_name: "batchInferenceJobs"))
656
+ ListBatchInferenceJobsResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "nextToken"))
657
+ ListBatchInferenceJobsResponse.struct_class = Types::ListBatchInferenceJobsResponse
658
+
591
659
  ListCampaignsRequest.add_member(:solution_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "solutionArn"))
592
660
  ListCampaignsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: NextToken, location_name: "nextToken"))
593
661
  ListCampaignsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "maxResults"))
@@ -702,6 +770,10 @@ module Aws::Personalize
702
770
  ResourceNotFoundException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
703
771
  ResourceNotFoundException.struct_class = Types::ResourceNotFoundException
704
772
 
773
+ S3DataConfig.add_member(:path, Shapes::ShapeRef.new(shape: S3Location, required: true, location_name: "path"))
774
+ S3DataConfig.add_member(:kms_key_arn, Shapes::ShapeRef.new(shape: KmsKeyArn, location_name: "kmsKeyArn"))
775
+ S3DataConfig.struct_class = Types::S3DataConfig
776
+
705
777
  Schemas.member = Shapes::ShapeRef.new(shape: DatasetSchemaSummary)
706
778
 
707
779
  Solution.add_member(:name, Shapes::ShapeRef.new(shape: Name, location_name: "name"))
@@ -787,6 +859,19 @@ module Aws::Personalize
787
859
  "uid" => "personalize-2018-05-22",
788
860
  }
789
861
 
862
+ api.add_operation(:create_batch_inference_job, Seahorse::Model::Operation.new.tap do |o|
863
+ o.name = "CreateBatchInferenceJob"
864
+ o.http_method = "POST"
865
+ o.http_request_uri = "/"
866
+ o.input = Shapes::ShapeRef.new(shape: CreateBatchInferenceJobRequest)
867
+ o.output = Shapes::ShapeRef.new(shape: CreateBatchInferenceJobResponse)
868
+ o.errors << Shapes::ShapeRef.new(shape: InvalidInputException)
869
+ o.errors << Shapes::ShapeRef.new(shape: ResourceAlreadyExistsException)
870
+ o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
871
+ o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
872
+ o.errors << Shapes::ShapeRef.new(shape: ResourceInUseException)
873
+ end)
874
+
790
875
  api.add_operation(:create_campaign, Seahorse::Model::Operation.new.tap do |o|
791
876
  o.name = "CreateCampaign"
792
877
  o.http_method = "POST"
@@ -961,6 +1046,16 @@ module Aws::Personalize
961
1046
  o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
962
1047
  end)
963
1048
 
1049
+ api.add_operation(:describe_batch_inference_job, Seahorse::Model::Operation.new.tap do |o|
1050
+ o.name = "DescribeBatchInferenceJob"
1051
+ o.http_method = "POST"
1052
+ o.http_request_uri = "/"
1053
+ o.input = Shapes::ShapeRef.new(shape: DescribeBatchInferenceJobRequest)
1054
+ o.output = Shapes::ShapeRef.new(shape: DescribeBatchInferenceJobResponse)
1055
+ o.errors << Shapes::ShapeRef.new(shape: InvalidInputException)
1056
+ o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
1057
+ end)
1058
+
964
1059
  api.add_operation(:describe_campaign, Seahorse::Model::Operation.new.tap do |o|
965
1060
  o.name = "DescribeCampaign"
966
1061
  o.http_method = "POST"
@@ -1072,6 +1167,22 @@ module Aws::Personalize
1072
1167
  o.errors << Shapes::ShapeRef.new(shape: ResourceInUseException)
1073
1168
  end)
1074
1169
 
1170
+ api.add_operation(:list_batch_inference_jobs, Seahorse::Model::Operation.new.tap do |o|
1171
+ o.name = "ListBatchInferenceJobs"
1172
+ o.http_method = "POST"
1173
+ o.http_request_uri = "/"
1174
+ o.input = Shapes::ShapeRef.new(shape: ListBatchInferenceJobsRequest)
1175
+ o.output = Shapes::ShapeRef.new(shape: ListBatchInferenceJobsResponse)
1176
+ o.errors << Shapes::ShapeRef.new(shape: InvalidInputException)
1177
+ o.errors << Shapes::ShapeRef.new(shape: InvalidNextTokenException)
1178
+ o[:pager] = Aws::Pager.new(
1179
+ limit_key: "max_results",
1180
+ tokens: {
1181
+ "next_token" => "next_token"
1182
+ }
1183
+ )
1184
+ end)
1185
+
1075
1186
  api.add_operation(:list_campaigns, Seahorse::Model::Operation.new.tap do |o|
1076
1187
  o.name = "ListCampaigns"
1077
1188
  o.http_method = "POST"
@@ -132,6 +132,182 @@ module Aws::Personalize
132
132
  include Aws::Structure
133
133
  end
134
134
 
135
+ # Contains information on a batch inference job.
136
+ #
137
+ # @!attribute [rw] job_name
138
+ # The name of the batch inference job.
139
+ # @return [String]
140
+ #
141
+ # @!attribute [rw] batch_inference_job_arn
142
+ # The Amazon Resource Name (ARN) of the batch inference job.
143
+ # @return [String]
144
+ #
145
+ # @!attribute [rw] failure_reason
146
+ # If the batch inference job failed, the reason for the failure.
147
+ # @return [String]
148
+ #
149
+ # @!attribute [rw] solution_version_arn
150
+ # The Amazon Resource Name (ARN) of the solution version from which
151
+ # the batch inference job was created.
152
+ # @return [String]
153
+ #
154
+ # @!attribute [rw] num_results
155
+ # The number of recommendations generated by the batch inference job.
156
+ # This number includes the error messages generated for failed input
157
+ # records.
158
+ # @return [Integer]
159
+ #
160
+ # @!attribute [rw] job_input
161
+ # The Amazon S3 path that leads to the input data used to generate the
162
+ # batch inference job.
163
+ # @return [Types::BatchInferenceJobInput]
164
+ #
165
+ # @!attribute [rw] job_output
166
+ # The Amazon S3 bucket that contains the output data generated by the
167
+ # batch inference job.
168
+ # @return [Types::BatchInferenceJobOutput]
169
+ #
170
+ # @!attribute [rw] role_arn
171
+ # The ARN of the Amazon Identity and Access Management (IAM) role that
172
+ # requested the batch inference job.
173
+ # @return [String]
174
+ #
175
+ # @!attribute [rw] status
176
+ # The status of the batch inference job. The status is one of the
177
+ # following values:
178
+ #
179
+ # * PENDING
180
+ #
181
+ # * IN PROGRESS
182
+ #
183
+ # * ACTIVE
184
+ #
185
+ # * CREATE FAILED
186
+ # @return [String]
187
+ #
188
+ # @!attribute [rw] creation_date_time
189
+ # The time at which the batch inference job was created.
190
+ # @return [Time]
191
+ #
192
+ # @!attribute [rw] last_updated_date_time
193
+ # The time at which the batch inference job was last updated.
194
+ # @return [Time]
195
+ #
196
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJob AWS API Documentation
197
+ #
198
+ class BatchInferenceJob < Struct.new(
199
+ :job_name,
200
+ :batch_inference_job_arn,
201
+ :failure_reason,
202
+ :solution_version_arn,
203
+ :num_results,
204
+ :job_input,
205
+ :job_output,
206
+ :role_arn,
207
+ :status,
208
+ :creation_date_time,
209
+ :last_updated_date_time)
210
+ include Aws::Structure
211
+ end
212
+
213
+ # The input configuration of a batch inference job.
214
+ #
215
+ # @note When making an API call, you may pass BatchInferenceJobInput
216
+ # data as a hash:
217
+ #
218
+ # {
219
+ # s3_data_source: { # required
220
+ # path: "S3Location", # required
221
+ # kms_key_arn: "KmsKeyArn",
222
+ # },
223
+ # }
224
+ #
225
+ # @!attribute [rw] s3_data_source
226
+ # The URI of the Amazon S3 location that contains your input data. The
227
+ # Amazon S3 bucket must be in the same region as the API endpoint you
228
+ # are calling.
229
+ # @return [Types::S3DataConfig]
230
+ #
231
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobInput AWS API Documentation
232
+ #
233
+ class BatchInferenceJobInput < Struct.new(
234
+ :s3_data_source)
235
+ include Aws::Structure
236
+ end
237
+
238
+ # The output configuration parameters of a batch inference job.
239
+ #
240
+ # @note When making an API call, you may pass BatchInferenceJobOutput
241
+ # data as a hash:
242
+ #
243
+ # {
244
+ # s3_data_destination: { # required
245
+ # path: "S3Location", # required
246
+ # kms_key_arn: "KmsKeyArn",
247
+ # },
248
+ # }
249
+ #
250
+ # @!attribute [rw] s3_data_destination
251
+ # Information on the Amazon S3 bucket in which the batch inference
252
+ # job's output is stored.
253
+ # @return [Types::S3DataConfig]
254
+ #
255
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobOutput AWS API Documentation
256
+ #
257
+ class BatchInferenceJobOutput < Struct.new(
258
+ :s3_data_destination)
259
+ include Aws::Structure
260
+ end
261
+
262
+ # A truncated version of the BatchInferenceJob datatype. The
263
+ # ListBatchInferenceJobs operation returns a list of batch inference job
264
+ # summaries.
265
+ #
266
+ # @!attribute [rw] batch_inference_job_arn
267
+ # The Amazon Resource Name (ARN) of the batch inference job.
268
+ # @return [String]
269
+ #
270
+ # @!attribute [rw] job_name
271
+ # The name of the batch inference job.
272
+ # @return [String]
273
+ #
274
+ # @!attribute [rw] status
275
+ # The status of the batch inference job. The status is one of the
276
+ # following values:
277
+ #
278
+ # * PENDING
279
+ #
280
+ # * IN PROGRESS
281
+ #
282
+ # * ACTIVE
283
+ #
284
+ # * CREATE FAILED
285
+ # @return [String]
286
+ #
287
+ # @!attribute [rw] creation_date_time
288
+ # The time at which the batch inference job was created.
289
+ # @return [Time]
290
+ #
291
+ # @!attribute [rw] last_updated_date_time
292
+ # The time at which the batch inference job was last updated.
293
+ # @return [Time]
294
+ #
295
+ # @!attribute [rw] failure_reason
296
+ # If the batch inference job failed, the reason for the failure.
297
+ # @return [String]
298
+ #
299
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/BatchInferenceJobSummary AWS API Documentation
300
+ #
301
+ class BatchInferenceJobSummary < Struct.new(
302
+ :batch_inference_job_arn,
303
+ :job_name,
304
+ :status,
305
+ :creation_date_time,
306
+ :last_updated_date_time,
307
+ :failure_reason)
308
+ include Aws::Structure
309
+ end
310
+
135
311
  # Describes a deployed solution version, otherwise known as a campaign.
136
312
  # For more information on campaigns, see CreateCampaign.
137
313
  #
@@ -350,6 +526,80 @@ module Aws::Personalize
350
526
  include Aws::Structure
351
527
  end
352
528
 
529
+ # @note When making an API call, you may pass CreateBatchInferenceJobRequest
530
+ # data as a hash:
531
+ #
532
+ # {
533
+ # job_name: "Name", # required
534
+ # solution_version_arn: "Arn", # required
535
+ # num_results: 1,
536
+ # job_input: { # required
537
+ # s3_data_source: { # required
538
+ # path: "S3Location", # required
539
+ # kms_key_arn: "KmsKeyArn",
540
+ # },
541
+ # },
542
+ # job_output: { # required
543
+ # s3_data_destination: { # required
544
+ # path: "S3Location", # required
545
+ # kms_key_arn: "KmsKeyArn",
546
+ # },
547
+ # },
548
+ # role_arn: "RoleArn", # required
549
+ # }
550
+ #
551
+ # @!attribute [rw] job_name
552
+ # The name of the batch inference job to create.
553
+ # @return [String]
554
+ #
555
+ # @!attribute [rw] solution_version_arn
556
+ # The Amazon Resource Name (ARN) of the solution version that will be
557
+ # used to generate the batch inference recommendations.
558
+ # @return [String]
559
+ #
560
+ # @!attribute [rw] num_results
561
+ # The number of recommendations to retreive.
562
+ # @return [Integer]
563
+ #
564
+ # @!attribute [rw] job_input
565
+ # The Amazon S3 path that leads to the input file to base your
566
+ # recommendations on. The input material must be in JSON format.
567
+ # @return [Types::BatchInferenceJobInput]
568
+ #
569
+ # @!attribute [rw] job_output
570
+ # The path to the Amazon S3 bucket where the job's output will be
571
+ # stored.
572
+ # @return [Types::BatchInferenceJobOutput]
573
+ #
574
+ # @!attribute [rw] role_arn
575
+ # The ARN of the Amazon Identity and Access Management role that has
576
+ # permissions to read and write to your input and out Amazon S3
577
+ # buckets respectively.
578
+ # @return [String]
579
+ #
580
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateBatchInferenceJobRequest AWS API Documentation
581
+ #
582
+ class CreateBatchInferenceJobRequest < Struct.new(
583
+ :job_name,
584
+ :solution_version_arn,
585
+ :num_results,
586
+ :job_input,
587
+ :job_output,
588
+ :role_arn)
589
+ include Aws::Structure
590
+ end
591
+
592
+ # @!attribute [rw] batch_inference_job_arn
593
+ # The ARN of the batch inference job.
594
+ # @return [String]
595
+ #
596
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CreateBatchInferenceJobResponse AWS API Documentation
597
+ #
598
+ class CreateBatchInferenceJobResponse < Struct.new(
599
+ :batch_inference_job_arn)
600
+ include Aws::Structure
601
+ end
602
+
353
603
  # @note When making an API call, you may pass CreateCampaignRequest
354
604
  # data as a hash:
355
605
  #
@@ -1480,6 +1730,35 @@ module Aws::Personalize
1480
1730
  include Aws::Structure
1481
1731
  end
1482
1732
 
1733
+ # @note When making an API call, you may pass DescribeBatchInferenceJobRequest
1734
+ # data as a hash:
1735
+ #
1736
+ # {
1737
+ # batch_inference_job_arn: "Arn", # required
1738
+ # }
1739
+ #
1740
+ # @!attribute [rw] batch_inference_job_arn
1741
+ # The ARN of the batch inference job to describe.
1742
+ # @return [String]
1743
+ #
1744
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeBatchInferenceJobRequest AWS API Documentation
1745
+ #
1746
+ class DescribeBatchInferenceJobRequest < Struct.new(
1747
+ :batch_inference_job_arn)
1748
+ include Aws::Structure
1749
+ end
1750
+
1751
+ # @!attribute [rw] batch_inference_job
1752
+ # Information on the specified batch inference job.
1753
+ # @return [Types::BatchInferenceJob]
1754
+ #
1755
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/DescribeBatchInferenceJobResponse AWS API Documentation
1756
+ #
1757
+ class DescribeBatchInferenceJobResponse < Struct.new(
1758
+ :batch_inference_job)
1759
+ include Aws::Structure
1760
+ end
1761
+
1483
1762
  # @note When making an API call, you may pass DescribeCampaignRequest
1484
1763
  # data as a hash:
1485
1764
  #
@@ -2214,6 +2493,55 @@ module Aws::Personalize
2214
2493
  include Aws::Structure
2215
2494
  end
2216
2495
 
2496
+ # @note When making an API call, you may pass ListBatchInferenceJobsRequest
2497
+ # data as a hash:
2498
+ #
2499
+ # {
2500
+ # solution_version_arn: "Arn",
2501
+ # next_token: "NextToken",
2502
+ # max_results: 1,
2503
+ # }
2504
+ #
2505
+ # @!attribute [rw] solution_version_arn
2506
+ # The Amazon Resource Name (ARN) of the solution version from which
2507
+ # the batch inference jobs were created.
2508
+ # @return [String]
2509
+ #
2510
+ # @!attribute [rw] next_token
2511
+ # The token to request the next page of results.
2512
+ # @return [String]
2513
+ #
2514
+ # @!attribute [rw] max_results
2515
+ # The maximum number of batch inference job results to return in each
2516
+ # page. The default value is 100.
2517
+ # @return [Integer]
2518
+ #
2519
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListBatchInferenceJobsRequest AWS API Documentation
2520
+ #
2521
+ class ListBatchInferenceJobsRequest < Struct.new(
2522
+ :solution_version_arn,
2523
+ :next_token,
2524
+ :max_results)
2525
+ include Aws::Structure
2526
+ end
2527
+
2528
+ # @!attribute [rw] batch_inference_jobs
2529
+ # A list containing information on each job that is returned.
2530
+ # @return [Array<Types::BatchInferenceJobSummary>]
2531
+ #
2532
+ # @!attribute [rw] next_token
2533
+ # The token to use to retreive the next page of results. The value is
2534
+ # `null` when there are no more results to return.
2535
+ # @return [String]
2536
+ #
2537
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListBatchInferenceJobsResponse AWS API Documentation
2538
+ #
2539
+ class ListBatchInferenceJobsResponse < Struct.new(
2540
+ :batch_inference_jobs,
2541
+ :next_token)
2542
+ include Aws::Structure
2543
+ end
2544
+
2217
2545
  # @note When making an API call, you may pass ListCampaignsRequest
2218
2546
  # data as a hash:
2219
2547
  #
@@ -2764,6 +3092,34 @@ module Aws::Personalize
2764
3092
  include Aws::Structure
2765
3093
  end
2766
3094
 
3095
+ # The configuration details of an Amazon S3 input or output bucket.
3096
+ #
3097
+ # @note When making an API call, you may pass S3DataConfig
3098
+ # data as a hash:
3099
+ #
3100
+ # {
3101
+ # path: "S3Location", # required
3102
+ # kms_key_arn: "KmsKeyArn",
3103
+ # }
3104
+ #
3105
+ # @!attribute [rw] path
3106
+ # The file path of the Amazon S3 bucket.
3107
+ # @return [String]
3108
+ #
3109
+ # @!attribute [rw] kms_key_arn
3110
+ # The Amazon Resource Name (ARN) of the Amazon Key Management Service
3111
+ # (KMS) key that Amazon Personalize uses to encrypt or decrypt the
3112
+ # input and output files of a batch inference job.
3113
+ # @return [String]
3114
+ #
3115
+ # @see http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/S3DataConfig AWS API Documentation
3116
+ #
3117
+ class S3DataConfig < Struct.new(
3118
+ :path,
3119
+ :kms_key_arn)
3120
+ include Aws::Structure
3121
+ end
3122
+
2767
3123
  # An object that provides information about a solution. A solution is a
2768
3124
  # trained model that can be deployed as a campaign.
2769
3125
  #
@@ -2910,9 +3266,7 @@ module Aws::Personalize
2910
3266
  # @return [String]
2911
3267
  #
2912
3268
  # @!attribute [rw] hpo_config
2913
- # Describes the properties for hyperparameter optimization (HPO). For
2914
- # use with the bring-your-own-recipe feature. Not used with Amazon
2915
- # Personalize predefined recipes.
3269
+ # Describes the properties for hyperparameter optimization (HPO).
2916
3270
  # @return [Types::HPOConfig]
2917
3271
  #
2918
3272
  # @!attribute [rw] algorithm_hyper_parameters
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-personalize
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.7.0
4
+ version: 1.8.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-10-23 00:00:00.000000000 Z
11
+ date: 2019-11-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core