aws-sdk-gluedatabrew 1.3.0 → 1.8.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bcb26d454bbc89f5adcad0a7856d2f13dfe93c99451e3085f7cd907ac549d7e3
4
- data.tar.gz: c93d5f51cdab992104d9aa9b6411946de2c09723a8a6ab8bba54bfd63a0cc090
3
+ metadata.gz: 85e9c68625b348efe52a6f9f3de795b06a2a81aa19f45317376e0d951f635ef4
4
+ data.tar.gz: f35b4be64f4035ec7ce3e2d85422503b00d22930399fb352239b33223c2b8b32
5
5
  SHA512:
6
- metadata.gz: 2e7f977bde7938c7d7e2dd032b0ec164ac8f42d5b95752cebe8c07c87202026956bc92375fabcd8fa9ad04cdc2a77aa52d2f606c0ee76ecaa734b0edbf0f14ea
7
- data.tar.gz: cf41aaf857708c41b0834149e0d5237ca10d7f51ecfc3440cd67e0258698e4c6c394588673e702ecc48792c95f5d90d3404995f1665f9a1b53821fbecf3b6b13
6
+ metadata.gz: 3d4b2a98550351998d4ee75e9a1ae7a0207de299c90d3cec09cd0cbfe0d9e5aea4b69ccdabc44c2076829f75241c88e07a4b465cae7c209c5bdfee594017f4dd
7
+ data.tar.gz: 280d6dde1839e64851752fa590acbeaf91544f834ccab845c4a00c1b84a5b42dc9c79c93374e4decc25a1ced9f51a7bbe64aa12d928a4e8f48a111661dbdcacf
data/CHANGELOG.md ADDED
@@ -0,0 +1,48 @@
1
+ Unreleased Changes
2
+ ------------------
3
+
4
+ 1.8.0 (2021-06-30)
5
+ ------------------
6
+
7
+ * Feature - Adds support for the output of job results to the AWS Glue Data Catalog.
8
+
9
+ 1.7.0 (2021-03-30)
10
+ ------------------
11
+
12
+ * Feature - This SDK release adds two new dataset features: 1) support for specifying a database connection as a dataset input 2) support for dynamic datasets that accept configurable parameters in S3 path.
13
+
14
+ 1.6.0 (2021-03-10)
15
+ ------------------
16
+
17
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
18
+
19
+ 1.5.0 (2021-02-25)
20
+ ------------------
21
+
22
+ * Feature - This SDK release adds two new dataset features: 1) support for specifying the file format for a dataset, and 2) support for specifying whether the first row of a CSV or Excel file contains a header.
23
+
24
+ 1.4.0 (2021-02-11)
25
+ ------------------
26
+
27
+ * Feature - This release adds support for profile job sampling, which determines the number of rows on which the profile job will be executed.
28
+
29
+ 1.3.0 (2021-02-03)
30
+ ------------------
31
+
32
+ * Feature - This release adds the DescribeJobRun API to allow customers retrieve details of a given job run
33
+
34
+ 1.2.0 (2021-02-02)
35
+ ------------------
36
+
37
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
38
+
39
+ 1.1.0 (2021-01-28)
40
+ ------------------
41
+
42
+ * Feature - This SDK release adds support for specifying a custom delimiter for input CSV datasets and for CSV job outputs.
43
+
44
+ 1.0.0 (2020-11-11)
45
+ ------------------
46
+
47
+ * Feature - Initial release of `aws-sdk-gluedatabrew`.
48
+
data/LICENSE.txt ADDED
@@ -0,0 +1,202 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.8.0
@@ -3,7 +3,7 @@
3
3
  # WARNING ABOUT GENERATED CODE
4
4
  #
5
5
  # This file is generated. See the contributing guide for more information:
6
- # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
7
  #
8
8
  # WARNING ABOUT GENERATED CODE
9
9
 
@@ -48,6 +48,6 @@ require_relative 'aws-sdk-gluedatabrew/customizations'
48
48
  # @!group service
49
49
  module Aws::GlueDataBrew
50
50
 
51
- GEM_VERSION = '1.3.0'
51
+ GEM_VERSION = '1.8.0'
52
52
 
53
53
  end
@@ -3,7 +3,7 @@
3
3
  # WARNING ABOUT GENERATED CODE
4
4
  #
5
5
  # This file is generated. See the contributing guide for more information:
6
- # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
7
  #
8
8
  # WARNING ABOUT GENERATED CODE
9
9
 
@@ -335,11 +335,11 @@ module Aws::GlueDataBrew
335
335
  #
336
336
  # * There is an invalid version identifier in the list of versions.
337
337
  #
338
- # * The verision list is empty.
338
+ # * The version list is empty.
339
339
  #
340
340
  # * The version list size exceeds 50.
341
341
  #
342
- # * The verison list contains duplicate entries.
342
+ # * The version list contains duplicate entries.
343
343
  #
344
344
  # The request will complete successfully, but with partial failures, if:
345
345
  #
@@ -399,12 +399,21 @@ module Aws::GlueDataBrew
399
399
  # The name of the dataset to be created. Valid characters are
400
400
  # alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and space.
401
401
  #
402
+ # @option params [String] :format
403
+ # The file format of a dataset that is created from an Amazon S3 file or
404
+ # folder.
405
+ #
402
406
  # @option params [Types::FormatOptions] :format_options
403
- # Options that define the structure of either Csv, Excel, or JSON input.
407
+ # Represents a set of options that define the structure of either
408
+ # comma-separated value (CSV), Excel, or JSON input.
404
409
  #
405
410
  # @option params [required, Types::Input] :input
406
- # Information on how DataBrew can find data, in either the AWS Glue Data
407
- # Catalog or Amazon S3.
411
+ # Represents information on how DataBrew can find data, in either the
412
+ # Glue Data Catalog or Amazon S3.
413
+ #
414
+ # @option params [Types::PathOptions] :path_options
415
+ # A set of options that defines how DataBrew interprets an Amazon S3
416
+ # path of the dataset.
408
417
  #
409
418
  # @option params [Hash<String,String>] :tags
410
419
  # Metadata tags to apply to this dataset.
@@ -417,6 +426,7 @@ module Aws::GlueDataBrew
417
426
  #
418
427
  # resp = client.create_dataset({
419
428
  # name: "DatasetName", # required
429
+ # format: "CSV", # accepts CSV, JSON, PARQUET, EXCEL
420
430
  # format_options: {
421
431
  # json: {
422
432
  # multi_line: false,
@@ -424,9 +434,11 @@ module Aws::GlueDataBrew
424
434
  # excel: {
425
435
  # sheet_names: ["SheetName"],
426
436
  # sheet_indexes: [1],
437
+ # header_row: false,
427
438
  # },
428
439
  # csv: {
429
440
  # delimiter: "Delimiter",
441
+ # header_row: false,
430
442
  # },
431
443
  # },
432
444
  # input: { # required
@@ -443,6 +455,45 @@ module Aws::GlueDataBrew
443
455
  # key: "Key",
444
456
  # },
445
457
  # },
458
+ # database_input_definition: {
459
+ # glue_connection_name: "GlueConnectionName", # required
460
+ # database_table_name: "DatabaseTableName", # required
461
+ # temp_directory: {
462
+ # bucket: "Bucket", # required
463
+ # key: "Key",
464
+ # },
465
+ # },
466
+ # },
467
+ # path_options: {
468
+ # last_modified_date_condition: {
469
+ # expression: "Expression", # required
470
+ # values_map: { # required
471
+ # "ValueReference" => "ConditionValue",
472
+ # },
473
+ # },
474
+ # files_limit: {
475
+ # max_files: 1, # required
476
+ # ordered_by: "LAST_MODIFIED_DATE", # accepts LAST_MODIFIED_DATE
477
+ # order: "DESCENDING", # accepts DESCENDING, ASCENDING
478
+ # },
479
+ # parameters: {
480
+ # "PathParameterName" => {
481
+ # name: "PathParameterName", # required
482
+ # type: "Datetime", # required, accepts Datetime, Number, String
483
+ # datetime_options: {
484
+ # format: "DatetimeFormat", # required
485
+ # timezone_offset: "TimezoneOffset",
486
+ # locale_code: "LocaleCode",
487
+ # },
488
+ # create_column: false,
489
+ # filter: {
490
+ # expression: "Expression", # required
491
+ # values_map: { # required
492
+ # "ValueReference" => "ConditionValue",
493
+ # },
494
+ # },
495
+ # },
496
+ # },
446
497
  # },
447
498
  # tags: {
448
499
  # "TagKey" => "TagValue",
@@ -474,8 +525,8 @@ module Aws::GlueDataBrew
474
525
  # @option params [String] :encryption_mode
475
526
  # The encryption mode for the job, which can be one of the following:
476
527
  #
477
- # * `SSE-KMS` - para&gt;`SSE-KMS` - server-side encryption with AWS
478
- # KMS-managed keys.
528
+ # * `SSE-KMS` - `SSE-KMS` - Server-side encryption with KMS-managed
529
+ # keys.
479
530
  #
480
531
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
481
532
  #
@@ -495,12 +546,12 @@ module Aws::GlueDataBrew
495
546
  # The maximum number of times to retry the job after a job run fails.
496
547
  #
497
548
  # @option params [required, Types::S3Location] :output_location
498
- # An Amazon S3 location (bucket name an object key) where DataBrew can
499
- # read input data, or write output from a job.
549
+ # Represents an Amazon S3 location (bucket name and object key) where
550
+ # DataBrew can read input data, or write output from a job.
500
551
  #
501
552
  # @option params [required, String] :role_arn
502
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
503
- # Management (IAM) role to be assumed when DataBrew runs the job.
553
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
554
+ # (IAM) role to be assumed when DataBrew runs the job.
504
555
  #
505
556
  # @option params [Hash<String,String>] :tags
506
557
  # Metadata tags to apply to this job.
@@ -509,6 +560,12 @@ module Aws::GlueDataBrew
509
560
  # The job's timeout in minutes. A job that attempts to run longer than
510
561
  # this timeout period ends with a status of `TIMEOUT`.
511
562
  #
563
+ # @option params [Types::JobSample] :job_sample
564
+ # Sample configuration for profile jobs only. Determines the number of
565
+ # rows on which the profile job will be executed. If a JobSample value
566
+ # is not provided, the default value will be used. The default value is
567
+ # CUSTOM\_ROWS for the mode parameter and 20000 for the size parameter.
568
+ #
512
569
  # @return [Types::CreateProfileJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
513
570
  #
514
571
  # * {Types::CreateProfileJobResponse#name #name} => String
@@ -532,6 +589,10 @@ module Aws::GlueDataBrew
532
589
  # "TagKey" => "TagValue",
533
590
  # },
534
591
  # timeout: 1,
592
+ # job_sample: {
593
+ # mode: "FULL_DATASET", # accepts FULL_DATASET, CUSTOM_ROWS
594
+ # size: 1,
595
+ # },
535
596
  # })
536
597
  #
537
598
  # @example Response structure
@@ -564,8 +625,8 @@ module Aws::GlueDataBrew
564
625
  # interactive data analysis.
565
626
  #
566
627
  # @option params [required, String] :role_arn
567
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
568
- # Management (IAM) role to be assumed for this request.
628
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
629
+ # (IAM) role to be assumed for this request.
569
630
  #
570
631
  # @option params [Hash<String,String>] :tags
571
632
  # Metadata tags to apply to this project.
@@ -665,7 +726,7 @@ module Aws::GlueDataBrew
665
726
  end
666
727
 
667
728
  # Creates a new job to transform input data, using steps defined in an
668
- # existing AWS Glue DataBrew recipe
729
+ # existing Glue DataBrew recipe
669
730
  #
670
731
  # @option params [String] :dataset_name
671
732
  # The name of the dataset that this job processes.
@@ -677,7 +738,7 @@ module Aws::GlueDataBrew
677
738
  # @option params [String] :encryption_mode
678
739
  # The encryption mode for the job, which can be one of the following:
679
740
  #
680
- # * `SSE-KMS` - Server-side encryption with AWS KMS-managed keys.
741
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
681
742
  #
682
743
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
683
744
  #
@@ -696,9 +757,13 @@ module Aws::GlueDataBrew
696
757
  # @option params [Integer] :max_retries
697
758
  # The maximum number of times to retry the job after a job run fails.
698
759
  #
699
- # @option params [required, Array<Types::Output>] :outputs
760
+ # @option params [Array<Types::Output>] :outputs
700
761
  # One or more artifacts that represent the output from running the job.
701
762
  #
763
+ # @option params [Array<Types::DataCatalogOutput>] :data_catalog_outputs
764
+ # One or more artifacts that represent the AWS Glue Data Catalog output
765
+ # from running the job.
766
+ #
702
767
  # @option params [String] :project_name
703
768
  # Either the name of an existing project, or a combination of a recipe
704
769
  # and a dataset to associate with the recipe.
@@ -707,8 +772,8 @@ module Aws::GlueDataBrew
707
772
  # Represents the name and version of a DataBrew recipe.
708
773
  #
709
774
  # @option params [required, String] :role_arn
710
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
711
- # Management (IAM) role to be assumed when DataBrew runs the job.
775
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
776
+ # (IAM) role to be assumed when DataBrew runs the job.
712
777
  #
713
778
  # @option params [Hash<String,String>] :tags
714
779
  # Metadata tags to apply to this job.
@@ -731,7 +796,7 @@ module Aws::GlueDataBrew
731
796
  # log_subscription: "ENABLE", # accepts ENABLE, DISABLE
732
797
  # max_capacity: 1,
733
798
  # max_retries: 1,
734
- # outputs: [ # required
799
+ # outputs: [
735
800
  # {
736
801
  # compression_format: "GZIP", # accepts GZIP, LZ4, SNAPPY, BZIP2, DEFLATE, LZO, BROTLI, ZSTD, ZLIB
737
802
  # format: "CSV", # accepts CSV, JSON, PARQUET, GLUEPARQUET, AVRO, ORC, XML
@@ -748,6 +813,27 @@ module Aws::GlueDataBrew
748
813
  # },
749
814
  # },
750
815
  # ],
816
+ # data_catalog_outputs: [
817
+ # {
818
+ # catalog_id: "CatalogId",
819
+ # database_name: "DatabaseName", # required
820
+ # table_name: "TableName", # required
821
+ # s3_options: {
822
+ # location: { # required
823
+ # bucket: "Bucket", # required
824
+ # key: "Key",
825
+ # },
826
+ # },
827
+ # database_options: {
828
+ # temp_directory: {
829
+ # bucket: "Bucket", # required
830
+ # key: "Key",
831
+ # },
832
+ # table_name: "DatabaseTableName", # required
833
+ # },
834
+ # overwrite: false,
835
+ # },
836
+ # ],
751
837
  # project_name: "ProjectName",
752
838
  # recipe_reference: {
753
839
  # name: "RecipeName", # required
@@ -781,7 +867,7 @@ module Aws::GlueDataBrew
781
867
  #
782
868
  # @option params [required, String] :cron_expression
783
869
  # The date or dates and time or times when the jobs are to be run. For
784
- # more information, see [Cron expressions][1] in the *AWS Glue DataBrew
870
+ # more information, see [Cron expressions][1] in the *Glue DataBrew
785
871
  # Developer Guide*.
786
872
  #
787
873
  #
@@ -981,11 +1067,13 @@ module Aws::GlueDataBrew
981
1067
  # * {Types::DescribeDatasetResponse#created_by #created_by} => String
982
1068
  # * {Types::DescribeDatasetResponse#create_date #create_date} => Time
983
1069
  # * {Types::DescribeDatasetResponse#name #name} => String
1070
+ # * {Types::DescribeDatasetResponse#format #format} => String
984
1071
  # * {Types::DescribeDatasetResponse#format_options #format_options} => Types::FormatOptions
985
1072
  # * {Types::DescribeDatasetResponse#input #input} => Types::Input
986
1073
  # * {Types::DescribeDatasetResponse#last_modified_date #last_modified_date} => Time
987
1074
  # * {Types::DescribeDatasetResponse#last_modified_by #last_modified_by} => String
988
1075
  # * {Types::DescribeDatasetResponse#source #source} => String
1076
+ # * {Types::DescribeDatasetResponse#path_options #path_options} => Types::PathOptions
989
1077
  # * {Types::DescribeDatasetResponse#tags #tags} => Hash&lt;String,String&gt;
990
1078
  # * {Types::DescribeDatasetResponse#resource_arn #resource_arn} => String
991
1079
  #
@@ -1000,12 +1088,15 @@ module Aws::GlueDataBrew
1000
1088
  # resp.created_by #=> String
1001
1089
  # resp.create_date #=> Time
1002
1090
  # resp.name #=> String
1091
+ # resp.format #=> String, one of "CSV", "JSON", "PARQUET", "EXCEL"
1003
1092
  # resp.format_options.json.multi_line #=> Boolean
1004
1093
  # resp.format_options.excel.sheet_names #=> Array
1005
1094
  # resp.format_options.excel.sheet_names[0] #=> String
1006
1095
  # resp.format_options.excel.sheet_indexes #=> Array
1007
1096
  # resp.format_options.excel.sheet_indexes[0] #=> Integer
1097
+ # resp.format_options.excel.header_row #=> Boolean
1008
1098
  # resp.format_options.csv.delimiter #=> String
1099
+ # resp.format_options.csv.header_row #=> Boolean
1009
1100
  # resp.input.s3_input_definition.bucket #=> String
1010
1101
  # resp.input.s3_input_definition.key #=> String
1011
1102
  # resp.input.data_catalog_input_definition.catalog_id #=> String
@@ -1013,9 +1104,29 @@ module Aws::GlueDataBrew
1013
1104
  # resp.input.data_catalog_input_definition.table_name #=> String
1014
1105
  # resp.input.data_catalog_input_definition.temp_directory.bucket #=> String
1015
1106
  # resp.input.data_catalog_input_definition.temp_directory.key #=> String
1107
+ # resp.input.database_input_definition.glue_connection_name #=> String
1108
+ # resp.input.database_input_definition.database_table_name #=> String
1109
+ # resp.input.database_input_definition.temp_directory.bucket #=> String
1110
+ # resp.input.database_input_definition.temp_directory.key #=> String
1016
1111
  # resp.last_modified_date #=> Time
1017
1112
  # resp.last_modified_by #=> String
1018
- # resp.source #=> String, one of "S3", "DATA-CATALOG"
1113
+ # resp.source #=> String, one of "S3", "DATA-CATALOG", "DATABASE"
1114
+ # resp.path_options.last_modified_date_condition.expression #=> String
1115
+ # resp.path_options.last_modified_date_condition.values_map #=> Hash
1116
+ # resp.path_options.last_modified_date_condition.values_map["ValueReference"] #=> String
1117
+ # resp.path_options.files_limit.max_files #=> Integer
1118
+ # resp.path_options.files_limit.ordered_by #=> String, one of "LAST_MODIFIED_DATE"
1119
+ # resp.path_options.files_limit.order #=> String, one of "DESCENDING", "ASCENDING"
1120
+ # resp.path_options.parameters #=> Hash
1121
+ # resp.path_options.parameters["PathParameterName"].name #=> String
1122
+ # resp.path_options.parameters["PathParameterName"].type #=> String, one of "Datetime", "Number", "String"
1123
+ # resp.path_options.parameters["PathParameterName"].datetime_options.format #=> String
1124
+ # resp.path_options.parameters["PathParameterName"].datetime_options.timezone_offset #=> String
1125
+ # resp.path_options.parameters["PathParameterName"].datetime_options.locale_code #=> String
1126
+ # resp.path_options.parameters["PathParameterName"].create_column #=> Boolean
1127
+ # resp.path_options.parameters["PathParameterName"].filter.expression #=> String
1128
+ # resp.path_options.parameters["PathParameterName"].filter.values_map #=> Hash
1129
+ # resp.path_options.parameters["PathParameterName"].filter.values_map["ValueReference"] #=> String
1019
1130
  # resp.tags #=> Hash
1020
1131
  # resp.tags["TagKey"] #=> String
1021
1132
  # resp.resource_arn #=> String
@@ -1049,12 +1160,14 @@ module Aws::GlueDataBrew
1049
1160
  # * {Types::DescribeJobResponse#max_capacity #max_capacity} => Integer
1050
1161
  # * {Types::DescribeJobResponse#max_retries #max_retries} => Integer
1051
1162
  # * {Types::DescribeJobResponse#outputs #outputs} => Array&lt;Types::Output&gt;
1163
+ # * {Types::DescribeJobResponse#data_catalog_outputs #data_catalog_outputs} => Array&lt;Types::DataCatalogOutput&gt;
1052
1164
  # * {Types::DescribeJobResponse#project_name #project_name} => String
1053
1165
  # * {Types::DescribeJobResponse#recipe_reference #recipe_reference} => Types::RecipeReference
1054
1166
  # * {Types::DescribeJobResponse#resource_arn #resource_arn} => String
1055
1167
  # * {Types::DescribeJobResponse#role_arn #role_arn} => String
1056
1168
  # * {Types::DescribeJobResponse#tags #tags} => Hash&lt;String,String&gt;
1057
1169
  # * {Types::DescribeJobResponse#timeout #timeout} => Integer
1170
+ # * {Types::DescribeJobResponse#job_sample #job_sample} => Types::JobSample
1058
1171
  #
1059
1172
  # @example Request syntax with placeholder values
1060
1173
  #
@@ -1085,6 +1198,16 @@ module Aws::GlueDataBrew
1085
1198
  # resp.outputs[0].location.key #=> String
1086
1199
  # resp.outputs[0].overwrite #=> Boolean
1087
1200
  # resp.outputs[0].format_options.csv.delimiter #=> String
1201
+ # resp.data_catalog_outputs #=> Array
1202
+ # resp.data_catalog_outputs[0].catalog_id #=> String
1203
+ # resp.data_catalog_outputs[0].database_name #=> String
1204
+ # resp.data_catalog_outputs[0].table_name #=> String
1205
+ # resp.data_catalog_outputs[0].s3_options.location.bucket #=> String
1206
+ # resp.data_catalog_outputs[0].s3_options.location.key #=> String
1207
+ # resp.data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1208
+ # resp.data_catalog_outputs[0].database_options.temp_directory.key #=> String
1209
+ # resp.data_catalog_outputs[0].database_options.table_name #=> String
1210
+ # resp.data_catalog_outputs[0].overwrite #=> Boolean
1088
1211
  # resp.project_name #=> String
1089
1212
  # resp.recipe_reference.name #=> String
1090
1213
  # resp.recipe_reference.recipe_version #=> String
@@ -1093,6 +1216,8 @@ module Aws::GlueDataBrew
1093
1216
  # resp.tags #=> Hash
1094
1217
  # resp.tags["TagKey"] #=> String
1095
1218
  # resp.timeout #=> Integer
1219
+ # resp.job_sample.mode #=> String, one of "FULL_DATASET", "CUSTOM_ROWS"
1220
+ # resp.job_sample.size #=> Integer
1096
1221
  #
1097
1222
  # @see http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/DescribeJob AWS API Documentation
1098
1223
  #
@@ -1124,9 +1249,11 @@ module Aws::GlueDataBrew
1124
1249
  # * {Types::DescribeJobRunResponse#log_subscription #log_subscription} => String
1125
1250
  # * {Types::DescribeJobRunResponse#log_group_name #log_group_name} => String
1126
1251
  # * {Types::DescribeJobRunResponse#outputs #outputs} => Array&lt;Types::Output&gt;
1252
+ # * {Types::DescribeJobRunResponse#data_catalog_outputs #data_catalog_outputs} => Array&lt;Types::DataCatalogOutput&gt;
1127
1253
  # * {Types::DescribeJobRunResponse#recipe_reference #recipe_reference} => Types::RecipeReference
1128
1254
  # * {Types::DescribeJobRunResponse#started_by #started_by} => String
1129
1255
  # * {Types::DescribeJobRunResponse#started_on #started_on} => Time
1256
+ # * {Types::DescribeJobRunResponse#job_sample #job_sample} => Types::JobSample
1130
1257
  #
1131
1258
  # @example Request syntax with placeholder values
1132
1259
  #
@@ -1156,10 +1283,22 @@ module Aws::GlueDataBrew
1156
1283
  # resp.outputs[0].location.key #=> String
1157
1284
  # resp.outputs[0].overwrite #=> Boolean
1158
1285
  # resp.outputs[0].format_options.csv.delimiter #=> String
1286
+ # resp.data_catalog_outputs #=> Array
1287
+ # resp.data_catalog_outputs[0].catalog_id #=> String
1288
+ # resp.data_catalog_outputs[0].database_name #=> String
1289
+ # resp.data_catalog_outputs[0].table_name #=> String
1290
+ # resp.data_catalog_outputs[0].s3_options.location.bucket #=> String
1291
+ # resp.data_catalog_outputs[0].s3_options.location.key #=> String
1292
+ # resp.data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1293
+ # resp.data_catalog_outputs[0].database_options.temp_directory.key #=> String
1294
+ # resp.data_catalog_outputs[0].database_options.table_name #=> String
1295
+ # resp.data_catalog_outputs[0].overwrite #=> Boolean
1159
1296
  # resp.recipe_reference.name #=> String
1160
1297
  # resp.recipe_reference.recipe_version #=> String
1161
1298
  # resp.started_by #=> String
1162
1299
  # resp.started_on #=> Time
1300
+ # resp.job_sample.mode #=> String, one of "FULL_DATASET", "CUSTOM_ROWS"
1301
+ # resp.job_sample.size #=> Integer
1163
1302
  #
1164
1303
  # @see http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/DescribeJobRun AWS API Documentation
1165
1304
  #
@@ -1368,12 +1507,15 @@ module Aws::GlueDataBrew
1368
1507
  # resp.datasets[0].created_by #=> String
1369
1508
  # resp.datasets[0].create_date #=> Time
1370
1509
  # resp.datasets[0].name #=> String
1510
+ # resp.datasets[0].format #=> String, one of "CSV", "JSON", "PARQUET", "EXCEL"
1371
1511
  # resp.datasets[0].format_options.json.multi_line #=> Boolean
1372
1512
  # resp.datasets[0].format_options.excel.sheet_names #=> Array
1373
1513
  # resp.datasets[0].format_options.excel.sheet_names[0] #=> String
1374
1514
  # resp.datasets[0].format_options.excel.sheet_indexes #=> Array
1375
1515
  # resp.datasets[0].format_options.excel.sheet_indexes[0] #=> Integer
1516
+ # resp.datasets[0].format_options.excel.header_row #=> Boolean
1376
1517
  # resp.datasets[0].format_options.csv.delimiter #=> String
1518
+ # resp.datasets[0].format_options.csv.header_row #=> Boolean
1377
1519
  # resp.datasets[0].input.s3_input_definition.bucket #=> String
1378
1520
  # resp.datasets[0].input.s3_input_definition.key #=> String
1379
1521
  # resp.datasets[0].input.data_catalog_input_definition.catalog_id #=> String
@@ -1381,9 +1523,29 @@ module Aws::GlueDataBrew
1381
1523
  # resp.datasets[0].input.data_catalog_input_definition.table_name #=> String
1382
1524
  # resp.datasets[0].input.data_catalog_input_definition.temp_directory.bucket #=> String
1383
1525
  # resp.datasets[0].input.data_catalog_input_definition.temp_directory.key #=> String
1526
+ # resp.datasets[0].input.database_input_definition.glue_connection_name #=> String
1527
+ # resp.datasets[0].input.database_input_definition.database_table_name #=> String
1528
+ # resp.datasets[0].input.database_input_definition.temp_directory.bucket #=> String
1529
+ # resp.datasets[0].input.database_input_definition.temp_directory.key #=> String
1384
1530
  # resp.datasets[0].last_modified_date #=> Time
1385
1531
  # resp.datasets[0].last_modified_by #=> String
1386
- # resp.datasets[0].source #=> String, one of "S3", "DATA-CATALOG"
1532
+ # resp.datasets[0].source #=> String, one of "S3", "DATA-CATALOG", "DATABASE"
1533
+ # resp.datasets[0].path_options.last_modified_date_condition.expression #=> String
1534
+ # resp.datasets[0].path_options.last_modified_date_condition.values_map #=> Hash
1535
+ # resp.datasets[0].path_options.last_modified_date_condition.values_map["ValueReference"] #=> String
1536
+ # resp.datasets[0].path_options.files_limit.max_files #=> Integer
1537
+ # resp.datasets[0].path_options.files_limit.ordered_by #=> String, one of "LAST_MODIFIED_DATE"
1538
+ # resp.datasets[0].path_options.files_limit.order #=> String, one of "DESCENDING", "ASCENDING"
1539
+ # resp.datasets[0].path_options.parameters #=> Hash
1540
+ # resp.datasets[0].path_options.parameters["PathParameterName"].name #=> String
1541
+ # resp.datasets[0].path_options.parameters["PathParameterName"].type #=> String, one of "Datetime", "Number", "String"
1542
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.format #=> String
1543
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.timezone_offset #=> String
1544
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.locale_code #=> String
1545
+ # resp.datasets[0].path_options.parameters["PathParameterName"].create_column #=> Boolean
1546
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.expression #=> String
1547
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.values_map #=> Hash
1548
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.values_map["ValueReference"] #=> String
1387
1549
  # resp.datasets[0].tags #=> Hash
1388
1550
  # resp.datasets[0].tags["TagKey"] #=> String
1389
1551
  # resp.datasets[0].resource_arn #=> String
@@ -1447,10 +1609,22 @@ module Aws::GlueDataBrew
1447
1609
  # resp.job_runs[0].outputs[0].location.key #=> String
1448
1610
  # resp.job_runs[0].outputs[0].overwrite #=> Boolean
1449
1611
  # resp.job_runs[0].outputs[0].format_options.csv.delimiter #=> String
1612
+ # resp.job_runs[0].data_catalog_outputs #=> Array
1613
+ # resp.job_runs[0].data_catalog_outputs[0].catalog_id #=> String
1614
+ # resp.job_runs[0].data_catalog_outputs[0].database_name #=> String
1615
+ # resp.job_runs[0].data_catalog_outputs[0].table_name #=> String
1616
+ # resp.job_runs[0].data_catalog_outputs[0].s3_options.location.bucket #=> String
1617
+ # resp.job_runs[0].data_catalog_outputs[0].s3_options.location.key #=> String
1618
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1619
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.temp_directory.key #=> String
1620
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.table_name #=> String
1621
+ # resp.job_runs[0].data_catalog_outputs[0].overwrite #=> Boolean
1450
1622
  # resp.job_runs[0].recipe_reference.name #=> String
1451
1623
  # resp.job_runs[0].recipe_reference.recipe_version #=> String
1452
1624
  # resp.job_runs[0].started_by #=> String
1453
1625
  # resp.job_runs[0].started_on #=> Time
1626
+ # resp.job_runs[0].job_sample.mode #=> String, one of "FULL_DATASET", "CUSTOM_ROWS"
1627
+ # resp.job_runs[0].job_sample.size #=> Integer
1454
1628
  # resp.next_token #=> String
1455
1629
  #
1456
1630
  # @see http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/ListJobRuns AWS API Documentation
@@ -1522,6 +1696,16 @@ module Aws::GlueDataBrew
1522
1696
  # resp.jobs[0].outputs[0].location.key #=> String
1523
1697
  # resp.jobs[0].outputs[0].overwrite #=> Boolean
1524
1698
  # resp.jobs[0].outputs[0].format_options.csv.delimiter #=> String
1699
+ # resp.jobs[0].data_catalog_outputs #=> Array
1700
+ # resp.jobs[0].data_catalog_outputs[0].catalog_id #=> String
1701
+ # resp.jobs[0].data_catalog_outputs[0].database_name #=> String
1702
+ # resp.jobs[0].data_catalog_outputs[0].table_name #=> String
1703
+ # resp.jobs[0].data_catalog_outputs[0].s3_options.location.bucket #=> String
1704
+ # resp.jobs[0].data_catalog_outputs[0].s3_options.location.key #=> String
1705
+ # resp.jobs[0].data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1706
+ # resp.jobs[0].data_catalog_outputs[0].database_options.temp_directory.key #=> String
1707
+ # resp.jobs[0].data_catalog_outputs[0].database_options.table_name #=> String
1708
+ # resp.jobs[0].data_catalog_outputs[0].overwrite #=> Boolean
1525
1709
  # resp.jobs[0].project_name #=> String
1526
1710
  # resp.jobs[0].recipe_reference.name #=> String
1527
1711
  # resp.jobs[0].recipe_reference.recipe_version #=> String
@@ -1530,6 +1714,8 @@ module Aws::GlueDataBrew
1530
1714
  # resp.jobs[0].timeout #=> Integer
1531
1715
  # resp.jobs[0].tags #=> Hash
1532
1716
  # resp.jobs[0].tags["TagKey"] #=> String
1717
+ # resp.jobs[0].job_sample.mode #=> String, one of "FULL_DATASET", "CUSTOM_ROWS"
1718
+ # resp.jobs[0].job_sample.size #=> Integer
1533
1719
  # resp.next_token #=> String
1534
1720
  #
1535
1721
  # @see http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/ListJobs AWS API Documentation
@@ -1862,7 +2048,7 @@ module Aws::GlueDataBrew
1862
2048
  # and ready for work. The action will be performed on this session.
1863
2049
  #
1864
2050
  # @option params [Types::ViewFrame] :view_frame
1865
- # Represents the data being being transformed during an action.
2051
+ # Represents the data being transformed during an action.
1866
2052
  #
1867
2053
  # @return [Types::SendProjectSessionActionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1868
2054
  #
@@ -2073,12 +2259,21 @@ module Aws::GlueDataBrew
2073
2259
  # @option params [required, String] :name
2074
2260
  # The name of the dataset to be updated.
2075
2261
  #
2262
+ # @option params [String] :format
2263
+ # The file format of a dataset that is created from an Amazon S3 file or
2264
+ # folder.
2265
+ #
2076
2266
  # @option params [Types::FormatOptions] :format_options
2077
- # Options that define the structure of either Csv, Excel, or JSON input.
2267
+ # Represents a set of options that define the structure of either
2268
+ # comma-separated value (CSV), Excel, or JSON input.
2078
2269
  #
2079
2270
  # @option params [required, Types::Input] :input
2080
- # Information on how DataBrew can find data, in either the AWS Glue Data
2081
- # Catalog or Amazon S3.
2271
+ # Represents information on how DataBrew can find data, in either the
2272
+ # Glue Data Catalog or Amazon S3.
2273
+ #
2274
+ # @option params [Types::PathOptions] :path_options
2275
+ # A set of options that defines how DataBrew interprets an Amazon S3
2276
+ # path of the dataset.
2082
2277
  #
2083
2278
  # @return [Types::UpdateDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2084
2279
  #
@@ -2088,6 +2283,7 @@ module Aws::GlueDataBrew
2088
2283
  #
2089
2284
  # resp = client.update_dataset({
2090
2285
  # name: "DatasetName", # required
2286
+ # format: "CSV", # accepts CSV, JSON, PARQUET, EXCEL
2091
2287
  # format_options: {
2092
2288
  # json: {
2093
2289
  # multi_line: false,
@@ -2095,9 +2291,11 @@ module Aws::GlueDataBrew
2095
2291
  # excel: {
2096
2292
  # sheet_names: ["SheetName"],
2097
2293
  # sheet_indexes: [1],
2294
+ # header_row: false,
2098
2295
  # },
2099
2296
  # csv: {
2100
2297
  # delimiter: "Delimiter",
2298
+ # header_row: false,
2101
2299
  # },
2102
2300
  # },
2103
2301
  # input: { # required
@@ -2114,6 +2312,45 @@ module Aws::GlueDataBrew
2114
2312
  # key: "Key",
2115
2313
  # },
2116
2314
  # },
2315
+ # database_input_definition: {
2316
+ # glue_connection_name: "GlueConnectionName", # required
2317
+ # database_table_name: "DatabaseTableName", # required
2318
+ # temp_directory: {
2319
+ # bucket: "Bucket", # required
2320
+ # key: "Key",
2321
+ # },
2322
+ # },
2323
+ # },
2324
+ # path_options: {
2325
+ # last_modified_date_condition: {
2326
+ # expression: "Expression", # required
2327
+ # values_map: { # required
2328
+ # "ValueReference" => "ConditionValue",
2329
+ # },
2330
+ # },
2331
+ # files_limit: {
2332
+ # max_files: 1, # required
2333
+ # ordered_by: "LAST_MODIFIED_DATE", # accepts LAST_MODIFIED_DATE
2334
+ # order: "DESCENDING", # accepts DESCENDING, ASCENDING
2335
+ # },
2336
+ # parameters: {
2337
+ # "PathParameterName" => {
2338
+ # name: "PathParameterName", # required
2339
+ # type: "Datetime", # required, accepts Datetime, Number, String
2340
+ # datetime_options: {
2341
+ # format: "DatetimeFormat", # required
2342
+ # timezone_offset: "TimezoneOffset",
2343
+ # locale_code: "LocaleCode",
2344
+ # },
2345
+ # create_column: false,
2346
+ # filter: {
2347
+ # expression: "Expression", # required
2348
+ # values_map: { # required
2349
+ # "ValueReference" => "ConditionValue",
2350
+ # },
2351
+ # },
2352
+ # },
2353
+ # },
2117
2354
  # },
2118
2355
  # })
2119
2356
  #
@@ -2139,7 +2376,7 @@ module Aws::GlueDataBrew
2139
2376
  # @option params [String] :encryption_mode
2140
2377
  # The encryption mode for the job, which can be one of the following:
2141
2378
  #
2142
- # * `SSE-KMS` - Server-side encryption with AWS KMS-managed keys.
2379
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
2143
2380
  #
2144
2381
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
2145
2382
  #
@@ -2158,17 +2395,24 @@ module Aws::GlueDataBrew
2158
2395
  # The maximum number of times to retry the job after a job run fails.
2159
2396
  #
2160
2397
  # @option params [required, Types::S3Location] :output_location
2161
- # An Amazon S3 location (bucket name an object key) where DataBrew can
2162
- # read input data, or write output from a job.
2398
+ # Represents an Amazon S3 location (bucket name and object key) where
2399
+ # DataBrew can read input data, or write output from a job.
2163
2400
  #
2164
2401
  # @option params [required, String] :role_arn
2165
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
2166
- # Management (IAM) role to be assumed when DataBrew runs the job.
2402
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
2403
+ # (IAM) role to be assumed when DataBrew runs the job.
2167
2404
  #
2168
2405
  # @option params [Integer] :timeout
2169
2406
  # The job's timeout in minutes. A job that attempts to run longer than
2170
2407
  # this timeout period ends with a status of `TIMEOUT`.
2171
2408
  #
2409
+ # @option params [Types::JobSample] :job_sample
2410
+ # Sample configuration for Profile Jobs only. Determines the number of
2411
+ # rows on which the Profile job will be executed. If a JobSample value
2412
+ # is not provided for profile jobs, the default value will be used. The
2413
+ # default value is CUSTOM\_ROWS for the mode parameter and 20000 for the
2414
+ # size parameter.
2415
+ #
2172
2416
  # @return [Types::UpdateProfileJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2173
2417
  #
2174
2418
  # * {Types::UpdateProfileJobResponse#name #name} => String
@@ -2188,6 +2432,10 @@ module Aws::GlueDataBrew
2188
2432
  # },
2189
2433
  # role_arn: "Arn", # required
2190
2434
  # timeout: 1,
2435
+ # job_sample: {
2436
+ # mode: "FULL_DATASET", # accepts FULL_DATASET, CUSTOM_ROWS
2437
+ # size: 1,
2438
+ # },
2191
2439
  # })
2192
2440
  #
2193
2441
  # @example Response structure
@@ -2309,7 +2557,7 @@ module Aws::GlueDataBrew
2309
2557
  # @option params [String] :encryption_mode
2310
2558
  # The encryption mode for the job, which can be one of the following:
2311
2559
  #
2312
- # * `SSE-KMS` - Server-side encryption with AWS KMS-managed keys.
2560
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
2313
2561
  #
2314
2562
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
2315
2563
  #
@@ -2327,12 +2575,16 @@ module Aws::GlueDataBrew
2327
2575
  # @option params [Integer] :max_retries
2328
2576
  # The maximum number of times to retry the job after a job run fails.
2329
2577
  #
2330
- # @option params [required, Array<Types::Output>] :outputs
2578
+ # @option params [Array<Types::Output>] :outputs
2331
2579
  # One or more artifacts that represent the output from running the job.
2332
2580
  #
2581
+ # @option params [Array<Types::DataCatalogOutput>] :data_catalog_outputs
2582
+ # One or more artifacts that represent the AWS Glue Data Catalog output
2583
+ # from running the job.
2584
+ #
2333
2585
  # @option params [required, String] :role_arn
2334
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
2335
- # Management (IAM) role to be assumed when DataBrew runs the job.
2586
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
2587
+ # (IAM) role to be assumed when DataBrew runs the job.
2336
2588
  #
2337
2589
  # @option params [Integer] :timeout
2338
2590
  # The job's timeout in minutes. A job that attempts to run longer than
@@ -2351,7 +2603,7 @@ module Aws::GlueDataBrew
2351
2603
  # log_subscription: "ENABLE", # accepts ENABLE, DISABLE
2352
2604
  # max_capacity: 1,
2353
2605
  # max_retries: 1,
2354
- # outputs: [ # required
2606
+ # outputs: [
2355
2607
  # {
2356
2608
  # compression_format: "GZIP", # accepts GZIP, LZ4, SNAPPY, BZIP2, DEFLATE, LZO, BROTLI, ZSTD, ZLIB
2357
2609
  # format: "CSV", # accepts CSV, JSON, PARQUET, GLUEPARQUET, AVRO, ORC, XML
@@ -2368,6 +2620,27 @@ module Aws::GlueDataBrew
2368
2620
  # },
2369
2621
  # },
2370
2622
  # ],
2623
+ # data_catalog_outputs: [
2624
+ # {
2625
+ # catalog_id: "CatalogId",
2626
+ # database_name: "DatabaseName", # required
2627
+ # table_name: "TableName", # required
2628
+ # s3_options: {
2629
+ # location: { # required
2630
+ # bucket: "Bucket", # required
2631
+ # key: "Key",
2632
+ # },
2633
+ # },
2634
+ # database_options: {
2635
+ # temp_directory: {
2636
+ # bucket: "Bucket", # required
2637
+ # key: "Key",
2638
+ # },
2639
+ # table_name: "DatabaseTableName", # required
2640
+ # },
2641
+ # overwrite: false,
2642
+ # },
2643
+ # ],
2371
2644
  # role_arn: "Arn", # required
2372
2645
  # timeout: 1,
2373
2646
  # })
@@ -2392,7 +2665,7 @@ module Aws::GlueDataBrew
2392
2665
  #
2393
2666
  # @option params [required, String] :cron_expression
2394
2667
  # The date or dates and time or times when the jobs are to be run. For
2395
- # more information, see [Cron expressions][1] in the *AWS Glue DataBrew
2668
+ # more information, see [Cron expressions][1] in the *Glue DataBrew
2396
2669
  # Developer Guide*.
2397
2670
  #
2398
2671
  #
@@ -2440,7 +2713,7 @@ module Aws::GlueDataBrew
2440
2713
  params: params,
2441
2714
  config: config)
2442
2715
  context[:gem_name] = 'aws-sdk-gluedatabrew'
2443
- context[:gem_version] = '1.3.0'
2716
+ context[:gem_version] = '1.8.0'
2444
2717
  Seahorse::Client::Request.new(handlers, context)
2445
2718
  end
2446
2719