aws-sdk-gluedatabrew 1.5.0 → 1.9.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 511e98419a5208c4a5b26a5e8a15e3003616ef9d397d8e0020bfcf5c068c8819
4
- data.tar.gz: 56225935d6dc9549438a0c876867411ef4282613567ea919b1f94e0a0254ea6d
3
+ metadata.gz: 97b0f8e55b3db7d09c04e29a290942d848be07288f239418d0c3542bb7b28ce8
4
+ data.tar.gz: 2f78725963736670edb295acbb1c49cb0a76a9e7d86540f16f974faa999c5c78
5
5
  SHA512:
6
- metadata.gz: 82c04d5d87a729404100300ce95b6daee496ae8409cbf47bb1e389c2e6f4603ce4a923b1a75655b08e847334efe316ba981ba4f55171c92c893a2f83747a42b0
7
- data.tar.gz: 18e03cf8ed7061bf6284049d868374b4bf9c9a0257f57fbf4f6a368be11b2b8ec9229e6124565714921b72c2d913e38a74a8309b60fed1f990aef96ef8136204
6
+ metadata.gz: '09dcdb81f2f6a6d9b68bccf8007c074f204a05d3d07b08dd5d476969768b0ec90f473b72eb63fc5edeb2e49d1e0611af24c51728e2fa026542117e5c181187a5'
7
+ data.tar.gz: eb9ae3cf20197ea283856989c8cc80dd84c3deeb720b5f6f670531a7cbb3e59f6daba338fd970a5192f9c3ddb6028c65ee545c073dabad44797eaf1f615300c4
data/CHANGELOG.md ADDED
@@ -0,0 +1,53 @@
1
+ Unreleased Changes
2
+ ------------------
3
+
4
+ 1.9.0 (2021-07-22)
5
+ ------------------
6
+
7
+ * Feature - This SDK release adds two new features: 1) Output to Native JDBC destinations and 2) Adding configurations to profile jobs
8
+
9
+ 1.8.0 (2021-06-30)
10
+ ------------------
11
+
12
+ * Feature - Adds support for the output of job results to the AWS Glue Data Catalog.
13
+
14
+ 1.7.0 (2021-03-30)
15
+ ------------------
16
+
17
+ * Feature - This SDK release adds two new dataset features: 1) support for specifying a database connection as a dataset input 2) support for dynamic datasets that accept configurable parameters in S3 path.
18
+
19
+ 1.6.0 (2021-03-10)
20
+ ------------------
21
+
22
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
23
+
24
+ 1.5.0 (2021-02-25)
25
+ ------------------
26
+
27
+ * Feature - This SDK release adds two new dataset features: 1) support for specifying the file format for a dataset, and 2) support for specifying whether the first row of a CSV or Excel file contains a header.
28
+
29
+ 1.4.0 (2021-02-11)
30
+ ------------------
31
+
32
+ * Feature - This release adds support for profile job sampling, which determines the number of rows on which the profile job will be executed.
33
+
34
+ 1.3.0 (2021-02-03)
35
+ ------------------
36
+
37
+ * Feature - This release adds the DescribeJobRun API to allow customers retrieve details of a given job run
38
+
39
+ 1.2.0 (2021-02-02)
40
+ ------------------
41
+
42
+ * Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
43
+
44
+ 1.1.0 (2021-01-28)
45
+ ------------------
46
+
47
+ * Feature - This SDK release adds support for specifying a custom delimiter for input CSV datasets and for CSV job outputs.
48
+
49
+ 1.0.0 (2020-11-11)
50
+ ------------------
51
+
52
+ * Feature - Initial release of `aws-sdk-gluedatabrew`.
53
+
data/LICENSE.txt ADDED
@@ -0,0 +1,202 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 1.9.0
@@ -3,7 +3,7 @@
3
3
  # WARNING ABOUT GENERATED CODE
4
4
  #
5
5
  # This file is generated. See the contributing guide for more information:
6
- # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
7
  #
8
8
  # WARNING ABOUT GENERATED CODE
9
9
 
@@ -48,6 +48,6 @@ require_relative 'aws-sdk-gluedatabrew/customizations'
48
48
  # @!group service
49
49
  module Aws::GlueDataBrew
50
50
 
51
- GEM_VERSION = '1.5.0'
51
+ GEM_VERSION = '1.9.0'
52
52
 
53
53
  end
@@ -3,7 +3,7 @@
3
3
  # WARNING ABOUT GENERATED CODE
4
4
  #
5
5
  # This file is generated. See the contributing guide for more information:
6
- # https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
6
+ # https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
7
7
  #
8
8
  # WARNING ABOUT GENERATED CODE
9
9
 
@@ -400,15 +400,20 @@ module Aws::GlueDataBrew
400
400
  # alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and space.
401
401
  #
402
402
  # @option params [String] :format
403
- # Specifies the file format of a dataset created from an S3 file or
403
+ # The file format of a dataset that is created from an Amazon S3 file or
404
404
  # folder.
405
405
  #
406
406
  # @option params [Types::FormatOptions] :format_options
407
- # Options that define the structure of either Csv, Excel, or JSON input.
407
+ # Represents a set of options that define the structure of either
408
+ # comma-separated value (CSV), Excel, or JSON input.
408
409
  #
409
410
  # @option params [required, Types::Input] :input
410
- # Information on how DataBrew can find data, in either the AWS Glue Data
411
- # Catalog or Amazon S3.
411
+ # Represents information on how DataBrew can find data, in either the
412
+ # Glue Data Catalog or Amazon S3.
413
+ #
414
+ # @option params [Types::PathOptions] :path_options
415
+ # A set of options that defines how DataBrew interprets an Amazon S3
416
+ # path of the dataset.
412
417
  #
413
418
  # @option params [Hash<String,String>] :tags
414
419
  # Metadata tags to apply to this dataset.
@@ -450,6 +455,45 @@ module Aws::GlueDataBrew
450
455
  # key: "Key",
451
456
  # },
452
457
  # },
458
+ # database_input_definition: {
459
+ # glue_connection_name: "GlueConnectionName", # required
460
+ # database_table_name: "DatabaseTableName", # required
461
+ # temp_directory: {
462
+ # bucket: "Bucket", # required
463
+ # key: "Key",
464
+ # },
465
+ # },
466
+ # },
467
+ # path_options: {
468
+ # last_modified_date_condition: {
469
+ # expression: "Expression", # required
470
+ # values_map: { # required
471
+ # "ValueReference" => "ConditionValue",
472
+ # },
473
+ # },
474
+ # files_limit: {
475
+ # max_files: 1, # required
476
+ # ordered_by: "LAST_MODIFIED_DATE", # accepts LAST_MODIFIED_DATE
477
+ # order: "DESCENDING", # accepts DESCENDING, ASCENDING
478
+ # },
479
+ # parameters: {
480
+ # "PathParameterName" => {
481
+ # name: "PathParameterName", # required
482
+ # type: "Datetime", # required, accepts Datetime, Number, String
483
+ # datetime_options: {
484
+ # format: "DatetimeFormat", # required
485
+ # timezone_offset: "TimezoneOffset",
486
+ # locale_code: "LocaleCode",
487
+ # },
488
+ # create_column: false,
489
+ # filter: {
490
+ # expression: "Expression", # required
491
+ # values_map: { # required
492
+ # "ValueReference" => "ConditionValue",
493
+ # },
494
+ # },
495
+ # },
496
+ # },
453
497
  # },
454
498
  # tags: {
455
499
  # "TagKey" => "TagValue",
@@ -481,7 +525,7 @@ module Aws::GlueDataBrew
481
525
  # @option params [String] :encryption_mode
482
526
  # The encryption mode for the job, which can be one of the following:
483
527
  #
484
- # * `SSE-KMS` - `SSE-KMS` - Server-side encryption with AWS KMS-managed
528
+ # * `SSE-KMS` - `SSE-KMS` - Server-side encryption with KMS-managed
485
529
  # keys.
486
530
  #
487
531
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
@@ -502,12 +546,17 @@ module Aws::GlueDataBrew
502
546
  # The maximum number of times to retry the job after a job run fails.
503
547
  #
504
548
  # @option params [required, Types::S3Location] :output_location
505
- # An Amazon S3 location (bucket name an object key) where DataBrew can
506
- # read input data, or write output from a job.
549
+ # Represents an Amazon S3 location (bucket name and object key) where
550
+ # DataBrew can read input data, or write output from a job.
551
+ #
552
+ # @option params [Types::ProfileConfiguration] :configuration
553
+ # Configuration for profile jobs. Used to select columns, do
554
+ # evaluations, and override default parameters of evaluations. When
555
+ # configuration is null, the profile job will run with default settings.
507
556
  #
508
557
  # @option params [required, String] :role_arn
509
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
510
- # Management (IAM) role to be assumed when DataBrew runs the job.
558
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
559
+ # (IAM) role to be assumed when DataBrew runs the job.
511
560
  #
512
561
  # @option params [Hash<String,String>] :tags
513
562
  # Metadata tags to apply to this job.
@@ -540,6 +589,46 @@ module Aws::GlueDataBrew
540
589
  # bucket: "Bucket", # required
541
590
  # key: "Key",
542
591
  # },
592
+ # configuration: {
593
+ # dataset_statistics_configuration: {
594
+ # included_statistics: ["Statistic"],
595
+ # overrides: [
596
+ # {
597
+ # statistic: "Statistic", # required
598
+ # parameters: { # required
599
+ # "ParameterName" => "ParameterValue",
600
+ # },
601
+ # },
602
+ # ],
603
+ # },
604
+ # profile_columns: [
605
+ # {
606
+ # regex: "ColumnName",
607
+ # name: "ColumnName",
608
+ # },
609
+ # ],
610
+ # column_statistics_configurations: [
611
+ # {
612
+ # selectors: [
613
+ # {
614
+ # regex: "ColumnName",
615
+ # name: "ColumnName",
616
+ # },
617
+ # ],
618
+ # statistics: { # required
619
+ # included_statistics: ["Statistic"],
620
+ # overrides: [
621
+ # {
622
+ # statistic: "Statistic", # required
623
+ # parameters: { # required
624
+ # "ParameterName" => "ParameterValue",
625
+ # },
626
+ # },
627
+ # ],
628
+ # },
629
+ # },
630
+ # ],
631
+ # },
543
632
  # role_arn: "Arn", # required
544
633
  # tags: {
545
634
  # "TagKey" => "TagValue",
@@ -581,8 +670,8 @@ module Aws::GlueDataBrew
581
670
  # interactive data analysis.
582
671
  #
583
672
  # @option params [required, String] :role_arn
584
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
585
- # Management (IAM) role to be assumed for this request.
673
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
674
+ # (IAM) role to be assumed for this request.
586
675
  #
587
676
  # @option params [Hash<String,String>] :tags
588
677
  # Metadata tags to apply to this project.
@@ -682,7 +771,7 @@ module Aws::GlueDataBrew
682
771
  end
683
772
 
684
773
  # Creates a new job to transform input data, using steps defined in an
685
- # existing AWS Glue DataBrew recipe
774
+ # existing Glue DataBrew recipe
686
775
  #
687
776
  # @option params [String] :dataset_name
688
777
  # The name of the dataset that this job processes.
@@ -694,7 +783,7 @@ module Aws::GlueDataBrew
694
783
  # @option params [String] :encryption_mode
695
784
  # The encryption mode for the job, which can be one of the following:
696
785
  #
697
- # * `SSE-KMS` - Server-side encryption with keys managed by AWS KMS.
786
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
698
787
  #
699
788
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
700
789
  #
@@ -713,9 +802,17 @@ module Aws::GlueDataBrew
713
802
  # @option params [Integer] :max_retries
714
803
  # The maximum number of times to retry the job after a job run fails.
715
804
  #
716
- # @option params [required, Array<Types::Output>] :outputs
805
+ # @option params [Array<Types::Output>] :outputs
717
806
  # One or more artifacts that represent the output from running the job.
718
807
  #
808
+ # @option params [Array<Types::DataCatalogOutput>] :data_catalog_outputs
809
+ # One or more artifacts that represent the Glue Data Catalog output from
810
+ # running the job.
811
+ #
812
+ # @option params [Array<Types::DatabaseOutput>] :database_outputs
813
+ # Represents a list of JDBC database output objects which defines the
814
+ # output destination for a DataBrew recipe job to write to.
815
+ #
719
816
  # @option params [String] :project_name
720
817
  # Either the name of an existing project, or a combination of a recipe
721
818
  # and a dataset to associate with the recipe.
@@ -724,8 +821,8 @@ module Aws::GlueDataBrew
724
821
  # Represents the name and version of a DataBrew recipe.
725
822
  #
726
823
  # @option params [required, String] :role_arn
727
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
728
- # Management (IAM) role to be assumed when DataBrew runs the job.
824
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
825
+ # (IAM) role to be assumed when DataBrew runs the job.
729
826
  #
730
827
  # @option params [Hash<String,String>] :tags
731
828
  # Metadata tags to apply to this job.
@@ -748,7 +845,7 @@ module Aws::GlueDataBrew
748
845
  # log_subscription: "ENABLE", # accepts ENABLE, DISABLE
749
846
  # max_capacity: 1,
750
847
  # max_retries: 1,
751
- # outputs: [ # required
848
+ # outputs: [
752
849
  # {
753
850
  # compression_format: "GZIP", # accepts GZIP, LZ4, SNAPPY, BZIP2, DEFLATE, LZO, BROTLI, ZSTD, ZLIB
754
851
  # format: "CSV", # accepts CSV, JSON, PARQUET, GLUEPARQUET, AVRO, ORC, XML
@@ -765,6 +862,40 @@ module Aws::GlueDataBrew
765
862
  # },
766
863
  # },
767
864
  # ],
865
+ # data_catalog_outputs: [
866
+ # {
867
+ # catalog_id: "CatalogId",
868
+ # database_name: "DatabaseName", # required
869
+ # table_name: "TableName", # required
870
+ # s3_options: {
871
+ # location: { # required
872
+ # bucket: "Bucket", # required
873
+ # key: "Key",
874
+ # },
875
+ # },
876
+ # database_options: {
877
+ # temp_directory: {
878
+ # bucket: "Bucket", # required
879
+ # key: "Key",
880
+ # },
881
+ # table_name: "DatabaseTableName", # required
882
+ # },
883
+ # overwrite: false,
884
+ # },
885
+ # ],
886
+ # database_outputs: [
887
+ # {
888
+ # glue_connection_name: "GlueConnectionName", # required
889
+ # database_options: { # required
890
+ # temp_directory: {
891
+ # bucket: "Bucket", # required
892
+ # key: "Key",
893
+ # },
894
+ # table_name: "DatabaseTableName", # required
895
+ # },
896
+ # database_output_mode: "NEW_TABLE", # accepts NEW_TABLE
897
+ # },
898
+ # ],
768
899
  # project_name: "ProjectName",
769
900
  # recipe_reference: {
770
901
  # name: "RecipeName", # required
@@ -798,7 +929,7 @@ module Aws::GlueDataBrew
798
929
  #
799
930
  # @option params [required, String] :cron_expression
800
931
  # The date or dates and time or times when the jobs are to be run. For
801
- # more information, see [Cron expressions][1] in the *AWS Glue DataBrew
932
+ # more information, see [Cron expressions][1] in the *Glue DataBrew
802
933
  # Developer Guide*.
803
934
  #
804
935
  #
@@ -1004,6 +1135,7 @@ module Aws::GlueDataBrew
1004
1135
  # * {Types::DescribeDatasetResponse#last_modified_date #last_modified_date} => Time
1005
1136
  # * {Types::DescribeDatasetResponse#last_modified_by #last_modified_by} => String
1006
1137
  # * {Types::DescribeDatasetResponse#source #source} => String
1138
+ # * {Types::DescribeDatasetResponse#path_options #path_options} => Types::PathOptions
1007
1139
  # * {Types::DescribeDatasetResponse#tags #tags} => Hash&lt;String,String&gt;
1008
1140
  # * {Types::DescribeDatasetResponse#resource_arn #resource_arn} => String
1009
1141
  #
@@ -1034,9 +1166,29 @@ module Aws::GlueDataBrew
1034
1166
  # resp.input.data_catalog_input_definition.table_name #=> String
1035
1167
  # resp.input.data_catalog_input_definition.temp_directory.bucket #=> String
1036
1168
  # resp.input.data_catalog_input_definition.temp_directory.key #=> String
1169
+ # resp.input.database_input_definition.glue_connection_name #=> String
1170
+ # resp.input.database_input_definition.database_table_name #=> String
1171
+ # resp.input.database_input_definition.temp_directory.bucket #=> String
1172
+ # resp.input.database_input_definition.temp_directory.key #=> String
1037
1173
  # resp.last_modified_date #=> Time
1038
1174
  # resp.last_modified_by #=> String
1039
- # resp.source #=> String, one of "S3", "DATA-CATALOG"
1175
+ # resp.source #=> String, one of "S3", "DATA-CATALOG", "DATABASE"
1176
+ # resp.path_options.last_modified_date_condition.expression #=> String
1177
+ # resp.path_options.last_modified_date_condition.values_map #=> Hash
1178
+ # resp.path_options.last_modified_date_condition.values_map["ValueReference"] #=> String
1179
+ # resp.path_options.files_limit.max_files #=> Integer
1180
+ # resp.path_options.files_limit.ordered_by #=> String, one of "LAST_MODIFIED_DATE"
1181
+ # resp.path_options.files_limit.order #=> String, one of "DESCENDING", "ASCENDING"
1182
+ # resp.path_options.parameters #=> Hash
1183
+ # resp.path_options.parameters["PathParameterName"].name #=> String
1184
+ # resp.path_options.parameters["PathParameterName"].type #=> String, one of "Datetime", "Number", "String"
1185
+ # resp.path_options.parameters["PathParameterName"].datetime_options.format #=> String
1186
+ # resp.path_options.parameters["PathParameterName"].datetime_options.timezone_offset #=> String
1187
+ # resp.path_options.parameters["PathParameterName"].datetime_options.locale_code #=> String
1188
+ # resp.path_options.parameters["PathParameterName"].create_column #=> Boolean
1189
+ # resp.path_options.parameters["PathParameterName"].filter.expression #=> String
1190
+ # resp.path_options.parameters["PathParameterName"].filter.values_map #=> Hash
1191
+ # resp.path_options.parameters["PathParameterName"].filter.values_map["ValueReference"] #=> String
1040
1192
  # resp.tags #=> Hash
1041
1193
  # resp.tags["TagKey"] #=> String
1042
1194
  # resp.resource_arn #=> String
@@ -1070,7 +1222,10 @@ module Aws::GlueDataBrew
1070
1222
  # * {Types::DescribeJobResponse#max_capacity #max_capacity} => Integer
1071
1223
  # * {Types::DescribeJobResponse#max_retries #max_retries} => Integer
1072
1224
  # * {Types::DescribeJobResponse#outputs #outputs} => Array&lt;Types::Output&gt;
1225
+ # * {Types::DescribeJobResponse#data_catalog_outputs #data_catalog_outputs} => Array&lt;Types::DataCatalogOutput&gt;
1226
+ # * {Types::DescribeJobResponse#database_outputs #database_outputs} => Array&lt;Types::DatabaseOutput&gt;
1073
1227
  # * {Types::DescribeJobResponse#project_name #project_name} => String
1228
+ # * {Types::DescribeJobResponse#profile_configuration #profile_configuration} => Types::ProfileConfiguration
1074
1229
  # * {Types::DescribeJobResponse#recipe_reference #recipe_reference} => Types::RecipeReference
1075
1230
  # * {Types::DescribeJobResponse#resource_arn #resource_arn} => String
1076
1231
  # * {Types::DescribeJobResponse#role_arn #role_arn} => String
@@ -1107,7 +1262,42 @@ module Aws::GlueDataBrew
1107
1262
  # resp.outputs[0].location.key #=> String
1108
1263
  # resp.outputs[0].overwrite #=> Boolean
1109
1264
  # resp.outputs[0].format_options.csv.delimiter #=> String
1265
+ # resp.data_catalog_outputs #=> Array
1266
+ # resp.data_catalog_outputs[0].catalog_id #=> String
1267
+ # resp.data_catalog_outputs[0].database_name #=> String
1268
+ # resp.data_catalog_outputs[0].table_name #=> String
1269
+ # resp.data_catalog_outputs[0].s3_options.location.bucket #=> String
1270
+ # resp.data_catalog_outputs[0].s3_options.location.key #=> String
1271
+ # resp.data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1272
+ # resp.data_catalog_outputs[0].database_options.temp_directory.key #=> String
1273
+ # resp.data_catalog_outputs[0].database_options.table_name #=> String
1274
+ # resp.data_catalog_outputs[0].overwrite #=> Boolean
1275
+ # resp.database_outputs #=> Array
1276
+ # resp.database_outputs[0].glue_connection_name #=> String
1277
+ # resp.database_outputs[0].database_options.temp_directory.bucket #=> String
1278
+ # resp.database_outputs[0].database_options.temp_directory.key #=> String
1279
+ # resp.database_outputs[0].database_options.table_name #=> String
1280
+ # resp.database_outputs[0].database_output_mode #=> String, one of "NEW_TABLE"
1110
1281
  # resp.project_name #=> String
1282
+ # resp.profile_configuration.dataset_statistics_configuration.included_statistics #=> Array
1283
+ # resp.profile_configuration.dataset_statistics_configuration.included_statistics[0] #=> String
1284
+ # resp.profile_configuration.dataset_statistics_configuration.overrides #=> Array
1285
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].statistic #=> String
1286
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].parameters #=> Hash
1287
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].parameters["ParameterName"] #=> String
1288
+ # resp.profile_configuration.profile_columns #=> Array
1289
+ # resp.profile_configuration.profile_columns[0].regex #=> String
1290
+ # resp.profile_configuration.profile_columns[0].name #=> String
1291
+ # resp.profile_configuration.column_statistics_configurations #=> Array
1292
+ # resp.profile_configuration.column_statistics_configurations[0].selectors #=> Array
1293
+ # resp.profile_configuration.column_statistics_configurations[0].selectors[0].regex #=> String
1294
+ # resp.profile_configuration.column_statistics_configurations[0].selectors[0].name #=> String
1295
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.included_statistics #=> Array
1296
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.included_statistics[0] #=> String
1297
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides #=> Array
1298
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].statistic #=> String
1299
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].parameters #=> Hash
1300
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].parameters["ParameterName"] #=> String
1111
1301
  # resp.recipe_reference.name #=> String
1112
1302
  # resp.recipe_reference.recipe_version #=> String
1113
1303
  # resp.resource_arn #=> String
@@ -1143,11 +1333,14 @@ module Aws::GlueDataBrew
1143
1333
  # * {Types::DescribeJobRunResponse#error_message #error_message} => String
1144
1334
  # * {Types::DescribeJobRunResponse#execution_time #execution_time} => Integer
1145
1335
  # * {Types::DescribeJobRunResponse#job_name #job_name} => String
1336
+ # * {Types::DescribeJobRunResponse#profile_configuration #profile_configuration} => Types::ProfileConfiguration
1146
1337
  # * {Types::DescribeJobRunResponse#run_id #run_id} => String
1147
1338
  # * {Types::DescribeJobRunResponse#state #state} => String
1148
1339
  # * {Types::DescribeJobRunResponse#log_subscription #log_subscription} => String
1149
1340
  # * {Types::DescribeJobRunResponse#log_group_name #log_group_name} => String
1150
1341
  # * {Types::DescribeJobRunResponse#outputs #outputs} => Array&lt;Types::Output&gt;
1342
+ # * {Types::DescribeJobRunResponse#data_catalog_outputs #data_catalog_outputs} => Array&lt;Types::DataCatalogOutput&gt;
1343
+ # * {Types::DescribeJobRunResponse#database_outputs #database_outputs} => Array&lt;Types::DatabaseOutput&gt;
1151
1344
  # * {Types::DescribeJobRunResponse#recipe_reference #recipe_reference} => Types::RecipeReference
1152
1345
  # * {Types::DescribeJobRunResponse#started_by #started_by} => String
1153
1346
  # * {Types::DescribeJobRunResponse#started_on #started_on} => Time
@@ -1168,6 +1361,25 @@ module Aws::GlueDataBrew
1168
1361
  # resp.error_message #=> String
1169
1362
  # resp.execution_time #=> Integer
1170
1363
  # resp.job_name #=> String
1364
+ # resp.profile_configuration.dataset_statistics_configuration.included_statistics #=> Array
1365
+ # resp.profile_configuration.dataset_statistics_configuration.included_statistics[0] #=> String
1366
+ # resp.profile_configuration.dataset_statistics_configuration.overrides #=> Array
1367
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].statistic #=> String
1368
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].parameters #=> Hash
1369
+ # resp.profile_configuration.dataset_statistics_configuration.overrides[0].parameters["ParameterName"] #=> String
1370
+ # resp.profile_configuration.profile_columns #=> Array
1371
+ # resp.profile_configuration.profile_columns[0].regex #=> String
1372
+ # resp.profile_configuration.profile_columns[0].name #=> String
1373
+ # resp.profile_configuration.column_statistics_configurations #=> Array
1374
+ # resp.profile_configuration.column_statistics_configurations[0].selectors #=> Array
1375
+ # resp.profile_configuration.column_statistics_configurations[0].selectors[0].regex #=> String
1376
+ # resp.profile_configuration.column_statistics_configurations[0].selectors[0].name #=> String
1377
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.included_statistics #=> Array
1378
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.included_statistics[0] #=> String
1379
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides #=> Array
1380
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].statistic #=> String
1381
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].parameters #=> Hash
1382
+ # resp.profile_configuration.column_statistics_configurations[0].statistics.overrides[0].parameters["ParameterName"] #=> String
1171
1383
  # resp.run_id #=> String
1172
1384
  # resp.state #=> String, one of "STARTING", "RUNNING", "STOPPING", "STOPPED", "SUCCEEDED", "FAILED", "TIMEOUT"
1173
1385
  # resp.log_subscription #=> String, one of "ENABLE", "DISABLE"
@@ -1181,6 +1393,22 @@ module Aws::GlueDataBrew
1181
1393
  # resp.outputs[0].location.key #=> String
1182
1394
  # resp.outputs[0].overwrite #=> Boolean
1183
1395
  # resp.outputs[0].format_options.csv.delimiter #=> String
1396
+ # resp.data_catalog_outputs #=> Array
1397
+ # resp.data_catalog_outputs[0].catalog_id #=> String
1398
+ # resp.data_catalog_outputs[0].database_name #=> String
1399
+ # resp.data_catalog_outputs[0].table_name #=> String
1400
+ # resp.data_catalog_outputs[0].s3_options.location.bucket #=> String
1401
+ # resp.data_catalog_outputs[0].s3_options.location.key #=> String
1402
+ # resp.data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1403
+ # resp.data_catalog_outputs[0].database_options.temp_directory.key #=> String
1404
+ # resp.data_catalog_outputs[0].database_options.table_name #=> String
1405
+ # resp.data_catalog_outputs[0].overwrite #=> Boolean
1406
+ # resp.database_outputs #=> Array
1407
+ # resp.database_outputs[0].glue_connection_name #=> String
1408
+ # resp.database_outputs[0].database_options.temp_directory.bucket #=> String
1409
+ # resp.database_outputs[0].database_options.temp_directory.key #=> String
1410
+ # resp.database_outputs[0].database_options.table_name #=> String
1411
+ # resp.database_outputs[0].database_output_mode #=> String, one of "NEW_TABLE"
1184
1412
  # resp.recipe_reference.name #=> String
1185
1413
  # resp.recipe_reference.recipe_version #=> String
1186
1414
  # resp.started_by #=> String
@@ -1411,9 +1639,29 @@ module Aws::GlueDataBrew
1411
1639
  # resp.datasets[0].input.data_catalog_input_definition.table_name #=> String
1412
1640
  # resp.datasets[0].input.data_catalog_input_definition.temp_directory.bucket #=> String
1413
1641
  # resp.datasets[0].input.data_catalog_input_definition.temp_directory.key #=> String
1642
+ # resp.datasets[0].input.database_input_definition.glue_connection_name #=> String
1643
+ # resp.datasets[0].input.database_input_definition.database_table_name #=> String
1644
+ # resp.datasets[0].input.database_input_definition.temp_directory.bucket #=> String
1645
+ # resp.datasets[0].input.database_input_definition.temp_directory.key #=> String
1414
1646
  # resp.datasets[0].last_modified_date #=> Time
1415
1647
  # resp.datasets[0].last_modified_by #=> String
1416
- # resp.datasets[0].source #=> String, one of "S3", "DATA-CATALOG"
1648
+ # resp.datasets[0].source #=> String, one of "S3", "DATA-CATALOG", "DATABASE"
1649
+ # resp.datasets[0].path_options.last_modified_date_condition.expression #=> String
1650
+ # resp.datasets[0].path_options.last_modified_date_condition.values_map #=> Hash
1651
+ # resp.datasets[0].path_options.last_modified_date_condition.values_map["ValueReference"] #=> String
1652
+ # resp.datasets[0].path_options.files_limit.max_files #=> Integer
1653
+ # resp.datasets[0].path_options.files_limit.ordered_by #=> String, one of "LAST_MODIFIED_DATE"
1654
+ # resp.datasets[0].path_options.files_limit.order #=> String, one of "DESCENDING", "ASCENDING"
1655
+ # resp.datasets[0].path_options.parameters #=> Hash
1656
+ # resp.datasets[0].path_options.parameters["PathParameterName"].name #=> String
1657
+ # resp.datasets[0].path_options.parameters["PathParameterName"].type #=> String, one of "Datetime", "Number", "String"
1658
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.format #=> String
1659
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.timezone_offset #=> String
1660
+ # resp.datasets[0].path_options.parameters["PathParameterName"].datetime_options.locale_code #=> String
1661
+ # resp.datasets[0].path_options.parameters["PathParameterName"].create_column #=> Boolean
1662
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.expression #=> String
1663
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.values_map #=> Hash
1664
+ # resp.datasets[0].path_options.parameters["PathParameterName"].filter.values_map["ValueReference"] #=> String
1417
1665
  # resp.datasets[0].tags #=> Hash
1418
1666
  # resp.datasets[0].tags["TagKey"] #=> String
1419
1667
  # resp.datasets[0].resource_arn #=> String
@@ -1477,6 +1725,22 @@ module Aws::GlueDataBrew
1477
1725
  # resp.job_runs[0].outputs[0].location.key #=> String
1478
1726
  # resp.job_runs[0].outputs[0].overwrite #=> Boolean
1479
1727
  # resp.job_runs[0].outputs[0].format_options.csv.delimiter #=> String
1728
+ # resp.job_runs[0].data_catalog_outputs #=> Array
1729
+ # resp.job_runs[0].data_catalog_outputs[0].catalog_id #=> String
1730
+ # resp.job_runs[0].data_catalog_outputs[0].database_name #=> String
1731
+ # resp.job_runs[0].data_catalog_outputs[0].table_name #=> String
1732
+ # resp.job_runs[0].data_catalog_outputs[0].s3_options.location.bucket #=> String
1733
+ # resp.job_runs[0].data_catalog_outputs[0].s3_options.location.key #=> String
1734
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1735
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.temp_directory.key #=> String
1736
+ # resp.job_runs[0].data_catalog_outputs[0].database_options.table_name #=> String
1737
+ # resp.job_runs[0].data_catalog_outputs[0].overwrite #=> Boolean
1738
+ # resp.job_runs[0].database_outputs #=> Array
1739
+ # resp.job_runs[0].database_outputs[0].glue_connection_name #=> String
1740
+ # resp.job_runs[0].database_outputs[0].database_options.temp_directory.bucket #=> String
1741
+ # resp.job_runs[0].database_outputs[0].database_options.temp_directory.key #=> String
1742
+ # resp.job_runs[0].database_outputs[0].database_options.table_name #=> String
1743
+ # resp.job_runs[0].database_outputs[0].database_output_mode #=> String, one of "NEW_TABLE"
1480
1744
  # resp.job_runs[0].recipe_reference.name #=> String
1481
1745
  # resp.job_runs[0].recipe_reference.recipe_version #=> String
1482
1746
  # resp.job_runs[0].started_by #=> String
@@ -1554,6 +1818,22 @@ module Aws::GlueDataBrew
1554
1818
  # resp.jobs[0].outputs[0].location.key #=> String
1555
1819
  # resp.jobs[0].outputs[0].overwrite #=> Boolean
1556
1820
  # resp.jobs[0].outputs[0].format_options.csv.delimiter #=> String
1821
+ # resp.jobs[0].data_catalog_outputs #=> Array
1822
+ # resp.jobs[0].data_catalog_outputs[0].catalog_id #=> String
1823
+ # resp.jobs[0].data_catalog_outputs[0].database_name #=> String
1824
+ # resp.jobs[0].data_catalog_outputs[0].table_name #=> String
1825
+ # resp.jobs[0].data_catalog_outputs[0].s3_options.location.bucket #=> String
1826
+ # resp.jobs[0].data_catalog_outputs[0].s3_options.location.key #=> String
1827
+ # resp.jobs[0].data_catalog_outputs[0].database_options.temp_directory.bucket #=> String
1828
+ # resp.jobs[0].data_catalog_outputs[0].database_options.temp_directory.key #=> String
1829
+ # resp.jobs[0].data_catalog_outputs[0].database_options.table_name #=> String
1830
+ # resp.jobs[0].data_catalog_outputs[0].overwrite #=> Boolean
1831
+ # resp.jobs[0].database_outputs #=> Array
1832
+ # resp.jobs[0].database_outputs[0].glue_connection_name #=> String
1833
+ # resp.jobs[0].database_outputs[0].database_options.temp_directory.bucket #=> String
1834
+ # resp.jobs[0].database_outputs[0].database_options.temp_directory.key #=> String
1835
+ # resp.jobs[0].database_outputs[0].database_options.table_name #=> String
1836
+ # resp.jobs[0].database_outputs[0].database_output_mode #=> String, one of "NEW_TABLE"
1557
1837
  # resp.jobs[0].project_name #=> String
1558
1838
  # resp.jobs[0].recipe_reference.name #=> String
1559
1839
  # resp.jobs[0].recipe_reference.recipe_version #=> String
@@ -1896,7 +2176,7 @@ module Aws::GlueDataBrew
1896
2176
  # and ready for work. The action will be performed on this session.
1897
2177
  #
1898
2178
  # @option params [Types::ViewFrame] :view_frame
1899
- # Represents the data being being transformed during an action.
2179
+ # Represents the data being transformed during an action.
1900
2180
  #
1901
2181
  # @return [Types::SendProjectSessionActionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1902
2182
  #
@@ -2108,15 +2388,20 @@ module Aws::GlueDataBrew
2108
2388
  # The name of the dataset to be updated.
2109
2389
  #
2110
2390
  # @option params [String] :format
2111
- # Specifies the file format of a dataset created from an S3 file or
2391
+ # The file format of a dataset that is created from an Amazon S3 file or
2112
2392
  # folder.
2113
2393
  #
2114
2394
  # @option params [Types::FormatOptions] :format_options
2115
- # Options that define the structure of either Csv, Excel, or JSON input.
2395
+ # Represents a set of options that define the structure of either
2396
+ # comma-separated value (CSV), Excel, or JSON input.
2116
2397
  #
2117
2398
  # @option params [required, Types::Input] :input
2118
- # Information on how DataBrew can find data, in either the AWS Glue Data
2119
- # Catalog or Amazon S3.
2399
+ # Represents information on how DataBrew can find data, in either the
2400
+ # Glue Data Catalog or Amazon S3.
2401
+ #
2402
+ # @option params [Types::PathOptions] :path_options
2403
+ # A set of options that defines how DataBrew interprets an Amazon S3
2404
+ # path of the dataset.
2120
2405
  #
2121
2406
  # @return [Types::UpdateDatasetResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
2122
2407
  #
@@ -2155,6 +2440,45 @@ module Aws::GlueDataBrew
2155
2440
  # key: "Key",
2156
2441
  # },
2157
2442
  # },
2443
+ # database_input_definition: {
2444
+ # glue_connection_name: "GlueConnectionName", # required
2445
+ # database_table_name: "DatabaseTableName", # required
2446
+ # temp_directory: {
2447
+ # bucket: "Bucket", # required
2448
+ # key: "Key",
2449
+ # },
2450
+ # },
2451
+ # },
2452
+ # path_options: {
2453
+ # last_modified_date_condition: {
2454
+ # expression: "Expression", # required
2455
+ # values_map: { # required
2456
+ # "ValueReference" => "ConditionValue",
2457
+ # },
2458
+ # },
2459
+ # files_limit: {
2460
+ # max_files: 1, # required
2461
+ # ordered_by: "LAST_MODIFIED_DATE", # accepts LAST_MODIFIED_DATE
2462
+ # order: "DESCENDING", # accepts DESCENDING, ASCENDING
2463
+ # },
2464
+ # parameters: {
2465
+ # "PathParameterName" => {
2466
+ # name: "PathParameterName", # required
2467
+ # type: "Datetime", # required, accepts Datetime, Number, String
2468
+ # datetime_options: {
2469
+ # format: "DatetimeFormat", # required
2470
+ # timezone_offset: "TimezoneOffset",
2471
+ # locale_code: "LocaleCode",
2472
+ # },
2473
+ # create_column: false,
2474
+ # filter: {
2475
+ # expression: "Expression", # required
2476
+ # values_map: { # required
2477
+ # "ValueReference" => "ConditionValue",
2478
+ # },
2479
+ # },
2480
+ # },
2481
+ # },
2158
2482
  # },
2159
2483
  # })
2160
2484
  #
@@ -2173,6 +2497,11 @@ module Aws::GlueDataBrew
2173
2497
 
2174
2498
  # Modifies the definition of an existing profile job.
2175
2499
  #
2500
+ # @option params [Types::ProfileConfiguration] :configuration
2501
+ # Configuration for profile jobs. Used to select columns, do
2502
+ # evaluations, and override default parameters of evaluations. When
2503
+ # configuration is null, the profile job will run with default settings.
2504
+ #
2176
2505
  # @option params [String] :encryption_key_arn
2177
2506
  # The Amazon Resource Name (ARN) of an encryption key that is used to
2178
2507
  # protect the job.
@@ -2180,7 +2509,7 @@ module Aws::GlueDataBrew
2180
2509
  # @option params [String] :encryption_mode
2181
2510
  # The encryption mode for the job, which can be one of the following:
2182
2511
  #
2183
- # * `SSE-KMS` - Server-side encryption with keys managed by AWS KMS.
2512
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
2184
2513
  #
2185
2514
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
2186
2515
  #
@@ -2199,12 +2528,12 @@ module Aws::GlueDataBrew
2199
2528
  # The maximum number of times to retry the job after a job run fails.
2200
2529
  #
2201
2530
  # @option params [required, Types::S3Location] :output_location
2202
- # An Amazon S3 location (bucket name an object key) where DataBrew can
2203
- # read input data, or write output from a job.
2531
+ # Represents an Amazon S3 location (bucket name and object key) where
2532
+ # DataBrew can read input data, or write output from a job.
2204
2533
  #
2205
2534
  # @option params [required, String] :role_arn
2206
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
2207
- # Management (IAM) role to be assumed when DataBrew runs the job.
2535
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
2536
+ # (IAM) role to be assumed when DataBrew runs the job.
2208
2537
  #
2209
2538
  # @option params [Integer] :timeout
2210
2539
  # The job's timeout in minutes. A job that attempts to run longer than
@@ -2224,6 +2553,46 @@ module Aws::GlueDataBrew
2224
2553
  # @example Request syntax with placeholder values
2225
2554
  #
2226
2555
  # resp = client.update_profile_job({
2556
+ # configuration: {
2557
+ # dataset_statistics_configuration: {
2558
+ # included_statistics: ["Statistic"],
2559
+ # overrides: [
2560
+ # {
2561
+ # statistic: "Statistic", # required
2562
+ # parameters: { # required
2563
+ # "ParameterName" => "ParameterValue",
2564
+ # },
2565
+ # },
2566
+ # ],
2567
+ # },
2568
+ # profile_columns: [
2569
+ # {
2570
+ # regex: "ColumnName",
2571
+ # name: "ColumnName",
2572
+ # },
2573
+ # ],
2574
+ # column_statistics_configurations: [
2575
+ # {
2576
+ # selectors: [
2577
+ # {
2578
+ # regex: "ColumnName",
2579
+ # name: "ColumnName",
2580
+ # },
2581
+ # ],
2582
+ # statistics: { # required
2583
+ # included_statistics: ["Statistic"],
2584
+ # overrides: [
2585
+ # {
2586
+ # statistic: "Statistic", # required
2587
+ # parameters: { # required
2588
+ # "ParameterName" => "ParameterValue",
2589
+ # },
2590
+ # },
2591
+ # ],
2592
+ # },
2593
+ # },
2594
+ # ],
2595
+ # },
2227
2596
  # encryption_key_arn: "EncryptionKeyArn",
2228
2597
  # encryption_mode: "SSE-KMS", # accepts SSE-KMS, SSE-S3
2229
2598
  # name: "JobName", # required
@@ -2361,7 +2730,7 @@ module Aws::GlueDataBrew
2361
2730
  # @option params [String] :encryption_mode
2362
2731
  # The encryption mode for the job, which can be one of the following:
2363
2732
  #
2364
- # * `SSE-KMS` - Server-side encryption with keys managed by AWS KMS.
2733
+ # * `SSE-KMS` - Server-side encryption with keys managed by KMS.
2365
2734
  #
2366
2735
  # * `SSE-S3` - Server-side encryption with keys managed by Amazon S3.
2367
2736
  #
@@ -2379,12 +2748,20 @@ module Aws::GlueDataBrew
2379
2748
  # @option params [Integer] :max_retries
2380
2749
  # The maximum number of times to retry the job after a job run fails.
2381
2750
  #
2382
- # @option params [required, Array<Types::Output>] :outputs
2751
+ # @option params [Array<Types::Output>] :outputs
2383
2752
  # One or more artifacts that represent the output from running the job.
2384
2753
  #
2754
+ # @option params [Array<Types::DataCatalogOutput>] :data_catalog_outputs
2755
+ # One or more artifacts that represent the Glue Data Catalog output from
2756
+ # running the job.
2757
+ #
2758
+ # @option params [Array<Types::DatabaseOutput>] :database_outputs
2759
+ # Represents a list of JDBC database output objects which defines the
2760
+ # output destination for a DataBrew recipe job to write into.
2761
+ #
2385
2762
  # @option params [required, String] :role_arn
2386
- # The Amazon Resource Name (ARN) of the AWS Identity and Access
2387
- # Management (IAM) role to be assumed when DataBrew runs the job.
2763
+ # The Amazon Resource Name (ARN) of the Identity and Access Management
2764
+ # (IAM) role to be assumed when DataBrew runs the job.
2388
2765
  #
2389
2766
  # @option params [Integer] :timeout
2390
2767
  # The job's timeout in minutes. A job that attempts to run longer than
@@ -2403,7 +2780,7 @@ module Aws::GlueDataBrew
2403
2780
  # log_subscription: "ENABLE", # accepts ENABLE, DISABLE
2404
2781
  # max_capacity: 1,
2405
2782
  # max_retries: 1,
2406
- # outputs: [ # required
2783
+ # outputs: [
2407
2784
  # {
2408
2785
  # compression_format: "GZIP", # accepts GZIP, LZ4, SNAPPY, BZIP2, DEFLATE, LZO, BROTLI, ZSTD, ZLIB
2409
2786
  # format: "CSV", # accepts CSV, JSON, PARQUET, GLUEPARQUET, AVRO, ORC, XML
@@ -2420,6 +2797,40 @@ module Aws::GlueDataBrew
2420
2797
  # },
2421
2798
  # },
2422
2799
  # ],
2800
+ # data_catalog_outputs: [
2801
+ # {
2802
+ # catalog_id: "CatalogId",
2803
+ # database_name: "DatabaseName", # required
2804
+ # table_name: "TableName", # required
2805
+ # s3_options: {
2806
+ # location: { # required
2807
+ # bucket: "Bucket", # required
2808
+ # key: "Key",
2809
+ # },
2810
+ # },
2811
+ # database_options: {
2812
+ # temp_directory: {
2813
+ # bucket: "Bucket", # required
2814
+ # key: "Key",
2815
+ # },
2816
+ # table_name: "DatabaseTableName", # required
2817
+ # },
2818
+ # overwrite: false,
2819
+ # },
2820
+ # ],
2821
+ # database_outputs: [
2822
+ # {
2823
+ # glue_connection_name: "GlueConnectionName", # required
2824
+ # database_options: { # required
2825
+ # temp_directory: {
2826
+ # bucket: "Bucket", # required
2827
+ # key: "Key",
2828
+ # },
2829
+ # table_name: "DatabaseTableName", # required
2830
+ # },
2831
+ # database_output_mode: "NEW_TABLE", # accepts NEW_TABLE
2832
+ # },
2833
+ # ],
2423
2834
  # role_arn: "Arn", # required
2424
2835
  # timeout: 1,
2425
2836
  # })
@@ -2444,7 +2855,7 @@ module Aws::GlueDataBrew
2444
2855
  #
2445
2856
  # @option params [required, String] :cron_expression
2446
2857
  # The date or dates and time or times when the jobs are to be run. For
2447
- # more information, see [Cron expressions][1] in the *AWS Glue DataBrew
2858
+ # more information, see [Cron expressions][1] in the *Glue DataBrew
2448
2859
  # Developer Guide*.
2449
2860
  #
2450
2861
  #
@@ -2492,7 +2903,7 @@ module Aws::GlueDataBrew
2492
2903
  params: params,
2493
2904
  config: config)
2494
2905
  context[:gem_name] = 'aws-sdk-gluedatabrew'
2495
- context[:gem_version] = '1.5.0'
2906
+ context[:gem_version] = '1.9.0'
2496
2907
  Seahorse::Client::Request.new(handlers, context)
2497
2908
  end
2498
2909