aws-sdk-glue 1.141.0 → 1.143.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5c9a48d7a23c5d40810b6a4d302f5fa0e878871d0acfeed3925bc6df9520b253
4
- data.tar.gz: fca19798435e80aad216e8fa1e34718aad722c22212623c85394c6077ee16bc3
3
+ metadata.gz: 2766516ed5f6fc83466404d34b9e6adc2c7055c22c862f62ae9ff9406b1052f4
4
+ data.tar.gz: 3e8a53e53bed12c6b07fa593b429ada7d6a5888b4b06738a11f0524389ff3b7e
5
5
  SHA512:
6
- metadata.gz: 9c078bd8ed133f32e982746e4db2734f5fb422432422577171e57a1bced1c4eac8c2a4cb8f42b8d2d243e4eee35633311e1b61d9670c954c9739b4467ffdb53a
7
- data.tar.gz: 00c1136b2389179e02d792afdbb245f3648966c1e465cf706ae3e30ed9f271f7c3a509b915775f18be2105b24422b1fada6940ed2d4aefab3725370076457dbe
6
+ metadata.gz: 45c4b5d637631dda70ad8b9317408cfa4325e89746b237facecd3bd3a1c05b314ff3d7961acf544d0232615b13ba5c59027d5611e933a264bb14acf8acaa3e42
7
+ data.tar.gz: 5b254a6bf6019d3288ee1191066d4bde8602becd7bde52678dec1285ee2accc5c5a7c7fa67d6b3697231a7818912c40f4f314e6c3020cbfaef923fefdc606bfa
data/CHANGELOG.md CHANGED
@@ -1,6 +1,16 @@
1
1
  Unreleased Changes
2
2
  ------------------
3
3
 
4
+ 1.143.0 (2023-06-26)
5
+ ------------------
6
+
7
+ * Feature - Timestamp Starting Position For Kinesis and Kafka Data Sources in a Glue Streaming Job
8
+
9
+ 1.142.0 (2023-06-19)
10
+ ------------------
11
+
12
+ * Feature - This release adds support for creating cross region table/database resource links
13
+
4
14
  1.141.0 (2023-06-15)
5
15
  ------------------
6
16
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.141.0
1
+ 1.143.0
@@ -1347,7 +1347,7 @@ module Aws::Glue
1347
1347
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.stream_name #=> String
1348
1348
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.classification #=> String
1349
1349
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.delimiter #=> String
1350
- # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
1350
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
1351
1351
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
1352
1352
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
1353
1353
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -1363,6 +1363,7 @@ module Aws::Glue
1363
1363
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.role_session_name #=> String
1364
1364
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.add_record_timestamp #=> String
1365
1365
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
1366
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_timestamp #=> Time
1366
1367
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.polling_time #=> Integer
1367
1368
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.record_polling_limit #=> Integer
1368
1369
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.name #=> String
@@ -1384,6 +1385,7 @@ module Aws::Glue
1384
1385
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.include_headers #=> Boolean
1385
1386
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.add_record_timestamp #=> String
1386
1387
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
1388
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.starting_timestamp #=> Time
1387
1389
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.window_size #=> Integer
1388
1390
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.detect_schema #=> Boolean
1389
1391
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.data_preview_options.polling_time #=> Integer
@@ -1397,7 +1399,7 @@ module Aws::Glue
1397
1399
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.stream_name #=> String
1398
1400
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.classification #=> String
1399
1401
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.delimiter #=> String
1400
- # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
1402
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
1401
1403
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
1402
1404
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
1403
1405
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -1413,6 +1415,7 @@ module Aws::Glue
1413
1415
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.role_session_name #=> String
1414
1416
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.add_record_timestamp #=> String
1415
1417
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
1418
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_timestamp #=> Time
1416
1419
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.polling_time #=> Integer
1417
1420
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.record_polling_limit #=> Integer
1418
1421
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.name #=> String
@@ -1438,6 +1441,7 @@ module Aws::Glue
1438
1441
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.include_headers #=> Boolean
1439
1442
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.add_record_timestamp #=> String
1440
1443
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
1444
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.starting_timestamp #=> Time
1441
1445
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.polling_time #=> Integer
1442
1446
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.record_polling_limit #=> Integer
1443
1447
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].drop_null_fields.name #=> String
@@ -2946,6 +2950,7 @@ module Aws::Glue
2946
2950
  # target_database: {
2947
2951
  # catalog_id: "CatalogIdString",
2948
2952
  # database_name: "NameString",
2953
+ # region: "NameString",
2949
2954
  # },
2950
2955
  # federated_database: {
2951
2956
  # identifier: "FederationIdentifier",
@@ -4280,6 +4285,7 @@ module Aws::Glue
4280
4285
  # catalog_id: "CatalogIdString",
4281
4286
  # database_name: "NameString",
4282
4287
  # name: "NameString",
4288
+ # region: "NameString",
4283
4289
  # },
4284
4290
  # },
4285
4291
  # partition_indexes: [
@@ -6503,6 +6509,7 @@ module Aws::Glue
6503
6509
  # resp.database.create_table_default_permissions[0].permissions[0] #=> String, one of "ALL", "SELECT", "ALTER", "DROP", "DELETE", "INSERT", "CREATE_DATABASE", "CREATE_TABLE", "DATA_LOCATION_ACCESS"
6504
6510
  # resp.database.target_database.catalog_id #=> String
6505
6511
  # resp.database.target_database.database_name #=> String
6512
+ # resp.database.target_database.region #=> String
6506
6513
  # resp.database.catalog_id #=> String
6507
6514
  # resp.database.federated_database.identifier #=> String
6508
6515
  # resp.database.federated_database.connection_name #=> String
@@ -6573,6 +6580,7 @@ module Aws::Glue
6573
6580
  # resp.database_list[0].create_table_default_permissions[0].permissions[0] #=> String, one of "ALL", "SELECT", "ALTER", "DROP", "DELETE", "INSERT", "CREATE_DATABASE", "CREATE_TABLE", "DATA_LOCATION_ACCESS"
6574
6581
  # resp.database_list[0].target_database.catalog_id #=> String
6575
6582
  # resp.database_list[0].target_database.database_name #=> String
6583
+ # resp.database_list[0].target_database.region #=> String
6576
6584
  # resp.database_list[0].catalog_id #=> String
6577
6585
  # resp.database_list[0].federated_database.identifier #=> String
6578
6586
  # resp.database_list[0].federated_database.connection_name #=> String
@@ -7106,7 +7114,7 @@ module Aws::Glue
7106
7114
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.stream_name #=> String
7107
7115
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.classification #=> String
7108
7116
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.delimiter #=> String
7109
- # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
7117
+ # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
7110
7118
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
7111
7119
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
7112
7120
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -7122,6 +7130,7 @@ module Aws::Glue
7122
7130
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.role_session_name #=> String
7123
7131
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.add_record_timestamp #=> String
7124
7132
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
7133
+ # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_timestamp #=> Time
7125
7134
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.polling_time #=> Integer
7126
7135
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.record_polling_limit #=> Integer
7127
7136
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.name #=> String
@@ -7143,6 +7152,7 @@ module Aws::Glue
7143
7152
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.include_headers #=> Boolean
7144
7153
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.add_record_timestamp #=> String
7145
7154
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
7155
+ # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.starting_timestamp #=> Time
7146
7156
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.window_size #=> Integer
7147
7157
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.detect_schema #=> Boolean
7148
7158
  # resp.job.code_gen_configuration_nodes["NodeId"].direct_kafka_source.data_preview_options.polling_time #=> Integer
@@ -7156,7 +7166,7 @@ module Aws::Glue
7156
7166
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.stream_name #=> String
7157
7167
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.classification #=> String
7158
7168
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.delimiter #=> String
7159
- # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
7169
+ # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
7160
7170
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
7161
7171
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
7162
7172
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -7172,6 +7182,7 @@ module Aws::Glue
7172
7182
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.role_session_name #=> String
7173
7183
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.add_record_timestamp #=> String
7174
7184
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
7185
+ # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_timestamp #=> Time
7175
7186
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.polling_time #=> Integer
7176
7187
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.record_polling_limit #=> Integer
7177
7188
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.name #=> String
@@ -7197,6 +7208,7 @@ module Aws::Glue
7197
7208
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.include_headers #=> Boolean
7198
7209
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.add_record_timestamp #=> String
7199
7210
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
7211
+ # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.starting_timestamp #=> Time
7200
7212
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.polling_time #=> Integer
7201
7213
  # resp.job.code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.record_polling_limit #=> Integer
7202
7214
  # resp.job.code_gen_configuration_nodes["NodeId"].drop_null_fields.name #=> String
@@ -8108,7 +8120,7 @@ module Aws::Glue
8108
8120
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.stream_name #=> String
8109
8121
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.classification #=> String
8110
8122
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.delimiter #=> String
8111
- # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
8123
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
8112
8124
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
8113
8125
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
8114
8126
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -8124,6 +8136,7 @@ module Aws::Glue
8124
8136
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.role_session_name #=> String
8125
8137
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.add_record_timestamp #=> String
8126
8138
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
8139
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.streaming_options.starting_timestamp #=> Time
8127
8140
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.polling_time #=> Integer
8128
8141
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kinesis_source.data_preview_options.record_polling_limit #=> Integer
8129
8142
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.name #=> String
@@ -8145,6 +8158,7 @@ module Aws::Glue
8145
8158
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.include_headers #=> Boolean
8146
8159
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.add_record_timestamp #=> String
8147
8160
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
8161
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.streaming_options.starting_timestamp #=> Time
8148
8162
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.window_size #=> Integer
8149
8163
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.detect_schema #=> Boolean
8150
8164
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].direct_kafka_source.data_preview_options.polling_time #=> Integer
@@ -8158,7 +8172,7 @@ module Aws::Glue
8158
8172
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.stream_name #=> String
8159
8173
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.classification #=> String
8160
8174
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.delimiter #=> String
8161
- # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest"
8175
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_position #=> String, one of "latest", "trim_horizon", "earliest", "timestamp"
8162
8176
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_time_in_ms #=> Integer
8163
8177
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_fetch_records_per_shard #=> Integer
8164
8178
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.max_record_per_read #=> Integer
@@ -8174,6 +8188,7 @@ module Aws::Glue
8174
8188
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.role_session_name #=> String
8175
8189
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.add_record_timestamp #=> String
8176
8190
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.emit_consumer_lag_metrics #=> String
8191
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.streaming_options.starting_timestamp #=> Time
8177
8192
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.polling_time #=> Integer
8178
8193
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kinesis_source.data_preview_options.record_polling_limit #=> Integer
8179
8194
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.name #=> String
@@ -8199,6 +8214,7 @@ module Aws::Glue
8199
8214
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.include_headers #=> Boolean
8200
8215
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.add_record_timestamp #=> String
8201
8216
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.emit_consumer_lag_metrics #=> String
8217
+ # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.streaming_options.starting_timestamp #=> Time
8202
8218
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.polling_time #=> Integer
8203
8219
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].catalog_kafka_source.data_preview_options.record_polling_limit #=> Integer
8204
8220
  # resp.jobs[0].code_gen_configuration_nodes["NodeId"].drop_null_fields.name #=> String
@@ -10108,6 +10124,7 @@ module Aws::Glue
10108
10124
  # resp.table.target_table.catalog_id #=> String
10109
10125
  # resp.table.target_table.database_name #=> String
10110
10126
  # resp.table.target_table.name #=> String
10127
+ # resp.table.target_table.region #=> String
10111
10128
  # resp.table.catalog_id #=> String
10112
10129
  # resp.table.version_id #=> String
10113
10130
  # resp.table.federated_table.identifier #=> String
@@ -10217,6 +10234,7 @@ module Aws::Glue
10217
10234
  # resp.table_version.table.target_table.catalog_id #=> String
10218
10235
  # resp.table_version.table.target_table.database_name #=> String
10219
10236
  # resp.table_version.table.target_table.name #=> String
10237
+ # resp.table_version.table.target_table.region #=> String
10220
10238
  # resp.table_version.table.catalog_id #=> String
10221
10239
  # resp.table_version.table.version_id #=> String
10222
10240
  # resp.table_version.table.federated_table.identifier #=> String
@@ -10335,6 +10353,7 @@ module Aws::Glue
10335
10353
  # resp.table_versions[0].table.target_table.catalog_id #=> String
10336
10354
  # resp.table_versions[0].table.target_table.database_name #=> String
10337
10355
  # resp.table_versions[0].table.target_table.name #=> String
10356
+ # resp.table_versions[0].table.target_table.region #=> String
10338
10357
  # resp.table_versions[0].table.catalog_id #=> String
10339
10358
  # resp.table_versions[0].table.version_id #=> String
10340
10359
  # resp.table_versions[0].table.federated_table.identifier #=> String
@@ -10464,6 +10483,7 @@ module Aws::Glue
10464
10483
  # resp.table_list[0].target_table.catalog_id #=> String
10465
10484
  # resp.table_list[0].target_table.database_name #=> String
10466
10485
  # resp.table_list[0].target_table.name #=> String
10486
+ # resp.table_list[0].target_table.region #=> String
10467
10487
  # resp.table_list[0].catalog_id #=> String
10468
10488
  # resp.table_list[0].version_id #=> String
10469
10489
  # resp.table_list[0].federated_table.identifier #=> String
@@ -11050,6 +11070,7 @@ module Aws::Glue
11050
11070
  # resp.table.target_table.catalog_id #=> String
11051
11071
  # resp.table.target_table.database_name #=> String
11052
11072
  # resp.table.target_table.name #=> String
11073
+ # resp.table.target_table.region #=> String
11053
11074
  # resp.table.catalog_id #=> String
11054
11075
  # resp.table.version_id #=> String
11055
11076
  # resp.table.federated_table.identifier #=> String
@@ -13391,6 +13412,7 @@ module Aws::Glue
13391
13412
  # resp.table_list[0].target_table.catalog_id #=> String
13392
13413
  # resp.table_list[0].target_table.database_name #=> String
13393
13414
  # resp.table_list[0].target_table.name #=> String
13415
+ # resp.table_list[0].target_table.region #=> String
13394
13416
  # resp.table_list[0].catalog_id #=> String
13395
13417
  # resp.table_list[0].version_id #=> String
13396
13418
  # resp.table_list[0].federated_table.identifier #=> String
@@ -14959,6 +14981,7 @@ module Aws::Glue
14959
14981
  # target_database: {
14960
14982
  # catalog_id: "CatalogIdString",
14961
14983
  # database_name: "NameString",
14984
+ # region: "NameString",
14962
14985
  # },
14963
14986
  # federated_database: {
14964
14987
  # identifier: "FederationIdentifier",
@@ -15654,6 +15677,7 @@ module Aws::Glue
15654
15677
  # catalog_id: "CatalogIdString",
15655
15678
  # database_name: "NameString",
15656
15679
  # name: "NameString",
15680
+ # region: "NameString",
15657
15681
  # },
15658
15682
  # },
15659
15683
  # skip_archive: false,
@@ -15868,7 +15892,7 @@ module Aws::Glue
15868
15892
  params: params,
15869
15893
  config: config)
15870
15894
  context[:gem_name] = 'aws-sdk-glue'
15871
- context[:gem_version] = '1.141.0'
15895
+ context[:gem_version] = '1.143.0'
15872
15896
  Seahorse::Client::Request.new(handlers, context)
15873
15897
  end
15874
15898
 
@@ -625,6 +625,7 @@ module Aws::Glue
625
625
  InvalidInputException = Shapes::StructureShape.new(name: 'InvalidInputException')
626
626
  InvalidStateException = Shapes::StructureShape.new(name: 'InvalidStateException')
627
627
  IsVersionValid = Shapes::BooleanShape.new(name: 'IsVersionValid')
628
+ Iso8601DateTime = Shapes::TimestampShape.new(name: 'Iso8601DateTime', timestampFormat: "iso8601")
628
629
  JDBCConnectionType = Shapes::StringShape.new(name: 'JDBCConnectionType')
629
630
  JDBCConnectorOptions = Shapes::StructureShape.new(name: 'JDBCConnectorOptions')
630
631
  JDBCConnectorSource = Shapes::StructureShape.new(name: 'JDBCConnectorSource')
@@ -2361,6 +2362,7 @@ module Aws::Glue
2361
2362
 
2362
2363
  DatabaseIdentifier.add_member(:catalog_id, Shapes::ShapeRef.new(shape: CatalogIdString, location_name: "CatalogId"))
2363
2364
  DatabaseIdentifier.add_member(:database_name, Shapes::ShapeRef.new(shape: NameString, location_name: "DatabaseName"))
2365
+ DatabaseIdentifier.add_member(:region, Shapes::ShapeRef.new(shape: NameString, location_name: "Region"))
2364
2366
  DatabaseIdentifier.struct_class = Types::DatabaseIdentifier
2365
2367
 
2366
2368
  DatabaseInput.add_member(:name, Shapes::ShapeRef.new(shape: NameString, required: true, location_name: "Name"))
@@ -3737,6 +3739,7 @@ module Aws::Glue
3737
3739
  KafkaStreamingSourceOptions.add_member(:include_headers, Shapes::ShapeRef.new(shape: BoxedBoolean, location_name: "IncludeHeaders"))
3738
3740
  KafkaStreamingSourceOptions.add_member(:add_record_timestamp, Shapes::ShapeRef.new(shape: EnclosedInStringProperty, location_name: "AddRecordTimestamp"))
3739
3741
  KafkaStreamingSourceOptions.add_member(:emit_consumer_lag_metrics, Shapes::ShapeRef.new(shape: EnclosedInStringProperty, location_name: "EmitConsumerLagMetrics"))
3742
+ KafkaStreamingSourceOptions.add_member(:starting_timestamp, Shapes::ShapeRef.new(shape: Iso8601DateTime, location_name: "StartingTimestamp"))
3740
3743
  KafkaStreamingSourceOptions.struct_class = Types::KafkaStreamingSourceOptions
3741
3744
 
3742
3745
  KeyList.member = Shapes::ShapeRef.new(shape: NameString)
@@ -3767,6 +3770,7 @@ module Aws::Glue
3767
3770
  KinesisStreamingSourceOptions.add_member(:role_session_name, Shapes::ShapeRef.new(shape: EnclosedInStringProperty, location_name: "RoleSessionName"))
3768
3771
  KinesisStreamingSourceOptions.add_member(:add_record_timestamp, Shapes::ShapeRef.new(shape: EnclosedInStringProperty, location_name: "AddRecordTimestamp"))
3769
3772
  KinesisStreamingSourceOptions.add_member(:emit_consumer_lag_metrics, Shapes::ShapeRef.new(shape: EnclosedInStringProperty, location_name: "EmitConsumerLagMetrics"))
3773
+ KinesisStreamingSourceOptions.add_member(:starting_timestamp, Shapes::ShapeRef.new(shape: Iso8601DateTime, location_name: "StartingTimestamp"))
3770
3774
  KinesisStreamingSourceOptions.struct_class = Types::KinesisStreamingSourceOptions
3771
3775
 
3772
3776
  LabelingSetGenerationTaskRunProperties.add_member(:output_s3_path, Shapes::ShapeRef.new(shape: UriString, location_name: "OutputS3Path"))
@@ -4998,6 +5002,7 @@ module Aws::Glue
4998
5002
  TableIdentifier.add_member(:catalog_id, Shapes::ShapeRef.new(shape: CatalogIdString, location_name: "CatalogId"))
4999
5003
  TableIdentifier.add_member(:database_name, Shapes::ShapeRef.new(shape: NameString, location_name: "DatabaseName"))
5000
5004
  TableIdentifier.add_member(:name, Shapes::ShapeRef.new(shape: NameString, location_name: "Name"))
5005
+ TableIdentifier.add_member(:region, Shapes::ShapeRef.new(shape: NameString, location_name: "Region"))
5001
5006
  TableIdentifier.struct_class = Types::TableIdentifier
5002
5007
 
5003
5008
  TableInput.add_member(:name, Shapes::ShapeRef.new(shape: NameString, required: true, location_name: "Name"))
@@ -6142,11 +6142,16 @@ module Aws::Glue
6142
6142
  # The name of the catalog database.
6143
6143
  # @return [String]
6144
6144
  #
6145
+ # @!attribute [rw] region
6146
+ # Region of the target database.
6147
+ # @return [String]
6148
+ #
6145
6149
  # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/DatabaseIdentifier AWS API Documentation
6146
6150
  #
6147
6151
  class DatabaseIdentifier < Struct.new(
6148
6152
  :catalog_id,
6149
- :database_name)
6153
+ :database_name,
6154
+ :region)
6150
6155
  SENSITIVE = []
6151
6156
  include Aws::Structure
6152
6157
  end
@@ -13250,6 +13255,16 @@ module Aws::Glue
13250
13255
  # 4.0 or later.
13251
13256
  # @return [String]
13252
13257
  #
13258
+ # @!attribute [rw] starting_timestamp
13259
+ # The timestamp of the record in the Kafka topic to start reading data
13260
+ # from. The possible values are a timestamp string in UTC format of
13261
+ # the pattern `yyyy-mm-ddTHH:MM:SSZ` (where Z represents a UTC
13262
+ # timezone offset with a +/-. For example:
13263
+ # "2023-04-04T08:00:00+08:00").
13264
+ #
13265
+ # Only one of `StartingTimestamp` or `StartingOffsets` must be set.
13266
+ # @return [Time]
13267
+ #
13253
13268
  # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/KafkaStreamingSourceOptions AWS API Documentation
13254
13269
  #
13255
13270
  class KafkaStreamingSourceOptions < Struct.new(
@@ -13270,7 +13285,8 @@ module Aws::Glue
13270
13285
  :min_partitions,
13271
13286
  :include_headers,
13272
13287
  :add_record_timestamp,
13273
- :emit_consumer_lag_metrics)
13288
+ :emit_consumer_lag_metrics,
13289
+ :starting_timestamp)
13274
13290
  SENSITIVE = []
13275
13291
  include Aws::Structure
13276
13292
  end
@@ -13314,8 +13330,15 @@ module Aws::Glue
13314
13330
  #
13315
13331
  # @!attribute [rw] starting_position
13316
13332
  # The starting position in the Kinesis data stream to read data from.
13317
- # The possible values are `"latest"`, `"trim_horizon"`, or
13318
- # `"earliest"`. The default value is `"latest"`.
13333
+ # The possible values are `"latest"`, `"trim_horizon"`, `"earliest"`,
13334
+ # or a timestamp string in UTC format in the pattern
13335
+ # `yyyy-mm-ddTHH:MM:SSZ` (where `Z` represents a UTC timezone offset
13336
+ # with a +/-. For example: "2023-04-04T08:00:00-04:00"). The default
13337
+ # value is `"latest"`.
13338
+ #
13339
+ # Note: Using a value that is a timestamp string in UTC format for
13340
+ # "startingPosition" is supported only for Glue version 4.0 or
13341
+ # later.
13319
13342
  # @return [String]
13320
13343
  #
13321
13344
  # @!attribute [rw] max_fetch_time_in_ms
@@ -13408,6 +13431,14 @@ module Aws::Glue
13408
13431
  # 4.0 or later.
13409
13432
  # @return [String]
13410
13433
  #
13434
+ # @!attribute [rw] starting_timestamp
13435
+ # The timestamp of the record in the Kinesis data stream to start
13436
+ # reading data from. The possible values are a timestamp string in UTC
13437
+ # format of the pattern `yyyy-mm-ddTHH:MM:SSZ` (where Z represents a
13438
+ # UTC timezone offset with a +/-. For example:
13439
+ # "2023-04-04T08:00:00+08:00").
13440
+ # @return [Time]
13441
+ #
13411
13442
  # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/KinesisStreamingSourceOptions AWS API Documentation
13412
13443
  #
13413
13444
  class KinesisStreamingSourceOptions < Struct.new(
@@ -13430,7 +13461,8 @@ module Aws::Glue
13430
13461
  :role_arn,
13431
13462
  :role_session_name,
13432
13463
  :add_record_timestamp,
13433
- :emit_consumer_lag_metrics)
13464
+ :emit_consumer_lag_metrics,
13465
+ :starting_timestamp)
13434
13466
  SENSITIVE = []
13435
13467
  include Aws::Structure
13436
13468
  end
@@ -19274,12 +19306,17 @@ module Aws::Glue
19274
19306
  # The name of the target table.
19275
19307
  # @return [String]
19276
19308
  #
19309
+ # @!attribute [rw] region
19310
+ # Region of the target table.
19311
+ # @return [String]
19312
+ #
19277
19313
  # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/TableIdentifier AWS API Documentation
19278
19314
  #
19279
19315
  class TableIdentifier < Struct.new(
19280
19316
  :catalog_id,
19281
19317
  :database_name,
19282
- :name)
19318
+ :name,
19319
+ :region)
19283
19320
  SENSITIVE = []
19284
19321
  include Aws::Structure
19285
19322
  end
data/lib/aws-sdk-glue.rb CHANGED
@@ -52,6 +52,6 @@ require_relative 'aws-sdk-glue/customizations'
52
52
  # @!group service
53
53
  module Aws::Glue
54
54
 
55
- GEM_VERSION = '1.141.0'
55
+ GEM_VERSION = '1.143.0'
56
56
 
57
57
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-glue
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.141.0
4
+ version: 1.143.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-06-15 00:00:00.000000000 Z
11
+ date: 2023-06-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core