aws-sdk-glue 1.72.0 → 1.77.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/aws-sdk-glue.rb +1 -1
- data/lib/aws-sdk-glue/client.rb +59 -1
- data/lib/aws-sdk-glue/client_api.rb +23 -0
- data/lib/aws-sdk-glue/types.rb +145 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1a9d36d08a0fe7de74da1d798ffa9a75b4f2aecbfff986843e9c9b9881dfdb4a
|
4
|
+
data.tar.gz: 7ebca7a5ad15d43b34b113a8831b58c07449b303d217cccc919171f46d2d4301
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: dc8f91cf898d03561a75954d761615eaae1531bfa89396407eb3862436b5ea8bfbeb65e18908e0071868c840d968a0e1a1dc8bcd78ed6a4449a8b866dcfac713
|
7
|
+
data.tar.gz: 89245361281954f2922057e3af2e68c7fbf3bc84d10384b2614086ce31278fa2d413453f951949e3ea59e1fce9632d3b557684f5c66d217d5aee24d8c6162b07
|
data/lib/aws-sdk-glue.rb
CHANGED
data/lib/aws-sdk-glue/client.rb
CHANGED
@@ -663,6 +663,10 @@ module Aws::Glue
|
|
663
663
|
# resp.crawlers[0].targets.jdbc_targets[0].path #=> String
|
664
664
|
# resp.crawlers[0].targets.jdbc_targets[0].exclusions #=> Array
|
665
665
|
# resp.crawlers[0].targets.jdbc_targets[0].exclusions[0] #=> String
|
666
|
+
# resp.crawlers[0].targets.mongo_db_targets #=> Array
|
667
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].connection_name #=> String
|
668
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].path #=> String
|
669
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].scan_all #=> Boolean
|
666
670
|
# resp.crawlers[0].targets.dynamo_db_targets #=> Array
|
667
671
|
# resp.crawlers[0].targets.dynamo_db_targets[0].path #=> String
|
668
672
|
# resp.crawlers[0].targets.dynamo_db_targets[0].scan_all #=> Boolean
|
@@ -675,6 +679,7 @@ module Aws::Glue
|
|
675
679
|
# resp.crawlers[0].description #=> String
|
676
680
|
# resp.crawlers[0].classifiers #=> Array
|
677
681
|
# resp.crawlers[0].classifiers[0] #=> String
|
682
|
+
# resp.crawlers[0].recrawl_policy.recrawl_behavior #=> String, one of "CRAWL_EVERYTHING", "CRAWL_NEW_FOLDERS_ONLY"
|
678
683
|
# resp.crawlers[0].schema_change_policy.update_behavior #=> String, one of "LOG", "UPDATE_IN_DATABASE"
|
679
684
|
# resp.crawlers[0].schema_change_policy.delete_behavior #=> String, one of "LOG", "DELETE_FROM_DATABASE", "DEPRECATE_IN_DATABASE"
|
680
685
|
# resp.crawlers[0].state #=> String, one of "READY", "RUNNING", "STOPPING"
|
@@ -1485,6 +1490,10 @@ module Aws::Glue
|
|
1485
1490
|
# @option params [Types::SchemaChangePolicy] :schema_change_policy
|
1486
1491
|
# The policy for the crawler's update and deletion behavior.
|
1487
1492
|
#
|
1493
|
+
# @option params [Types::RecrawlPolicy] :recrawl_policy
|
1494
|
+
# A policy that specifies whether to crawl the entire dataset again, or
|
1495
|
+
# to crawl only folders that were added since the last crawler run.
|
1496
|
+
#
|
1488
1497
|
# @option params [String] :configuration
|
1489
1498
|
# Crawler configuration information. This versioned JSON string allows
|
1490
1499
|
# users to specify aspects of a crawler's behavior. For more
|
@@ -1531,6 +1540,13 @@ module Aws::Glue
|
|
1531
1540
|
# exclusions: ["Path"],
|
1532
1541
|
# },
|
1533
1542
|
# ],
|
1543
|
+
# mongo_db_targets: [
|
1544
|
+
# {
|
1545
|
+
# connection_name: "ConnectionName",
|
1546
|
+
# path: "Path",
|
1547
|
+
# scan_all: false,
|
1548
|
+
# },
|
1549
|
+
# ],
|
1534
1550
|
# dynamo_db_targets: [
|
1535
1551
|
# {
|
1536
1552
|
# path: "Path",
|
@@ -1552,6 +1568,9 @@ module Aws::Glue
|
|
1552
1568
|
# update_behavior: "LOG", # accepts LOG, UPDATE_IN_DATABASE
|
1553
1569
|
# delete_behavior: "LOG", # accepts LOG, DELETE_FROM_DATABASE, DEPRECATE_IN_DATABASE
|
1554
1570
|
# },
|
1571
|
+
# recrawl_policy: {
|
1572
|
+
# recrawl_behavior: "CRAWL_EVERYTHING", # accepts CRAWL_EVERYTHING, CRAWL_NEW_FOLDERS_ONLY
|
1573
|
+
# },
|
1555
1574
|
# configuration: "CrawlerConfiguration",
|
1556
1575
|
# crawler_security_configuration: "CrawlerSecurityConfiguration",
|
1557
1576
|
# tags: {
|
@@ -3727,6 +3746,10 @@ module Aws::Glue
|
|
3727
3746
|
# resp.crawler.targets.jdbc_targets[0].path #=> String
|
3728
3747
|
# resp.crawler.targets.jdbc_targets[0].exclusions #=> Array
|
3729
3748
|
# resp.crawler.targets.jdbc_targets[0].exclusions[0] #=> String
|
3749
|
+
# resp.crawler.targets.mongo_db_targets #=> Array
|
3750
|
+
# resp.crawler.targets.mongo_db_targets[0].connection_name #=> String
|
3751
|
+
# resp.crawler.targets.mongo_db_targets[0].path #=> String
|
3752
|
+
# resp.crawler.targets.mongo_db_targets[0].scan_all #=> Boolean
|
3730
3753
|
# resp.crawler.targets.dynamo_db_targets #=> Array
|
3731
3754
|
# resp.crawler.targets.dynamo_db_targets[0].path #=> String
|
3732
3755
|
# resp.crawler.targets.dynamo_db_targets[0].scan_all #=> Boolean
|
@@ -3739,6 +3762,7 @@ module Aws::Glue
|
|
3739
3762
|
# resp.crawler.description #=> String
|
3740
3763
|
# resp.crawler.classifiers #=> Array
|
3741
3764
|
# resp.crawler.classifiers[0] #=> String
|
3765
|
+
# resp.crawler.recrawl_policy.recrawl_behavior #=> String, one of "CRAWL_EVERYTHING", "CRAWL_NEW_FOLDERS_ONLY"
|
3742
3766
|
# resp.crawler.schema_change_policy.update_behavior #=> String, one of "LOG", "UPDATE_IN_DATABASE"
|
3743
3767
|
# resp.crawler.schema_change_policy.delete_behavior #=> String, one of "LOG", "DELETE_FROM_DATABASE", "DEPRECATE_IN_DATABASE"
|
3744
3768
|
# resp.crawler.state #=> String, one of "READY", "RUNNING", "STOPPING"
|
@@ -3852,6 +3876,10 @@ module Aws::Glue
|
|
3852
3876
|
# resp.crawlers[0].targets.jdbc_targets[0].path #=> String
|
3853
3877
|
# resp.crawlers[0].targets.jdbc_targets[0].exclusions #=> Array
|
3854
3878
|
# resp.crawlers[0].targets.jdbc_targets[0].exclusions[0] #=> String
|
3879
|
+
# resp.crawlers[0].targets.mongo_db_targets #=> Array
|
3880
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].connection_name #=> String
|
3881
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].path #=> String
|
3882
|
+
# resp.crawlers[0].targets.mongo_db_targets[0].scan_all #=> Boolean
|
3855
3883
|
# resp.crawlers[0].targets.dynamo_db_targets #=> Array
|
3856
3884
|
# resp.crawlers[0].targets.dynamo_db_targets[0].path #=> String
|
3857
3885
|
# resp.crawlers[0].targets.dynamo_db_targets[0].scan_all #=> Boolean
|
@@ -3864,6 +3892,7 @@ module Aws::Glue
|
|
3864
3892
|
# resp.crawlers[0].description #=> String
|
3865
3893
|
# resp.crawlers[0].classifiers #=> Array
|
3866
3894
|
# resp.crawlers[0].classifiers[0] #=> String
|
3895
|
+
# resp.crawlers[0].recrawl_policy.recrawl_behavior #=> String, one of "CRAWL_EVERYTHING", "CRAWL_NEW_FOLDERS_ONLY"
|
3867
3896
|
# resp.crawlers[0].schema_change_policy.update_behavior #=> String, one of "LOG", "UPDATE_IN_DATABASE"
|
3868
3897
|
# resp.crawlers[0].schema_change_policy.delete_behavior #=> String, one of "LOG", "DELETE_FROM_DATABASE", "DEPRECATE_IN_DATABASE"
|
3869
3898
|
# resp.crawlers[0].state #=> String, one of "READY", "RUNNING", "STOPPING"
|
@@ -5226,6 +5255,18 @@ module Aws::Glue
|
|
5226
5255
|
# @option params [String] :language
|
5227
5256
|
# The programming language of the code to perform the mapping.
|
5228
5257
|
#
|
5258
|
+
# @option params [Hash<String,String>] :additional_plan_options_map
|
5259
|
+
# A map to hold additional optional key-value parameters.
|
5260
|
+
#
|
5261
|
+
# Currently, these key-value pairs are supported:
|
5262
|
+
#
|
5263
|
+
# * `inferSchema` — Specifies whether to set `inferSchema` to true or
|
5264
|
+
# false for the default script generated by an AWS Glue job. For
|
5265
|
+
# example, to set `inferSchema` to true, pass the following key value
|
5266
|
+
# pair:
|
5267
|
+
#
|
5268
|
+
# `--additional-plan-options-map '\{"inferSchema":"true"\}'`
|
5269
|
+
#
|
5229
5270
|
# @return [Types::GetPlanResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
5230
5271
|
#
|
5231
5272
|
# * {Types::GetPlanResponse#python_script #python_script} => String
|
@@ -5278,6 +5319,9 @@ module Aws::Glue
|
|
5278
5319
|
# ],
|
5279
5320
|
# },
|
5280
5321
|
# language: "PYTHON", # accepts PYTHON, SCALA
|
5322
|
+
# additional_plan_options_map: {
|
5323
|
+
# "GenericString" => "GenericString",
|
5324
|
+
# },
|
5281
5325
|
# })
|
5282
5326
|
#
|
5283
5327
|
# @example Response structure
|
@@ -8288,6 +8332,10 @@ module Aws::Glue
|
|
8288
8332
|
# @option params [Types::SchemaChangePolicy] :schema_change_policy
|
8289
8333
|
# The policy for the crawler's update and deletion behavior.
|
8290
8334
|
#
|
8335
|
+
# @option params [Types::RecrawlPolicy] :recrawl_policy
|
8336
|
+
# A policy that specifies whether to crawl the entire dataset again, or
|
8337
|
+
# to crawl only folders that were added since the last crawler run.
|
8338
|
+
#
|
8291
8339
|
# @option params [String] :configuration
|
8292
8340
|
# Crawler configuration information. This versioned JSON string allows
|
8293
8341
|
# users to specify aspects of a crawler's behavior. For more
|
@@ -8325,6 +8373,13 @@ module Aws::Glue
|
|
8325
8373
|
# exclusions: ["Path"],
|
8326
8374
|
# },
|
8327
8375
|
# ],
|
8376
|
+
# mongo_db_targets: [
|
8377
|
+
# {
|
8378
|
+
# connection_name: "ConnectionName",
|
8379
|
+
# path: "Path",
|
8380
|
+
# scan_all: false,
|
8381
|
+
# },
|
8382
|
+
# ],
|
8328
8383
|
# dynamo_db_targets: [
|
8329
8384
|
# {
|
8330
8385
|
# path: "Path",
|
@@ -8346,6 +8401,9 @@ module Aws::Glue
|
|
8346
8401
|
# update_behavior: "LOG", # accepts LOG, UPDATE_IN_DATABASE
|
8347
8402
|
# delete_behavior: "LOG", # accepts LOG, DELETE_FROM_DATABASE, DEPRECATE_IN_DATABASE
|
8348
8403
|
# },
|
8404
|
+
# recrawl_policy: {
|
8405
|
+
# recrawl_behavior: "CRAWL_EVERYTHING", # accepts CRAWL_EVERYTHING, CRAWL_NEW_FOLDERS_ONLY
|
8406
|
+
# },
|
8349
8407
|
# configuration: "CrawlerConfiguration",
|
8350
8408
|
# crawler_security_configuration: "CrawlerSecurityConfiguration",
|
8351
8409
|
# })
|
@@ -9090,7 +9148,7 @@ module Aws::Glue
|
|
9090
9148
|
params: params,
|
9091
9149
|
config: config)
|
9092
9150
|
context[:gem_name] = 'aws-sdk-glue'
|
9093
|
-
context[:gem_version] = '1.
|
9151
|
+
context[:gem_version] = '1.77.0'
|
9094
9152
|
Seahorse::Client::Request.new(handlers, context)
|
9095
9153
|
end
|
9096
9154
|
|
@@ -16,6 +16,7 @@ module Aws::Glue
|
|
16
16
|
AccessDeniedException = Shapes::StructureShape.new(name: 'AccessDeniedException')
|
17
17
|
Action = Shapes::StructureShape.new(name: 'Action')
|
18
18
|
ActionList = Shapes::ListShape.new(name: 'ActionList')
|
19
|
+
AdditionalPlanOptionsMap = Shapes::MapShape.new(name: 'AdditionalPlanOptionsMap')
|
19
20
|
AlreadyExistsException = Shapes::StructureShape.new(name: 'AlreadyExistsException')
|
20
21
|
AttemptCount = Shapes::IntegerShape.new(name: 'AttemptCount')
|
21
22
|
BatchCreatePartitionRequest = Shapes::StructureShape.new(name: 'BatchCreatePartitionRequest')
|
@@ -436,6 +437,8 @@ module Aws::Glue
|
|
436
437
|
MessagePrefix = Shapes::StringShape.new(name: 'MessagePrefix')
|
437
438
|
MessageString = Shapes::StringShape.new(name: 'MessageString')
|
438
439
|
MillisecondsCount = Shapes::IntegerShape.new(name: 'MillisecondsCount')
|
440
|
+
MongoDBTarget = Shapes::StructureShape.new(name: 'MongoDBTarget')
|
441
|
+
MongoDBTargetList = Shapes::ListShape.new(name: 'MongoDBTargetList')
|
439
442
|
NameString = Shapes::StringShape.new(name: 'NameString')
|
440
443
|
NameStringList = Shapes::ListShape.new(name: 'NameStringList')
|
441
444
|
NoScheduleException = Shapes::StructureShape.new(name: 'NoScheduleException')
|
@@ -495,6 +498,8 @@ module Aws::Glue
|
|
495
498
|
PythonScript = Shapes::StringShape.new(name: 'PythonScript')
|
496
499
|
PythonVersionString = Shapes::StringShape.new(name: 'PythonVersionString')
|
497
500
|
RecordsCount = Shapes::IntegerShape.new(name: 'RecordsCount')
|
501
|
+
RecrawlBehavior = Shapes::StringShape.new(name: 'RecrawlBehavior')
|
502
|
+
RecrawlPolicy = Shapes::StructureShape.new(name: 'RecrawlPolicy')
|
498
503
|
ReplaceBoolean = Shapes::BooleanShape.new(name: 'ReplaceBoolean')
|
499
504
|
ResetJobBookmarkRequest = Shapes::StructureShape.new(name: 'ResetJobBookmarkRequest')
|
500
505
|
ResetJobBookmarkResponse = Shapes::StructureShape.new(name: 'ResetJobBookmarkResponse')
|
@@ -690,6 +695,9 @@ module Aws::Glue
|
|
690
695
|
|
691
696
|
ActionList.member = Shapes::ShapeRef.new(shape: Action)
|
692
697
|
|
698
|
+
AdditionalPlanOptionsMap.key = Shapes::ShapeRef.new(shape: GenericString)
|
699
|
+
AdditionalPlanOptionsMap.value = Shapes::ShapeRef.new(shape: GenericString)
|
700
|
+
|
693
701
|
AlreadyExistsException.add_member(:message, Shapes::ShapeRef.new(shape: MessageString, location_name: "Message"))
|
694
702
|
AlreadyExistsException.struct_class = Types::AlreadyExistsException
|
695
703
|
|
@@ -1020,6 +1028,7 @@ module Aws::Glue
|
|
1020
1028
|
Crawler.add_member(:database_name, Shapes::ShapeRef.new(shape: DatabaseName, location_name: "DatabaseName"))
|
1021
1029
|
Crawler.add_member(:description, Shapes::ShapeRef.new(shape: DescriptionString, location_name: "Description"))
|
1022
1030
|
Crawler.add_member(:classifiers, Shapes::ShapeRef.new(shape: ClassifierNameList, location_name: "Classifiers"))
|
1031
|
+
Crawler.add_member(:recrawl_policy, Shapes::ShapeRef.new(shape: RecrawlPolicy, location_name: "RecrawlPolicy"))
|
1023
1032
|
Crawler.add_member(:schema_change_policy, Shapes::ShapeRef.new(shape: SchemaChangePolicy, location_name: "SchemaChangePolicy"))
|
1024
1033
|
Crawler.add_member(:state, Shapes::ShapeRef.new(shape: CrawlerState, location_name: "State"))
|
1025
1034
|
Crawler.add_member(:table_prefix, Shapes::ShapeRef.new(shape: TablePrefix, location_name: "TablePrefix"))
|
@@ -1063,6 +1072,7 @@ module Aws::Glue
|
|
1063
1072
|
|
1064
1073
|
CrawlerTargets.add_member(:s3_targets, Shapes::ShapeRef.new(shape: S3TargetList, location_name: "S3Targets"))
|
1065
1074
|
CrawlerTargets.add_member(:jdbc_targets, Shapes::ShapeRef.new(shape: JdbcTargetList, location_name: "JdbcTargets"))
|
1075
|
+
CrawlerTargets.add_member(:mongo_db_targets, Shapes::ShapeRef.new(shape: MongoDBTargetList, location_name: "MongoDBTargets"))
|
1066
1076
|
CrawlerTargets.add_member(:dynamo_db_targets, Shapes::ShapeRef.new(shape: DynamoDBTargetList, location_name: "DynamoDBTargets"))
|
1067
1077
|
CrawlerTargets.add_member(:catalog_targets, Shapes::ShapeRef.new(shape: CatalogTargetList, location_name: "CatalogTargets"))
|
1068
1078
|
CrawlerTargets.struct_class = Types::CrawlerTargets
|
@@ -1090,6 +1100,7 @@ module Aws::Glue
|
|
1090
1100
|
CreateCrawlerRequest.add_member(:classifiers, Shapes::ShapeRef.new(shape: ClassifierNameList, location_name: "Classifiers"))
|
1091
1101
|
CreateCrawlerRequest.add_member(:table_prefix, Shapes::ShapeRef.new(shape: TablePrefix, location_name: "TablePrefix"))
|
1092
1102
|
CreateCrawlerRequest.add_member(:schema_change_policy, Shapes::ShapeRef.new(shape: SchemaChangePolicy, location_name: "SchemaChangePolicy"))
|
1103
|
+
CreateCrawlerRequest.add_member(:recrawl_policy, Shapes::ShapeRef.new(shape: RecrawlPolicy, location_name: "RecrawlPolicy"))
|
1093
1104
|
CreateCrawlerRequest.add_member(:configuration, Shapes::ShapeRef.new(shape: CrawlerConfiguration, location_name: "Configuration"))
|
1094
1105
|
CreateCrawlerRequest.add_member(:crawler_security_configuration, Shapes::ShapeRef.new(shape: CrawlerSecurityConfiguration, location_name: "CrawlerSecurityConfiguration"))
|
1095
1106
|
CreateCrawlerRequest.add_member(:tags, Shapes::ShapeRef.new(shape: TagsMap, location_name: "Tags"))
|
@@ -1828,6 +1839,7 @@ module Aws::Glue
|
|
1828
1839
|
GetPlanRequest.add_member(:sinks, Shapes::ShapeRef.new(shape: CatalogEntries, location_name: "Sinks"))
|
1829
1840
|
GetPlanRequest.add_member(:location, Shapes::ShapeRef.new(shape: Location, location_name: "Location"))
|
1830
1841
|
GetPlanRequest.add_member(:language, Shapes::ShapeRef.new(shape: Language, location_name: "Language"))
|
1842
|
+
GetPlanRequest.add_member(:additional_plan_options_map, Shapes::ShapeRef.new(shape: AdditionalPlanOptionsMap, location_name: "AdditionalPlanOptionsMap"))
|
1831
1843
|
GetPlanRequest.struct_class = Types::GetPlanRequest
|
1832
1844
|
|
1833
1845
|
GetPlanResponse.add_member(:python_script, Shapes::ShapeRef.new(shape: PythonScript, location_name: "PythonScript"))
|
@@ -2260,6 +2272,13 @@ module Aws::Glue
|
|
2260
2272
|
|
2261
2273
|
MatchCriteria.member = Shapes::ShapeRef.new(shape: NameString)
|
2262
2274
|
|
2275
|
+
MongoDBTarget.add_member(:connection_name, Shapes::ShapeRef.new(shape: ConnectionName, location_name: "ConnectionName"))
|
2276
|
+
MongoDBTarget.add_member(:path, Shapes::ShapeRef.new(shape: Path, location_name: "Path"))
|
2277
|
+
MongoDBTarget.add_member(:scan_all, Shapes::ShapeRef.new(shape: NullableBoolean, location_name: "ScanAll"))
|
2278
|
+
MongoDBTarget.struct_class = Types::MongoDBTarget
|
2279
|
+
|
2280
|
+
MongoDBTargetList.member = Shapes::ShapeRef.new(shape: MongoDBTarget)
|
2281
|
+
|
2263
2282
|
NameStringList.member = Shapes::ShapeRef.new(shape: NameString)
|
2264
2283
|
|
2265
2284
|
NoScheduleException.add_member(:message, Shapes::ShapeRef.new(shape: MessageString, location_name: "Message"))
|
@@ -2393,6 +2412,9 @@ module Aws::Glue
|
|
2393
2412
|
|
2394
2413
|
PutWorkflowRunPropertiesResponse.struct_class = Types::PutWorkflowRunPropertiesResponse
|
2395
2414
|
|
2415
|
+
RecrawlPolicy.add_member(:recrawl_behavior, Shapes::ShapeRef.new(shape: RecrawlBehavior, location_name: "RecrawlBehavior"))
|
2416
|
+
RecrawlPolicy.struct_class = Types::RecrawlPolicy
|
2417
|
+
|
2396
2418
|
ResetJobBookmarkRequest.add_member(:job_name, Shapes::ShapeRef.new(shape: JobName, required: true, location_name: "JobName"))
|
2397
2419
|
ResetJobBookmarkRequest.add_member(:run_id, Shapes::ShapeRef.new(shape: RunId, location_name: "RunId"))
|
2398
2420
|
ResetJobBookmarkRequest.struct_class = Types::ResetJobBookmarkRequest
|
@@ -2808,6 +2830,7 @@ module Aws::Glue
|
|
2808
2830
|
UpdateCrawlerRequest.add_member(:classifiers, Shapes::ShapeRef.new(shape: ClassifierNameList, location_name: "Classifiers"))
|
2809
2831
|
UpdateCrawlerRequest.add_member(:table_prefix, Shapes::ShapeRef.new(shape: TablePrefix, location_name: "TablePrefix"))
|
2810
2832
|
UpdateCrawlerRequest.add_member(:schema_change_policy, Shapes::ShapeRef.new(shape: SchemaChangePolicy, location_name: "SchemaChangePolicy"))
|
2833
|
+
UpdateCrawlerRequest.add_member(:recrawl_policy, Shapes::ShapeRef.new(shape: RecrawlPolicy, location_name: "RecrawlPolicy"))
|
2811
2834
|
UpdateCrawlerRequest.add_member(:configuration, Shapes::ShapeRef.new(shape: CrawlerConfiguration, location_name: "Configuration"))
|
2812
2835
|
UpdateCrawlerRequest.add_member(:crawler_security_configuration, Shapes::ShapeRef.new(shape: CrawlerSecurityConfiguration, location_name: "CrawlerSecurityConfiguration"))
|
2813
2836
|
UpdateCrawlerRequest.struct_class = Types::UpdateCrawlerRequest
|
data/lib/aws-sdk-glue/types.rb
CHANGED
@@ -2103,6 +2103,11 @@ module Aws::Glue
|
|
2103
2103
|
# associated with the crawler.
|
2104
2104
|
# @return [Array<String>]
|
2105
2105
|
#
|
2106
|
+
# @!attribute [rw] recrawl_policy
|
2107
|
+
# A policy that specifies whether to crawl the entire dataset again,
|
2108
|
+
# or to crawl only folders that were added since the last crawler run.
|
2109
|
+
# @return [Types::RecrawlPolicy]
|
2110
|
+
#
|
2106
2111
|
# @!attribute [rw] schema_change_policy
|
2107
2112
|
# The policy that specifies update and delete behaviors for the
|
2108
2113
|
# crawler.
|
@@ -2167,6 +2172,7 @@ module Aws::Glue
|
|
2167
2172
|
:database_name,
|
2168
2173
|
:description,
|
2169
2174
|
:classifiers,
|
2175
|
+
:recrawl_policy,
|
2170
2176
|
:schema_change_policy,
|
2171
2177
|
:state,
|
2172
2178
|
:table_prefix,
|
@@ -2309,6 +2315,13 @@ module Aws::Glue
|
|
2309
2315
|
# exclusions: ["Path"],
|
2310
2316
|
# },
|
2311
2317
|
# ],
|
2318
|
+
# mongo_db_targets: [
|
2319
|
+
# {
|
2320
|
+
# connection_name: "ConnectionName",
|
2321
|
+
# path: "Path",
|
2322
|
+
# scan_all: false,
|
2323
|
+
# },
|
2324
|
+
# ],
|
2312
2325
|
# dynamo_db_targets: [
|
2313
2326
|
# {
|
2314
2327
|
# path: "Path",
|
@@ -2332,6 +2345,10 @@ module Aws::Glue
|
|
2332
2345
|
# Specifies JDBC targets.
|
2333
2346
|
# @return [Array<Types::JdbcTarget>]
|
2334
2347
|
#
|
2348
|
+
# @!attribute [rw] mongo_db_targets
|
2349
|
+
# Specifies Amazon DocumentDB or MongoDB targets.
|
2350
|
+
# @return [Array<Types::MongoDBTarget>]
|
2351
|
+
#
|
2335
2352
|
# @!attribute [rw] dynamo_db_targets
|
2336
2353
|
# Specifies Amazon DynamoDB targets.
|
2337
2354
|
# @return [Array<Types::DynamoDBTarget>]
|
@@ -2345,6 +2362,7 @@ module Aws::Glue
|
|
2345
2362
|
class CrawlerTargets < Struct.new(
|
2346
2363
|
:s3_targets,
|
2347
2364
|
:jdbc_targets,
|
2365
|
+
:mongo_db_targets,
|
2348
2366
|
:dynamo_db_targets,
|
2349
2367
|
:catalog_targets)
|
2350
2368
|
SENSITIVE = []
|
@@ -2478,6 +2496,13 @@ module Aws::Glue
|
|
2478
2496
|
# exclusions: ["Path"],
|
2479
2497
|
# },
|
2480
2498
|
# ],
|
2499
|
+
# mongo_db_targets: [
|
2500
|
+
# {
|
2501
|
+
# connection_name: "ConnectionName",
|
2502
|
+
# path: "Path",
|
2503
|
+
# scan_all: false,
|
2504
|
+
# },
|
2505
|
+
# ],
|
2481
2506
|
# dynamo_db_targets: [
|
2482
2507
|
# {
|
2483
2508
|
# path: "Path",
|
@@ -2499,6 +2524,9 @@ module Aws::Glue
|
|
2499
2524
|
# update_behavior: "LOG", # accepts LOG, UPDATE_IN_DATABASE
|
2500
2525
|
# delete_behavior: "LOG", # accepts LOG, DELETE_FROM_DATABASE, DEPRECATE_IN_DATABASE
|
2501
2526
|
# },
|
2527
|
+
# recrawl_policy: {
|
2528
|
+
# recrawl_behavior: "CRAWL_EVERYTHING", # accepts CRAWL_EVERYTHING, CRAWL_NEW_FOLDERS_ONLY
|
2529
|
+
# },
|
2502
2530
|
# configuration: "CrawlerConfiguration",
|
2503
2531
|
# crawler_security_configuration: "CrawlerSecurityConfiguration",
|
2504
2532
|
# tags: {
|
@@ -2553,6 +2581,11 @@ module Aws::Glue
|
|
2553
2581
|
# The policy for the crawler's update and deletion behavior.
|
2554
2582
|
# @return [Types::SchemaChangePolicy]
|
2555
2583
|
#
|
2584
|
+
# @!attribute [rw] recrawl_policy
|
2585
|
+
# A policy that specifies whether to crawl the entire dataset again,
|
2586
|
+
# or to crawl only folders that were added since the last crawler run.
|
2587
|
+
# @return [Types::RecrawlPolicy]
|
2588
|
+
#
|
2556
2589
|
# @!attribute [rw] configuration
|
2557
2590
|
# Crawler configuration information. This versioned JSON string allows
|
2558
2591
|
# users to specify aspects of a crawler's behavior. For more
|
@@ -2590,6 +2623,7 @@ module Aws::Glue
|
|
2590
2623
|
:classifiers,
|
2591
2624
|
:table_prefix,
|
2592
2625
|
:schema_change_policy,
|
2626
|
+
:recrawl_policy,
|
2593
2627
|
:configuration,
|
2594
2628
|
:crawler_security_configuration,
|
2595
2629
|
:tags)
|
@@ -7527,6 +7561,9 @@ module Aws::Glue
|
|
7527
7561
|
# ],
|
7528
7562
|
# },
|
7529
7563
|
# language: "PYTHON", # accepts PYTHON, SCALA
|
7564
|
+
# additional_plan_options_map: {
|
7565
|
+
# "GenericString" => "GenericString",
|
7566
|
+
# },
|
7530
7567
|
# }
|
7531
7568
|
#
|
7532
7569
|
# @!attribute [rw] mapping
|
@@ -7549,6 +7586,19 @@ module Aws::Glue
|
|
7549
7586
|
# The programming language of the code to perform the mapping.
|
7550
7587
|
# @return [String]
|
7551
7588
|
#
|
7589
|
+
# @!attribute [rw] additional_plan_options_map
|
7590
|
+
# A map to hold additional optional key-value parameters.
|
7591
|
+
#
|
7592
|
+
# Currently, these key-value pairs are supported:
|
7593
|
+
#
|
7594
|
+
# * `inferSchema` — Specifies whether to set `inferSchema` to true
|
7595
|
+
# or false for the default script generated by an AWS Glue job. For
|
7596
|
+
# example, to set `inferSchema` to true, pass the following key
|
7597
|
+
# value pair:
|
7598
|
+
#
|
7599
|
+
# `--additional-plan-options-map '\{"inferSchema":"true"\}'`
|
7600
|
+
# @return [Hash<String,String>]
|
7601
|
+
#
|
7552
7602
|
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/GetPlanRequest AWS API Documentation
|
7553
7603
|
#
|
7554
7604
|
class GetPlanRequest < Struct.new(
|
@@ -7556,7 +7606,8 @@ module Aws::Glue
|
|
7556
7606
|
:source,
|
7557
7607
|
:sinks,
|
7558
7608
|
:location,
|
7559
|
-
:language
|
7609
|
+
:language,
|
7610
|
+
:additional_plan_options_map)
|
7560
7611
|
SENSITIVE = []
|
7561
7612
|
include Aws::Structure
|
7562
7613
|
end
|
@@ -10199,6 +10250,47 @@ module Aws::Glue
|
|
10199
10250
|
include Aws::Structure
|
10200
10251
|
end
|
10201
10252
|
|
10253
|
+
# Specifies an Amazon DocumentDB or MongoDB data store to crawl.
|
10254
|
+
#
|
10255
|
+
# @note When making an API call, you may pass MongoDBTarget
|
10256
|
+
# data as a hash:
|
10257
|
+
#
|
10258
|
+
# {
|
10259
|
+
# connection_name: "ConnectionName",
|
10260
|
+
# path: "Path",
|
10261
|
+
# scan_all: false,
|
10262
|
+
# }
|
10263
|
+
#
|
10264
|
+
# @!attribute [rw] connection_name
|
10265
|
+
# The name of the connection to use to connect to the Amazon
|
10266
|
+
# DocumentDB or MongoDB target.
|
10267
|
+
# @return [String]
|
10268
|
+
#
|
10269
|
+
# @!attribute [rw] path
|
10270
|
+
# The path of the Amazon DocumentDB or MongoDB target
|
10271
|
+
# (database/collection).
|
10272
|
+
# @return [String]
|
10273
|
+
#
|
10274
|
+
# @!attribute [rw] scan_all
|
10275
|
+
# Indicates whether to scan all the records, or to sample rows from
|
10276
|
+
# the table. Scanning all the records can take a long time when the
|
10277
|
+
# table is not a high throughput table.
|
10278
|
+
#
|
10279
|
+
# A value of `true` means to scan all records, while a value of
|
10280
|
+
# `false` means to sample the records. If no value is specified, the
|
10281
|
+
# value defaults to `true`.
|
10282
|
+
# @return [Boolean]
|
10283
|
+
#
|
10284
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/MongoDBTarget AWS API Documentation
|
10285
|
+
#
|
10286
|
+
class MongoDBTarget < Struct.new(
|
10287
|
+
:connection_name,
|
10288
|
+
:path,
|
10289
|
+
:scan_all)
|
10290
|
+
SENSITIVE = []
|
10291
|
+
include Aws::Structure
|
10292
|
+
end
|
10293
|
+
|
10202
10294
|
# There is no applicable schedule.
|
10203
10295
|
#
|
10204
10296
|
# @!attribute [rw] message
|
@@ -10868,6 +10960,42 @@ module Aws::Glue
|
|
10868
10960
|
#
|
10869
10961
|
class PutWorkflowRunPropertiesResponse < Aws::EmptyStructure; end
|
10870
10962
|
|
10963
|
+
# When crawling an Amazon S3 data source after the first crawl is
|
10964
|
+
# complete, specifies whether to crawl the entire dataset again or to
|
10965
|
+
# crawl only folders that were added since the last crawler run. For
|
10966
|
+
# more information, see [Incremental Crawls in AWS Glue][1] in the
|
10967
|
+
# developer guide.
|
10968
|
+
#
|
10969
|
+
#
|
10970
|
+
#
|
10971
|
+
# [1]: https://docs.aws.amazon.com/glue/latest/dg/incremental-crawls.html
|
10972
|
+
#
|
10973
|
+
# @note When making an API call, you may pass RecrawlPolicy
|
10974
|
+
# data as a hash:
|
10975
|
+
#
|
10976
|
+
# {
|
10977
|
+
# recrawl_behavior: "CRAWL_EVERYTHING", # accepts CRAWL_EVERYTHING, CRAWL_NEW_FOLDERS_ONLY
|
10978
|
+
# }
|
10979
|
+
#
|
10980
|
+
# @!attribute [rw] recrawl_behavior
|
10981
|
+
# Specifies whether to crawl the entire dataset again or to crawl only
|
10982
|
+
# folders that were added since the last crawler run.
|
10983
|
+
#
|
10984
|
+
# A value of `CRAWL_EVERYTHING` specifies crawling the entire dataset
|
10985
|
+
# again.
|
10986
|
+
#
|
10987
|
+
# A value of `CRAWL_NEW_FOLDERS_ONLY` specifies crawling only folders
|
10988
|
+
# that were added since the last crawler run.
|
10989
|
+
# @return [String]
|
10990
|
+
#
|
10991
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/RecrawlPolicy AWS API Documentation
|
10992
|
+
#
|
10993
|
+
class RecrawlPolicy < Struct.new(
|
10994
|
+
:recrawl_behavior)
|
10995
|
+
SENSITIVE = []
|
10996
|
+
include Aws::Structure
|
10997
|
+
end
|
10998
|
+
|
10871
10999
|
# @note When making an API call, you may pass ResetJobBookmarkRequest
|
10872
11000
|
# data as a hash:
|
10873
11001
|
#
|
@@ -13409,6 +13537,13 @@ module Aws::Glue
|
|
13409
13537
|
# exclusions: ["Path"],
|
13410
13538
|
# },
|
13411
13539
|
# ],
|
13540
|
+
# mongo_db_targets: [
|
13541
|
+
# {
|
13542
|
+
# connection_name: "ConnectionName",
|
13543
|
+
# path: "Path",
|
13544
|
+
# scan_all: false,
|
13545
|
+
# },
|
13546
|
+
# ],
|
13412
13547
|
# dynamo_db_targets: [
|
13413
13548
|
# {
|
13414
13549
|
# path: "Path",
|
@@ -13430,6 +13565,9 @@ module Aws::Glue
|
|
13430
13565
|
# update_behavior: "LOG", # accepts LOG, UPDATE_IN_DATABASE
|
13431
13566
|
# delete_behavior: "LOG", # accepts LOG, DELETE_FROM_DATABASE, DEPRECATE_IN_DATABASE
|
13432
13567
|
# },
|
13568
|
+
# recrawl_policy: {
|
13569
|
+
# recrawl_behavior: "CRAWL_EVERYTHING", # accepts CRAWL_EVERYTHING, CRAWL_NEW_FOLDERS_ONLY
|
13570
|
+
# },
|
13433
13571
|
# configuration: "CrawlerConfiguration",
|
13434
13572
|
# crawler_security_configuration: "CrawlerSecurityConfiguration",
|
13435
13573
|
# }
|
@@ -13481,6 +13619,11 @@ module Aws::Glue
|
|
13481
13619
|
# The policy for the crawler's update and deletion behavior.
|
13482
13620
|
# @return [Types::SchemaChangePolicy]
|
13483
13621
|
#
|
13622
|
+
# @!attribute [rw] recrawl_policy
|
13623
|
+
# A policy that specifies whether to crawl the entire dataset again,
|
13624
|
+
# or to crawl only folders that were added since the last crawler run.
|
13625
|
+
# @return [Types::RecrawlPolicy]
|
13626
|
+
#
|
13484
13627
|
# @!attribute [rw] configuration
|
13485
13628
|
# Crawler configuration information. This versioned JSON string allows
|
13486
13629
|
# users to specify aspects of a crawler's behavior. For more
|
@@ -13508,6 +13651,7 @@ module Aws::Glue
|
|
13508
13651
|
:classifiers,
|
13509
13652
|
:table_prefix,
|
13510
13653
|
:schema_change_policy,
|
13654
|
+
:recrawl_policy,
|
13511
13655
|
:configuration,
|
13512
13656
|
:crawler_security_configuration)
|
13513
13657
|
SENSITIVE = []
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: aws-sdk-glue
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.77.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Amazon Web Services
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-10-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: aws-sdk-core
|