aws-sdk-datasync 1.35.0 → 1.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/VERSION +1 -1
- data/lib/aws-sdk-datasync/client.rb +307 -12
- data/lib/aws-sdk-datasync/client_api.rb +114 -0
- data/lib/aws-sdk-datasync/types.rb +470 -25
- data/lib/aws-sdk-datasync.rb +1 -1
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2251d1ba573073ab2feaa7a18e4d04fbd82a664d4055a88c5ca781768186a7f0
|
4
|
+
data.tar.gz: 7363752e7c4d3911781bbb05d6d9eb646fd4878ba767f5794a200da1d9119764
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 27a5420837ad5210d39e9e4ac1ba26ca0d5e685f1fc51812f49de1577414d3a93a29ecb264a913e0e004384314760164242ee722af13413611746bcc640bf580
|
7
|
+
data.tar.gz: 526d0a688e9c23aa2f1c0ac4084f463fc423b2dd76e8d00edbb52c7b3f088498c90a1183e75ed7429d42a987bbd089a52503af245b72730091beec8325ec2c4c
|
data/CHANGELOG.md
CHANGED
@@ -1,6 +1,26 @@
|
|
1
1
|
Unreleased Changes
|
2
2
|
------------------
|
3
3
|
|
4
|
+
1.39.0 (2021-11-04)
|
5
|
+
------------------
|
6
|
+
|
7
|
+
* Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
|
8
|
+
|
9
|
+
1.38.0 (2021-11-03)
|
10
|
+
------------------
|
11
|
+
|
12
|
+
* Feature - AWS DataSync now supports Hadoop Distributed File System (HDFS) Locations
|
13
|
+
|
14
|
+
1.37.0 (2021-10-18)
|
15
|
+
------------------
|
16
|
+
|
17
|
+
* Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
|
18
|
+
|
19
|
+
1.36.0 (2021-09-01)
|
20
|
+
------------------
|
21
|
+
|
22
|
+
* Feature - Code Generated Changes, see `./build_tools` or `aws-sdk-core`'s CHANGELOG.md for details.
|
23
|
+
|
4
24
|
1.35.0 (2021-08-25)
|
5
25
|
------------------
|
6
26
|
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
1.
|
1
|
+
1.39.0
|
@@ -285,6 +285,15 @@ module Aws::DataSync
|
|
285
285
|
# ** Please note ** When response stubbing is enabled, no HTTP
|
286
286
|
# requests are made, and retries are disabled.
|
287
287
|
#
|
288
|
+
# @option options [Boolean] :use_dualstack_endpoint
|
289
|
+
# When set to `true`, dualstack enabled endpoints (with `.aws` TLD)
|
290
|
+
# will be used if available.
|
291
|
+
#
|
292
|
+
# @option options [Boolean] :use_fips_endpoint
|
293
|
+
# When set to `true`, fips compatible endpoints will be used if available.
|
294
|
+
# When a `fips` region is used, the region is normalized and this config
|
295
|
+
# is set to `true`.
|
296
|
+
#
|
288
297
|
# @option options [Boolean] :validate_params (true)
|
289
298
|
# When `true`, request parameters are validated before
|
290
299
|
# sending the request.
|
@@ -636,6 +645,141 @@ module Aws::DataSync
|
|
636
645
|
req.send_request(options)
|
637
646
|
end
|
638
647
|
|
648
|
+
# Creates an endpoint for a Hadoop Distributed File System (HDFS).
|
649
|
+
#
|
650
|
+
# @option params [String] :subdirectory
|
651
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to read
|
652
|
+
# data from or write data to the HDFS cluster. If the subdirectory
|
653
|
+
# isn't specified, it will default to `/`.
|
654
|
+
#
|
655
|
+
# @option params [required, Array<Types::HdfsNameNode>] :name_nodes
|
656
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
657
|
+
# operations such as opening, closing, and renaming files and
|
658
|
+
# directories. The NameNode contains the information to map blocks of
|
659
|
+
# data to the DataNodes. You can use only one NameNode.
|
660
|
+
#
|
661
|
+
# @option params [Integer] :block_size
|
662
|
+
# The size of data blocks to write into the HDFS cluster. The block size
|
663
|
+
# must be a multiple of 512 bytes. The default block size is 128
|
664
|
+
# mebibytes (MiB).
|
665
|
+
#
|
666
|
+
# @option params [Integer] :replication_factor
|
667
|
+
# The number of DataNodes to replicate the data to when writing to the
|
668
|
+
# HDFS cluster. By default, data is replicated to three DataNodes.
|
669
|
+
#
|
670
|
+
# @option params [String] :kms_key_provider_uri
|
671
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
672
|
+
#
|
673
|
+
# @option params [Types::QopConfiguration] :qop_configuration
|
674
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
675
|
+
# Procedure Call (RPC) and data transfer protection settings configured
|
676
|
+
# on the Hadoop Distributed File System (HDFS) cluster. If
|
677
|
+
# `QopConfiguration` isn't specified, `RpcProtection` and
|
678
|
+
# `DataTransferProtection` default to `PRIVACY`. If you set
|
679
|
+
# `RpcProtection` or `DataTransferProtection`, the other parameter
|
680
|
+
# assumes the same value.
|
681
|
+
#
|
682
|
+
# @option params [required, String] :authentication_type
|
683
|
+
# The type of authentication used to determine the identity of the user.
|
684
|
+
#
|
685
|
+
# @option params [String] :simple_user
|
686
|
+
# The user name used to identify the client on the host operating
|
687
|
+
# system.
|
688
|
+
#
|
689
|
+
# <note markdown="1"> If `SIMPLE` is specified for `AuthenticationType`, this parameter is
|
690
|
+
# required.
|
691
|
+
#
|
692
|
+
# </note>
|
693
|
+
#
|
694
|
+
# @option params [String] :kerberos_principal
|
695
|
+
# The Kerberos principal with access to the files and folders on the
|
696
|
+
# HDFS cluster.
|
697
|
+
#
|
698
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
699
|
+
# required.
|
700
|
+
#
|
701
|
+
# </note>
|
702
|
+
#
|
703
|
+
# @option params [String, StringIO, File] :kerberos_keytab
|
704
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
705
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
706
|
+
# keytab from a file by providing the file's address. If you're using
|
707
|
+
# the CLI, it performs base64 encoding for you. Otherwise, provide the
|
708
|
+
# base64-encoded text.
|
709
|
+
#
|
710
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
711
|
+
# required.
|
712
|
+
#
|
713
|
+
# </note>
|
714
|
+
#
|
715
|
+
# @option params [String, StringIO, File] :kerberos_krb_5_conf
|
716
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
717
|
+
# information. You can load the `krb5.conf` file by providing the
|
718
|
+
# file's address. If you're using the CLI, it performs the base64
|
719
|
+
# encoding for you. Otherwise, provide the base64-encoded text.
|
720
|
+
#
|
721
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
722
|
+
# required.
|
723
|
+
#
|
724
|
+
# </note>
|
725
|
+
#
|
726
|
+
# @option params [required, Array<String>] :agent_arns
|
727
|
+
# The Amazon Resource Names (ARNs) of the agents that are used to
|
728
|
+
# connect to the HDFS cluster.
|
729
|
+
#
|
730
|
+
# @option params [Array<Types::TagListEntry>] :tags
|
731
|
+
# The key-value pair that represents the tag that you want to add to the
|
732
|
+
# location. The value can be an empty string. We recommend using tags to
|
733
|
+
# name your resources.
|
734
|
+
#
|
735
|
+
# @return [Types::CreateLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
736
|
+
#
|
737
|
+
# * {Types::CreateLocationHdfsResponse#location_arn #location_arn} => String
|
738
|
+
#
|
739
|
+
# @example Request syntax with placeholder values
|
740
|
+
#
|
741
|
+
# resp = client.create_location_hdfs({
|
742
|
+
# subdirectory: "HdfsSubdirectory",
|
743
|
+
# name_nodes: [ # required
|
744
|
+
# {
|
745
|
+
# hostname: "HdfsServerHostname", # required
|
746
|
+
# port: 1, # required
|
747
|
+
# },
|
748
|
+
# ],
|
749
|
+
# block_size: 1,
|
750
|
+
# replication_factor: 1,
|
751
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
752
|
+
# qop_configuration: {
|
753
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
754
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
755
|
+
# },
|
756
|
+
# authentication_type: "SIMPLE", # required, accepts SIMPLE, KERBEROS
|
757
|
+
# simple_user: "HdfsUser",
|
758
|
+
# kerberos_principal: "KerberosPrincipal",
|
759
|
+
# kerberos_keytab: "data",
|
760
|
+
# kerberos_krb_5_conf: "data",
|
761
|
+
# agent_arns: ["AgentArn"], # required
|
762
|
+
# tags: [
|
763
|
+
# {
|
764
|
+
# key: "TagKey", # required
|
765
|
+
# value: "TagValue",
|
766
|
+
# },
|
767
|
+
# ],
|
768
|
+
# })
|
769
|
+
#
|
770
|
+
# @example Response structure
|
771
|
+
#
|
772
|
+
# resp.location_arn #=> String
|
773
|
+
#
|
774
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfs AWS API Documentation
|
775
|
+
#
|
776
|
+
# @overload create_location_hdfs(params = {})
|
777
|
+
# @param [Hash] params ({})
|
778
|
+
def create_location_hdfs(params = {}, options = {})
|
779
|
+
req = build_request(:create_location_hdfs, params)
|
780
|
+
req.send_request(options)
|
781
|
+
end
|
782
|
+
|
639
783
|
# Defines a file system on a Network File System (NFS) server that can
|
640
784
|
# be read from or written to.
|
641
785
|
#
|
@@ -1103,9 +1247,9 @@ module Aws::DataSync
|
|
1103
1247
|
#
|
1104
1248
|
# @option params [Array<Types::FilterRule>] :includes
|
1105
1249
|
# A list of filter rules that determines which files to include when
|
1106
|
-
# running a task. The pattern
|
1250
|
+
# running a task. The pattern contains a single filter string that
|
1107
1251
|
# consists of the patterns to include. The patterns are delimited by
|
1108
|
-
# "\|" (that is, a pipe)
|
1252
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
1109
1253
|
#
|
1110
1254
|
# @return [Types::CreateTaskResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1111
1255
|
#
|
@@ -1368,6 +1512,62 @@ module Aws::DataSync
|
|
1368
1512
|
req.send_request(options)
|
1369
1513
|
end
|
1370
1514
|
|
1515
|
+
# Returns metadata, such as the authentication information about the
|
1516
|
+
# Hadoop Distributed File System (HDFS) location.
|
1517
|
+
#
|
1518
|
+
# @option params [required, String] :location_arn
|
1519
|
+
# The Amazon Resource Name (ARN) of the HDFS cluster location to
|
1520
|
+
# describe.
|
1521
|
+
#
|
1522
|
+
# @return [Types::DescribeLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1523
|
+
#
|
1524
|
+
# * {Types::DescribeLocationHdfsResponse#location_arn #location_arn} => String
|
1525
|
+
# * {Types::DescribeLocationHdfsResponse#location_uri #location_uri} => String
|
1526
|
+
# * {Types::DescribeLocationHdfsResponse#name_nodes #name_nodes} => Array<Types::HdfsNameNode>
|
1527
|
+
# * {Types::DescribeLocationHdfsResponse#block_size #block_size} => Integer
|
1528
|
+
# * {Types::DescribeLocationHdfsResponse#replication_factor #replication_factor} => Integer
|
1529
|
+
# * {Types::DescribeLocationHdfsResponse#kms_key_provider_uri #kms_key_provider_uri} => String
|
1530
|
+
# * {Types::DescribeLocationHdfsResponse#qop_configuration #qop_configuration} => Types::QopConfiguration
|
1531
|
+
# * {Types::DescribeLocationHdfsResponse#authentication_type #authentication_type} => String
|
1532
|
+
# * {Types::DescribeLocationHdfsResponse#simple_user #simple_user} => String
|
1533
|
+
# * {Types::DescribeLocationHdfsResponse#kerberos_principal #kerberos_principal} => String
|
1534
|
+
# * {Types::DescribeLocationHdfsResponse#agent_arns #agent_arns} => Array<String>
|
1535
|
+
# * {Types::DescribeLocationHdfsResponse#creation_time #creation_time} => Time
|
1536
|
+
#
|
1537
|
+
# @example Request syntax with placeholder values
|
1538
|
+
#
|
1539
|
+
# resp = client.describe_location_hdfs({
|
1540
|
+
# location_arn: "LocationArn", # required
|
1541
|
+
# })
|
1542
|
+
#
|
1543
|
+
# @example Response structure
|
1544
|
+
#
|
1545
|
+
# resp.location_arn #=> String
|
1546
|
+
# resp.location_uri #=> String
|
1547
|
+
# resp.name_nodes #=> Array
|
1548
|
+
# resp.name_nodes[0].hostname #=> String
|
1549
|
+
# resp.name_nodes[0].port #=> Integer
|
1550
|
+
# resp.block_size #=> Integer
|
1551
|
+
# resp.replication_factor #=> Integer
|
1552
|
+
# resp.kms_key_provider_uri #=> String
|
1553
|
+
# resp.qop_configuration.rpc_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
|
1554
|
+
# resp.qop_configuration.data_transfer_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
|
1555
|
+
# resp.authentication_type #=> String, one of "SIMPLE", "KERBEROS"
|
1556
|
+
# resp.simple_user #=> String
|
1557
|
+
# resp.kerberos_principal #=> String
|
1558
|
+
# resp.agent_arns #=> Array
|
1559
|
+
# resp.agent_arns[0] #=> String
|
1560
|
+
# resp.creation_time #=> Time
|
1561
|
+
#
|
1562
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfs AWS API Documentation
|
1563
|
+
#
|
1564
|
+
# @overload describe_location_hdfs(params = {})
|
1565
|
+
# @param [Hash] params ({})
|
1566
|
+
def describe_location_hdfs(params = {}, options = {})
|
1567
|
+
req = build_request(:describe_location_hdfs, params)
|
1568
|
+
req.send_request(options)
|
1569
|
+
end
|
1570
|
+
|
1371
1571
|
# Returns metadata, such as the path information, about an NFS location.
|
1372
1572
|
#
|
1373
1573
|
# @option params [required, String] :location_arn
|
@@ -1969,13 +2169,13 @@ module Aws::DataSync
|
|
1969
2169
|
# A list of filter rules that determines which files to include when
|
1970
2170
|
# running a task. The pattern should contain a single filter string that
|
1971
2171
|
# consists of the patterns to include. The patterns are delimited by
|
1972
|
-
# "\|" (that is, a pipe)
|
2172
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
1973
2173
|
#
|
1974
2174
|
# @option params [Array<Types::FilterRule>] :excludes
|
1975
2175
|
# A list of filter rules that determines which files to exclude from a
|
1976
|
-
# task. The list
|
1977
|
-
#
|
1978
|
-
#
|
2176
|
+
# task. The list contains a single filter string that consists of the
|
2177
|
+
# patterns to exclude. The patterns are delimited by "\|" (that is, a
|
2178
|
+
# pipe), for example, `"/folder1|/folder2"`.
|
1979
2179
|
#
|
1980
2180
|
# @return [Types::StartTaskExecutionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1981
2181
|
#
|
@@ -2111,6 +2311,101 @@ module Aws::DataSync
|
|
2111
2311
|
req.send_request(options)
|
2112
2312
|
end
|
2113
2313
|
|
2314
|
+
# Updates some parameters of a previously created location for a Hadoop
|
2315
|
+
# Distributed File System cluster.
|
2316
|
+
#
|
2317
|
+
# @option params [required, String] :location_arn
|
2318
|
+
# The Amazon Resource Name (ARN) of the source HDFS cluster location.
|
2319
|
+
#
|
2320
|
+
# @option params [String] :subdirectory
|
2321
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to read
|
2322
|
+
# data from or write data to the HDFS cluster.
|
2323
|
+
#
|
2324
|
+
# @option params [Array<Types::HdfsNameNode>] :name_nodes
|
2325
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
2326
|
+
# operations such as opening, closing, and renaming files and
|
2327
|
+
# directories. The NameNode contains the information to map blocks of
|
2328
|
+
# data to the DataNodes. You can use only one NameNode.
|
2329
|
+
#
|
2330
|
+
# @option params [Integer] :block_size
|
2331
|
+
# The size of the data blocks to write into the HDFS cluster.
|
2332
|
+
#
|
2333
|
+
# @option params [Integer] :replication_factor
|
2334
|
+
# The number of DataNodes to replicate the data to when writing to the
|
2335
|
+
# HDFS cluster.
|
2336
|
+
#
|
2337
|
+
# @option params [String] :kms_key_provider_uri
|
2338
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
2339
|
+
#
|
2340
|
+
# @option params [Types::QopConfiguration] :qop_configuration
|
2341
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
2342
|
+
# Procedure Call (RPC) and data transfer privacy settings configured on
|
2343
|
+
# the Hadoop Distributed File System (HDFS) cluster.
|
2344
|
+
#
|
2345
|
+
# @option params [String] :authentication_type
|
2346
|
+
# The type of authentication used to determine the identity of the user.
|
2347
|
+
#
|
2348
|
+
# @option params [String] :simple_user
|
2349
|
+
# The user name used to identify the client on the host operating
|
2350
|
+
# system.
|
2351
|
+
#
|
2352
|
+
# @option params [String] :kerberos_principal
|
2353
|
+
# The Kerberos principal with access to the files and folders on the
|
2354
|
+
# HDFS cluster.
|
2355
|
+
#
|
2356
|
+
# @option params [String, StringIO, File] :kerberos_keytab
|
2357
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
2358
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
2359
|
+
# keytab from a file by providing the file's address. If you use the
|
2360
|
+
# AWS CLI, it performs base64 encoding for you. Otherwise, provide the
|
2361
|
+
# base64-encoded text.
|
2362
|
+
#
|
2363
|
+
# @option params [String, StringIO, File] :kerberos_krb_5_conf
|
2364
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
2365
|
+
# information. You can load the `krb5.conf` file by providing the
|
2366
|
+
# file's address. If you're using the AWS CLI, it performs the base64
|
2367
|
+
# encoding for you. Otherwise, provide the base64-encoded text.
|
2368
|
+
#
|
2369
|
+
# @option params [Array<String>] :agent_arns
|
2370
|
+
# The ARNs of the agents that are used to connect to the HDFS cluster.
|
2371
|
+
#
|
2372
|
+
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
|
2373
|
+
#
|
2374
|
+
# @example Request syntax with placeholder values
|
2375
|
+
#
|
2376
|
+
# resp = client.update_location_hdfs({
|
2377
|
+
# location_arn: "LocationArn", # required
|
2378
|
+
# subdirectory: "HdfsSubdirectory",
|
2379
|
+
# name_nodes: [
|
2380
|
+
# {
|
2381
|
+
# hostname: "HdfsServerHostname", # required
|
2382
|
+
# port: 1, # required
|
2383
|
+
# },
|
2384
|
+
# ],
|
2385
|
+
# block_size: 1,
|
2386
|
+
# replication_factor: 1,
|
2387
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
2388
|
+
# qop_configuration: {
|
2389
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
2390
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
2391
|
+
# },
|
2392
|
+
# authentication_type: "SIMPLE", # accepts SIMPLE, KERBEROS
|
2393
|
+
# simple_user: "HdfsUser",
|
2394
|
+
# kerberos_principal: "KerberosPrincipal",
|
2395
|
+
# kerberos_keytab: "data",
|
2396
|
+
# kerberos_krb_5_conf: "data",
|
2397
|
+
# agent_arns: ["AgentArn"],
|
2398
|
+
# })
|
2399
|
+
#
|
2400
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfs AWS API Documentation
|
2401
|
+
#
|
2402
|
+
# @overload update_location_hdfs(params = {})
|
2403
|
+
# @param [Hash] params ({})
|
2404
|
+
def update_location_hdfs(params = {}, options = {})
|
2405
|
+
req = build_request(:update_location_hdfs, params)
|
2406
|
+
req.send_request(options)
|
2407
|
+
end
|
2408
|
+
|
2114
2409
|
# Updates some of the parameters of a previously created location for
|
2115
2410
|
# Network File System (NFS) access. For information about creating an
|
2116
2411
|
# NFS location, see [Creating a location for NFS][1].
|
@@ -2360,7 +2655,7 @@ module Aws::DataSync
|
|
2360
2655
|
# A list of filter rules that determines which files to exclude from a
|
2361
2656
|
# task. The list should contain a single filter string that consists of
|
2362
2657
|
# the patterns to exclude. The patterns are delimited by "\|" (that
|
2363
|
-
# is, a pipe), for example
|
2658
|
+
# is, a pipe), for example, `"/folder1|/folder2"`.
|
2364
2659
|
#
|
2365
2660
|
# @option params [Types::TaskSchedule] :schedule
|
2366
2661
|
# Specifies a schedule used to periodically transfer files from a source
|
@@ -2378,14 +2673,14 @@ module Aws::DataSync
|
|
2378
2673
|
# The name of the task to update.
|
2379
2674
|
#
|
2380
2675
|
# @option params [String] :cloud_watch_log_group_arn
|
2381
|
-
# The Amazon Resource Name (ARN) of the resource name of the
|
2382
|
-
#
|
2676
|
+
# The Amazon Resource Name (ARN) of the resource name of the Amazon
|
2677
|
+
# CloudWatch log group.
|
2383
2678
|
#
|
2384
2679
|
# @option params [Array<Types::FilterRule>] :includes
|
2385
2680
|
# A list of filter rules that determines which files to include when
|
2386
|
-
# running a task. The pattern
|
2681
|
+
# running a task. The pattern contains a single filter string that
|
2387
2682
|
# consists of the patterns to include. The patterns are delimited by
|
2388
|
-
# "\|" (that is, a pipe)
|
2683
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
2389
2684
|
#
|
2390
2685
|
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
|
2391
2686
|
#
|
@@ -2518,7 +2813,7 @@ module Aws::DataSync
|
|
2518
2813
|
params: params,
|
2519
2814
|
config: config)
|
2520
2815
|
context[:gem_name] = 'aws-sdk-datasync'
|
2521
|
-
context[:gem_version] = '1.
|
2816
|
+
context[:gem_version] = '1.39.0'
|
2522
2817
|
Seahorse::Client::Request.new(handlers, context)
|
2523
2818
|
end
|
2524
2819
|
|
@@ -29,6 +29,8 @@ module Aws::DataSync
|
|
29
29
|
CreateLocationEfsResponse = Shapes::StructureShape.new(name: 'CreateLocationEfsResponse')
|
30
30
|
CreateLocationFsxWindowsRequest = Shapes::StructureShape.new(name: 'CreateLocationFsxWindowsRequest')
|
31
31
|
CreateLocationFsxWindowsResponse = Shapes::StructureShape.new(name: 'CreateLocationFsxWindowsResponse')
|
32
|
+
CreateLocationHdfsRequest = Shapes::StructureShape.new(name: 'CreateLocationHdfsRequest')
|
33
|
+
CreateLocationHdfsResponse = Shapes::StructureShape.new(name: 'CreateLocationHdfsResponse')
|
32
34
|
CreateLocationNfsRequest = Shapes::StructureShape.new(name: 'CreateLocationNfsRequest')
|
33
35
|
CreateLocationNfsResponse = Shapes::StructureShape.new(name: 'CreateLocationNfsResponse')
|
34
36
|
CreateLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'CreateLocationObjectStorageRequest')
|
@@ -51,6 +53,8 @@ module Aws::DataSync
|
|
51
53
|
DescribeLocationEfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationEfsResponse')
|
52
54
|
DescribeLocationFsxWindowsRequest = Shapes::StructureShape.new(name: 'DescribeLocationFsxWindowsRequest')
|
53
55
|
DescribeLocationFsxWindowsResponse = Shapes::StructureShape.new(name: 'DescribeLocationFsxWindowsResponse')
|
56
|
+
DescribeLocationHdfsRequest = Shapes::StructureShape.new(name: 'DescribeLocationHdfsRequest')
|
57
|
+
DescribeLocationHdfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationHdfsResponse')
|
54
58
|
DescribeLocationNfsRequest = Shapes::StructureShape.new(name: 'DescribeLocationNfsRequest')
|
55
59
|
DescribeLocationNfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationNfsResponse')
|
56
60
|
DescribeLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'DescribeLocationObjectStorageRequest')
|
@@ -82,10 +86,25 @@ module Aws::DataSync
|
|
82
86
|
FsxFilesystemArn = Shapes::StringShape.new(name: 'FsxFilesystemArn')
|
83
87
|
FsxWindowsSubdirectory = Shapes::StringShape.new(name: 'FsxWindowsSubdirectory')
|
84
88
|
Gid = Shapes::StringShape.new(name: 'Gid')
|
89
|
+
HdfsAuthenticationType = Shapes::StringShape.new(name: 'HdfsAuthenticationType')
|
90
|
+
HdfsBlockSize = Shapes::IntegerShape.new(name: 'HdfsBlockSize')
|
91
|
+
HdfsDataTransferProtection = Shapes::StringShape.new(name: 'HdfsDataTransferProtection')
|
92
|
+
HdfsNameNode = Shapes::StructureShape.new(name: 'HdfsNameNode')
|
93
|
+
HdfsNameNodeList = Shapes::ListShape.new(name: 'HdfsNameNodeList')
|
94
|
+
HdfsReplicationFactor = Shapes::IntegerShape.new(name: 'HdfsReplicationFactor')
|
95
|
+
HdfsRpcProtection = Shapes::StringShape.new(name: 'HdfsRpcProtection')
|
96
|
+
HdfsServerHostname = Shapes::StringShape.new(name: 'HdfsServerHostname')
|
97
|
+
HdfsServerPort = Shapes::IntegerShape.new(name: 'HdfsServerPort')
|
98
|
+
HdfsSubdirectory = Shapes::StringShape.new(name: 'HdfsSubdirectory')
|
99
|
+
HdfsUser = Shapes::StringShape.new(name: 'HdfsUser')
|
85
100
|
IamRoleArn = Shapes::StringShape.new(name: 'IamRoleArn')
|
86
101
|
InputTagList = Shapes::ListShape.new(name: 'InputTagList')
|
87
102
|
InternalException = Shapes::StructureShape.new(name: 'InternalException')
|
88
103
|
InvalidRequestException = Shapes::StructureShape.new(name: 'InvalidRequestException')
|
104
|
+
KerberosKeytabFile = Shapes::BlobShape.new(name: 'KerberosKeytabFile')
|
105
|
+
KerberosKrb5ConfFile = Shapes::BlobShape.new(name: 'KerberosKrb5ConfFile')
|
106
|
+
KerberosPrincipal = Shapes::StringShape.new(name: 'KerberosPrincipal')
|
107
|
+
KmsKeyProviderUri = Shapes::StringShape.new(name: 'KmsKeyProviderUri')
|
89
108
|
ListAgentsRequest = Shapes::StructureShape.new(name: 'ListAgentsRequest')
|
90
109
|
ListAgentsResponse = Shapes::StructureShape.new(name: 'ListAgentsResponse')
|
91
110
|
ListLocationsRequest = Shapes::StructureShape.new(name: 'ListLocationsRequest')
|
@@ -129,6 +148,7 @@ module Aws::DataSync
|
|
129
148
|
PreserveDeletedFiles = Shapes::StringShape.new(name: 'PreserveDeletedFiles')
|
130
149
|
PreserveDevices = Shapes::StringShape.new(name: 'PreserveDevices')
|
131
150
|
PrivateLinkConfig = Shapes::StructureShape.new(name: 'PrivateLinkConfig')
|
151
|
+
QopConfiguration = Shapes::StructureShape.new(name: 'QopConfiguration')
|
132
152
|
S3BucketArn = Shapes::StringShape.new(name: 'S3BucketArn')
|
133
153
|
S3Config = Shapes::StructureShape.new(name: 'S3Config')
|
134
154
|
S3StorageClass = Shapes::StringShape.new(name: 'S3StorageClass')
|
@@ -173,6 +193,8 @@ module Aws::DataSync
|
|
173
193
|
UntagResourceResponse = Shapes::StructureShape.new(name: 'UntagResourceResponse')
|
174
194
|
UpdateAgentRequest = Shapes::StructureShape.new(name: 'UpdateAgentRequest')
|
175
195
|
UpdateAgentResponse = Shapes::StructureShape.new(name: 'UpdateAgentResponse')
|
196
|
+
UpdateLocationHdfsRequest = Shapes::StructureShape.new(name: 'UpdateLocationHdfsRequest')
|
197
|
+
UpdateLocationHdfsResponse = Shapes::StructureShape.new(name: 'UpdateLocationHdfsResponse')
|
176
198
|
UpdateLocationNfsRequest = Shapes::StructureShape.new(name: 'UpdateLocationNfsRequest')
|
177
199
|
UpdateLocationNfsResponse = Shapes::StructureShape.new(name: 'UpdateLocationNfsResponse')
|
178
200
|
UpdateLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'UpdateLocationObjectStorageRequest')
|
@@ -234,6 +256,24 @@ module Aws::DataSync
|
|
234
256
|
CreateLocationFsxWindowsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
|
235
257
|
CreateLocationFsxWindowsResponse.struct_class = Types::CreateLocationFsxWindowsResponse
|
236
258
|
|
259
|
+
CreateLocationHdfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: HdfsSubdirectory, location_name: "Subdirectory"))
|
260
|
+
CreateLocationHdfsRequest.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, required: true, location_name: "NameNodes"))
|
261
|
+
CreateLocationHdfsRequest.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
|
262
|
+
CreateLocationHdfsRequest.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
|
263
|
+
CreateLocationHdfsRequest.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
|
264
|
+
CreateLocationHdfsRequest.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
|
265
|
+
CreateLocationHdfsRequest.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, required: true, location_name: "AuthenticationType"))
|
266
|
+
CreateLocationHdfsRequest.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
|
267
|
+
CreateLocationHdfsRequest.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
|
268
|
+
CreateLocationHdfsRequest.add_member(:kerberos_keytab, Shapes::ShapeRef.new(shape: KerberosKeytabFile, location_name: "KerberosKeytab"))
|
269
|
+
CreateLocationHdfsRequest.add_member(:kerberos_krb_5_conf, Shapes::ShapeRef.new(shape: KerberosKrb5ConfFile, location_name: "KerberosKrb5Conf"))
|
270
|
+
CreateLocationHdfsRequest.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, required: true, location_name: "AgentArns"))
|
271
|
+
CreateLocationHdfsRequest.add_member(:tags, Shapes::ShapeRef.new(shape: InputTagList, location_name: "Tags"))
|
272
|
+
CreateLocationHdfsRequest.struct_class = Types::CreateLocationHdfsRequest
|
273
|
+
|
274
|
+
CreateLocationHdfsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
|
275
|
+
CreateLocationHdfsResponse.struct_class = Types::CreateLocationHdfsResponse
|
276
|
+
|
237
277
|
CreateLocationNfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: NfsSubdirectory, required: true, location_name: "Subdirectory"))
|
238
278
|
CreateLocationNfsRequest.add_member(:server_hostname, Shapes::ShapeRef.new(shape: ServerHostname, required: true, location_name: "ServerHostname"))
|
239
279
|
CreateLocationNfsRequest.add_member(:on_prem_config, Shapes::ShapeRef.new(shape: OnPremConfig, required: true, location_name: "OnPremConfig"))
|
@@ -343,6 +383,23 @@ module Aws::DataSync
|
|
343
383
|
DescribeLocationFsxWindowsResponse.add_member(:domain, Shapes::ShapeRef.new(shape: SmbDomain, location_name: "Domain"))
|
344
384
|
DescribeLocationFsxWindowsResponse.struct_class = Types::DescribeLocationFsxWindowsResponse
|
345
385
|
|
386
|
+
DescribeLocationHdfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
|
387
|
+
DescribeLocationHdfsRequest.struct_class = Types::DescribeLocationHdfsRequest
|
388
|
+
|
389
|
+
DescribeLocationHdfsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
|
390
|
+
DescribeLocationHdfsResponse.add_member(:location_uri, Shapes::ShapeRef.new(shape: LocationUri, location_name: "LocationUri"))
|
391
|
+
DescribeLocationHdfsResponse.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, location_name: "NameNodes"))
|
392
|
+
DescribeLocationHdfsResponse.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
|
393
|
+
DescribeLocationHdfsResponse.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
|
394
|
+
DescribeLocationHdfsResponse.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
|
395
|
+
DescribeLocationHdfsResponse.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
|
396
|
+
DescribeLocationHdfsResponse.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, location_name: "AuthenticationType"))
|
397
|
+
DescribeLocationHdfsResponse.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
|
398
|
+
DescribeLocationHdfsResponse.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
|
399
|
+
DescribeLocationHdfsResponse.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, location_name: "AgentArns"))
|
400
|
+
DescribeLocationHdfsResponse.add_member(:creation_time, Shapes::ShapeRef.new(shape: Time, location_name: "CreationTime"))
|
401
|
+
DescribeLocationHdfsResponse.struct_class = Types::DescribeLocationHdfsResponse
|
402
|
+
|
346
403
|
DescribeLocationNfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
|
347
404
|
DescribeLocationNfsRequest.struct_class = Types::DescribeLocationNfsRequest
|
348
405
|
|
@@ -442,6 +499,12 @@ module Aws::DataSync
|
|
442
499
|
|
443
500
|
FilterValues.member = Shapes::ShapeRef.new(shape: FilterAttributeValue)
|
444
501
|
|
502
|
+
HdfsNameNode.add_member(:hostname, Shapes::ShapeRef.new(shape: HdfsServerHostname, required: true, location_name: "Hostname"))
|
503
|
+
HdfsNameNode.add_member(:port, Shapes::ShapeRef.new(shape: HdfsServerPort, required: true, location_name: "Port"))
|
504
|
+
HdfsNameNode.struct_class = Types::HdfsNameNode
|
505
|
+
|
506
|
+
HdfsNameNodeList.member = Shapes::ShapeRef.new(shape: HdfsNameNode)
|
507
|
+
|
445
508
|
InputTagList.member = Shapes::ShapeRef.new(shape: TagListEntry)
|
446
509
|
|
447
510
|
InternalException.add_member(:message, Shapes::ShapeRef.new(shape: string, location_name: "message"))
|
@@ -543,6 +606,10 @@ module Aws::DataSync
|
|
543
606
|
PrivateLinkConfig.add_member(:security_group_arns, Shapes::ShapeRef.new(shape: PLSecurityGroupArnList, location_name: "SecurityGroupArns"))
|
544
607
|
PrivateLinkConfig.struct_class = Types::PrivateLinkConfig
|
545
608
|
|
609
|
+
QopConfiguration.add_member(:rpc_protection, Shapes::ShapeRef.new(shape: HdfsRpcProtection, location_name: "RpcProtection"))
|
610
|
+
QopConfiguration.add_member(:data_transfer_protection, Shapes::ShapeRef.new(shape: HdfsDataTransferProtection, location_name: "DataTransferProtection"))
|
611
|
+
QopConfiguration.struct_class = Types::QopConfiguration
|
612
|
+
|
546
613
|
S3Config.add_member(:bucket_access_role_arn, Shapes::ShapeRef.new(shape: IamRoleArn, required: true, location_name: "BucketAccessRoleArn"))
|
547
614
|
S3Config.struct_class = Types::S3Config
|
548
615
|
|
@@ -618,6 +685,23 @@ module Aws::DataSync
|
|
618
685
|
|
619
686
|
UpdateAgentResponse.struct_class = Types::UpdateAgentResponse
|
620
687
|
|
688
|
+
UpdateLocationHdfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
|
689
|
+
UpdateLocationHdfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: HdfsSubdirectory, location_name: "Subdirectory"))
|
690
|
+
UpdateLocationHdfsRequest.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, location_name: "NameNodes"))
|
691
|
+
UpdateLocationHdfsRequest.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
|
692
|
+
UpdateLocationHdfsRequest.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
|
693
|
+
UpdateLocationHdfsRequest.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
|
694
|
+
UpdateLocationHdfsRequest.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
|
695
|
+
UpdateLocationHdfsRequest.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, location_name: "AuthenticationType"))
|
696
|
+
UpdateLocationHdfsRequest.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
|
697
|
+
UpdateLocationHdfsRequest.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
|
698
|
+
UpdateLocationHdfsRequest.add_member(:kerberos_keytab, Shapes::ShapeRef.new(shape: KerberosKeytabFile, location_name: "KerberosKeytab"))
|
699
|
+
UpdateLocationHdfsRequest.add_member(:kerberos_krb_5_conf, Shapes::ShapeRef.new(shape: KerberosKrb5ConfFile, location_name: "KerberosKrb5Conf"))
|
700
|
+
UpdateLocationHdfsRequest.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, location_name: "AgentArns"))
|
701
|
+
UpdateLocationHdfsRequest.struct_class = Types::UpdateLocationHdfsRequest
|
702
|
+
|
703
|
+
UpdateLocationHdfsResponse.struct_class = Types::UpdateLocationHdfsResponse
|
704
|
+
|
621
705
|
UpdateLocationNfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
|
622
706
|
UpdateLocationNfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: NfsSubdirectory, location_name: "Subdirectory"))
|
623
707
|
UpdateLocationNfsRequest.add_member(:on_prem_config, Shapes::ShapeRef.new(shape: OnPremConfig, location_name: "OnPremConfig"))
|
@@ -725,6 +809,16 @@ module Aws::DataSync
|
|
725
809
|
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
726
810
|
end)
|
727
811
|
|
812
|
+
api.add_operation(:create_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
|
813
|
+
o.name = "CreateLocationHdfs"
|
814
|
+
o.http_method = "POST"
|
815
|
+
o.http_request_uri = "/"
|
816
|
+
o.input = Shapes::ShapeRef.new(shape: CreateLocationHdfsRequest)
|
817
|
+
o.output = Shapes::ShapeRef.new(shape: CreateLocationHdfsResponse)
|
818
|
+
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
|
819
|
+
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
820
|
+
end)
|
821
|
+
|
728
822
|
api.add_operation(:create_location_nfs, Seahorse::Model::Operation.new.tap do |o|
|
729
823
|
o.name = "CreateLocationNfs"
|
730
824
|
o.http_method = "POST"
|
@@ -835,6 +929,16 @@ module Aws::DataSync
|
|
835
929
|
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
836
930
|
end)
|
837
931
|
|
932
|
+
api.add_operation(:describe_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
|
933
|
+
o.name = "DescribeLocationHdfs"
|
934
|
+
o.http_method = "POST"
|
935
|
+
o.http_request_uri = "/"
|
936
|
+
o.input = Shapes::ShapeRef.new(shape: DescribeLocationHdfsRequest)
|
937
|
+
o.output = Shapes::ShapeRef.new(shape: DescribeLocationHdfsResponse)
|
938
|
+
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
|
939
|
+
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
940
|
+
end)
|
941
|
+
|
838
942
|
api.add_operation(:describe_location_nfs, Seahorse::Model::Operation.new.tap do |o|
|
839
943
|
o.name = "DescribeLocationNfs"
|
840
944
|
o.http_method = "POST"
|
@@ -1015,6 +1119,16 @@ module Aws::DataSync
|
|
1015
1119
|
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
1016
1120
|
end)
|
1017
1121
|
|
1122
|
+
api.add_operation(:update_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
|
1123
|
+
o.name = "UpdateLocationHdfs"
|
1124
|
+
o.http_method = "POST"
|
1125
|
+
o.http_request_uri = "/"
|
1126
|
+
o.input = Shapes::ShapeRef.new(shape: UpdateLocationHdfsRequest)
|
1127
|
+
o.output = Shapes::ShapeRef.new(shape: UpdateLocationHdfsResponse)
|
1128
|
+
o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
|
1129
|
+
o.errors << Shapes::ShapeRef.new(shape: InternalException)
|
1130
|
+
end)
|
1131
|
+
|
1018
1132
|
api.add_operation(:update_location_nfs, Seahorse::Model::Operation.new.tap do |o|
|
1019
1133
|
o.name = "UpdateLocationNfs"
|
1020
1134
|
o.http_method = "POST"
|
@@ -363,6 +363,169 @@ module Aws::DataSync
|
|
363
363
|
include Aws::Structure
|
364
364
|
end
|
365
365
|
|
366
|
+
# @note When making an API call, you may pass CreateLocationHdfsRequest
|
367
|
+
# data as a hash:
|
368
|
+
#
|
369
|
+
# {
|
370
|
+
# subdirectory: "HdfsSubdirectory",
|
371
|
+
# name_nodes: [ # required
|
372
|
+
# {
|
373
|
+
# hostname: "HdfsServerHostname", # required
|
374
|
+
# port: 1, # required
|
375
|
+
# },
|
376
|
+
# ],
|
377
|
+
# block_size: 1,
|
378
|
+
# replication_factor: 1,
|
379
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
380
|
+
# qop_configuration: {
|
381
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
382
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
383
|
+
# },
|
384
|
+
# authentication_type: "SIMPLE", # required, accepts SIMPLE, KERBEROS
|
385
|
+
# simple_user: "HdfsUser",
|
386
|
+
# kerberos_principal: "KerberosPrincipal",
|
387
|
+
# kerberos_keytab: "data",
|
388
|
+
# kerberos_krb_5_conf: "data",
|
389
|
+
# agent_arns: ["AgentArn"], # required
|
390
|
+
# tags: [
|
391
|
+
# {
|
392
|
+
# key: "TagKey", # required
|
393
|
+
# value: "TagValue",
|
394
|
+
# },
|
395
|
+
# ],
|
396
|
+
# }
|
397
|
+
#
|
398
|
+
# @!attribute [rw] subdirectory
|
399
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to
|
400
|
+
# read data from or write data to the HDFS cluster. If the
|
401
|
+
# subdirectory isn't specified, it will default to `/`.
|
402
|
+
# @return [String]
|
403
|
+
#
|
404
|
+
# @!attribute [rw] name_nodes
|
405
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
406
|
+
# operations such as opening, closing, and renaming files and
|
407
|
+
# directories. The NameNode contains the information to map blocks of
|
408
|
+
# data to the DataNodes. You can use only one NameNode.
|
409
|
+
# @return [Array<Types::HdfsNameNode>]
|
410
|
+
#
|
411
|
+
# @!attribute [rw] block_size
|
412
|
+
# The size of data blocks to write into the HDFS cluster. The block
|
413
|
+
# size must be a multiple of 512 bytes. The default block size is 128
|
414
|
+
# mebibytes (MiB).
|
415
|
+
# @return [Integer]
|
416
|
+
#
|
417
|
+
# @!attribute [rw] replication_factor
|
418
|
+
# The number of DataNodes to replicate the data to when writing to the
|
419
|
+
# HDFS cluster. By default, data is replicated to three DataNodes.
|
420
|
+
# @return [Integer]
|
421
|
+
#
|
422
|
+
# @!attribute [rw] kms_key_provider_uri
|
423
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
424
|
+
# @return [String]
|
425
|
+
#
|
426
|
+
# @!attribute [rw] qop_configuration
|
427
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
428
|
+
# Procedure Call (RPC) and data transfer protection settings
|
429
|
+
# configured on the Hadoop Distributed File System (HDFS) cluster. If
|
430
|
+
# `QopConfiguration` isn't specified, `RpcProtection` and
|
431
|
+
# `DataTransferProtection` default to `PRIVACY`. If you set
|
432
|
+
# `RpcProtection` or `DataTransferProtection`, the other parameter
|
433
|
+
# assumes the same value.
|
434
|
+
# @return [Types::QopConfiguration]
|
435
|
+
#
|
436
|
+
# @!attribute [rw] authentication_type
|
437
|
+
# The type of authentication used to determine the identity of the
|
438
|
+
# user.
|
439
|
+
# @return [String]
|
440
|
+
#
|
441
|
+
# @!attribute [rw] simple_user
|
442
|
+
# The user name used to identify the client on the host operating
|
443
|
+
# system.
|
444
|
+
#
|
445
|
+
# <note markdown="1"> If `SIMPLE` is specified for `AuthenticationType`, this parameter is
|
446
|
+
# required.
|
447
|
+
#
|
448
|
+
# </note>
|
449
|
+
# @return [String]
|
450
|
+
#
|
451
|
+
# @!attribute [rw] kerberos_principal
|
452
|
+
# The Kerberos principal with access to the files and folders on the
|
453
|
+
# HDFS cluster.
|
454
|
+
#
|
455
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
|
456
|
+
# is required.
|
457
|
+
#
|
458
|
+
# </note>
|
459
|
+
# @return [String]
|
460
|
+
#
|
461
|
+
# @!attribute [rw] kerberos_keytab
|
462
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
463
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
464
|
+
# keytab from a file by providing the file's address. If you're
|
465
|
+
# using the CLI, it performs base64 encoding for you. Otherwise,
|
466
|
+
# provide the base64-encoded text.
|
467
|
+
#
|
468
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
|
469
|
+
# is required.
|
470
|
+
#
|
471
|
+
# </note>
|
472
|
+
# @return [String]
|
473
|
+
#
|
474
|
+
# @!attribute [rw] kerberos_krb_5_conf
|
475
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
476
|
+
# information. You can load the `krb5.conf` file by providing the
|
477
|
+
# file's address. If you're using the CLI, it performs the base64
|
478
|
+
# encoding for you. Otherwise, provide the base64-encoded text.
|
479
|
+
#
|
480
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
|
481
|
+
# is required.
|
482
|
+
#
|
483
|
+
# </note>
|
484
|
+
# @return [String]
|
485
|
+
#
|
486
|
+
# @!attribute [rw] agent_arns
|
487
|
+
# The Amazon Resource Names (ARNs) of the agents that are used to
|
488
|
+
# connect to the HDFS cluster.
|
489
|
+
# @return [Array<String>]
|
490
|
+
#
|
491
|
+
# @!attribute [rw] tags
|
492
|
+
# The key-value pair that represents the tag that you want to add to
|
493
|
+
# the location. The value can be an empty string. We recommend using
|
494
|
+
# tags to name your resources.
|
495
|
+
# @return [Array<Types::TagListEntry>]
|
496
|
+
#
|
497
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfsRequest AWS API Documentation
|
498
|
+
#
|
499
|
+
class CreateLocationHdfsRequest < Struct.new(
|
500
|
+
:subdirectory,
|
501
|
+
:name_nodes,
|
502
|
+
:block_size,
|
503
|
+
:replication_factor,
|
504
|
+
:kms_key_provider_uri,
|
505
|
+
:qop_configuration,
|
506
|
+
:authentication_type,
|
507
|
+
:simple_user,
|
508
|
+
:kerberos_principal,
|
509
|
+
:kerberos_keytab,
|
510
|
+
:kerberos_krb_5_conf,
|
511
|
+
:agent_arns,
|
512
|
+
:tags)
|
513
|
+
SENSITIVE = []
|
514
|
+
include Aws::Structure
|
515
|
+
end
|
516
|
+
|
517
|
+
# @!attribute [rw] location_arn
|
518
|
+
# The ARN of the source HDFS cluster location that's created.
|
519
|
+
# @return [String]
|
520
|
+
#
|
521
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfsResponse AWS API Documentation
|
522
|
+
#
|
523
|
+
class CreateLocationHdfsResponse < Struct.new(
|
524
|
+
:location_arn)
|
525
|
+
SENSITIVE = []
|
526
|
+
include Aws::Structure
|
527
|
+
end
|
528
|
+
|
366
529
|
# CreateLocationNfsRequest
|
367
530
|
#
|
368
531
|
# @note When making an API call, you may pass CreateLocationNfsRequest
|
@@ -930,9 +1093,9 @@ module Aws::DataSync
|
|
930
1093
|
#
|
931
1094
|
# @!attribute [rw] includes
|
932
1095
|
# A list of filter rules that determines which files to include when
|
933
|
-
# running a task. The pattern
|
934
|
-
#
|
935
|
-
#
|
1096
|
+
# running a task. The pattern contains a single filter string that
|
1097
|
+
# consists of the patterns to include. The patterns are delimited by
|
1098
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
936
1099
|
# @return [Array<Types::FilterRule>]
|
937
1100
|
#
|
938
1101
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateTaskRequest AWS API Documentation
|
@@ -1233,6 +1396,101 @@ module Aws::DataSync
|
|
1233
1396
|
include Aws::Structure
|
1234
1397
|
end
|
1235
1398
|
|
1399
|
+
# @note When making an API call, you may pass DescribeLocationHdfsRequest
|
1400
|
+
# data as a hash:
|
1401
|
+
#
|
1402
|
+
# {
|
1403
|
+
# location_arn: "LocationArn", # required
|
1404
|
+
# }
|
1405
|
+
#
|
1406
|
+
# @!attribute [rw] location_arn
|
1407
|
+
# The Amazon Resource Name (ARN) of the HDFS cluster location to
|
1408
|
+
# describe.
|
1409
|
+
# @return [String]
|
1410
|
+
#
|
1411
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfsRequest AWS API Documentation
|
1412
|
+
#
|
1413
|
+
class DescribeLocationHdfsRequest < Struct.new(
|
1414
|
+
:location_arn)
|
1415
|
+
SENSITIVE = []
|
1416
|
+
include Aws::Structure
|
1417
|
+
end
|
1418
|
+
|
1419
|
+
# @!attribute [rw] location_arn
|
1420
|
+
# The ARN of the HDFS cluster location.
|
1421
|
+
# @return [String]
|
1422
|
+
#
|
1423
|
+
# @!attribute [rw] location_uri
|
1424
|
+
# The URI of the HDFS cluster location.
|
1425
|
+
# @return [String]
|
1426
|
+
#
|
1427
|
+
# @!attribute [rw] name_nodes
|
1428
|
+
# The NameNode that manage the HDFS namespace.
|
1429
|
+
# @return [Array<Types::HdfsNameNode>]
|
1430
|
+
#
|
1431
|
+
# @!attribute [rw] block_size
|
1432
|
+
# The size of the data blocks to write into the HDFS cluster.
|
1433
|
+
# @return [Integer]
|
1434
|
+
#
|
1435
|
+
# @!attribute [rw] replication_factor
|
1436
|
+
# The number of DataNodes to replicate the data to when writing to the
|
1437
|
+
# HDFS cluster.
|
1438
|
+
# @return [Integer]
|
1439
|
+
#
|
1440
|
+
# @!attribute [rw] kms_key_provider_uri
|
1441
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
1442
|
+
# @return [String]
|
1443
|
+
#
|
1444
|
+
# @!attribute [rw] qop_configuration
|
1445
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
1446
|
+
# Procedure Call (RPC) and data transfer protection settings
|
1447
|
+
# configured on the Hadoop Distributed File System (HDFS) cluster.
|
1448
|
+
# @return [Types::QopConfiguration]
|
1449
|
+
#
|
1450
|
+
# @!attribute [rw] authentication_type
|
1451
|
+
# The type of authentication used to determine the identity of the
|
1452
|
+
# user.
|
1453
|
+
# @return [String]
|
1454
|
+
#
|
1455
|
+
# @!attribute [rw] simple_user
|
1456
|
+
# The user name used to identify the client on the host operating
|
1457
|
+
# system. This parameter is used if the `AuthenticationType` is
|
1458
|
+
# defined as `SIMPLE`.
|
1459
|
+
# @return [String]
|
1460
|
+
#
|
1461
|
+
# @!attribute [rw] kerberos_principal
|
1462
|
+
# The Kerberos principal with access to the files and folders on the
|
1463
|
+
# HDFS cluster. This parameter is used if the `AuthenticationType` is
|
1464
|
+
# defined as `KERBEROS`.
|
1465
|
+
# @return [String]
|
1466
|
+
#
|
1467
|
+
# @!attribute [rw] agent_arns
|
1468
|
+
# The ARNs of the agents that are used to connect to the HDFS cluster.
|
1469
|
+
# @return [Array<String>]
|
1470
|
+
#
|
1471
|
+
# @!attribute [rw] creation_time
|
1472
|
+
# The time that the HDFS location was created.
|
1473
|
+
# @return [Time]
|
1474
|
+
#
|
1475
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfsResponse AWS API Documentation
|
1476
|
+
#
|
1477
|
+
class DescribeLocationHdfsResponse < Struct.new(
|
1478
|
+
:location_arn,
|
1479
|
+
:location_uri,
|
1480
|
+
:name_nodes,
|
1481
|
+
:block_size,
|
1482
|
+
:replication_factor,
|
1483
|
+
:kms_key_provider_uri,
|
1484
|
+
:qop_configuration,
|
1485
|
+
:authentication_type,
|
1486
|
+
:simple_user,
|
1487
|
+
:kerberos_principal,
|
1488
|
+
:agent_arns,
|
1489
|
+
:creation_time)
|
1490
|
+
SENSITIVE = []
|
1491
|
+
include Aws::Structure
|
1492
|
+
end
|
1493
|
+
|
1236
1494
|
# DescribeLocationNfsRequest
|
1237
1495
|
#
|
1238
1496
|
# @note When making an API call, you may pass DescribeLocationNfsRequest
|
@@ -1719,13 +1977,13 @@ module Aws::DataSync
|
|
1719
1977
|
# @return [String]
|
1720
1978
|
#
|
1721
1979
|
# @!attribute [rw] source_network_interface_arns
|
1722
|
-
# The Amazon Resource
|
1723
|
-
#
|
1980
|
+
# The Amazon Resource Names (ARNs) of the source elastic network
|
1981
|
+
# interfaces (ENIs) that were created for your subnet.
|
1724
1982
|
# @return [Array<String>]
|
1725
1983
|
#
|
1726
1984
|
# @!attribute [rw] destination_network_interface_arns
|
1727
|
-
# The Amazon Resource
|
1728
|
-
#
|
1985
|
+
# The Amazon Resource Names (ARNs) of the destination elastic network
|
1986
|
+
# interfaces (ENIs) that were created for your subnet.
|
1729
1987
|
# @return [Array<String>]
|
1730
1988
|
#
|
1731
1989
|
# @!attribute [rw] options
|
@@ -1748,7 +2006,7 @@ module Aws::DataSync
|
|
1748
2006
|
# A list of filter rules that determines which files to exclude from a
|
1749
2007
|
# task. The list should contain a single filter string that consists
|
1750
2008
|
# of the patterns to exclude. The patterns are delimited by "\|"
|
1751
|
-
# (that is, a pipe), for example
|
2009
|
+
# (that is, a pipe), for example, `"/folder1|/folder2"`.
|
1752
2010
|
# @return [Array<Types::FilterRule>]
|
1753
2011
|
#
|
1754
2012
|
# @!attribute [rw] schedule
|
@@ -1773,9 +2031,9 @@ module Aws::DataSync
|
|
1773
2031
|
#
|
1774
2032
|
# @!attribute [rw] includes
|
1775
2033
|
# A list of filter rules that determines which files to include when
|
1776
|
-
# running a task. The pattern
|
1777
|
-
#
|
1778
|
-
#
|
2034
|
+
# running a task. The pattern contains a single filter string that
|
2035
|
+
# consists of the patterns to include. The patterns are delimited by
|
2036
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2`".
|
1779
2037
|
# @return [Array<Types::FilterRule>]
|
1780
2038
|
#
|
1781
2039
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeTaskResponse AWS API Documentation
|
@@ -1865,6 +2123,40 @@ module Aws::DataSync
|
|
1865
2123
|
include Aws::Structure
|
1866
2124
|
end
|
1867
2125
|
|
2126
|
+
# The NameNode of the Hadoop Distributed File System (HDFS). The
|
2127
|
+
# NameNode manages the file system's namespace. The NameNode performs
|
2128
|
+
# operations such as opening, closing, and renaming files and
|
2129
|
+
# directories. The NameNode contains the information to map blocks of
|
2130
|
+
# data to the DataNodes.
|
2131
|
+
#
|
2132
|
+
# @note When making an API call, you may pass HdfsNameNode
|
2133
|
+
# data as a hash:
|
2134
|
+
#
|
2135
|
+
# {
|
2136
|
+
# hostname: "HdfsServerHostname", # required
|
2137
|
+
# port: 1, # required
|
2138
|
+
# }
|
2139
|
+
#
|
2140
|
+
# @!attribute [rw] hostname
|
2141
|
+
# The hostname of the NameNode in the HDFS cluster. This value is the
|
2142
|
+
# IP address or Domain Name Service (DNS) name of the NameNode. An
|
2143
|
+
# agent that's installed on-premises uses this hostname to
|
2144
|
+
# communicate with the NameNode in the network.
|
2145
|
+
# @return [String]
|
2146
|
+
#
|
2147
|
+
# @!attribute [rw] port
|
2148
|
+
# The port that the NameNode uses to listen to client requests.
|
2149
|
+
# @return [Integer]
|
2150
|
+
#
|
2151
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/HdfsNameNode AWS API Documentation
|
2152
|
+
#
|
2153
|
+
class HdfsNameNode < Struct.new(
|
2154
|
+
:hostname,
|
2155
|
+
:port)
|
2156
|
+
SENSITIVE = []
|
2157
|
+
include Aws::Structure
|
2158
|
+
end
|
2159
|
+
|
1868
2160
|
# This exception is thrown when an error occurs in the DataSync service.
|
1869
2161
|
#
|
1870
2162
|
# @!attribute [rw] message
|
@@ -2304,7 +2596,7 @@ module Aws::DataSync
|
|
2304
2596
|
#
|
2305
2597
|
# * <b> <a href="https://tools.ietf.org/html/rfc3530">NFSv4.0</a> </b>
|
2306
2598
|
# - stateful, firewall-friendly protocol version that supports
|
2307
|
-
# delegations and pseudo
|
2599
|
+
# delegations and pseudo file systems.
|
2308
2600
|
#
|
2309
2601
|
# * <b> <a href="https://tools.ietf.org/html/rfc5661">NFSv4.1</a> </b>
|
2310
2602
|
# - stateful protocol version that supports sessions, directory
|
@@ -2725,6 +3017,39 @@ module Aws::DataSync
|
|
2725
3017
|
include Aws::Structure
|
2726
3018
|
end
|
2727
3019
|
|
3020
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
3021
|
+
# Procedure Call (RPC) and data transfer privacy settings configured on
|
3022
|
+
# the Hadoop Distributed File System (HDFS) cluster.
|
3023
|
+
#
|
3024
|
+
# @note When making an API call, you may pass QopConfiguration
|
3025
|
+
# data as a hash:
|
3026
|
+
#
|
3027
|
+
# {
|
3028
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
3029
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
3030
|
+
# }
|
3031
|
+
#
|
3032
|
+
# @!attribute [rw] rpc_protection
|
3033
|
+
# The RPC protection setting configured on the HDFS cluster. This
|
3034
|
+
# setting corresponds to your `hadoop.rpc.protection` setting in your
|
3035
|
+
# `core-site.xml` file on your Hadoop cluster.
|
3036
|
+
# @return [String]
|
3037
|
+
#
|
3038
|
+
# @!attribute [rw] data_transfer_protection
|
3039
|
+
# The data transfer protection setting configured on the HDFS cluster.
|
3040
|
+
# This setting corresponds to your `dfs.data.transfer.protection`
|
3041
|
+
# setting in the `hdfs-site.xml` file on your Hadoop cluster.
|
3042
|
+
# @return [String]
|
3043
|
+
#
|
3044
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/QopConfiguration AWS API Documentation
|
3045
|
+
#
|
3046
|
+
class QopConfiguration < Struct.new(
|
3047
|
+
:rpc_protection,
|
3048
|
+
:data_transfer_protection)
|
3049
|
+
SENSITIVE = []
|
3050
|
+
include Aws::Structure
|
3051
|
+
end
|
3052
|
+
|
2728
3053
|
# The Amazon Resource Name (ARN) of the Identity and Access Management
|
2729
3054
|
# (IAM) role that is used to access an Amazon S3 bucket.
|
2730
3055
|
#
|
@@ -2843,14 +3168,14 @@ module Aws::DataSync
|
|
2843
3168
|
# A list of filter rules that determines which files to include when
|
2844
3169
|
# running a task. The pattern should contain a single filter string
|
2845
3170
|
# that consists of the patterns to include. The patterns are delimited
|
2846
|
-
# by "\|" (that is, a pipe)
|
3171
|
+
# by "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
2847
3172
|
# @return [Array<Types::FilterRule>]
|
2848
3173
|
#
|
2849
3174
|
# @!attribute [rw] excludes
|
2850
3175
|
# A list of filter rules that determines which files to exclude from a
|
2851
|
-
# task. The list
|
2852
|
-
#
|
2853
|
-
#
|
3176
|
+
# task. The list contains a single filter string that consists of the
|
3177
|
+
# patterns to exclude. The patterns are delimited by "\|" (that is,
|
3178
|
+
# a pipe), for example, `"/folder1|/folder2"`.
|
2854
3179
|
# @return [Array<Types::FilterRule>]
|
2855
3180
|
#
|
2856
3181
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/StartTaskExecutionRequest AWS API Documentation
|
@@ -2879,9 +3204,9 @@ module Aws::DataSync
|
|
2879
3204
|
include Aws::Structure
|
2880
3205
|
end
|
2881
3206
|
|
2882
|
-
# Represents a single entry in a list of
|
2883
|
-
# `TagListEntry` returns an array that contains a list of tasks
|
2884
|
-
# [ListTagsForResource][1] operation is called.
|
3207
|
+
# Represents a single entry in a list of Amazon Web Services resource
|
3208
|
+
# tags. `TagListEntry` returns an array that contains a list of tasks
|
3209
|
+
# when the [ListTagsForResource][1] operation is called.
|
2885
3210
|
#
|
2886
3211
|
#
|
2887
3212
|
#
|
@@ -3204,6 +3529,126 @@ module Aws::DataSync
|
|
3204
3529
|
#
|
3205
3530
|
class UpdateAgentResponse < Aws::EmptyStructure; end
|
3206
3531
|
|
3532
|
+
# @note When making an API call, you may pass UpdateLocationHdfsRequest
|
3533
|
+
# data as a hash:
|
3534
|
+
#
|
3535
|
+
# {
|
3536
|
+
# location_arn: "LocationArn", # required
|
3537
|
+
# subdirectory: "HdfsSubdirectory",
|
3538
|
+
# name_nodes: [
|
3539
|
+
# {
|
3540
|
+
# hostname: "HdfsServerHostname", # required
|
3541
|
+
# port: 1, # required
|
3542
|
+
# },
|
3543
|
+
# ],
|
3544
|
+
# block_size: 1,
|
3545
|
+
# replication_factor: 1,
|
3546
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
3547
|
+
# qop_configuration: {
|
3548
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
3549
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
3550
|
+
# },
|
3551
|
+
# authentication_type: "SIMPLE", # accepts SIMPLE, KERBEROS
|
3552
|
+
# simple_user: "HdfsUser",
|
3553
|
+
# kerberos_principal: "KerberosPrincipal",
|
3554
|
+
# kerberos_keytab: "data",
|
3555
|
+
# kerberos_krb_5_conf: "data",
|
3556
|
+
# agent_arns: ["AgentArn"],
|
3557
|
+
# }
|
3558
|
+
#
|
3559
|
+
# @!attribute [rw] location_arn
|
3560
|
+
# The Amazon Resource Name (ARN) of the source HDFS cluster location.
|
3561
|
+
# @return [String]
|
3562
|
+
#
|
3563
|
+
# @!attribute [rw] subdirectory
|
3564
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to
|
3565
|
+
# read data from or write data to the HDFS cluster.
|
3566
|
+
# @return [String]
|
3567
|
+
#
|
3568
|
+
# @!attribute [rw] name_nodes
|
3569
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
3570
|
+
# operations such as opening, closing, and renaming files and
|
3571
|
+
# directories. The NameNode contains the information to map blocks of
|
3572
|
+
# data to the DataNodes. You can use only one NameNode.
|
3573
|
+
# @return [Array<Types::HdfsNameNode>]
|
3574
|
+
#
|
3575
|
+
# @!attribute [rw] block_size
|
3576
|
+
# The size of the data blocks to write into the HDFS cluster.
|
3577
|
+
# @return [Integer]
|
3578
|
+
#
|
3579
|
+
# @!attribute [rw] replication_factor
|
3580
|
+
# The number of DataNodes to replicate the data to when writing to the
|
3581
|
+
# HDFS cluster.
|
3582
|
+
# @return [Integer]
|
3583
|
+
#
|
3584
|
+
# @!attribute [rw] kms_key_provider_uri
|
3585
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
3586
|
+
# @return [String]
|
3587
|
+
#
|
3588
|
+
# @!attribute [rw] qop_configuration
|
3589
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
3590
|
+
# Procedure Call (RPC) and data transfer privacy settings configured
|
3591
|
+
# on the Hadoop Distributed File System (HDFS) cluster.
|
3592
|
+
# @return [Types::QopConfiguration]
|
3593
|
+
#
|
3594
|
+
# @!attribute [rw] authentication_type
|
3595
|
+
# The type of authentication used to determine the identity of the
|
3596
|
+
# user.
|
3597
|
+
# @return [String]
|
3598
|
+
#
|
3599
|
+
# @!attribute [rw] simple_user
|
3600
|
+
# The user name used to identify the client on the host operating
|
3601
|
+
# system.
|
3602
|
+
# @return [String]
|
3603
|
+
#
|
3604
|
+
# @!attribute [rw] kerberos_principal
|
3605
|
+
# The Kerberos principal with access to the files and folders on the
|
3606
|
+
# HDFS cluster.
|
3607
|
+
# @return [String]
|
3608
|
+
#
|
3609
|
+
# @!attribute [rw] kerberos_keytab
|
3610
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
3611
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
3612
|
+
# keytab from a file by providing the file's address. If you use the
|
3613
|
+
# AWS CLI, it performs base64 encoding for you. Otherwise, provide the
|
3614
|
+
# base64-encoded text.
|
3615
|
+
# @return [String]
|
3616
|
+
#
|
3617
|
+
# @!attribute [rw] kerberos_krb_5_conf
|
3618
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
3619
|
+
# information. You can load the `krb5.conf` file by providing the
|
3620
|
+
# file's address. If you're using the AWS CLI, it performs the
|
3621
|
+
# base64 encoding for you. Otherwise, provide the base64-encoded text.
|
3622
|
+
# @return [String]
|
3623
|
+
#
|
3624
|
+
# @!attribute [rw] agent_arns
|
3625
|
+
# The ARNs of the agents that are used to connect to the HDFS cluster.
|
3626
|
+
# @return [Array<String>]
|
3627
|
+
#
|
3628
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfsRequest AWS API Documentation
|
3629
|
+
#
|
3630
|
+
class UpdateLocationHdfsRequest < Struct.new(
|
3631
|
+
:location_arn,
|
3632
|
+
:subdirectory,
|
3633
|
+
:name_nodes,
|
3634
|
+
:block_size,
|
3635
|
+
:replication_factor,
|
3636
|
+
:kms_key_provider_uri,
|
3637
|
+
:qop_configuration,
|
3638
|
+
:authentication_type,
|
3639
|
+
:simple_user,
|
3640
|
+
:kerberos_principal,
|
3641
|
+
:kerberos_keytab,
|
3642
|
+
:kerberos_krb_5_conf,
|
3643
|
+
:agent_arns)
|
3644
|
+
SENSITIVE = []
|
3645
|
+
include Aws::Structure
|
3646
|
+
end
|
3647
|
+
|
3648
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfsResponse AWS API Documentation
|
3649
|
+
#
|
3650
|
+
class UpdateLocationHdfsResponse < Aws::EmptyStructure; end
|
3651
|
+
|
3207
3652
|
# @note When making an API call, you may pass UpdateLocationNfsRequest
|
3208
3653
|
# data as a hash:
|
3209
3654
|
#
|
@@ -3571,7 +4016,7 @@ module Aws::DataSync
|
|
3571
4016
|
# A list of filter rules that determines which files to exclude from a
|
3572
4017
|
# task. The list should contain a single filter string that consists
|
3573
4018
|
# of the patterns to exclude. The patterns are delimited by "\|"
|
3574
|
-
# (that is, a pipe), for example
|
4019
|
+
# (that is, a pipe), for example, `"/folder1|/folder2"`.
|
3575
4020
|
# @return [Array<Types::FilterRule>]
|
3576
4021
|
#
|
3577
4022
|
# @!attribute [rw] schedule
|
@@ -3592,15 +4037,15 @@ module Aws::DataSync
|
|
3592
4037
|
# @return [String]
|
3593
4038
|
#
|
3594
4039
|
# @!attribute [rw] cloud_watch_log_group_arn
|
3595
|
-
# The Amazon Resource Name (ARN) of the resource name of the
|
3596
|
-
# CloudWatch
|
4040
|
+
# The Amazon Resource Name (ARN) of the resource name of the Amazon
|
4041
|
+
# CloudWatch log group.
|
3597
4042
|
# @return [String]
|
3598
4043
|
#
|
3599
4044
|
# @!attribute [rw] includes
|
3600
4045
|
# A list of filter rules that determines which files to include when
|
3601
|
-
# running a task. The pattern
|
3602
|
-
#
|
3603
|
-
#
|
4046
|
+
# running a task. The pattern contains a single filter string that
|
4047
|
+
# consists of the patterns to include. The patterns are delimited by
|
4048
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
3604
4049
|
# @return [Array<Types::FilterRule>]
|
3605
4050
|
#
|
3606
4051
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateTaskRequest AWS API Documentation
|
data/lib/aws-sdk-datasync.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: aws-sdk-datasync
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.39.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Amazon Web Services
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-
|
11
|
+
date: 2021-11-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: aws-sdk-core
|
@@ -19,7 +19,7 @@ dependencies:
|
|
19
19
|
version: '3'
|
20
20
|
- - ">="
|
21
21
|
- !ruby/object:Gem::Version
|
22
|
-
version: 3.
|
22
|
+
version: 3.122.0
|
23
23
|
type: :runtime
|
24
24
|
prerelease: false
|
25
25
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -29,7 +29,7 @@ dependencies:
|
|
29
29
|
version: '3'
|
30
30
|
- - ">="
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version: 3.
|
32
|
+
version: 3.122.0
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
34
|
name: aws-sigv4
|
35
35
|
requirement: !ruby/object:Gem::Requirement
|
@@ -76,7 +76,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
76
76
|
requirements:
|
77
77
|
- - ">="
|
78
78
|
- !ruby/object:Gem::Version
|
79
|
-
version: '
|
79
|
+
version: '2.3'
|
80
80
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
81
81
|
requirements:
|
82
82
|
- - ">="
|