aws-sdk-datasync 1.37.0 → 1.38.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 37157815b2abccc0c8f064fb766152e608469c1efb3c84455a09b25f411c44a6
4
- data.tar.gz: 303ea94eb62e43adaee3fa6052f6ebdb4cd0fc5ee2bd1dce536efd8fc8d3401d
3
+ metadata.gz: eb5c25d1228fa0790ae09ac652b7cbfbdcef6aba610d445d1200ac231471d5a5
4
+ data.tar.gz: 55c3d40cfab9c1ddc57fc4e9070ffdb866b47c23dbf71ac146cb095454ee7231
5
5
  SHA512:
6
- metadata.gz: 8bcafa13e1c50f2ffc300cc14c1492488e1b30b3a2c1f82bd1a7ab79d0d5776f7a80251718cc1af230116b7ee63975e86df940e6199ac0dd6de6051584af4826
7
- data.tar.gz: 5cb5d65aeee6fdf84f90075a1659a4e79139fc5f8c422e3a3e4b2e945cc2722befb43b8c9548a579df605e8aeb0876d016a2c78d306a6153a60cb28a1f5de51a
6
+ metadata.gz: 02e1cef44c1da23a59e32f7dac2f47eaa0914948f1835f90904a0c4268e80be00b44cb781fb20a4c92b1df120c0dd6f9b9e0cd96b00227177a48fc288e0c656a
7
+ data.tar.gz: 5102e87aac922d94729fef1cd6191cb9b46795d830a2c58329c4484f945f9401891f99ee6d8ebf31e306e90b2453620898cac87a7a47ca50def5ac094b1169b4
data/CHANGELOG.md CHANGED
@@ -1,6 +1,11 @@
1
1
  Unreleased Changes
2
2
  ------------------
3
3
 
4
+ 1.38.0 (2021-11-03)
5
+ ------------------
6
+
7
+ * Feature - AWS DataSync now supports Hadoop Distributed File System (HDFS) Locations
8
+
4
9
  1.37.0 (2021-10-18)
5
10
  ------------------
6
11
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.37.0
1
+ 1.38.0
@@ -636,6 +636,141 @@ module Aws::DataSync
636
636
  req.send_request(options)
637
637
  end
638
638
 
639
+ # Creates an endpoint for a Hadoop Distributed File System (HDFS).
640
+ #
641
+ # @option params [String] :subdirectory
642
+ # A subdirectory in the HDFS cluster. This subdirectory is used to read
643
+ # data from or write data to the HDFS cluster. If the subdirectory
644
+ # isn't specified, it will default to `/`.
645
+ #
646
+ # @option params [required, Array<Types::HdfsNameNode>] :name_nodes
647
+ # The NameNode that manages the HDFS namespace. The NameNode performs
648
+ # operations such as opening, closing, and renaming files and
649
+ # directories. The NameNode contains the information to map blocks of
650
+ # data to the DataNodes. You can use only one NameNode.
651
+ #
652
+ # @option params [Integer] :block_size
653
+ # The size of data blocks to write into the HDFS cluster. The block size
654
+ # must be a multiple of 512 bytes. The default block size is 128
655
+ # mebibytes (MiB).
656
+ #
657
+ # @option params [Integer] :replication_factor
658
+ # The number of DataNodes to replicate the data to when writing to the
659
+ # HDFS cluster. By default, data is replicated to three DataNodes.
660
+ #
661
+ # @option params [String] :kms_key_provider_uri
662
+ # The URI of the HDFS cluster's Key Management Server (KMS).
663
+ #
664
+ # @option params [Types::QopConfiguration] :qop_configuration
665
+ # The Quality of Protection (QOP) configuration specifies the Remote
666
+ # Procedure Call (RPC) and data transfer protection settings configured
667
+ # on the Hadoop Distributed File System (HDFS) cluster. If
668
+ # `QopConfiguration` isn't specified, `RpcProtection` and
669
+ # `DataTransferProtection` default to `PRIVACY`. If you set
670
+ # `RpcProtection` or `DataTransferProtection`, the other parameter
671
+ # assumes the same value.
672
+ #
673
+ # @option params [required, String] :authentication_type
674
+ # The type of authentication used to determine the identity of the user.
675
+ #
676
+ # @option params [String] :simple_user
677
+ # The user name used to identify the client on the host operating
678
+ # system.
679
+ #
680
+ # <note markdown="1"> If `SIMPLE` is specified for `AuthenticationType`, this parameter is
681
+ # required.
682
+ #
683
+ # </note>
684
+ #
685
+ # @option params [String] :kerberos_principal
686
+ # The Kerberos principal with access to the files and folders on the
687
+ # HDFS cluster.
688
+ #
689
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
690
+ # required.
691
+ #
692
+ # </note>
693
+ #
694
+ # @option params [String, StringIO, File] :kerberos_keytab
695
+ # The Kerberos key table (keytab) that contains mappings between the
696
+ # defined Kerberos principal and the encrypted keys. You can load the
697
+ # keytab from a file by providing the file's address. If you're using
698
+ # the CLI, it performs base64 encoding for you. Otherwise, provide the
699
+ # base64-encoded text.
700
+ #
701
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
702
+ # required.
703
+ #
704
+ # </note>
705
+ #
706
+ # @option params [String, StringIO, File] :kerberos_krb_5_conf
707
+ # The `krb5.conf` file that contains the Kerberos configuration
708
+ # information. You can load the `krb5.conf` file by providing the
709
+ # file's address. If you're using the CLI, it performs the base64
710
+ # encoding for you. Otherwise, provide the base64-encoded text.
711
+ #
712
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
713
+ # required.
714
+ #
715
+ # </note>
716
+ #
717
+ # @option params [required, Array<String>] :agent_arns
718
+ # The Amazon Resource Names (ARNs) of the agents that are used to
719
+ # connect to the HDFS cluster.
720
+ #
721
+ # @option params [Array<Types::TagListEntry>] :tags
722
+ # The key-value pair that represents the tag that you want to add to the
723
+ # location. The value can be an empty string. We recommend using tags to
724
+ # name your resources.
725
+ #
726
+ # @return [Types::CreateLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
727
+ #
728
+ # * {Types::CreateLocationHdfsResponse#location_arn #location_arn} => String
729
+ #
730
+ # @example Request syntax with placeholder values
731
+ #
732
+ # resp = client.create_location_hdfs({
733
+ # subdirectory: "HdfsSubdirectory",
734
+ # name_nodes: [ # required
735
+ # {
736
+ # hostname: "HdfsServerHostname", # required
737
+ # port: 1, # required
738
+ # },
739
+ # ],
740
+ # block_size: 1,
741
+ # replication_factor: 1,
742
+ # kms_key_provider_uri: "KmsKeyProviderUri",
743
+ # qop_configuration: {
744
+ # rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
745
+ # data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
746
+ # },
747
+ # authentication_type: "SIMPLE", # required, accepts SIMPLE, KERBEROS
748
+ # simple_user: "HdfsUser",
749
+ # kerberos_principal: "KerberosPrincipal",
750
+ # kerberos_keytab: "data",
751
+ # kerberos_krb_5_conf: "data",
752
+ # agent_arns: ["AgentArn"], # required
753
+ # tags: [
754
+ # {
755
+ # key: "TagKey", # required
756
+ # value: "TagValue",
757
+ # },
758
+ # ],
759
+ # })
760
+ #
761
+ # @example Response structure
762
+ #
763
+ # resp.location_arn #=> String
764
+ #
765
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfs AWS API Documentation
766
+ #
767
+ # @overload create_location_hdfs(params = {})
768
+ # @param [Hash] params ({})
769
+ def create_location_hdfs(params = {}, options = {})
770
+ req = build_request(:create_location_hdfs, params)
771
+ req.send_request(options)
772
+ end
773
+
639
774
  # Defines a file system on a Network File System (NFS) server that can
640
775
  # be read from or written to.
641
776
  #
@@ -1103,9 +1238,9 @@ module Aws::DataSync
1103
1238
  #
1104
1239
  # @option params [Array<Types::FilterRule>] :includes
1105
1240
  # A list of filter rules that determines which files to include when
1106
- # running a task. The pattern should contain a single filter string that
1241
+ # running a task. The pattern contains a single filter string that
1107
1242
  # consists of the patterns to include. The patterns are delimited by
1108
- # "\|" (that is, a pipe). For example: `"/folder1|/folder2`"
1243
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
1109
1244
  #
1110
1245
  # @return [Types::CreateTaskResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1111
1246
  #
@@ -1368,6 +1503,62 @@ module Aws::DataSync
1368
1503
  req.send_request(options)
1369
1504
  end
1370
1505
 
1506
+ # Returns metadata, such as the authentication information about the
1507
+ # Hadoop Distributed File System (HDFS) location.
1508
+ #
1509
+ # @option params [required, String] :location_arn
1510
+ # The Amazon Resource Name (ARN) of the HDFS cluster location to
1511
+ # describe.
1512
+ #
1513
+ # @return [Types::DescribeLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1514
+ #
1515
+ # * {Types::DescribeLocationHdfsResponse#location_arn #location_arn} => String
1516
+ # * {Types::DescribeLocationHdfsResponse#location_uri #location_uri} => String
1517
+ # * {Types::DescribeLocationHdfsResponse#name_nodes #name_nodes} => Array&lt;Types::HdfsNameNode&gt;
1518
+ # * {Types::DescribeLocationHdfsResponse#block_size #block_size} => Integer
1519
+ # * {Types::DescribeLocationHdfsResponse#replication_factor #replication_factor} => Integer
1520
+ # * {Types::DescribeLocationHdfsResponse#kms_key_provider_uri #kms_key_provider_uri} => String
1521
+ # * {Types::DescribeLocationHdfsResponse#qop_configuration #qop_configuration} => Types::QopConfiguration
1522
+ # * {Types::DescribeLocationHdfsResponse#authentication_type #authentication_type} => String
1523
+ # * {Types::DescribeLocationHdfsResponse#simple_user #simple_user} => String
1524
+ # * {Types::DescribeLocationHdfsResponse#kerberos_principal #kerberos_principal} => String
1525
+ # * {Types::DescribeLocationHdfsResponse#agent_arns #agent_arns} => Array&lt;String&gt;
1526
+ # * {Types::DescribeLocationHdfsResponse#creation_time #creation_time} => Time
1527
+ #
1528
+ # @example Request syntax with placeholder values
1529
+ #
1530
+ # resp = client.describe_location_hdfs({
1531
+ # location_arn: "LocationArn", # required
1532
+ # })
1533
+ #
1534
+ # @example Response structure
1535
+ #
1536
+ # resp.location_arn #=> String
1537
+ # resp.location_uri #=> String
1538
+ # resp.name_nodes #=> Array
1539
+ # resp.name_nodes[0].hostname #=> String
1540
+ # resp.name_nodes[0].port #=> Integer
1541
+ # resp.block_size #=> Integer
1542
+ # resp.replication_factor #=> Integer
1543
+ # resp.kms_key_provider_uri #=> String
1544
+ # resp.qop_configuration.rpc_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
1545
+ # resp.qop_configuration.data_transfer_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
1546
+ # resp.authentication_type #=> String, one of "SIMPLE", "KERBEROS"
1547
+ # resp.simple_user #=> String
1548
+ # resp.kerberos_principal #=> String
1549
+ # resp.agent_arns #=> Array
1550
+ # resp.agent_arns[0] #=> String
1551
+ # resp.creation_time #=> Time
1552
+ #
1553
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfs AWS API Documentation
1554
+ #
1555
+ # @overload describe_location_hdfs(params = {})
1556
+ # @param [Hash] params ({})
1557
+ def describe_location_hdfs(params = {}, options = {})
1558
+ req = build_request(:describe_location_hdfs, params)
1559
+ req.send_request(options)
1560
+ end
1561
+
1371
1562
  # Returns metadata, such as the path information, about an NFS location.
1372
1563
  #
1373
1564
  # @option params [required, String] :location_arn
@@ -1969,13 +2160,13 @@ module Aws::DataSync
1969
2160
  # A list of filter rules that determines which files to include when
1970
2161
  # running a task. The pattern should contain a single filter string that
1971
2162
  # consists of the patterns to include. The patterns are delimited by
1972
- # "\|" (that is, a pipe). For example: `"/folder1|/folder2"`
2163
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
1973
2164
  #
1974
2165
  # @option params [Array<Types::FilterRule>] :excludes
1975
2166
  # A list of filter rules that determines which files to exclude from a
1976
- # task. The list should contain a single filter string that consists of
1977
- # the patterns to exclude. The patterns are delimited by "\|" (that
1978
- # is, a pipe), for example, `"/folder1|/folder2"`.
2167
+ # task. The list contains a single filter string that consists of the
2168
+ # patterns to exclude. The patterns are delimited by "\|" (that is, a
2169
+ # pipe), for example, `"/folder1|/folder2"`.
1979
2170
  #
1980
2171
  # @return [Types::StartTaskExecutionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
1981
2172
  #
@@ -2111,6 +2302,101 @@ module Aws::DataSync
2111
2302
  req.send_request(options)
2112
2303
  end
2113
2304
 
2305
+ # Updates some parameters of a previously created location for a Hadoop
2306
+ # Distributed File System cluster.
2307
+ #
2308
+ # @option params [required, String] :location_arn
2309
+ # The Amazon Resource Name (ARN) of the source HDFS cluster location.
2310
+ #
2311
+ # @option params [String] :subdirectory
2312
+ # A subdirectory in the HDFS cluster. This subdirectory is used to read
2313
+ # data from or write data to the HDFS cluster.
2314
+ #
2315
+ # @option params [Array<Types::HdfsNameNode>] :name_nodes
2316
+ # The NameNode that manages the HDFS namespace. The NameNode performs
2317
+ # operations such as opening, closing, and renaming files and
2318
+ # directories. The NameNode contains the information to map blocks of
2319
+ # data to the DataNodes. You can use only one NameNode.
2320
+ #
2321
+ # @option params [Integer] :block_size
2322
+ # The size of the data blocks to write into the HDFS cluster.
2323
+ #
2324
+ # @option params [Integer] :replication_factor
2325
+ # The number of DataNodes to replicate the data to when writing to the
2326
+ # HDFS cluster.
2327
+ #
2328
+ # @option params [String] :kms_key_provider_uri
2329
+ # The URI of the HDFS cluster's Key Management Server (KMS).
2330
+ #
2331
+ # @option params [Types::QopConfiguration] :qop_configuration
2332
+ # The Quality of Protection (QOP) configuration specifies the Remote
2333
+ # Procedure Call (RPC) and data transfer privacy settings configured on
2334
+ # the Hadoop Distributed File System (HDFS) cluster.
2335
+ #
2336
+ # @option params [String] :authentication_type
2337
+ # The type of authentication used to determine the identity of the user.
2338
+ #
2339
+ # @option params [String] :simple_user
2340
+ # The user name used to identify the client on the host operating
2341
+ # system.
2342
+ #
2343
+ # @option params [String] :kerberos_principal
2344
+ # The Kerberos principal with access to the files and folders on the
2345
+ # HDFS cluster.
2346
+ #
2347
+ # @option params [String, StringIO, File] :kerberos_keytab
2348
+ # The Kerberos key table (keytab) that contains mappings between the
2349
+ # defined Kerberos principal and the encrypted keys. You can load the
2350
+ # keytab from a file by providing the file's address. If you use the
2351
+ # AWS CLI, it performs base64 encoding for you. Otherwise, provide the
2352
+ # base64-encoded text.
2353
+ #
2354
+ # @option params [String, StringIO, File] :kerberos_krb_5_conf
2355
+ # The `krb5.conf` file that contains the Kerberos configuration
2356
+ # information. You can load the `krb5.conf` file by providing the
2357
+ # file's address. If you're using the AWS CLI, it performs the base64
2358
+ # encoding for you. Otherwise, provide the base64-encoded text.
2359
+ #
2360
+ # @option params [Array<String>] :agent_arns
2361
+ # The ARNs of the agents that are used to connect to the HDFS cluster.
2362
+ #
2363
+ # @return [Struct] Returns an empty {Seahorse::Client::Response response}.
2364
+ #
2365
+ # @example Request syntax with placeholder values
2366
+ #
2367
+ # resp = client.update_location_hdfs({
2368
+ # location_arn: "LocationArn", # required
2369
+ # subdirectory: "HdfsSubdirectory",
2370
+ # name_nodes: [
2371
+ # {
2372
+ # hostname: "HdfsServerHostname", # required
2373
+ # port: 1, # required
2374
+ # },
2375
+ # ],
2376
+ # block_size: 1,
2377
+ # replication_factor: 1,
2378
+ # kms_key_provider_uri: "KmsKeyProviderUri",
2379
+ # qop_configuration: {
2380
+ # rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
2381
+ # data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
2382
+ # },
2383
+ # authentication_type: "SIMPLE", # accepts SIMPLE, KERBEROS
2384
+ # simple_user: "HdfsUser",
2385
+ # kerberos_principal: "KerberosPrincipal",
2386
+ # kerberos_keytab: "data",
2387
+ # kerberos_krb_5_conf: "data",
2388
+ # agent_arns: ["AgentArn"],
2389
+ # })
2390
+ #
2391
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfs AWS API Documentation
2392
+ #
2393
+ # @overload update_location_hdfs(params = {})
2394
+ # @param [Hash] params ({})
2395
+ def update_location_hdfs(params = {}, options = {})
2396
+ req = build_request(:update_location_hdfs, params)
2397
+ req.send_request(options)
2398
+ end
2399
+
2114
2400
  # Updates some of the parameters of a previously created location for
2115
2401
  # Network File System (NFS) access. For information about creating an
2116
2402
  # NFS location, see [Creating a location for NFS][1].
@@ -2360,7 +2646,7 @@ module Aws::DataSync
2360
2646
  # A list of filter rules that determines which files to exclude from a
2361
2647
  # task. The list should contain a single filter string that consists of
2362
2648
  # the patterns to exclude. The patterns are delimited by "\|" (that
2363
- # is, a pipe), for example: `"/folder1|/folder2"`
2649
+ # is, a pipe), for example, `"/folder1|/folder2"`.
2364
2650
  #
2365
2651
  # @option params [Types::TaskSchedule] :schedule
2366
2652
  # Specifies a schedule used to periodically transfer files from a source
@@ -2378,14 +2664,14 @@ module Aws::DataSync
2378
2664
  # The name of the task to update.
2379
2665
  #
2380
2666
  # @option params [String] :cloud_watch_log_group_arn
2381
- # The Amazon Resource Name (ARN) of the resource name of the CloudWatch
2382
- # LogGroup.
2667
+ # The Amazon Resource Name (ARN) of the resource name of the Amazon
2668
+ # CloudWatch log group.
2383
2669
  #
2384
2670
  # @option params [Array<Types::FilterRule>] :includes
2385
2671
  # A list of filter rules that determines which files to include when
2386
- # running a task. The pattern should contain a single filter string that
2672
+ # running a task. The pattern contains a single filter string that
2387
2673
  # consists of the patterns to include. The patterns are delimited by
2388
- # "\|" (that is, a pipe). For example: `"/folder1|/folder2`"
2674
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
2389
2675
  #
2390
2676
  # @return [Struct] Returns an empty {Seahorse::Client::Response response}.
2391
2677
  #
@@ -2518,7 +2804,7 @@ module Aws::DataSync
2518
2804
  params: params,
2519
2805
  config: config)
2520
2806
  context[:gem_name] = 'aws-sdk-datasync'
2521
- context[:gem_version] = '1.37.0'
2807
+ context[:gem_version] = '1.38.0'
2522
2808
  Seahorse::Client::Request.new(handlers, context)
2523
2809
  end
2524
2810
 
@@ -29,6 +29,8 @@ module Aws::DataSync
29
29
  CreateLocationEfsResponse = Shapes::StructureShape.new(name: 'CreateLocationEfsResponse')
30
30
  CreateLocationFsxWindowsRequest = Shapes::StructureShape.new(name: 'CreateLocationFsxWindowsRequest')
31
31
  CreateLocationFsxWindowsResponse = Shapes::StructureShape.new(name: 'CreateLocationFsxWindowsResponse')
32
+ CreateLocationHdfsRequest = Shapes::StructureShape.new(name: 'CreateLocationHdfsRequest')
33
+ CreateLocationHdfsResponse = Shapes::StructureShape.new(name: 'CreateLocationHdfsResponse')
32
34
  CreateLocationNfsRequest = Shapes::StructureShape.new(name: 'CreateLocationNfsRequest')
33
35
  CreateLocationNfsResponse = Shapes::StructureShape.new(name: 'CreateLocationNfsResponse')
34
36
  CreateLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'CreateLocationObjectStorageRequest')
@@ -51,6 +53,8 @@ module Aws::DataSync
51
53
  DescribeLocationEfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationEfsResponse')
52
54
  DescribeLocationFsxWindowsRequest = Shapes::StructureShape.new(name: 'DescribeLocationFsxWindowsRequest')
53
55
  DescribeLocationFsxWindowsResponse = Shapes::StructureShape.new(name: 'DescribeLocationFsxWindowsResponse')
56
+ DescribeLocationHdfsRequest = Shapes::StructureShape.new(name: 'DescribeLocationHdfsRequest')
57
+ DescribeLocationHdfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationHdfsResponse')
54
58
  DescribeLocationNfsRequest = Shapes::StructureShape.new(name: 'DescribeLocationNfsRequest')
55
59
  DescribeLocationNfsResponse = Shapes::StructureShape.new(name: 'DescribeLocationNfsResponse')
56
60
  DescribeLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'DescribeLocationObjectStorageRequest')
@@ -82,10 +86,25 @@ module Aws::DataSync
82
86
  FsxFilesystemArn = Shapes::StringShape.new(name: 'FsxFilesystemArn')
83
87
  FsxWindowsSubdirectory = Shapes::StringShape.new(name: 'FsxWindowsSubdirectory')
84
88
  Gid = Shapes::StringShape.new(name: 'Gid')
89
+ HdfsAuthenticationType = Shapes::StringShape.new(name: 'HdfsAuthenticationType')
90
+ HdfsBlockSize = Shapes::IntegerShape.new(name: 'HdfsBlockSize')
91
+ HdfsDataTransferProtection = Shapes::StringShape.new(name: 'HdfsDataTransferProtection')
92
+ HdfsNameNode = Shapes::StructureShape.new(name: 'HdfsNameNode')
93
+ HdfsNameNodeList = Shapes::ListShape.new(name: 'HdfsNameNodeList')
94
+ HdfsReplicationFactor = Shapes::IntegerShape.new(name: 'HdfsReplicationFactor')
95
+ HdfsRpcProtection = Shapes::StringShape.new(name: 'HdfsRpcProtection')
96
+ HdfsServerHostname = Shapes::StringShape.new(name: 'HdfsServerHostname')
97
+ HdfsServerPort = Shapes::IntegerShape.new(name: 'HdfsServerPort')
98
+ HdfsSubdirectory = Shapes::StringShape.new(name: 'HdfsSubdirectory')
99
+ HdfsUser = Shapes::StringShape.new(name: 'HdfsUser')
85
100
  IamRoleArn = Shapes::StringShape.new(name: 'IamRoleArn')
86
101
  InputTagList = Shapes::ListShape.new(name: 'InputTagList')
87
102
  InternalException = Shapes::StructureShape.new(name: 'InternalException')
88
103
  InvalidRequestException = Shapes::StructureShape.new(name: 'InvalidRequestException')
104
+ KerberosKeytabFile = Shapes::BlobShape.new(name: 'KerberosKeytabFile')
105
+ KerberosKrb5ConfFile = Shapes::BlobShape.new(name: 'KerberosKrb5ConfFile')
106
+ KerberosPrincipal = Shapes::StringShape.new(name: 'KerberosPrincipal')
107
+ KmsKeyProviderUri = Shapes::StringShape.new(name: 'KmsKeyProviderUri')
89
108
  ListAgentsRequest = Shapes::StructureShape.new(name: 'ListAgentsRequest')
90
109
  ListAgentsResponse = Shapes::StructureShape.new(name: 'ListAgentsResponse')
91
110
  ListLocationsRequest = Shapes::StructureShape.new(name: 'ListLocationsRequest')
@@ -129,6 +148,7 @@ module Aws::DataSync
129
148
  PreserveDeletedFiles = Shapes::StringShape.new(name: 'PreserveDeletedFiles')
130
149
  PreserveDevices = Shapes::StringShape.new(name: 'PreserveDevices')
131
150
  PrivateLinkConfig = Shapes::StructureShape.new(name: 'PrivateLinkConfig')
151
+ QopConfiguration = Shapes::StructureShape.new(name: 'QopConfiguration')
132
152
  S3BucketArn = Shapes::StringShape.new(name: 'S3BucketArn')
133
153
  S3Config = Shapes::StructureShape.new(name: 'S3Config')
134
154
  S3StorageClass = Shapes::StringShape.new(name: 'S3StorageClass')
@@ -173,6 +193,8 @@ module Aws::DataSync
173
193
  UntagResourceResponse = Shapes::StructureShape.new(name: 'UntagResourceResponse')
174
194
  UpdateAgentRequest = Shapes::StructureShape.new(name: 'UpdateAgentRequest')
175
195
  UpdateAgentResponse = Shapes::StructureShape.new(name: 'UpdateAgentResponse')
196
+ UpdateLocationHdfsRequest = Shapes::StructureShape.new(name: 'UpdateLocationHdfsRequest')
197
+ UpdateLocationHdfsResponse = Shapes::StructureShape.new(name: 'UpdateLocationHdfsResponse')
176
198
  UpdateLocationNfsRequest = Shapes::StructureShape.new(name: 'UpdateLocationNfsRequest')
177
199
  UpdateLocationNfsResponse = Shapes::StructureShape.new(name: 'UpdateLocationNfsResponse')
178
200
  UpdateLocationObjectStorageRequest = Shapes::StructureShape.new(name: 'UpdateLocationObjectStorageRequest')
@@ -234,6 +256,24 @@ module Aws::DataSync
234
256
  CreateLocationFsxWindowsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
235
257
  CreateLocationFsxWindowsResponse.struct_class = Types::CreateLocationFsxWindowsResponse
236
258
 
259
+ CreateLocationHdfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: HdfsSubdirectory, location_name: "Subdirectory"))
260
+ CreateLocationHdfsRequest.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, required: true, location_name: "NameNodes"))
261
+ CreateLocationHdfsRequest.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
262
+ CreateLocationHdfsRequest.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
263
+ CreateLocationHdfsRequest.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
264
+ CreateLocationHdfsRequest.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
265
+ CreateLocationHdfsRequest.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, required: true, location_name: "AuthenticationType"))
266
+ CreateLocationHdfsRequest.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
267
+ CreateLocationHdfsRequest.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
268
+ CreateLocationHdfsRequest.add_member(:kerberos_keytab, Shapes::ShapeRef.new(shape: KerberosKeytabFile, location_name: "KerberosKeytab"))
269
+ CreateLocationHdfsRequest.add_member(:kerberos_krb_5_conf, Shapes::ShapeRef.new(shape: KerberosKrb5ConfFile, location_name: "KerberosKrb5Conf"))
270
+ CreateLocationHdfsRequest.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, required: true, location_name: "AgentArns"))
271
+ CreateLocationHdfsRequest.add_member(:tags, Shapes::ShapeRef.new(shape: InputTagList, location_name: "Tags"))
272
+ CreateLocationHdfsRequest.struct_class = Types::CreateLocationHdfsRequest
273
+
274
+ CreateLocationHdfsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
275
+ CreateLocationHdfsResponse.struct_class = Types::CreateLocationHdfsResponse
276
+
237
277
  CreateLocationNfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: NfsSubdirectory, required: true, location_name: "Subdirectory"))
238
278
  CreateLocationNfsRequest.add_member(:server_hostname, Shapes::ShapeRef.new(shape: ServerHostname, required: true, location_name: "ServerHostname"))
239
279
  CreateLocationNfsRequest.add_member(:on_prem_config, Shapes::ShapeRef.new(shape: OnPremConfig, required: true, location_name: "OnPremConfig"))
@@ -343,6 +383,23 @@ module Aws::DataSync
343
383
  DescribeLocationFsxWindowsResponse.add_member(:domain, Shapes::ShapeRef.new(shape: SmbDomain, location_name: "Domain"))
344
384
  DescribeLocationFsxWindowsResponse.struct_class = Types::DescribeLocationFsxWindowsResponse
345
385
 
386
+ DescribeLocationHdfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
387
+ DescribeLocationHdfsRequest.struct_class = Types::DescribeLocationHdfsRequest
388
+
389
+ DescribeLocationHdfsResponse.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, location_name: "LocationArn"))
390
+ DescribeLocationHdfsResponse.add_member(:location_uri, Shapes::ShapeRef.new(shape: LocationUri, location_name: "LocationUri"))
391
+ DescribeLocationHdfsResponse.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, location_name: "NameNodes"))
392
+ DescribeLocationHdfsResponse.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
393
+ DescribeLocationHdfsResponse.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
394
+ DescribeLocationHdfsResponse.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
395
+ DescribeLocationHdfsResponse.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
396
+ DescribeLocationHdfsResponse.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, location_name: "AuthenticationType"))
397
+ DescribeLocationHdfsResponse.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
398
+ DescribeLocationHdfsResponse.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
399
+ DescribeLocationHdfsResponse.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, location_name: "AgentArns"))
400
+ DescribeLocationHdfsResponse.add_member(:creation_time, Shapes::ShapeRef.new(shape: Time, location_name: "CreationTime"))
401
+ DescribeLocationHdfsResponse.struct_class = Types::DescribeLocationHdfsResponse
402
+
346
403
  DescribeLocationNfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
347
404
  DescribeLocationNfsRequest.struct_class = Types::DescribeLocationNfsRequest
348
405
 
@@ -442,6 +499,12 @@ module Aws::DataSync
442
499
 
443
500
  FilterValues.member = Shapes::ShapeRef.new(shape: FilterAttributeValue)
444
501
 
502
+ HdfsNameNode.add_member(:hostname, Shapes::ShapeRef.new(shape: HdfsServerHostname, required: true, location_name: "Hostname"))
503
+ HdfsNameNode.add_member(:port, Shapes::ShapeRef.new(shape: HdfsServerPort, required: true, location_name: "Port"))
504
+ HdfsNameNode.struct_class = Types::HdfsNameNode
505
+
506
+ HdfsNameNodeList.member = Shapes::ShapeRef.new(shape: HdfsNameNode)
507
+
445
508
  InputTagList.member = Shapes::ShapeRef.new(shape: TagListEntry)
446
509
 
447
510
  InternalException.add_member(:message, Shapes::ShapeRef.new(shape: string, location_name: "message"))
@@ -543,6 +606,10 @@ module Aws::DataSync
543
606
  PrivateLinkConfig.add_member(:security_group_arns, Shapes::ShapeRef.new(shape: PLSecurityGroupArnList, location_name: "SecurityGroupArns"))
544
607
  PrivateLinkConfig.struct_class = Types::PrivateLinkConfig
545
608
 
609
+ QopConfiguration.add_member(:rpc_protection, Shapes::ShapeRef.new(shape: HdfsRpcProtection, location_name: "RpcProtection"))
610
+ QopConfiguration.add_member(:data_transfer_protection, Shapes::ShapeRef.new(shape: HdfsDataTransferProtection, location_name: "DataTransferProtection"))
611
+ QopConfiguration.struct_class = Types::QopConfiguration
612
+
546
613
  S3Config.add_member(:bucket_access_role_arn, Shapes::ShapeRef.new(shape: IamRoleArn, required: true, location_name: "BucketAccessRoleArn"))
547
614
  S3Config.struct_class = Types::S3Config
548
615
 
@@ -618,6 +685,23 @@ module Aws::DataSync
618
685
 
619
686
  UpdateAgentResponse.struct_class = Types::UpdateAgentResponse
620
687
 
688
+ UpdateLocationHdfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
689
+ UpdateLocationHdfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: HdfsSubdirectory, location_name: "Subdirectory"))
690
+ UpdateLocationHdfsRequest.add_member(:name_nodes, Shapes::ShapeRef.new(shape: HdfsNameNodeList, location_name: "NameNodes"))
691
+ UpdateLocationHdfsRequest.add_member(:block_size, Shapes::ShapeRef.new(shape: HdfsBlockSize, location_name: "BlockSize"))
692
+ UpdateLocationHdfsRequest.add_member(:replication_factor, Shapes::ShapeRef.new(shape: HdfsReplicationFactor, location_name: "ReplicationFactor"))
693
+ UpdateLocationHdfsRequest.add_member(:kms_key_provider_uri, Shapes::ShapeRef.new(shape: KmsKeyProviderUri, location_name: "KmsKeyProviderUri"))
694
+ UpdateLocationHdfsRequest.add_member(:qop_configuration, Shapes::ShapeRef.new(shape: QopConfiguration, location_name: "QopConfiguration"))
695
+ UpdateLocationHdfsRequest.add_member(:authentication_type, Shapes::ShapeRef.new(shape: HdfsAuthenticationType, location_name: "AuthenticationType"))
696
+ UpdateLocationHdfsRequest.add_member(:simple_user, Shapes::ShapeRef.new(shape: HdfsUser, location_name: "SimpleUser"))
697
+ UpdateLocationHdfsRequest.add_member(:kerberos_principal, Shapes::ShapeRef.new(shape: KerberosPrincipal, location_name: "KerberosPrincipal"))
698
+ UpdateLocationHdfsRequest.add_member(:kerberos_keytab, Shapes::ShapeRef.new(shape: KerberosKeytabFile, location_name: "KerberosKeytab"))
699
+ UpdateLocationHdfsRequest.add_member(:kerberos_krb_5_conf, Shapes::ShapeRef.new(shape: KerberosKrb5ConfFile, location_name: "KerberosKrb5Conf"))
700
+ UpdateLocationHdfsRequest.add_member(:agent_arns, Shapes::ShapeRef.new(shape: AgentArnList, location_name: "AgentArns"))
701
+ UpdateLocationHdfsRequest.struct_class = Types::UpdateLocationHdfsRequest
702
+
703
+ UpdateLocationHdfsResponse.struct_class = Types::UpdateLocationHdfsResponse
704
+
621
705
  UpdateLocationNfsRequest.add_member(:location_arn, Shapes::ShapeRef.new(shape: LocationArn, required: true, location_name: "LocationArn"))
622
706
  UpdateLocationNfsRequest.add_member(:subdirectory, Shapes::ShapeRef.new(shape: NfsSubdirectory, location_name: "Subdirectory"))
623
707
  UpdateLocationNfsRequest.add_member(:on_prem_config, Shapes::ShapeRef.new(shape: OnPremConfig, location_name: "OnPremConfig"))
@@ -725,6 +809,16 @@ module Aws::DataSync
725
809
  o.errors << Shapes::ShapeRef.new(shape: InternalException)
726
810
  end)
727
811
 
812
+ api.add_operation(:create_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
813
+ o.name = "CreateLocationHdfs"
814
+ o.http_method = "POST"
815
+ o.http_request_uri = "/"
816
+ o.input = Shapes::ShapeRef.new(shape: CreateLocationHdfsRequest)
817
+ o.output = Shapes::ShapeRef.new(shape: CreateLocationHdfsResponse)
818
+ o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
819
+ o.errors << Shapes::ShapeRef.new(shape: InternalException)
820
+ end)
821
+
728
822
  api.add_operation(:create_location_nfs, Seahorse::Model::Operation.new.tap do |o|
729
823
  o.name = "CreateLocationNfs"
730
824
  o.http_method = "POST"
@@ -835,6 +929,16 @@ module Aws::DataSync
835
929
  o.errors << Shapes::ShapeRef.new(shape: InternalException)
836
930
  end)
837
931
 
932
+ api.add_operation(:describe_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
933
+ o.name = "DescribeLocationHdfs"
934
+ o.http_method = "POST"
935
+ o.http_request_uri = "/"
936
+ o.input = Shapes::ShapeRef.new(shape: DescribeLocationHdfsRequest)
937
+ o.output = Shapes::ShapeRef.new(shape: DescribeLocationHdfsResponse)
938
+ o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
939
+ o.errors << Shapes::ShapeRef.new(shape: InternalException)
940
+ end)
941
+
838
942
  api.add_operation(:describe_location_nfs, Seahorse::Model::Operation.new.tap do |o|
839
943
  o.name = "DescribeLocationNfs"
840
944
  o.http_method = "POST"
@@ -1015,6 +1119,16 @@ module Aws::DataSync
1015
1119
  o.errors << Shapes::ShapeRef.new(shape: InternalException)
1016
1120
  end)
1017
1121
 
1122
+ api.add_operation(:update_location_hdfs, Seahorse::Model::Operation.new.tap do |o|
1123
+ o.name = "UpdateLocationHdfs"
1124
+ o.http_method = "POST"
1125
+ o.http_request_uri = "/"
1126
+ o.input = Shapes::ShapeRef.new(shape: UpdateLocationHdfsRequest)
1127
+ o.output = Shapes::ShapeRef.new(shape: UpdateLocationHdfsResponse)
1128
+ o.errors << Shapes::ShapeRef.new(shape: InvalidRequestException)
1129
+ o.errors << Shapes::ShapeRef.new(shape: InternalException)
1130
+ end)
1131
+
1018
1132
  api.add_operation(:update_location_nfs, Seahorse::Model::Operation.new.tap do |o|
1019
1133
  o.name = "UpdateLocationNfs"
1020
1134
  o.http_method = "POST"
@@ -363,6 +363,169 @@ module Aws::DataSync
363
363
  include Aws::Structure
364
364
  end
365
365
 
366
+ # @note When making an API call, you may pass CreateLocationHdfsRequest
367
+ # data as a hash:
368
+ #
369
+ # {
370
+ # subdirectory: "HdfsSubdirectory",
371
+ # name_nodes: [ # required
372
+ # {
373
+ # hostname: "HdfsServerHostname", # required
374
+ # port: 1, # required
375
+ # },
376
+ # ],
377
+ # block_size: 1,
378
+ # replication_factor: 1,
379
+ # kms_key_provider_uri: "KmsKeyProviderUri",
380
+ # qop_configuration: {
381
+ # rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
382
+ # data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
383
+ # },
384
+ # authentication_type: "SIMPLE", # required, accepts SIMPLE, KERBEROS
385
+ # simple_user: "HdfsUser",
386
+ # kerberos_principal: "KerberosPrincipal",
387
+ # kerberos_keytab: "data",
388
+ # kerberos_krb_5_conf: "data",
389
+ # agent_arns: ["AgentArn"], # required
390
+ # tags: [
391
+ # {
392
+ # key: "TagKey", # required
393
+ # value: "TagValue",
394
+ # },
395
+ # ],
396
+ # }
397
+ #
398
+ # @!attribute [rw] subdirectory
399
+ # A subdirectory in the HDFS cluster. This subdirectory is used to
400
+ # read data from or write data to the HDFS cluster. If the
401
+ # subdirectory isn't specified, it will default to `/`.
402
+ # @return [String]
403
+ #
404
+ # @!attribute [rw] name_nodes
405
+ # The NameNode that manages the HDFS namespace. The NameNode performs
406
+ # operations such as opening, closing, and renaming files and
407
+ # directories. The NameNode contains the information to map blocks of
408
+ # data to the DataNodes. You can use only one NameNode.
409
+ # @return [Array<Types::HdfsNameNode>]
410
+ #
411
+ # @!attribute [rw] block_size
412
+ # The size of data blocks to write into the HDFS cluster. The block
413
+ # size must be a multiple of 512 bytes. The default block size is 128
414
+ # mebibytes (MiB).
415
+ # @return [Integer]
416
+ #
417
+ # @!attribute [rw] replication_factor
418
+ # The number of DataNodes to replicate the data to when writing to the
419
+ # HDFS cluster. By default, data is replicated to three DataNodes.
420
+ # @return [Integer]
421
+ #
422
+ # @!attribute [rw] kms_key_provider_uri
423
+ # The URI of the HDFS cluster's Key Management Server (KMS).
424
+ # @return [String]
425
+ #
426
+ # @!attribute [rw] qop_configuration
427
+ # The Quality of Protection (QOP) configuration specifies the Remote
428
+ # Procedure Call (RPC) and data transfer protection settings
429
+ # configured on the Hadoop Distributed File System (HDFS) cluster. If
430
+ # `QopConfiguration` isn't specified, `RpcProtection` and
431
+ # `DataTransferProtection` default to `PRIVACY`. If you set
432
+ # `RpcProtection` or `DataTransferProtection`, the other parameter
433
+ # assumes the same value.
434
+ # @return [Types::QopConfiguration]
435
+ #
436
+ # @!attribute [rw] authentication_type
437
+ # The type of authentication used to determine the identity of the
438
+ # user.
439
+ # @return [String]
440
+ #
441
+ # @!attribute [rw] simple_user
442
+ # The user name used to identify the client on the host operating
443
+ # system.
444
+ #
445
+ # <note markdown="1"> If `SIMPLE` is specified for `AuthenticationType`, this parameter is
446
+ # required.
447
+ #
448
+ # </note>
449
+ # @return [String]
450
+ #
451
+ # @!attribute [rw] kerberos_principal
452
+ # The Kerberos principal with access to the files and folders on the
453
+ # HDFS cluster.
454
+ #
455
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
456
+ # is required.
457
+ #
458
+ # </note>
459
+ # @return [String]
460
+ #
461
+ # @!attribute [rw] kerberos_keytab
462
+ # The Kerberos key table (keytab) that contains mappings between the
463
+ # defined Kerberos principal and the encrypted keys. You can load the
464
+ # keytab from a file by providing the file's address. If you're
465
+ # using the CLI, it performs base64 encoding for you. Otherwise,
466
+ # provide the base64-encoded text.
467
+ #
468
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
469
+ # is required.
470
+ #
471
+ # </note>
472
+ # @return [String]
473
+ #
474
+ # @!attribute [rw] kerberos_krb_5_conf
475
+ # The `krb5.conf` file that contains the Kerberos configuration
476
+ # information. You can load the `krb5.conf` file by providing the
477
+ # file's address. If you're using the CLI, it performs the base64
478
+ # encoding for you. Otherwise, provide the base64-encoded text.
479
+ #
480
+ # <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter
481
+ # is required.
482
+ #
483
+ # </note>
484
+ # @return [String]
485
+ #
486
+ # @!attribute [rw] agent_arns
487
+ # The Amazon Resource Names (ARNs) of the agents that are used to
488
+ # connect to the HDFS cluster.
489
+ # @return [Array<String>]
490
+ #
491
+ # @!attribute [rw] tags
492
+ # The key-value pair that represents the tag that you want to add to
493
+ # the location. The value can be an empty string. We recommend using
494
+ # tags to name your resources.
495
+ # @return [Array<Types::TagListEntry>]
496
+ #
497
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfsRequest AWS API Documentation
498
+ #
499
+ class CreateLocationHdfsRequest < Struct.new(
500
+ :subdirectory,
501
+ :name_nodes,
502
+ :block_size,
503
+ :replication_factor,
504
+ :kms_key_provider_uri,
505
+ :qop_configuration,
506
+ :authentication_type,
507
+ :simple_user,
508
+ :kerberos_principal,
509
+ :kerberos_keytab,
510
+ :kerberos_krb_5_conf,
511
+ :agent_arns,
512
+ :tags)
513
+ SENSITIVE = []
514
+ include Aws::Structure
515
+ end
516
+
517
+ # @!attribute [rw] location_arn
518
+ # The ARN of the source HDFS cluster location that's created.
519
+ # @return [String]
520
+ #
521
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfsResponse AWS API Documentation
522
+ #
523
+ class CreateLocationHdfsResponse < Struct.new(
524
+ :location_arn)
525
+ SENSITIVE = []
526
+ include Aws::Structure
527
+ end
528
+
366
529
  # CreateLocationNfsRequest
367
530
  #
368
531
  # @note When making an API call, you may pass CreateLocationNfsRequest
@@ -930,9 +1093,9 @@ module Aws::DataSync
930
1093
  #
931
1094
  # @!attribute [rw] includes
932
1095
  # A list of filter rules that determines which files to include when
933
- # running a task. The pattern should contain a single filter string
934
- # that consists of the patterns to include. The patterns are delimited
935
- # by "\|" (that is, a pipe). For example: `"/folder1|/folder2`"
1096
+ # running a task. The pattern contains a single filter string that
1097
+ # consists of the patterns to include. The patterns are delimited by
1098
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
936
1099
  # @return [Array<Types::FilterRule>]
937
1100
  #
938
1101
  # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateTaskRequest AWS API Documentation
@@ -1233,6 +1396,101 @@ module Aws::DataSync
1233
1396
  include Aws::Structure
1234
1397
  end
1235
1398
 
1399
+ # @note When making an API call, you may pass DescribeLocationHdfsRequest
1400
+ # data as a hash:
1401
+ #
1402
+ # {
1403
+ # location_arn: "LocationArn", # required
1404
+ # }
1405
+ #
1406
+ # @!attribute [rw] location_arn
1407
+ # The Amazon Resource Name (ARN) of the HDFS cluster location to
1408
+ # describe.
1409
+ # @return [String]
1410
+ #
1411
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfsRequest AWS API Documentation
1412
+ #
1413
+ class DescribeLocationHdfsRequest < Struct.new(
1414
+ :location_arn)
1415
+ SENSITIVE = []
1416
+ include Aws::Structure
1417
+ end
1418
+
1419
+ # @!attribute [rw] location_arn
1420
+ # The ARN of the HDFS cluster location.
1421
+ # @return [String]
1422
+ #
1423
+ # @!attribute [rw] location_uri
1424
+ # The URI of the HDFS cluster location.
1425
+ # @return [String]
1426
+ #
1427
+ # @!attribute [rw] name_nodes
1428
+ # The NameNode that manage the HDFS namespace.
1429
+ # @return [Array<Types::HdfsNameNode>]
1430
+ #
1431
+ # @!attribute [rw] block_size
1432
+ # The size of the data blocks to write into the HDFS cluster.
1433
+ # @return [Integer]
1434
+ #
1435
+ # @!attribute [rw] replication_factor
1436
+ # The number of DataNodes to replicate the data to when writing to the
1437
+ # HDFS cluster.
1438
+ # @return [Integer]
1439
+ #
1440
+ # @!attribute [rw] kms_key_provider_uri
1441
+ # The URI of the HDFS cluster's Key Management Server (KMS).
1442
+ # @return [String]
1443
+ #
1444
+ # @!attribute [rw] qop_configuration
1445
+ # The Quality of Protection (QOP) configuration specifies the Remote
1446
+ # Procedure Call (RPC) and data transfer protection settings
1447
+ # configured on the Hadoop Distributed File System (HDFS) cluster.
1448
+ # @return [Types::QopConfiguration]
1449
+ #
1450
+ # @!attribute [rw] authentication_type
1451
+ # The type of authentication used to determine the identity of the
1452
+ # user.
1453
+ # @return [String]
1454
+ #
1455
+ # @!attribute [rw] simple_user
1456
+ # The user name used to identify the client on the host operating
1457
+ # system. This parameter is used if the `AuthenticationType` is
1458
+ # defined as `SIMPLE`.
1459
+ # @return [String]
1460
+ #
1461
+ # @!attribute [rw] kerberos_principal
1462
+ # The Kerberos principal with access to the files and folders on the
1463
+ # HDFS cluster. This parameter is used if the `AuthenticationType` is
1464
+ # defined as `KERBEROS`.
1465
+ # @return [String]
1466
+ #
1467
+ # @!attribute [rw] agent_arns
1468
+ # The ARNs of the agents that are used to connect to the HDFS cluster.
1469
+ # @return [Array<String>]
1470
+ #
1471
+ # @!attribute [rw] creation_time
1472
+ # The time that the HDFS location was created.
1473
+ # @return [Time]
1474
+ #
1475
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfsResponse AWS API Documentation
1476
+ #
1477
+ class DescribeLocationHdfsResponse < Struct.new(
1478
+ :location_arn,
1479
+ :location_uri,
1480
+ :name_nodes,
1481
+ :block_size,
1482
+ :replication_factor,
1483
+ :kms_key_provider_uri,
1484
+ :qop_configuration,
1485
+ :authentication_type,
1486
+ :simple_user,
1487
+ :kerberos_principal,
1488
+ :agent_arns,
1489
+ :creation_time)
1490
+ SENSITIVE = []
1491
+ include Aws::Structure
1492
+ end
1493
+
1236
1494
  # DescribeLocationNfsRequest
1237
1495
  #
1238
1496
  # @note When making an API call, you may pass DescribeLocationNfsRequest
@@ -1719,13 +1977,13 @@ module Aws::DataSync
1719
1977
  # @return [String]
1720
1978
  #
1721
1979
  # @!attribute [rw] source_network_interface_arns
1722
- # The Amazon Resource Name (ARN) of the source ENIs (Elastic Network
1723
- # Interface) that was created for your subnet.
1980
+ # The Amazon Resource Names (ARNs) of the source elastic network
1981
+ # interfaces (ENIs) that were created for your subnet.
1724
1982
  # @return [Array<String>]
1725
1983
  #
1726
1984
  # @!attribute [rw] destination_network_interface_arns
1727
- # The Amazon Resource Name (ARN) of the destination ENIs (Elastic
1728
- # Network Interface) that was created for your subnet.
1985
+ # The Amazon Resource Names (ARNs) of the destination elastic network
1986
+ # interfaces (ENIs) that were created for your subnet.
1729
1987
  # @return [Array<String>]
1730
1988
  #
1731
1989
  # @!attribute [rw] options
@@ -1748,7 +2006,7 @@ module Aws::DataSync
1748
2006
  # A list of filter rules that determines which files to exclude from a
1749
2007
  # task. The list should contain a single filter string that consists
1750
2008
  # of the patterns to exclude. The patterns are delimited by "\|"
1751
- # (that is, a pipe), for example: `"/folder1|/folder2"`
2009
+ # (that is, a pipe), for example, `"/folder1|/folder2"`.
1752
2010
  # @return [Array<Types::FilterRule>]
1753
2011
  #
1754
2012
  # @!attribute [rw] schedule
@@ -1773,9 +2031,9 @@ module Aws::DataSync
1773
2031
  #
1774
2032
  # @!attribute [rw] includes
1775
2033
  # A list of filter rules that determines which files to include when
1776
- # running a task. The pattern should contain a single filter string
1777
- # that consists of the patterns to include. The patterns are delimited
1778
- # by "\|" (that is, a pipe). For example: `"/folder1|/folder2`"
2034
+ # running a task. The pattern contains a single filter string that
2035
+ # consists of the patterns to include. The patterns are delimited by
2036
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2`".
1779
2037
  # @return [Array<Types::FilterRule>]
1780
2038
  #
1781
2039
  # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeTaskResponse AWS API Documentation
@@ -1865,6 +2123,40 @@ module Aws::DataSync
1865
2123
  include Aws::Structure
1866
2124
  end
1867
2125
 
2126
+ # The NameNode of the Hadoop Distributed File System (HDFS). The
2127
+ # NameNode manages the file system's namespace. The NameNode performs
2128
+ # operations such as opening, closing, and renaming files and
2129
+ # directories. The NameNode contains the information to map blocks of
2130
+ # data to the DataNodes.
2131
+ #
2132
+ # @note When making an API call, you may pass HdfsNameNode
2133
+ # data as a hash:
2134
+ #
2135
+ # {
2136
+ # hostname: "HdfsServerHostname", # required
2137
+ # port: 1, # required
2138
+ # }
2139
+ #
2140
+ # @!attribute [rw] hostname
2141
+ # The hostname of the NameNode in the HDFS cluster. This value is the
2142
+ # IP address or Domain Name Service (DNS) name of the NameNode. An
2143
+ # agent that's installed on-premises uses this hostname to
2144
+ # communicate with the NameNode in the network.
2145
+ # @return [String]
2146
+ #
2147
+ # @!attribute [rw] port
2148
+ # The port that the NameNode uses to listen to client requests.
2149
+ # @return [Integer]
2150
+ #
2151
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/HdfsNameNode AWS API Documentation
2152
+ #
2153
+ class HdfsNameNode < Struct.new(
2154
+ :hostname,
2155
+ :port)
2156
+ SENSITIVE = []
2157
+ include Aws::Structure
2158
+ end
2159
+
1868
2160
  # This exception is thrown when an error occurs in the DataSync service.
1869
2161
  #
1870
2162
  # @!attribute [rw] message
@@ -2304,7 +2596,7 @@ module Aws::DataSync
2304
2596
  #
2305
2597
  # * <b> <a href="https://tools.ietf.org/html/rfc3530">NFSv4.0</a> </b>
2306
2598
  # - stateful, firewall-friendly protocol version that supports
2307
- # delegations and pseudo filesystems.
2599
+ # delegations and pseudo file systems.
2308
2600
  #
2309
2601
  # * <b> <a href="https://tools.ietf.org/html/rfc5661">NFSv4.1</a> </b>
2310
2602
  # - stateful protocol version that supports sessions, directory
@@ -2725,6 +3017,39 @@ module Aws::DataSync
2725
3017
  include Aws::Structure
2726
3018
  end
2727
3019
 
3020
+ # The Quality of Protection (QOP) configuration specifies the Remote
3021
+ # Procedure Call (RPC) and data transfer privacy settings configured on
3022
+ # the Hadoop Distributed File System (HDFS) cluster.
3023
+ #
3024
+ # @note When making an API call, you may pass QopConfiguration
3025
+ # data as a hash:
3026
+ #
3027
+ # {
3028
+ # rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
3029
+ # data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
3030
+ # }
3031
+ #
3032
+ # @!attribute [rw] rpc_protection
3033
+ # The RPC protection setting configured on the HDFS cluster. This
3034
+ # setting corresponds to your `hadoop.rpc.protection` setting in your
3035
+ # `core-site.xml` file on your Hadoop cluster.
3036
+ # @return [String]
3037
+ #
3038
+ # @!attribute [rw] data_transfer_protection
3039
+ # The data transfer protection setting configured on the HDFS cluster.
3040
+ # This setting corresponds to your `dfs.data.transfer.protection`
3041
+ # setting in the `hdfs-site.xml` file on your Hadoop cluster.
3042
+ # @return [String]
3043
+ #
3044
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/QopConfiguration AWS API Documentation
3045
+ #
3046
+ class QopConfiguration < Struct.new(
3047
+ :rpc_protection,
3048
+ :data_transfer_protection)
3049
+ SENSITIVE = []
3050
+ include Aws::Structure
3051
+ end
3052
+
2728
3053
  # The Amazon Resource Name (ARN) of the Identity and Access Management
2729
3054
  # (IAM) role that is used to access an Amazon S3 bucket.
2730
3055
  #
@@ -2843,14 +3168,14 @@ module Aws::DataSync
2843
3168
  # A list of filter rules that determines which files to include when
2844
3169
  # running a task. The pattern should contain a single filter string
2845
3170
  # that consists of the patterns to include. The patterns are delimited
2846
- # by "\|" (that is, a pipe). For example: `"/folder1|/folder2"`
3171
+ # by "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
2847
3172
  # @return [Array<Types::FilterRule>]
2848
3173
  #
2849
3174
  # @!attribute [rw] excludes
2850
3175
  # A list of filter rules that determines which files to exclude from a
2851
- # task. The list should contain a single filter string that consists
2852
- # of the patterns to exclude. The patterns are delimited by "\|"
2853
- # (that is, a pipe), for example, `"/folder1|/folder2"`.
3176
+ # task. The list contains a single filter string that consists of the
3177
+ # patterns to exclude. The patterns are delimited by "\|" (that is,
3178
+ # a pipe), for example, `"/folder1|/folder2"`.
2854
3179
  # @return [Array<Types::FilterRule>]
2855
3180
  #
2856
3181
  # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/StartTaskExecutionRequest AWS API Documentation
@@ -2879,9 +3204,9 @@ module Aws::DataSync
2879
3204
  include Aws::Structure
2880
3205
  end
2881
3206
 
2882
- # Represents a single entry in a list of AWS resource tags.
2883
- # `TagListEntry` returns an array that contains a list of tasks when the
2884
- # [ListTagsForResource][1] operation is called.
3207
+ # Represents a single entry in a list of Amazon Web Services resource
3208
+ # tags. `TagListEntry` returns an array that contains a list of tasks
3209
+ # when the [ListTagsForResource][1] operation is called.
2885
3210
  #
2886
3211
  #
2887
3212
  #
@@ -3204,6 +3529,126 @@ module Aws::DataSync
3204
3529
  #
3205
3530
  class UpdateAgentResponse < Aws::EmptyStructure; end
3206
3531
 
3532
+ # @note When making an API call, you may pass UpdateLocationHdfsRequest
3533
+ # data as a hash:
3534
+ #
3535
+ # {
3536
+ # location_arn: "LocationArn", # required
3537
+ # subdirectory: "HdfsSubdirectory",
3538
+ # name_nodes: [
3539
+ # {
3540
+ # hostname: "HdfsServerHostname", # required
3541
+ # port: 1, # required
3542
+ # },
3543
+ # ],
3544
+ # block_size: 1,
3545
+ # replication_factor: 1,
3546
+ # kms_key_provider_uri: "KmsKeyProviderUri",
3547
+ # qop_configuration: {
3548
+ # rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
3549
+ # data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
3550
+ # },
3551
+ # authentication_type: "SIMPLE", # accepts SIMPLE, KERBEROS
3552
+ # simple_user: "HdfsUser",
3553
+ # kerberos_principal: "KerberosPrincipal",
3554
+ # kerberos_keytab: "data",
3555
+ # kerberos_krb_5_conf: "data",
3556
+ # agent_arns: ["AgentArn"],
3557
+ # }
3558
+ #
3559
+ # @!attribute [rw] location_arn
3560
+ # The Amazon Resource Name (ARN) of the source HDFS cluster location.
3561
+ # @return [String]
3562
+ #
3563
+ # @!attribute [rw] subdirectory
3564
+ # A subdirectory in the HDFS cluster. This subdirectory is used to
3565
+ # read data from or write data to the HDFS cluster.
3566
+ # @return [String]
3567
+ #
3568
+ # @!attribute [rw] name_nodes
3569
+ # The NameNode that manages the HDFS namespace. The NameNode performs
3570
+ # operations such as opening, closing, and renaming files and
3571
+ # directories. The NameNode contains the information to map blocks of
3572
+ # data to the DataNodes. You can use only one NameNode.
3573
+ # @return [Array<Types::HdfsNameNode>]
3574
+ #
3575
+ # @!attribute [rw] block_size
3576
+ # The size of the data blocks to write into the HDFS cluster.
3577
+ # @return [Integer]
3578
+ #
3579
+ # @!attribute [rw] replication_factor
3580
+ # The number of DataNodes to replicate the data to when writing to the
3581
+ # HDFS cluster.
3582
+ # @return [Integer]
3583
+ #
3584
+ # @!attribute [rw] kms_key_provider_uri
3585
+ # The URI of the HDFS cluster's Key Management Server (KMS).
3586
+ # @return [String]
3587
+ #
3588
+ # @!attribute [rw] qop_configuration
3589
+ # The Quality of Protection (QOP) configuration specifies the Remote
3590
+ # Procedure Call (RPC) and data transfer privacy settings configured
3591
+ # on the Hadoop Distributed File System (HDFS) cluster.
3592
+ # @return [Types::QopConfiguration]
3593
+ #
3594
+ # @!attribute [rw] authentication_type
3595
+ # The type of authentication used to determine the identity of the
3596
+ # user.
3597
+ # @return [String]
3598
+ #
3599
+ # @!attribute [rw] simple_user
3600
+ # The user name used to identify the client on the host operating
3601
+ # system.
3602
+ # @return [String]
3603
+ #
3604
+ # @!attribute [rw] kerberos_principal
3605
+ # The Kerberos principal with access to the files and folders on the
3606
+ # HDFS cluster.
3607
+ # @return [String]
3608
+ #
3609
+ # @!attribute [rw] kerberos_keytab
3610
+ # The Kerberos key table (keytab) that contains mappings between the
3611
+ # defined Kerberos principal and the encrypted keys. You can load the
3612
+ # keytab from a file by providing the file's address. If you use the
3613
+ # AWS CLI, it performs base64 encoding for you. Otherwise, provide the
3614
+ # base64-encoded text.
3615
+ # @return [String]
3616
+ #
3617
+ # @!attribute [rw] kerberos_krb_5_conf
3618
+ # The `krb5.conf` file that contains the Kerberos configuration
3619
+ # information. You can load the `krb5.conf` file by providing the
3620
+ # file's address. If you're using the AWS CLI, it performs the
3621
+ # base64 encoding for you. Otherwise, provide the base64-encoded text.
3622
+ # @return [String]
3623
+ #
3624
+ # @!attribute [rw] agent_arns
3625
+ # The ARNs of the agents that are used to connect to the HDFS cluster.
3626
+ # @return [Array<String>]
3627
+ #
3628
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfsRequest AWS API Documentation
3629
+ #
3630
+ class UpdateLocationHdfsRequest < Struct.new(
3631
+ :location_arn,
3632
+ :subdirectory,
3633
+ :name_nodes,
3634
+ :block_size,
3635
+ :replication_factor,
3636
+ :kms_key_provider_uri,
3637
+ :qop_configuration,
3638
+ :authentication_type,
3639
+ :simple_user,
3640
+ :kerberos_principal,
3641
+ :kerberos_keytab,
3642
+ :kerberos_krb_5_conf,
3643
+ :agent_arns)
3644
+ SENSITIVE = []
3645
+ include Aws::Structure
3646
+ end
3647
+
3648
+ # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfsResponse AWS API Documentation
3649
+ #
3650
+ class UpdateLocationHdfsResponse < Aws::EmptyStructure; end
3651
+
3207
3652
  # @note When making an API call, you may pass UpdateLocationNfsRequest
3208
3653
  # data as a hash:
3209
3654
  #
@@ -3571,7 +4016,7 @@ module Aws::DataSync
3571
4016
  # A list of filter rules that determines which files to exclude from a
3572
4017
  # task. The list should contain a single filter string that consists
3573
4018
  # of the patterns to exclude. The patterns are delimited by "\|"
3574
- # (that is, a pipe), for example: `"/folder1|/folder2"`
4019
+ # (that is, a pipe), for example, `"/folder1|/folder2"`.
3575
4020
  # @return [Array<Types::FilterRule>]
3576
4021
  #
3577
4022
  # @!attribute [rw] schedule
@@ -3592,15 +4037,15 @@ module Aws::DataSync
3592
4037
  # @return [String]
3593
4038
  #
3594
4039
  # @!attribute [rw] cloud_watch_log_group_arn
3595
- # The Amazon Resource Name (ARN) of the resource name of the
3596
- # CloudWatch LogGroup.
4040
+ # The Amazon Resource Name (ARN) of the resource name of the Amazon
4041
+ # CloudWatch log group.
3597
4042
  # @return [String]
3598
4043
  #
3599
4044
  # @!attribute [rw] includes
3600
4045
  # A list of filter rules that determines which files to include when
3601
- # running a task. The pattern should contain a single filter string
3602
- # that consists of the patterns to include. The patterns are delimited
3603
- # by "\|" (that is, a pipe). For example: `"/folder1|/folder2`"
4046
+ # running a task. The pattern contains a single filter string that
4047
+ # consists of the patterns to include. The patterns are delimited by
4048
+ # "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
3604
4049
  # @return [Array<Types::FilterRule>]
3605
4050
  #
3606
4051
  # @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateTaskRequest AWS API Documentation
@@ -48,6 +48,6 @@ require_relative 'aws-sdk-datasync/customizations'
48
48
  # @!group service
49
49
  module Aws::DataSync
50
50
 
51
- GEM_VERSION = '1.37.0'
51
+ GEM_VERSION = '1.38.0'
52
52
 
53
53
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aws-sdk-datasync
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.37.0
4
+ version: 1.38.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Amazon Web Services
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-10-18 00:00:00.000000000 Z
11
+ date: 2021-11-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-core