aws-sdk-datasync 1.34.0 → 1.38.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/VERSION +1 -1
- data/lib/aws-sdk-datasync/client.rb +401 -64
- data/lib/aws-sdk-datasync/client_api.rb +118 -0
- data/lib/aws-sdk-datasync/types.rb +587 -91
- data/lib/aws-sdk-datasync.rb +1 -1
- metadata +5 -5
@@ -346,8 +346,8 @@ module Aws::DataSync
|
|
346
346
|
# you allow the task execution to complete, file content on the
|
347
347
|
# destination is complete and consistent. This applies to other
|
348
348
|
# unexpected failures that interrupt a task execution. In all of these
|
349
|
-
# cases,
|
350
|
-
#
|
349
|
+
# cases, DataSync successfully complete the transfer when you start the
|
350
|
+
# next task execution.
|
351
351
|
#
|
352
352
|
# @option params [required, String] :task_execution_arn
|
353
353
|
# The Amazon Resource Name (ARN) of the task execution to cancel.
|
@@ -369,12 +369,13 @@ module Aws::DataSync
|
|
369
369
|
req.send_request(options)
|
370
370
|
end
|
371
371
|
|
372
|
-
# Activates an
|
373
|
-
#
|
374
|
-
# activation process, you specify information such as the
|
375
|
-
# that you want to activate the agent in. You activate
|
376
|
-
#
|
377
|
-
# reside. Your tasks are created
|
372
|
+
# Activates an DataSync agent that you have deployed on your host. The
|
373
|
+
# activation process associates your agent with your account. In the
|
374
|
+
# activation process, you specify information such as the Amazon Web
|
375
|
+
# Services Region that you want to activate the agent in. You activate
|
376
|
+
# the agent in the Amazon Web Services Region where your target
|
377
|
+
# locations (in Amazon S3 or Amazon EFS) reside. Your tasks are created
|
378
|
+
# in this Amazon Web Services Region.
|
378
379
|
#
|
379
380
|
# You can activate the agent in a VPC (virtual private cloud) or provide
|
380
381
|
# the agent access to a VPC endpoint so you can run tasks without going
|
@@ -385,13 +386,14 @@ module Aws::DataSync
|
|
385
386
|
# task to run. If you use multiple agents for a source location, the
|
386
387
|
# status of all the agents must be AVAILABLE for the task to run.
|
387
388
|
#
|
388
|
-
# Agents are automatically updated by
|
389
|
-
# mechanism that ensures minimal interruption to your
|
389
|
+
# Agents are automatically updated by Amazon Web Services on a regular
|
390
|
+
# basis, using a mechanism that ensures minimal interruption to your
|
391
|
+
# tasks.
|
390
392
|
#
|
391
393
|
# @option params [required, String] :activation_key
|
392
394
|
# Your agent activation key. You can get the activation key either by
|
393
395
|
# sending an HTTP GET request with redirects that enable you to get the
|
394
|
-
# agent IP address (port 80). Alternatively, you can get it from the
|
396
|
+
# agent IP address (port 80). Alternatively, you can get it from the
|
395
397
|
# DataSync console.
|
396
398
|
#
|
397
399
|
# The redirect URL returned in the response provides you the activation
|
@@ -400,8 +402,8 @@ module Aws::DataSync
|
|
400
402
|
# are merely defaults. The arguments you pass to this API call determine
|
401
403
|
# the actual configuration of your agent.
|
402
404
|
#
|
403
|
-
# For more information, see Activating an Agent in the *
|
404
|
-
#
|
405
|
+
# For more information, see Activating an Agent in the *DataSync User
|
406
|
+
# Guide.*
|
405
407
|
#
|
406
408
|
# @option params [String] :agent_name
|
407
409
|
# The name you configured for your agent. This value is a text reference
|
@@ -487,7 +489,7 @@ module Aws::DataSync
|
|
487
489
|
# @option params [String] :subdirectory
|
488
490
|
# A subdirectory in the location’s path. This subdirectory in the EFS
|
489
491
|
# file system is used to read data from the EFS source location or write
|
490
|
-
# data to the EFS destination. By default,
|
492
|
+
# data to the EFS destination. By default, DataSync uses the root
|
491
493
|
# directory.
|
492
494
|
#
|
493
495
|
# <note markdown="1"> `Subdirectory` must be specified with forward slashes. For example,
|
@@ -634,6 +636,141 @@ module Aws::DataSync
|
|
634
636
|
req.send_request(options)
|
635
637
|
end
|
636
638
|
|
639
|
+
# Creates an endpoint for a Hadoop Distributed File System (HDFS).
|
640
|
+
#
|
641
|
+
# @option params [String] :subdirectory
|
642
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to read
|
643
|
+
# data from or write data to the HDFS cluster. If the subdirectory
|
644
|
+
# isn't specified, it will default to `/`.
|
645
|
+
#
|
646
|
+
# @option params [required, Array<Types::HdfsNameNode>] :name_nodes
|
647
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
648
|
+
# operations such as opening, closing, and renaming files and
|
649
|
+
# directories. The NameNode contains the information to map blocks of
|
650
|
+
# data to the DataNodes. You can use only one NameNode.
|
651
|
+
#
|
652
|
+
# @option params [Integer] :block_size
|
653
|
+
# The size of data blocks to write into the HDFS cluster. The block size
|
654
|
+
# must be a multiple of 512 bytes. The default block size is 128
|
655
|
+
# mebibytes (MiB).
|
656
|
+
#
|
657
|
+
# @option params [Integer] :replication_factor
|
658
|
+
# The number of DataNodes to replicate the data to when writing to the
|
659
|
+
# HDFS cluster. By default, data is replicated to three DataNodes.
|
660
|
+
#
|
661
|
+
# @option params [String] :kms_key_provider_uri
|
662
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
663
|
+
#
|
664
|
+
# @option params [Types::QopConfiguration] :qop_configuration
|
665
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
666
|
+
# Procedure Call (RPC) and data transfer protection settings configured
|
667
|
+
# on the Hadoop Distributed File System (HDFS) cluster. If
|
668
|
+
# `QopConfiguration` isn't specified, `RpcProtection` and
|
669
|
+
# `DataTransferProtection` default to `PRIVACY`. If you set
|
670
|
+
# `RpcProtection` or `DataTransferProtection`, the other parameter
|
671
|
+
# assumes the same value.
|
672
|
+
#
|
673
|
+
# @option params [required, String] :authentication_type
|
674
|
+
# The type of authentication used to determine the identity of the user.
|
675
|
+
#
|
676
|
+
# @option params [String] :simple_user
|
677
|
+
# The user name used to identify the client on the host operating
|
678
|
+
# system.
|
679
|
+
#
|
680
|
+
# <note markdown="1"> If `SIMPLE` is specified for `AuthenticationType`, this parameter is
|
681
|
+
# required.
|
682
|
+
#
|
683
|
+
# </note>
|
684
|
+
#
|
685
|
+
# @option params [String] :kerberos_principal
|
686
|
+
# The Kerberos principal with access to the files and folders on the
|
687
|
+
# HDFS cluster.
|
688
|
+
#
|
689
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
690
|
+
# required.
|
691
|
+
#
|
692
|
+
# </note>
|
693
|
+
#
|
694
|
+
# @option params [String, StringIO, File] :kerberos_keytab
|
695
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
696
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
697
|
+
# keytab from a file by providing the file's address. If you're using
|
698
|
+
# the CLI, it performs base64 encoding for you. Otherwise, provide the
|
699
|
+
# base64-encoded text.
|
700
|
+
#
|
701
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
702
|
+
# required.
|
703
|
+
#
|
704
|
+
# </note>
|
705
|
+
#
|
706
|
+
# @option params [String, StringIO, File] :kerberos_krb_5_conf
|
707
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
708
|
+
# information. You can load the `krb5.conf` file by providing the
|
709
|
+
# file's address. If you're using the CLI, it performs the base64
|
710
|
+
# encoding for you. Otherwise, provide the base64-encoded text.
|
711
|
+
#
|
712
|
+
# <note markdown="1"> If `KERBEROS` is specified for `AuthenticationType`, this parameter is
|
713
|
+
# required.
|
714
|
+
#
|
715
|
+
# </note>
|
716
|
+
#
|
717
|
+
# @option params [required, Array<String>] :agent_arns
|
718
|
+
# The Amazon Resource Names (ARNs) of the agents that are used to
|
719
|
+
# connect to the HDFS cluster.
|
720
|
+
#
|
721
|
+
# @option params [Array<Types::TagListEntry>] :tags
|
722
|
+
# The key-value pair that represents the tag that you want to add to the
|
723
|
+
# location. The value can be an empty string. We recommend using tags to
|
724
|
+
# name your resources.
|
725
|
+
#
|
726
|
+
# @return [Types::CreateLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
727
|
+
#
|
728
|
+
# * {Types::CreateLocationHdfsResponse#location_arn #location_arn} => String
|
729
|
+
#
|
730
|
+
# @example Request syntax with placeholder values
|
731
|
+
#
|
732
|
+
# resp = client.create_location_hdfs({
|
733
|
+
# subdirectory: "HdfsSubdirectory",
|
734
|
+
# name_nodes: [ # required
|
735
|
+
# {
|
736
|
+
# hostname: "HdfsServerHostname", # required
|
737
|
+
# port: 1, # required
|
738
|
+
# },
|
739
|
+
# ],
|
740
|
+
# block_size: 1,
|
741
|
+
# replication_factor: 1,
|
742
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
743
|
+
# qop_configuration: {
|
744
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
745
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
746
|
+
# },
|
747
|
+
# authentication_type: "SIMPLE", # required, accepts SIMPLE, KERBEROS
|
748
|
+
# simple_user: "HdfsUser",
|
749
|
+
# kerberos_principal: "KerberosPrincipal",
|
750
|
+
# kerberos_keytab: "data",
|
751
|
+
# kerberos_krb_5_conf: "data",
|
752
|
+
# agent_arns: ["AgentArn"], # required
|
753
|
+
# tags: [
|
754
|
+
# {
|
755
|
+
# key: "TagKey", # required
|
756
|
+
# value: "TagValue",
|
757
|
+
# },
|
758
|
+
# ],
|
759
|
+
# })
|
760
|
+
#
|
761
|
+
# @example Response structure
|
762
|
+
#
|
763
|
+
# resp.location_arn #=> String
|
764
|
+
#
|
765
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/CreateLocationHdfs AWS API Documentation
|
766
|
+
#
|
767
|
+
# @overload create_location_hdfs(params = {})
|
768
|
+
# @param [Hash] params ({})
|
769
|
+
def create_location_hdfs(params = {}, options = {})
|
770
|
+
req = build_request(:create_location_hdfs, params)
|
771
|
+
req.send_request(options)
|
772
|
+
end
|
773
|
+
|
637
774
|
# Defines a file system on a Network File System (NFS) server that can
|
638
775
|
# be read from or written to.
|
639
776
|
#
|
@@ -658,8 +795,8 @@ module Aws::DataSync
|
|
658
795
|
# files. For the agent to access directories, you must additionally
|
659
796
|
# enable all execute access.
|
660
797
|
#
|
661
|
-
# If you are copying data to or from your
|
662
|
-
# Server on
|
798
|
+
# If you are copying data to or from your Snowcone device, see [NFS
|
799
|
+
# Server on Snowcone][1] for more information.
|
663
800
|
#
|
664
801
|
# For information about NFS export configuration, see 18.7. The
|
665
802
|
# /etc/exports Configuration File in the Red Hat Enterprise Linux
|
@@ -674,8 +811,8 @@ module Aws::DataSync
|
|
674
811
|
# Name Service (DNS) name of the NFS server. An agent that is installed
|
675
812
|
# on-premises uses this host name to mount the NFS server in a network.
|
676
813
|
#
|
677
|
-
# If you are copying data to or from your
|
678
|
-
# Server on
|
814
|
+
# If you are copying data to or from your Snowcone device, see [NFS
|
815
|
+
# Server on Snowcone][1] for more information.
|
679
816
|
#
|
680
817
|
# <note markdown="1"> This name must either be DNS-compliant or must be an IP version 4
|
681
818
|
# (IPv4) address.
|
@@ -690,8 +827,8 @@ module Aws::DataSync
|
|
690
827
|
# Contains a list of Amazon Resource Names (ARNs) of agents that are
|
691
828
|
# used to connect to an NFS server.
|
692
829
|
#
|
693
|
-
# If you are copying data to or from your
|
694
|
-
# Server on
|
830
|
+
# If you are copying data to or from your Snowcone device, see [NFS
|
831
|
+
# Server on Snowcone][1] for more information.
|
695
832
|
#
|
696
833
|
#
|
697
834
|
#
|
@@ -832,9 +969,12 @@ module Aws::DataSync
|
|
832
969
|
|
833
970
|
# Creates an endpoint for an Amazon S3 bucket.
|
834
971
|
#
|
835
|
-
# For more information, see
|
836
|
-
#
|
837
|
-
#
|
972
|
+
# For more information, see [Create an Amazon S3 location][1] in the
|
973
|
+
# *DataSync User Guide*.
|
974
|
+
#
|
975
|
+
#
|
976
|
+
#
|
977
|
+
# [1]: https://docs.aws.amazon.com/datasync/latest/userguide/create-locations-cli.html#create-location-s3-cli
|
838
978
|
#
|
839
979
|
# @option params [String] :subdirectory
|
840
980
|
# A subdirectory in the Amazon S3 bucket. This subdirectory in Amazon S3
|
@@ -842,14 +982,15 @@ module Aws::DataSync
|
|
842
982
|
# S3 destination.
|
843
983
|
#
|
844
984
|
# @option params [required, String] :s3_bucket_arn
|
845
|
-
# The ARN of the Amazon S3 bucket. If the bucket is on an
|
846
|
-
# this must be an access point ARN.
|
985
|
+
# The ARN of the Amazon S3 bucket. If the bucket is on an Amazon Web
|
986
|
+
# Services Outpost, this must be an access point ARN.
|
847
987
|
#
|
848
988
|
# @option params [String] :s3_storage_class
|
849
989
|
# The Amazon S3 storage class that you want to store your files in when
|
850
|
-
# this location is used as a task destination. For buckets in
|
851
|
-
# Regions, the storage class defaults to Standard. For buckets
|
852
|
-
# Outposts, the storage class defaults to
|
990
|
+
# this location is used as a task destination. For buckets in Amazon Web
|
991
|
+
# Services Regions, the storage class defaults to Standard. For buckets
|
992
|
+
# on Outposts, the storage class defaults to Amazon Web Services S3
|
993
|
+
# Outposts.
|
853
994
|
#
|
854
995
|
# For more information about S3 storage classes, see [Amazon S3 Storage
|
855
996
|
# Classes][1]. Some storage classes have behaviors that can affect your
|
@@ -862,17 +1003,18 @@ module Aws::DataSync
|
|
862
1003
|
# [2]: https://docs.aws.amazon.com/datasync/latest/userguide/create-s3-location.html#using-storage-classes
|
863
1004
|
#
|
864
1005
|
# @option params [required, Types::S3Config] :s3_config
|
865
|
-
# The Amazon Resource Name (ARN) of the
|
866
|
-
#
|
1006
|
+
# The Amazon Resource Name (ARN) of the Identity and Access Management
|
1007
|
+
# (IAM) role that is used to access an Amazon S3 bucket.
|
867
1008
|
#
|
868
1009
|
# For detailed information about using such a role, see Creating a
|
869
|
-
# Location for Amazon S3 in the *
|
1010
|
+
# Location for Amazon S3 in the *DataSync User Guide*.
|
870
1011
|
#
|
871
1012
|
# @option params [Array<String>] :agent_arns
|
872
|
-
# If you are using DataSync on an
|
873
|
-
# Resource Names (ARNs) of the DataSync agents deployed on
|
874
|
-
# For more information about launching a DataSync agent on
|
875
|
-
# Outpost, see [Deploy your DataSync agent on
|
1013
|
+
# If you are using DataSync on an Amazon Web Services Outpost, specify
|
1014
|
+
# the Amazon Resource Names (ARNs) of the DataSync agents deployed on
|
1015
|
+
# your Outpost. For more information about launching a DataSync agent on
|
1016
|
+
# an Amazon Web Services Outpost, see [Deploy your DataSync agent on
|
1017
|
+
# Outposts][1].
|
876
1018
|
#
|
877
1019
|
#
|
878
1020
|
#
|
@@ -1025,18 +1167,21 @@ module Aws::DataSync
|
|
1025
1167
|
# The configuration specifies options such as task scheduling, bandwidth
|
1026
1168
|
# limits, etc. A task is the complete definition of a data transfer.
|
1027
1169
|
#
|
1028
|
-
# When you create a task that transfers data between
|
1029
|
-
# different
|
1030
|
-
# reside in the Region where DataSync is
|
1031
|
-
# must be specified in a different
|
1170
|
+
# When you create a task that transfers data between Amazon Web Services
|
1171
|
+
# services in different Amazon Web Services Regions, one of the two
|
1172
|
+
# locations that you specify must reside in the Region where DataSync is
|
1173
|
+
# being used. The other location must be specified in a different
|
1174
|
+
# Region.
|
1032
1175
|
#
|
1033
|
-
# You can transfer data between commercial
|
1034
|
-
# or between
|
1176
|
+
# You can transfer data between commercial Amazon Web Services Regions
|
1177
|
+
# except for China, or between Amazon Web Services GovCloud (US)
|
1178
|
+
# Regions.
|
1035
1179
|
#
|
1036
|
-
# When you use DataSync to copy files or objects between
|
1037
|
-
# you pay for data transfer between Regions. This is
|
1038
|
-
# transfer OUT from your source Region to your
|
1039
|
-
# more information, see [Data Transfer
|
1180
|
+
# When you use DataSync to copy files or objects between Amazon Web
|
1181
|
+
# Services Regions, you pay for data transfer between Regions. This is
|
1182
|
+
# billed as data transfer OUT from your source Region to your
|
1183
|
+
# destination Region. For more information, see [Data Transfer
|
1184
|
+
# pricing][1].
|
1040
1185
|
#
|
1041
1186
|
#
|
1042
1187
|
#
|
@@ -1046,7 +1191,8 @@ module Aws::DataSync
|
|
1046
1191
|
# The Amazon Resource Name (ARN) of the source location for the task.
|
1047
1192
|
#
|
1048
1193
|
# @option params [required, String] :destination_location_arn
|
1049
|
-
# The Amazon Resource Name (ARN) of an
|
1194
|
+
# The Amazon Resource Name (ARN) of an Amazon Web Services storage
|
1195
|
+
# resource's location.
|
1050
1196
|
#
|
1051
1197
|
# @option params [String] :cloud_watch_log_group_arn
|
1052
1198
|
# The Amazon Resource Name (ARN) of the Amazon CloudWatch log group that
|
@@ -1090,6 +1236,12 @@ module Aws::DataSync
|
|
1090
1236
|
# The key-value pair that represents the tag that you want to add to the
|
1091
1237
|
# resource. The value can be an empty string.
|
1092
1238
|
#
|
1239
|
+
# @option params [Array<Types::FilterRule>] :includes
|
1240
|
+
# A list of filter rules that determines which files to include when
|
1241
|
+
# running a task. The pattern contains a single filter string that
|
1242
|
+
# consists of the patterns to include. The patterns are delimited by
|
1243
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
1244
|
+
#
|
1093
1245
|
# @return [Types::CreateTaskResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1094
1246
|
#
|
1095
1247
|
# * {Types::CreateTaskResponse#task_arn #task_arn} => String
|
@@ -1132,6 +1284,12 @@ module Aws::DataSync
|
|
1132
1284
|
# value: "TagValue",
|
1133
1285
|
# },
|
1134
1286
|
# ],
|
1287
|
+
# includes: [
|
1288
|
+
# {
|
1289
|
+
# filter_type: "SIMPLE_PATTERN", # accepts SIMPLE_PATTERN
|
1290
|
+
# value: "FilterValue",
|
1291
|
+
# },
|
1292
|
+
# ],
|
1135
1293
|
# })
|
1136
1294
|
#
|
1137
1295
|
# @example Response structure
|
@@ -1149,14 +1307,14 @@ module Aws::DataSync
|
|
1149
1307
|
|
1150
1308
|
# Deletes an agent. To specify which agent to delete, use the Amazon
|
1151
1309
|
# Resource Name (ARN) of the agent in your request. The operation
|
1152
|
-
# disassociates the agent from your
|
1153
|
-
# delete the agent virtual machine (VM) from your
|
1154
|
-
# environment.
|
1310
|
+
# disassociates the agent from your Amazon Web Services account.
|
1311
|
+
# However, it doesn't delete the agent virtual machine (VM) from your
|
1312
|
+
# on-premises environment.
|
1155
1313
|
#
|
1156
1314
|
# @option params [required, String] :agent_arn
|
1157
1315
|
# The Amazon Resource Name (ARN) of the agent to delete. Use the
|
1158
1316
|
# `ListAgents` operation to return a list of agents for your account and
|
1159
|
-
#
|
1317
|
+
# Amazon Web Services Region.
|
1160
1318
|
#
|
1161
1319
|
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
|
1162
1320
|
#
|
@@ -1175,7 +1333,7 @@ module Aws::DataSync
|
|
1175
1333
|
req.send_request(options)
|
1176
1334
|
end
|
1177
1335
|
|
1178
|
-
# Deletes the configuration of a location used by
|
1336
|
+
# Deletes the configuration of a location used by DataSync.
|
1179
1337
|
#
|
1180
1338
|
# @option params [required, String] :location_arn
|
1181
1339
|
# The Amazon Resource Name (ARN) of the location to delete.
|
@@ -1345,6 +1503,62 @@ module Aws::DataSync
|
|
1345
1503
|
req.send_request(options)
|
1346
1504
|
end
|
1347
1505
|
|
1506
|
+
# Returns metadata, such as the authentication information about the
|
1507
|
+
# Hadoop Distributed File System (HDFS) location.
|
1508
|
+
#
|
1509
|
+
# @option params [required, String] :location_arn
|
1510
|
+
# The Amazon Resource Name (ARN) of the HDFS cluster location to
|
1511
|
+
# describe.
|
1512
|
+
#
|
1513
|
+
# @return [Types::DescribeLocationHdfsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1514
|
+
#
|
1515
|
+
# * {Types::DescribeLocationHdfsResponse#location_arn #location_arn} => String
|
1516
|
+
# * {Types::DescribeLocationHdfsResponse#location_uri #location_uri} => String
|
1517
|
+
# * {Types::DescribeLocationHdfsResponse#name_nodes #name_nodes} => Array<Types::HdfsNameNode>
|
1518
|
+
# * {Types::DescribeLocationHdfsResponse#block_size #block_size} => Integer
|
1519
|
+
# * {Types::DescribeLocationHdfsResponse#replication_factor #replication_factor} => Integer
|
1520
|
+
# * {Types::DescribeLocationHdfsResponse#kms_key_provider_uri #kms_key_provider_uri} => String
|
1521
|
+
# * {Types::DescribeLocationHdfsResponse#qop_configuration #qop_configuration} => Types::QopConfiguration
|
1522
|
+
# * {Types::DescribeLocationHdfsResponse#authentication_type #authentication_type} => String
|
1523
|
+
# * {Types::DescribeLocationHdfsResponse#simple_user #simple_user} => String
|
1524
|
+
# * {Types::DescribeLocationHdfsResponse#kerberos_principal #kerberos_principal} => String
|
1525
|
+
# * {Types::DescribeLocationHdfsResponse#agent_arns #agent_arns} => Array<String>
|
1526
|
+
# * {Types::DescribeLocationHdfsResponse#creation_time #creation_time} => Time
|
1527
|
+
#
|
1528
|
+
# @example Request syntax with placeholder values
|
1529
|
+
#
|
1530
|
+
# resp = client.describe_location_hdfs({
|
1531
|
+
# location_arn: "LocationArn", # required
|
1532
|
+
# })
|
1533
|
+
#
|
1534
|
+
# @example Response structure
|
1535
|
+
#
|
1536
|
+
# resp.location_arn #=> String
|
1537
|
+
# resp.location_uri #=> String
|
1538
|
+
# resp.name_nodes #=> Array
|
1539
|
+
# resp.name_nodes[0].hostname #=> String
|
1540
|
+
# resp.name_nodes[0].port #=> Integer
|
1541
|
+
# resp.block_size #=> Integer
|
1542
|
+
# resp.replication_factor #=> Integer
|
1543
|
+
# resp.kms_key_provider_uri #=> String
|
1544
|
+
# resp.qop_configuration.rpc_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
|
1545
|
+
# resp.qop_configuration.data_transfer_protection #=> String, one of "DISABLED", "AUTHENTICATION", "INTEGRITY", "PRIVACY"
|
1546
|
+
# resp.authentication_type #=> String, one of "SIMPLE", "KERBEROS"
|
1547
|
+
# resp.simple_user #=> String
|
1548
|
+
# resp.kerberos_principal #=> String
|
1549
|
+
# resp.agent_arns #=> Array
|
1550
|
+
# resp.agent_arns[0] #=> String
|
1551
|
+
# resp.creation_time #=> Time
|
1552
|
+
#
|
1553
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeLocationHdfs AWS API Documentation
|
1554
|
+
#
|
1555
|
+
# @overload describe_location_hdfs(params = {})
|
1556
|
+
# @param [Hash] params ({})
|
1557
|
+
def describe_location_hdfs(params = {}, options = {})
|
1558
|
+
req = build_request(:describe_location_hdfs, params)
|
1559
|
+
req.send_request(options)
|
1560
|
+
end
|
1561
|
+
|
1348
1562
|
# Returns metadata, such as the path information, about an NFS location.
|
1349
1563
|
#
|
1350
1564
|
# @option params [required, String] :location_arn
|
@@ -1535,6 +1749,7 @@ module Aws::DataSync
|
|
1535
1749
|
# * {Types::DescribeTaskResponse#error_code #error_code} => String
|
1536
1750
|
# * {Types::DescribeTaskResponse#error_detail #error_detail} => String
|
1537
1751
|
# * {Types::DescribeTaskResponse#creation_time #creation_time} => Time
|
1752
|
+
# * {Types::DescribeTaskResponse#includes #includes} => Array<Types::FilterRule>
|
1538
1753
|
#
|
1539
1754
|
# @example Request syntax with placeholder values
|
1540
1755
|
#
|
@@ -1576,6 +1791,9 @@ module Aws::DataSync
|
|
1576
1791
|
# resp.error_code #=> String
|
1577
1792
|
# resp.error_detail #=> String
|
1578
1793
|
# resp.creation_time #=> Time
|
1794
|
+
# resp.includes #=> Array
|
1795
|
+
# resp.includes[0].filter_type #=> String, one of "SIMPLE_PATTERN"
|
1796
|
+
# resp.includes[0].value #=> String
|
1579
1797
|
#
|
1580
1798
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/DescribeTask AWS API Documentation
|
1581
1799
|
#
|
@@ -1661,9 +1879,9 @@ module Aws::DataSync
|
|
1661
1879
|
req.send_request(options)
|
1662
1880
|
end
|
1663
1881
|
|
1664
|
-
# Returns a list of agents owned by an
|
1665
|
-
# specified in the request. The returned
|
1666
|
-
# Resource Name (ARN).
|
1882
|
+
# Returns a list of agents owned by an Amazon Web Services account in
|
1883
|
+
# the Amazon Web Services Region specified in the request. The returned
|
1884
|
+
# list is ordered by agent Amazon Resource Name (ARN).
|
1667
1885
|
#
|
1668
1886
|
# By default, this operation returns a maximum of 100 agents. This
|
1669
1887
|
# operation supports pagination that enables you to optionally reduce
|
@@ -1917,7 +2135,7 @@ module Aws::DataSync
|
|
1917
2135
|
# PREPARING \| TRANSFERRING \| VERIFYING \| SUCCESS/FAILURE.
|
1918
2136
|
#
|
1919
2137
|
# For detailed information, see the Task Execution section in the
|
1920
|
-
# Components and Terminology topic in the *
|
2138
|
+
# Components and Terminology topic in the *DataSync User Guide*.
|
1921
2139
|
#
|
1922
2140
|
# @option params [required, String] :task_arn
|
1923
2141
|
# The Amazon Resource Name (ARN) of the task to start.
|
@@ -1942,7 +2160,13 @@ module Aws::DataSync
|
|
1942
2160
|
# A list of filter rules that determines which files to include when
|
1943
2161
|
# running a task. The pattern should contain a single filter string that
|
1944
2162
|
# consists of the patterns to include. The patterns are delimited by
|
1945
|
-
# "\|" (that is, a pipe)
|
2163
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
2164
|
+
#
|
2165
|
+
# @option params [Array<Types::FilterRule>] :excludes
|
2166
|
+
# A list of filter rules that determines which files to exclude from a
|
2167
|
+
# task. The list contains a single filter string that consists of the
|
2168
|
+
# patterns to exclude. The patterns are delimited by "\|" (that is, a
|
2169
|
+
# pipe), for example, `"/folder1|/folder2"`.
|
1946
2170
|
#
|
1947
2171
|
# @return [Types::StartTaskExecutionResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
|
1948
2172
|
#
|
@@ -1974,6 +2198,12 @@ module Aws::DataSync
|
|
1974
2198
|
# value: "FilterValue",
|
1975
2199
|
# },
|
1976
2200
|
# ],
|
2201
|
+
# excludes: [
|
2202
|
+
# {
|
2203
|
+
# filter_type: "SIMPLE_PATTERN", # accepts SIMPLE_PATTERN
|
2204
|
+
# value: "FilterValue",
|
2205
|
+
# },
|
2206
|
+
# ],
|
1977
2207
|
# })
|
1978
2208
|
#
|
1979
2209
|
# @example Response structure
|
@@ -1989,7 +2219,7 @@ module Aws::DataSync
|
|
1989
2219
|
req.send_request(options)
|
1990
2220
|
end
|
1991
2221
|
|
1992
|
-
# Applies a key-value pair to an
|
2222
|
+
# Applies a key-value pair to an Amazon Web Services resource.
|
1993
2223
|
#
|
1994
2224
|
# @option params [required, String] :resource_arn
|
1995
2225
|
# The Amazon Resource Name (ARN) of the resource to apply the tag to.
|
@@ -2020,7 +2250,7 @@ module Aws::DataSync
|
|
2020
2250
|
req.send_request(options)
|
2021
2251
|
end
|
2022
2252
|
|
2023
|
-
# Removes a tag from an
|
2253
|
+
# Removes a tag from an Amazon Web Services resource.
|
2024
2254
|
#
|
2025
2255
|
# @option params [required, String] :resource_arn
|
2026
2256
|
# The Amazon Resource Name (ARN) of the resource to remove the tag from.
|
@@ -2072,6 +2302,101 @@ module Aws::DataSync
|
|
2072
2302
|
req.send_request(options)
|
2073
2303
|
end
|
2074
2304
|
|
2305
|
+
# Updates some parameters of a previously created location for a Hadoop
|
2306
|
+
# Distributed File System cluster.
|
2307
|
+
#
|
2308
|
+
# @option params [required, String] :location_arn
|
2309
|
+
# The Amazon Resource Name (ARN) of the source HDFS cluster location.
|
2310
|
+
#
|
2311
|
+
# @option params [String] :subdirectory
|
2312
|
+
# A subdirectory in the HDFS cluster. This subdirectory is used to read
|
2313
|
+
# data from or write data to the HDFS cluster.
|
2314
|
+
#
|
2315
|
+
# @option params [Array<Types::HdfsNameNode>] :name_nodes
|
2316
|
+
# The NameNode that manages the HDFS namespace. The NameNode performs
|
2317
|
+
# operations such as opening, closing, and renaming files and
|
2318
|
+
# directories. The NameNode contains the information to map blocks of
|
2319
|
+
# data to the DataNodes. You can use only one NameNode.
|
2320
|
+
#
|
2321
|
+
# @option params [Integer] :block_size
|
2322
|
+
# The size of the data blocks to write into the HDFS cluster.
|
2323
|
+
#
|
2324
|
+
# @option params [Integer] :replication_factor
|
2325
|
+
# The number of DataNodes to replicate the data to when writing to the
|
2326
|
+
# HDFS cluster.
|
2327
|
+
#
|
2328
|
+
# @option params [String] :kms_key_provider_uri
|
2329
|
+
# The URI of the HDFS cluster's Key Management Server (KMS).
|
2330
|
+
#
|
2331
|
+
# @option params [Types::QopConfiguration] :qop_configuration
|
2332
|
+
# The Quality of Protection (QOP) configuration specifies the Remote
|
2333
|
+
# Procedure Call (RPC) and data transfer privacy settings configured on
|
2334
|
+
# the Hadoop Distributed File System (HDFS) cluster.
|
2335
|
+
#
|
2336
|
+
# @option params [String] :authentication_type
|
2337
|
+
# The type of authentication used to determine the identity of the user.
|
2338
|
+
#
|
2339
|
+
# @option params [String] :simple_user
|
2340
|
+
# The user name used to identify the client on the host operating
|
2341
|
+
# system.
|
2342
|
+
#
|
2343
|
+
# @option params [String] :kerberos_principal
|
2344
|
+
# The Kerberos principal with access to the files and folders on the
|
2345
|
+
# HDFS cluster.
|
2346
|
+
#
|
2347
|
+
# @option params [String, StringIO, File] :kerberos_keytab
|
2348
|
+
# The Kerberos key table (keytab) that contains mappings between the
|
2349
|
+
# defined Kerberos principal and the encrypted keys. You can load the
|
2350
|
+
# keytab from a file by providing the file's address. If you use the
|
2351
|
+
# AWS CLI, it performs base64 encoding for you. Otherwise, provide the
|
2352
|
+
# base64-encoded text.
|
2353
|
+
#
|
2354
|
+
# @option params [String, StringIO, File] :kerberos_krb_5_conf
|
2355
|
+
# The `krb5.conf` file that contains the Kerberos configuration
|
2356
|
+
# information. You can load the `krb5.conf` file by providing the
|
2357
|
+
# file's address. If you're using the AWS CLI, it performs the base64
|
2358
|
+
# encoding for you. Otherwise, provide the base64-encoded text.
|
2359
|
+
#
|
2360
|
+
# @option params [Array<String>] :agent_arns
|
2361
|
+
# The ARNs of the agents that are used to connect to the HDFS cluster.
|
2362
|
+
#
|
2363
|
+
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
|
2364
|
+
#
|
2365
|
+
# @example Request syntax with placeholder values
|
2366
|
+
#
|
2367
|
+
# resp = client.update_location_hdfs({
|
2368
|
+
# location_arn: "LocationArn", # required
|
2369
|
+
# subdirectory: "HdfsSubdirectory",
|
2370
|
+
# name_nodes: [
|
2371
|
+
# {
|
2372
|
+
# hostname: "HdfsServerHostname", # required
|
2373
|
+
# port: 1, # required
|
2374
|
+
# },
|
2375
|
+
# ],
|
2376
|
+
# block_size: 1,
|
2377
|
+
# replication_factor: 1,
|
2378
|
+
# kms_key_provider_uri: "KmsKeyProviderUri",
|
2379
|
+
# qop_configuration: {
|
2380
|
+
# rpc_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
2381
|
+
# data_transfer_protection: "DISABLED", # accepts DISABLED, AUTHENTICATION, INTEGRITY, PRIVACY
|
2382
|
+
# },
|
2383
|
+
# authentication_type: "SIMPLE", # accepts SIMPLE, KERBEROS
|
2384
|
+
# simple_user: "HdfsUser",
|
2385
|
+
# kerberos_principal: "KerberosPrincipal",
|
2386
|
+
# kerberos_keytab: "data",
|
2387
|
+
# kerberos_krb_5_conf: "data",
|
2388
|
+
# agent_arns: ["AgentArn"],
|
2389
|
+
# })
|
2390
|
+
#
|
2391
|
+
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateLocationHdfs AWS API Documentation
|
2392
|
+
#
|
2393
|
+
# @overload update_location_hdfs(params = {})
|
2394
|
+
# @param [Hash] params ({})
|
2395
|
+
def update_location_hdfs(params = {}, options = {})
|
2396
|
+
req = build_request(:update_location_hdfs, params)
|
2397
|
+
req.send_request(options)
|
2398
|
+
end
|
2399
|
+
|
2075
2400
|
# Updates some of the parameters of a previously created location for
|
2076
2401
|
# Network File System (NFS) access. For information about creating an
|
2077
2402
|
# NFS location, see [Creating a location for NFS][1].
|
@@ -2104,8 +2429,8 @@ module Aws::DataSync
|
|
2104
2429
|
# the files. For the agent to access directories, you must additionally
|
2105
2430
|
# enable all execute access.
|
2106
2431
|
#
|
2107
|
-
# If you are copying data to or from your
|
2108
|
-
# Server on
|
2432
|
+
# If you are copying data to or from your Snowcone device, see [NFS
|
2433
|
+
# Server on Snowcone][1] for more information.
|
2109
2434
|
#
|
2110
2435
|
# For information about NFS export configuration, see 18.7. The
|
2111
2436
|
# /etc/exports Configuration File in the Red Hat Enterprise Linux
|
@@ -2321,7 +2646,7 @@ module Aws::DataSync
|
|
2321
2646
|
# A list of filter rules that determines which files to exclude from a
|
2322
2647
|
# task. The list should contain a single filter string that consists of
|
2323
2648
|
# the patterns to exclude. The patterns are delimited by "\|" (that
|
2324
|
-
# is, a pipe), for example
|
2649
|
+
# is, a pipe), for example, `"/folder1|/folder2"`.
|
2325
2650
|
#
|
2326
2651
|
# @option params [Types::TaskSchedule] :schedule
|
2327
2652
|
# Specifies a schedule used to periodically transfer files from a source
|
@@ -2339,8 +2664,14 @@ module Aws::DataSync
|
|
2339
2664
|
# The name of the task to update.
|
2340
2665
|
#
|
2341
2666
|
# @option params [String] :cloud_watch_log_group_arn
|
2342
|
-
# The Amazon Resource Name (ARN) of the resource name of the
|
2343
|
-
#
|
2667
|
+
# The Amazon Resource Name (ARN) of the resource name of the Amazon
|
2668
|
+
# CloudWatch log group.
|
2669
|
+
#
|
2670
|
+
# @option params [Array<Types::FilterRule>] :includes
|
2671
|
+
# A list of filter rules that determines which files to include when
|
2672
|
+
# running a task. The pattern contains a single filter string that
|
2673
|
+
# consists of the patterns to include. The patterns are delimited by
|
2674
|
+
# "\|" (that is, a pipe), for example, `"/folder1|/folder2"`.
|
2344
2675
|
#
|
2345
2676
|
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
|
2346
2677
|
#
|
@@ -2375,6 +2706,12 @@ module Aws::DataSync
|
|
2375
2706
|
# },
|
2376
2707
|
# name: "TagValue",
|
2377
2708
|
# cloud_watch_log_group_arn: "LogGroupArn",
|
2709
|
+
# includes: [
|
2710
|
+
# {
|
2711
|
+
# filter_type: "SIMPLE_PATTERN", # accepts SIMPLE_PATTERN
|
2712
|
+
# value: "FilterValue",
|
2713
|
+
# },
|
2714
|
+
# ],
|
2378
2715
|
# })
|
2379
2716
|
#
|
2380
2717
|
# @see http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/UpdateTask AWS API Documentation
|
@@ -2467,7 +2804,7 @@ module Aws::DataSync
|
|
2467
2804
|
params: params,
|
2468
2805
|
config: config)
|
2469
2806
|
context[:gem_name] = 'aws-sdk-datasync'
|
2470
|
-
context[:gem_version] = '1.
|
2807
|
+
context[:gem_version] = '1.38.0'
|
2471
2808
|
Seahorse::Client::Request.new(handlers, context)
|
2472
2809
|
end
|
2473
2810
|
|