@aws-sdk/client-transfer 3.264.0 → 3.266.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,5 +3,5 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ruleSet = void 0;
4
4
  const q = "fn", r = "argv", s = "ref";
5
5
  const a = true, b = false, c = "String", d = "PartitionResult", e = "tree", f = "error", g = "endpoint", h = { "required": true, "default": false, "type": "Boolean" }, i = { [s]: "Endpoint" }, j = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }, k = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }, l = {}, m = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: d }, "supportsFIPS"] }] }, n = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: d }, "supportsDualStack"] }] }, o = [j], p = [k];
6
- const _data = { version: "1.0", parameters: { Region: { required: a, type: c }, UseDualStack: h, UseFIPS: h, Endpoint: { required: b, type: c } }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: d }], type: e, rules: [{ conditions: [{ [q]: "isSet", [r]: [i] }], type: e, rules: [{ conditions: o, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: f }, { type: e, rules: [{ conditions: p, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: f }, { endpoint: { url: i, properties: l, headers: l }, type: g }] }] }, { conditions: [j, k], type: e, rules: [{ conditions: [m, n], type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: f }] }, { conditions: o, type: e, rules: [{ conditions: [m], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: f }] }, { conditions: p, type: e, rules: [{ conditions: [n], type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: f }] }, { endpoint: { url: "https://transfer.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] };
6
+ const _data = { version: "1.0", parameters: { Region: { required: a, type: c }, UseDualStack: h, UseFIPS: h, Endpoint: { required: b, type: c } }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: d }], type: e, rules: [{ conditions: [{ [q]: "isSet", [r]: [i] }], type: e, rules: [{ conditions: o, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: f }, { type: e, rules: [{ conditions: p, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: f }, { endpoint: { url: i, properties: l, headers: l }, type: g }] }] }, { conditions: [j, k], type: e, rules: [{ conditions: [m, n], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: f }] }, { conditions: o, type: e, rules: [{ conditions: [m], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: f }] }, { conditions: p, type: e, rules: [{ conditions: [n], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: f }] }, { type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }] };
7
7
  exports.ruleSet = _data;
@@ -1,4 +1,4 @@
1
1
  const q = "fn", r = "argv", s = "ref";
2
2
  const a = true, b = false, c = "String", d = "PartitionResult", e = "tree", f = "error", g = "endpoint", h = { "required": true, "default": false, "type": "Boolean" }, i = { [s]: "Endpoint" }, j = { [q]: "booleanEquals", [r]: [{ [s]: "UseFIPS" }, true] }, k = { [q]: "booleanEquals", [r]: [{ [s]: "UseDualStack" }, true] }, l = {}, m = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: d }, "supportsFIPS"] }] }, n = { [q]: "booleanEquals", [r]: [true, { [q]: "getAttr", [r]: [{ [s]: d }, "supportsDualStack"] }] }, o = [j], p = [k];
3
- const _data = { version: "1.0", parameters: { Region: { required: a, type: c }, UseDualStack: h, UseFIPS: h, Endpoint: { required: b, type: c } }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: d }], type: e, rules: [{ conditions: [{ [q]: "isSet", [r]: [i] }], type: e, rules: [{ conditions: o, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: f }, { type: e, rules: [{ conditions: p, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: f }, { endpoint: { url: i, properties: l, headers: l }, type: g }] }] }, { conditions: [j, k], type: e, rules: [{ conditions: [m, n], type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: f }] }, { conditions: o, type: e, rules: [{ conditions: [m], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: f }] }, { conditions: p, type: e, rules: [{ conditions: [n], type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: f }] }, { endpoint: { url: "https://transfer.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] };
3
+ const _data = { version: "1.0", parameters: { Region: { required: a, type: c }, UseDualStack: h, UseFIPS: h, Endpoint: { required: b, type: c } }, rules: [{ conditions: [{ [q]: "aws.partition", [r]: [{ [s]: "Region" }], assign: d }], type: e, rules: [{ conditions: [{ [q]: "isSet", [r]: [i] }], type: e, rules: [{ conditions: o, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: f }, { type: e, rules: [{ conditions: p, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: f }, { endpoint: { url: i, properties: l, headers: l }, type: g }] }] }, { conditions: [j, k], type: e, rules: [{ conditions: [m, n], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: f }] }, { conditions: o, type: e, rules: [{ conditions: [m], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer-fips.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "FIPS is enabled but this partition does not support FIPS", type: f }] }, { conditions: p, type: e, rules: [{ conditions: [n], type: e, rules: [{ type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: l, headers: l }, type: g }] }] }, { error: "DualStack is enabled but this partition does not support DualStack", type: f }] }, { type: e, rules: [{ endpoint: { url: "https://transfer.{Region}.{PartitionResult#dnsSuffix}", properties: l, headers: l }, type: g }] }] }] };
4
4
  export const ruleSet = _data;
@@ -68,6 +68,10 @@ export interface As2ConnectorConfig {
68
68
  Compression?: CompressionEnum | string;
69
69
  /**
70
70
  * <p>The algorithm that is used to encrypt the file.</p>
71
+ * <note>
72
+ * <p>You can only specify <code>NONE</code> if the URL for your connector uses HTTPS. This ensures that
73
+ * no traffic is sent in clear text.</p>
74
+ * </note>
71
75
  */
72
76
  EncryptionAlgorithm?: EncryptionAlg | string;
73
77
  /**
@@ -77,7 +81,7 @@ export interface As2ConnectorConfig {
77
81
  /**
78
82
  * <p>The signing algorithm for the MDN response.</p>
79
83
  * <note>
80
- * <p>If set to DEFAULT (or not set at all), the value for <code>SigningAlogorithm</code> is used.</p>
84
+ * <p>If set to DEFAULT (or not set at all), the value for <code>SigningAlgorithm</code> is used.</p>
81
85
  * </note>
82
86
  */
83
87
  MdnSigningAlgorithm?: MdnSigningAlg | string;
@@ -128,7 +132,8 @@ export declare class ConflictException extends __BaseException {
128
132
  constructor(opts: __ExceptionOptionType<ConflictException, __BaseException>);
129
133
  }
130
134
  /**
131
- * <p>Reserved for future use.</p>
135
+ * <p>Specifies the details for the file location for the file that's being used in the workflow. Only applicable if you are using Amazon Elastic File Systems
136
+ * (Amazon EFS) for storage.</p>
132
137
  * <p>
133
138
  * </p>
134
139
  */
@@ -143,7 +148,7 @@ export interface EfsFileLocation {
143
148
  Path?: string;
144
149
  }
145
150
  /**
146
- * <p>Specifies the customer input S3 file location. If it is used inside <code>copyStepDetails.DestinationFileLocation</code>, it should be the S3 copy destination.</p>
151
+ * <p>Specifies the customer input Amazon S3 file location. If it is used inside <code>copyStepDetails.DestinationFileLocation</code>, it should be the S3 copy destination.</p>
147
152
  * <p>
148
153
  * You need to provide the bucket and key.
149
154
  * The key can represent either a path or a file.
@@ -166,15 +171,16 @@ export interface S3InputFileLocation {
166
171
  Key?: string;
167
172
  }
168
173
  /**
169
- * <p>Specifies the location for the file being copied. Only applicable for the Copy type of workflow steps.</p>
174
+ * <p>Specifies the location for the file that's being processed.</p>
170
175
  */
171
176
  export interface InputFileLocation {
172
177
  /**
173
- * <p>Specifies the details for the S3 file being copied.</p>
178
+ * <p>Specifies the details for the Amazon S3 file that's being copied or decrypted.</p>
174
179
  */
175
180
  S3FileLocation?: S3InputFileLocation;
176
181
  /**
177
- * <p>Reserved for future use.</p>
182
+ * <p>Specifies the details for the Amazon Elastic File System (Amazon EFS) file that's being
183
+ * decrypted.</p>
178
184
  */
179
185
  EfsFileLocation?: EfsFileLocation;
180
186
  }
@@ -191,13 +197,26 @@ export interface CopyStepDetails {
191
197
  */
192
198
  Name?: string;
193
199
  /**
194
- * <p>Specifies the location for the file being copied. Only applicable for Copy type workflow
195
- * steps. Use <code>${Transfer:username}</code> in this field to parametrize the destination
196
- * prefix by username.</p>
200
+ * <p>Specifies the location for the file being copied. Use <code>${Transfer:username}</code> or <code>${Transfer:UploadDate}</code> in this field to parametrize the destination
201
+ * prefix by username or uploaded date.</p>
202
+ * <ul>
203
+ * <li>
204
+ * <p>Set the value of <code>DestinationFileLocation</code> to <code>${Transfer:username}</code> to copy uploaded files to
205
+ * an Amazon S3 bucket that is prefixed with the name of the Transfer Family user that uploaded the file.</p>
206
+ * </li>
207
+ * <li>
208
+ * <p>Set the value of <code>DestinationFileLocation</code> to <code>${Transfer:UploadDate}</code> to copy uploaded files to
209
+ * an Amazon S3 bucket that is prefixed with the date of the upload.</p>
210
+ * <note>
211
+ * <p>The system resolves <code>UploadDate</code> to a date format of <i>YYYY-MM-DD</i>, based on the date the file
212
+ * is uploaded.</p>
213
+ * </note>
214
+ * </li>
215
+ * </ul>
197
216
  */
198
217
  DestinationFileLocation?: InputFileLocation;
199
218
  /**
200
- * <p>A flag that indicates whether or not to overwrite an existing file of the same name.
219
+ * <p>A flag that indicates whether to overwrite an existing file of the same name.
201
220
  * The default is <code>FALSE</code>.</p>
202
221
  */
203
222
  OverwriteExisting?: OverwriteExisting | string;
@@ -206,12 +225,12 @@ export interface CopyStepDetails {
206
225
  * for the workflow.</p>
207
226
  * <ul>
208
227
  * <li>
209
- * <p>Enter <code>${previous.file}</code> to use the previous file as the input.
228
+ * <p>To use the previous file as the input, enter <code>${previous.file}</code>.
210
229
  * In this case, this workflow step uses the output file from the previous workflow step as input.
211
230
  * This is the default value.</p>
212
231
  * </li>
213
232
  * <li>
214
- * <p>Enter <code>${original.file}</code> to use the originally-uploaded file location as input for this step.</p>
233
+ * <p>To use the originally uploaded file location as input for this step, enter <code>${original.file}</code>.</p>
215
234
  * </li>
216
235
  * </ul>
217
236
  */
@@ -459,7 +478,7 @@ export interface CreateAgreementRequest {
459
478
  /**
460
479
  * <p>The landing directory (folder) for files transferred by using the AS2 protocol.</p>
461
480
  * <p>A <code>BaseDirectory</code> example is
462
- * <i>DOC-EXAMPLE-BUCKET</i>/<i>home</i>/<i>mydirectory</i>.</p>
481
+ * <code>/DOC-EXAMPLE-BUCKET/home/mydirectory</code>.</p>
463
482
  */
464
483
  BaseDirectory: string | undefined;
465
484
  /**
@@ -787,7 +806,7 @@ export declare enum Protocol {
787
806
  }
788
807
  /**
789
808
  * <p>Specifies the workflow ID for the workflow to assign and the execution role that's used for executing the workflow.</p>
790
- * <p>In additon to a workflow to execute when a file is uploaded completely, <code>WorkflowDeatails</code> can also contain a
809
+ * <p>In addition to a workflow to execute when a file is uploaded completely, <code>WorkflowDetails</code> can also contain a
791
810
  * workflow ID (and execution role) for a workflow to execute on partial upload. A partial upload occurs when a file is open when
792
811
  * the session disconnects.</p>
793
812
  */
@@ -916,7 +935,7 @@ export interface CreateServerRequest {
916
935
  * server to a new server, don't update the host key. Accidentally changing a
917
936
  * server's host key can be disruptive.</p>
918
937
  * </important>
919
- * <p>For more information, see <a href="https://docs.aws.amazon.com/transfer/latest/userguide/edit-server-config.html#configuring-servers-change-host-key">Update host keys for your SFTP-enabled server</a> in the <i>Transfer Family User Guide</i>.</p>
938
+ * <p>For more information, see <a href="https://docs.aws.amazon.com/transfer/latest/userguide/edit-server-config.html#configuring-servers-change-host-key">Manage host keys for your SFTP-enabled server</a> in the <i>Transfer Family User Guide</i>.</p>
920
939
  */
921
940
  HostKey?: string;
922
941
  /**
@@ -999,7 +1018,7 @@ export interface CreateServerRequest {
999
1018
  * <li>
1000
1019
  * <p>If <code>Protocol</code> includes either <code>FTP</code> or <code>FTPS</code>, then the
1001
1020
  * <code>EndpointType</code> must be <code>VPC</code> and the
1002
- * <code>IdentityProviderType</code> must be <code>AWS_DIRECTORY_SERVICE</code> or <code>API_GATEWAY</code>.</p>
1021
+ * <code>IdentityProviderType</code> must be either <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
1003
1022
  * </li>
1004
1023
  * <li>
1005
1024
  * <p>If <code>Protocol</code> includes <code>FTP</code>, then
@@ -1007,8 +1026,8 @@ export interface CreateServerRequest {
1007
1026
  * </li>
1008
1027
  * <li>
1009
1028
  * <p>If <code>Protocol</code> is set only to <code>SFTP</code>, the <code>EndpointType</code>
1010
- * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set to
1011
- * <code>SERVICE_MANAGED</code>.</p>
1029
+ * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set any of the supported identity types:
1030
+ * <code>SERVICE_MANAGED</code>, <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
1012
1031
  * </li>
1013
1032
  * <li>
1014
1033
  * <p>If <code>Protocol</code> includes <code>AS2</code>, then the
@@ -1056,7 +1075,7 @@ export interface CreateServerRequest {
1056
1075
  Tags?: Tag[];
1057
1076
  /**
1058
1077
  * <p>Specifies the workflow ID for the workflow to assign and the execution role that's used for executing the workflow.</p>
1059
- * <p>In additon to a workflow to execute when a file is uploaded completely, <code>WorkflowDeatails</code> can also contain a
1078
+ * <p>In addition to a workflow to execute when a file is uploaded completely, <code>WorkflowDetails</code> can also contain a
1060
1079
  * workflow ID (and execution role) for a workflow to execute on partial upload. A partial upload occurs when a file is open when
1061
1080
  * the session disconnects.</p>
1062
1081
  */
@@ -1144,7 +1163,23 @@ export interface CreateUserRequest {
1144
1163
  /**
1145
1164
  * <p>The public portion of the Secure Shell (SSH) key used to authenticate the user to the
1146
1165
  * server.</p>
1166
+ * <p>The three standard SSH public key format elements are <code><key type></code>,
1167
+ * <code><body base64></code>, and an optional <code><comment></code>, with spaces
1168
+ * between each element.</p>
1147
1169
  * <p>Transfer Family accepts RSA, ECDSA, and ED25519 keys.</p>
1170
+ * <ul>
1171
+ * <li>
1172
+ * <p>For RSA keys, the key type is <code>ssh-rsa</code>.</p>
1173
+ * </li>
1174
+ * <li>
1175
+ * <p>For ED25519 keys, the key type is <code>ssh-ed25519</code>.</p>
1176
+ * </li>
1177
+ * <li>
1178
+ * <p>For ECDSA keys, the key type is either <code>ecdsa-sha2-nistp256</code>,
1179
+ * <code>ecdsa-sha2-nistp384</code>, or <code>ecdsa-sha2-nistp521</code>, depending on the
1180
+ * size of the key you generated.</p>
1181
+ * </li>
1182
+ * </ul>
1148
1183
  */
1149
1184
  SshPublicKeyBody?: string;
1150
1185
  /**
@@ -1191,12 +1226,12 @@ export interface CustomStepDetails {
1191
1226
  * for the workflow.</p>
1192
1227
  * <ul>
1193
1228
  * <li>
1194
- * <p>Enter <code>${previous.file}</code> to use the previous file as the input.
1229
+ * <p>To use the previous file as the input, enter <code>${previous.file}</code>.
1195
1230
  * In this case, this workflow step uses the output file from the previous workflow step as input.
1196
1231
  * This is the default value.</p>
1197
1232
  * </li>
1198
1233
  * <li>
1199
- * <p>Enter <code>${original.file}</code> to use the originally-uploaded file location as input for this step.</p>
1234
+ * <p>To use the originally uploaded file location as input for this step, enter <code>${original.file}</code>.</p>
1200
1235
  * </li>
1201
1236
  * </ul>
1202
1237
  */
@@ -1205,13 +1240,40 @@ export interface CustomStepDetails {
1205
1240
  export declare enum EncryptionType {
1206
1241
  PGP = "PGP"
1207
1242
  }
1243
+ /**
1244
+ * <p>Each step type has its own <code>StepDetails</code> structure.</p>
1245
+ */
1208
1246
  export interface DecryptStepDetails {
1247
+ /**
1248
+ * <p>The name of the step, used as an identifier.</p>
1249
+ */
1209
1250
  Name?: string;
1251
+ /**
1252
+ * <p>The type of encryption used. Currently, this value must be <code>PGP</code>.</p>
1253
+ */
1210
1254
  Type: EncryptionType | string | undefined;
1255
+ /**
1256
+ * <p>Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file
1257
+ * for the workflow.</p>
1258
+ * <ul>
1259
+ * <li>
1260
+ * <p>To use the previous file as the input, enter <code>${previous.file}</code>.
1261
+ * In this case, this workflow step uses the output file from the previous workflow step as input.
1262
+ * This is the default value.</p>
1263
+ * </li>
1264
+ * <li>
1265
+ * <p>To use the originally uploaded file location as input for this step, enter <code>${original.file}</code>.</p>
1266
+ * </li>
1267
+ * </ul>
1268
+ */
1211
1269
  SourceFileLocation?: string;
1270
+ /**
1271
+ * <p>A flag that indicates whether to overwrite an existing file of the same name.
1272
+ * The default is <code>FALSE</code>.</p>
1273
+ */
1212
1274
  OverwriteExisting?: OverwriteExisting | string;
1213
1275
  /**
1214
- * <p>Specifies the location for the file being copied. Only applicable for the Copy type of workflow steps.</p>
1276
+ * <p>Specifies the location for the file that's being processed.</p>
1215
1277
  */
1216
1278
  DestinationFileLocation: InputFileLocation | undefined;
1217
1279
  }
@@ -1228,12 +1290,12 @@ export interface DeleteStepDetails {
1228
1290
  * for the workflow.</p>
1229
1291
  * <ul>
1230
1292
  * <li>
1231
- * <p>Enter <code>${previous.file}</code> to use the previous file as the input.
1293
+ * <p>To use the previous file as the input, enter <code>${previous.file}</code>.
1232
1294
  * In this case, this workflow step uses the output file from the previous workflow step as input.
1233
1295
  * This is the default value.</p>
1234
1296
  * </li>
1235
1297
  * <li>
1236
- * <p>Enter <code>${original.file}</code> to use the originally-uploaded file location as input for this step.</p>
1298
+ * <p>To use the originally uploaded file location as input for this step, enter <code>${original.file}</code>.</p>
1237
1299
  * </li>
1238
1300
  * </ul>
1239
1301
  */
@@ -1270,12 +1332,12 @@ export interface TagStepDetails {
1270
1332
  * for the workflow.</p>
1271
1333
  * <ul>
1272
1334
  * <li>
1273
- * <p>Enter <code>${previous.file}</code> to use the previous file as the input.
1335
+ * <p>To use the previous file as the input, enter <code>${previous.file}</code>.
1274
1336
  * In this case, this workflow step uses the output file from the previous workflow step as input.
1275
1337
  * This is the default value.</p>
1276
1338
  * </li>
1277
1339
  * <li>
1278
- * <p>Enter <code>${original.file}</code> to use the originally-uploaded file location as input for this step.</p>
1340
+ * <p>To use the originally uploaded file location as input for this step, enter <code>${original.file}</code>.</p>
1279
1341
  * </li>
1280
1342
  * </ul>
1281
1343
  */
@@ -1299,19 +1361,33 @@ export interface WorkflowStep {
1299
1361
  * <ul>
1300
1362
  * <li>
1301
1363
  * <p>
1302
- * <i>COPY</i>: Copy the file to another location.</p>
1364
+ * <b>
1365
+ * <code>COPY</code>
1366
+ * </b> - Copy the file to another location.</p>
1367
+ * </li>
1368
+ * <li>
1369
+ * <p>
1370
+ * <b>
1371
+ * <code>CUSTOM</code>
1372
+ * </b> - Perform a custom step with an Lambda function target.</p>
1303
1373
  * </li>
1304
1374
  * <li>
1305
1375
  * <p>
1306
- * <i>CUSTOM</i>: Perform a custom step with an Lambda function target.</p>
1376
+ * <b>
1377
+ * <code>DECRYPT</code>
1378
+ * </b> - Decrypt a file that was encrypted before it was uploaded.</p>
1307
1379
  * </li>
1308
1380
  * <li>
1309
1381
  * <p>
1310
- * <i>DELETE</i>: Delete the file.</p>
1382
+ * <b>
1383
+ * <code>DELETE</code>
1384
+ * </b> - Delete the file.</p>
1311
1385
  * </li>
1312
1386
  * <li>
1313
1387
  * <p>
1314
- * <i>TAG</i>: Add a tag to the file.</p>
1388
+ * <b>
1389
+ * <code>TAG</code>
1390
+ * </b> - Add a tag to the file.</p>
1315
1391
  * </li>
1316
1392
  * </ul>
1317
1393
  */
@@ -1326,20 +1402,18 @@ export interface WorkflowStep {
1326
1402
  * <p>A description</p>
1327
1403
  * </li>
1328
1404
  * <li>
1329
- * <p>An S3 location for the destination of the file copy.</p>
1405
+ * <p>An Amazon S3 location for the destination of the file copy.</p>
1330
1406
  * </li>
1331
1407
  * <li>
1332
- * <p>A flag that indicates whether or not to overwrite an existing file of the same name.
1333
- * The default is <code>FALSE</code>.</p>
1408
+ * <p>A flag that indicates whether to overwrite an existing file of the same name. The default is
1409
+ * <code>FALSE</code>.</p>
1334
1410
  * </li>
1335
1411
  * </ul>
1336
1412
  */
1337
1413
  CopyStepDetails?: CopyStepDetails;
1338
1414
  /**
1339
- * <p>Details for a step that invokes a lambda function.</p>
1340
- * <p>
1341
- * Consists of the lambda function name, target, and timeout (in seconds).
1342
- * </p>
1415
+ * <p>Details for a step that invokes an Lambda function.</p>
1416
+ * <p>Consists of the Lambda function's name, target, and timeout (in seconds). </p>
1343
1417
  */
1344
1418
  CustomStepDetails?: CustomStepDetails;
1345
1419
  /**
@@ -1348,9 +1422,32 @@ export interface WorkflowStep {
1348
1422
  DeleteStepDetails?: DeleteStepDetails;
1349
1423
  /**
1350
1424
  * <p>Details for a step that creates one or more tags.</p>
1351
- * <p>You specify one or more tags: each tag contains a key/value pair.</p>
1425
+ * <p>You specify one or more tags. Each tag contains a key-value pair.</p>
1352
1426
  */
1353
1427
  TagStepDetails?: TagStepDetails;
1428
+ /**
1429
+ * <p>Details for a step that decrypts an encrypted file.</p>
1430
+ * <p>Consists of the following values:</p>
1431
+ * <ul>
1432
+ * <li>
1433
+ * <p>A descriptive name</p>
1434
+ * </li>
1435
+ * <li>
1436
+ * <p>An Amazon S3 or Amazon Elastic File System (Amazon EFS) location for the source file to
1437
+ * decrypt.</p>
1438
+ * </li>
1439
+ * <li>
1440
+ * <p>An S3 or Amazon EFS location for the destination of the file decryption.</p>
1441
+ * </li>
1442
+ * <li>
1443
+ * <p>A flag that indicates whether to overwrite an existing file of the same name. The default is
1444
+ * <code>FALSE</code>.</p>
1445
+ * </li>
1446
+ * <li>
1447
+ * <p>The type of encryption that's used. Currently, only PGP encryption is supported.</p>
1448
+ * </li>
1449
+ * </ul>
1450
+ */
1354
1451
  DecryptStepDetails?: DecryptStepDetails;
1355
1452
  }
1356
1453
  export interface CreateWorkflowRequest {
@@ -1366,19 +1463,33 @@ export interface CreateWorkflowRequest {
1366
1463
  * <ul>
1367
1464
  * <li>
1368
1465
  * <p>
1369
- * <i>COPY</i>: Copy the file to another location.</p>
1466
+ * <b>
1467
+ * <code>COPY</code>
1468
+ * </b> - Copy the file to another location.</p>
1370
1469
  * </li>
1371
1470
  * <li>
1372
1471
  * <p>
1373
- * <i>CUSTOM</i>: Perform a custom step with an Lambda function target.</p>
1472
+ * <b>
1473
+ * <code>CUSTOM</code>
1474
+ * </b> - Perform a custom step with an Lambda function target.</p>
1374
1475
  * </li>
1375
1476
  * <li>
1376
1477
  * <p>
1377
- * <i>DELETE</i>: Delete the file.</p>
1478
+ * <b>
1479
+ * <code>DECRYPT</code>
1480
+ * </b> - Decrypt a file that was encrypted before it was uploaded.</p>
1378
1481
  * </li>
1379
1482
  * <li>
1380
1483
  * <p>
1381
- * <i>TAG</i>: Add a tag to the file.</p>
1484
+ * <b>
1485
+ * <code>DELETE</code>
1486
+ * </b> - Delete the file.</p>
1487
+ * </li>
1488
+ * <li>
1489
+ * <p>
1490
+ * <b>
1491
+ * <code>TAG</code>
1492
+ * </b> - Add a tag to the file.</p>
1382
1493
  * </li>
1383
1494
  * </ul>
1384
1495
  * <note>
@@ -1386,7 +1497,7 @@ export interface CreateWorkflowRequest {
1386
1497
  * Currently, copying and tagging are supported only on S3.
1387
1498
  * </p>
1388
1499
  * </note>
1389
- * <p> For file location, you specify either the S3 bucket and key, or the EFS file system ID
1500
+ * <p> For file location, you specify either the Amazon S3 bucket and key, or the Amazon EFS file system ID
1390
1501
  * and path. </p>
1391
1502
  */
1392
1503
  Steps: WorkflowStep[] | undefined;
@@ -1936,19 +2047,33 @@ export interface ExecutionStepResult {
1936
2047
  * <ul>
1937
2048
  * <li>
1938
2049
  * <p>
1939
- * <i>COPY</i>: Copy the file to another location.</p>
2050
+ * <b>
2051
+ * <code>COPY</code>
2052
+ * </b> - Copy the file to another location.</p>
2053
+ * </li>
2054
+ * <li>
2055
+ * <p>
2056
+ * <b>
2057
+ * <code>CUSTOM</code>
2058
+ * </b> - Perform a custom step with an Lambda function target.</p>
1940
2059
  * </li>
1941
2060
  * <li>
1942
2061
  * <p>
1943
- * <i>CUSTOM</i>: Perform a custom step with an Lambda function target.</p>
2062
+ * <b>
2063
+ * <code>DECRYPT</code>
2064
+ * </b> - Decrypt a file that was encrypted before it was uploaded.</p>
1944
2065
  * </li>
1945
2066
  * <li>
1946
2067
  * <p>
1947
- * <i>DELETE</i>: Delete the file.</p>
2068
+ * <b>
2069
+ * <code>DELETE</code>
2070
+ * </b> - Delete the file.</p>
1948
2071
  * </li>
1949
2072
  * <li>
1950
2073
  * <p>
1951
- * <i>TAG</i>: Add a tag to the file.</p>
2074
+ * <b>
2075
+ * <code>TAG</code>
2076
+ * </b> - Add a tag to the file.</p>
1952
2077
  * </li>
1953
2078
  * </ul>
1954
2079
  */
@@ -2334,7 +2459,7 @@ export interface DescribedServer {
2334
2459
  * <li>
2335
2460
  * <p>If <code>Protocol</code> includes either <code>FTP</code> or <code>FTPS</code>, then the
2336
2461
  * <code>EndpointType</code> must be <code>VPC</code> and the
2337
- * <code>IdentityProviderType</code> must be <code>AWS_DIRECTORY_SERVICE</code> or <code>API_GATEWAY</code>.</p>
2462
+ * <code>IdentityProviderType</code> must be either <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
2338
2463
  * </li>
2339
2464
  * <li>
2340
2465
  * <p>If <code>Protocol</code> includes <code>FTP</code>, then
@@ -2342,8 +2467,8 @@ export interface DescribedServer {
2342
2467
  * </li>
2343
2468
  * <li>
2344
2469
  * <p>If <code>Protocol</code> is set only to <code>SFTP</code>, the <code>EndpointType</code>
2345
- * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set to
2346
- * <code>SERVICE_MANAGED</code>.</p>
2470
+ * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set any of the supported identity types:
2471
+ * <code>SERVICE_MANAGED</code>, <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
2347
2472
  * </li>
2348
2473
  * <li>
2349
2474
  * <p>If <code>Protocol</code> includes <code>AS2</code>, then the
@@ -2384,7 +2509,7 @@ export interface DescribedServer {
2384
2509
  UserCount?: number;
2385
2510
  /**
2386
2511
  * <p>Specifies the workflow ID for the workflow to assign and the execution role that's used for executing the workflow.</p>
2387
- * <p>In additon to a workflow to execute when a file is uploaded completely, <code>WorkflowDeatails</code> can also contain a
2512
+ * <p>In addition to a workflow to execute when a file is uploaded completely, <code>WorkflowDetails</code> can also contain a
2388
2513
  * workflow ID (and execution role) for a workflow to execute on partial upload. A partial upload occurs when a file is open when
2389
2514
  * the session disconnects.</p>
2390
2515
  */
@@ -2631,7 +2756,15 @@ export interface ImportCertificateRequest {
2631
2756
  */
2632
2757
  Usage: CertificateUsageType | string | undefined;
2633
2758
  /**
2634
- * <p>The file that contains the certificate to import.</p>
2759
+ * <ul>
2760
+ * <li>
2761
+ * <p>For the CLI, provide a file path for a certificate in URI format. For example, <code>--certificate file://encryption-cert.pem</code>.
2762
+ * Alternatively, you can provide the raw content.</p>
2763
+ * </li>
2764
+ * <li>
2765
+ * <p>For the SDK, specify the raw content of a certificate file. For example, <code>--certificate "`cat encryption-cert.pem`"</code>.</p>
2766
+ * </li>
2767
+ * </ul>
2635
2768
  */
2636
2769
  Certificate: string | undefined;
2637
2770
  /**
@@ -2640,7 +2773,16 @@ export interface ImportCertificateRequest {
2640
2773
  */
2641
2774
  CertificateChain?: string;
2642
2775
  /**
2643
- * <p>The file that contains the private key for the certificate that's being imported.</p>
2776
+ * <ul>
2777
+ * <li>
2778
+ * <p>For the CLI, provide a file path for a private key in URI format.For example, <code>--private-key file://encryption-key.pem</code>.
2779
+ * Alternatively, you can provide the raw content of the private key file.</p>
2780
+ * </li>
2781
+ * <li>
2782
+ * <p>For the SDK, specify the raw content of a private key file. For example, <code>--private-key "`cat encryption-key.pem`"</code>
2783
+ * </p>
2784
+ * </li>
2785
+ * </ul>
2644
2786
  */
2645
2787
  PrivateKey?: string;
2646
2788
  /**
@@ -2672,7 +2814,7 @@ export interface ImportHostKeyRequest {
2672
2814
  */
2673
2815
  ServerId: string | undefined;
2674
2816
  /**
2675
- * <p>The public key portion of an SSH key pair.</p>
2817
+ * <p>The private key portion of an SSH key pair.</p>
2676
2818
  * <p>Transfer Family accepts RSA, ECDSA, and ED25519 keys.</p>
2677
2819
  */
2678
2820
  HostKeyBody: string | undefined;
@@ -3990,7 +4132,7 @@ export interface UpdateServerRequest {
3990
4132
  * server to a new server, don't update the host key. Accidentally changing a
3991
4133
  * server's host key can be disruptive.</p>
3992
4134
  * </important>
3993
- * <p>For more information, see <a href="https://docs.aws.amazon.com/transfer/latest/userguide/edit-server-config.html#configuring-servers-change-host-key">Update host keys for your SFTP-enabled server</a> in the <i>Transfer Family User Guide</i>.</p>
4135
+ * <p>For more information, see <a href="https://docs.aws.amazon.com/transfer/latest/userguide/edit-server-config.html#configuring-servers-change-host-key">Manage host keys for your SFTP-enabled server</a> in the <i>Transfer Family User Guide</i>.</p>
3994
4136
  */
3995
4137
  HostKey?: string;
3996
4138
  /**
@@ -4054,7 +4196,7 @@ export interface UpdateServerRequest {
4054
4196
  * <li>
4055
4197
  * <p>If <code>Protocol</code> includes either <code>FTP</code> or <code>FTPS</code>, then the
4056
4198
  * <code>EndpointType</code> must be <code>VPC</code> and the
4057
- * <code>IdentityProviderType</code> must be <code>AWS_DIRECTORY_SERVICE</code> or <code>API_GATEWAY</code>.</p>
4199
+ * <code>IdentityProviderType</code> must be either <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
4058
4200
  * </li>
4059
4201
  * <li>
4060
4202
  * <p>If <code>Protocol</code> includes <code>FTP</code>, then
@@ -4062,8 +4204,8 @@ export interface UpdateServerRequest {
4062
4204
  * </li>
4063
4205
  * <li>
4064
4206
  * <p>If <code>Protocol</code> is set only to <code>SFTP</code>, the <code>EndpointType</code>
4065
- * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set to
4066
- * <code>SERVICE_MANAGED</code>.</p>
4207
+ * can be set to <code>PUBLIC</code> and the <code>IdentityProviderType</code> can be set any of the supported identity types:
4208
+ * <code>SERVICE_MANAGED</code>, <code>AWS_DIRECTORY_SERVICE</code>, <code>AWS_LAMBDA</code>, or <code>API_GATEWAY</code>.</p>
4067
4209
  * </li>
4068
4210
  * <li>
4069
4211
  * <p>If <code>Protocol</code> includes <code>AS2</code>, then the
@@ -4084,7 +4226,7 @@ export interface UpdateServerRequest {
4084
4226
  ServerId: string | undefined;
4085
4227
  /**
4086
4228
  * <p>Specifies the workflow ID for the workflow to assign and the execution role that's used for executing the workflow.</p>
4087
- * <p>In additon to a workflow to execute when a file is uploaded completely, <code>WorkflowDeatails</code> can also contain a
4229
+ * <p>In addition to a workflow to execute when a file is uploaded completely, <code>WorkflowDetails</code> can also contain a
4088
4230
  * workflow ID (and execution role) for a workflow to execute on partial upload. A partial upload occurs when a file is open when
4089
4231
  * the session disconnects.</p>
4090
4232
  * <p>To remove an associated workflow from a server, you can provide an empty <code>OnUpload</code> object, as in the following example.</p>
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@aws-sdk/client-transfer",
3
3
  "description": "AWS SDK for JavaScript Transfer Client for Node.js, Browser and React Native",
4
- "version": "3.264.0",
4
+ "version": "3.266.1",
5
5
  "scripts": {
6
6
  "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'",
7
7
  "build:cjs": "tsc -p tsconfig.cjs.json",
@@ -20,39 +20,39 @@
20
20
  "dependencies": {
21
21
  "@aws-crypto/sha256-browser": "3.0.0",
22
22
  "@aws-crypto/sha256-js": "3.0.0",
23
- "@aws-sdk/client-sts": "3.264.0",
24
- "@aws-sdk/config-resolver": "3.259.0",
25
- "@aws-sdk/credential-provider-node": "3.264.0",
26
- "@aws-sdk/fetch-http-handler": "3.257.0",
27
- "@aws-sdk/hash-node": "3.257.0",
28
- "@aws-sdk/invalid-dependency": "3.257.0",
29
- "@aws-sdk/middleware-content-length": "3.257.0",
30
- "@aws-sdk/middleware-endpoint": "3.264.0",
31
- "@aws-sdk/middleware-host-header": "3.257.0",
32
- "@aws-sdk/middleware-logger": "3.257.0",
33
- "@aws-sdk/middleware-recursion-detection": "3.257.0",
34
- "@aws-sdk/middleware-retry": "3.259.0",
35
- "@aws-sdk/middleware-serde": "3.257.0",
36
- "@aws-sdk/middleware-signing": "3.257.0",
37
- "@aws-sdk/middleware-stack": "3.257.0",
38
- "@aws-sdk/middleware-user-agent": "3.257.0",
39
- "@aws-sdk/node-config-provider": "3.259.0",
40
- "@aws-sdk/node-http-handler": "3.257.0",
41
- "@aws-sdk/protocol-http": "3.257.0",
42
- "@aws-sdk/smithy-client": "3.261.0",
43
- "@aws-sdk/types": "3.257.0",
44
- "@aws-sdk/url-parser": "3.257.0",
23
+ "@aws-sdk/client-sts": "3.266.1",
24
+ "@aws-sdk/config-resolver": "3.266.1",
25
+ "@aws-sdk/credential-provider-node": "3.266.1",
26
+ "@aws-sdk/fetch-http-handler": "3.266.1",
27
+ "@aws-sdk/hash-node": "3.266.1",
28
+ "@aws-sdk/invalid-dependency": "3.266.1",
29
+ "@aws-sdk/middleware-content-length": "3.266.1",
30
+ "@aws-sdk/middleware-endpoint": "3.266.1",
31
+ "@aws-sdk/middleware-host-header": "3.266.1",
32
+ "@aws-sdk/middleware-logger": "3.266.1",
33
+ "@aws-sdk/middleware-recursion-detection": "3.266.1",
34
+ "@aws-sdk/middleware-retry": "3.266.1",
35
+ "@aws-sdk/middleware-serde": "3.266.1",
36
+ "@aws-sdk/middleware-signing": "3.266.1",
37
+ "@aws-sdk/middleware-stack": "3.266.1",
38
+ "@aws-sdk/middleware-user-agent": "3.266.1",
39
+ "@aws-sdk/node-config-provider": "3.266.1",
40
+ "@aws-sdk/node-http-handler": "3.266.1",
41
+ "@aws-sdk/protocol-http": "3.266.1",
42
+ "@aws-sdk/smithy-client": "3.266.1",
43
+ "@aws-sdk/types": "3.266.1",
44
+ "@aws-sdk/url-parser": "3.266.1",
45
45
  "@aws-sdk/util-base64": "3.208.0",
46
46
  "@aws-sdk/util-body-length-browser": "3.188.0",
47
47
  "@aws-sdk/util-body-length-node": "3.208.0",
48
- "@aws-sdk/util-defaults-mode-browser": "3.261.0",
49
- "@aws-sdk/util-defaults-mode-node": "3.261.0",
50
- "@aws-sdk/util-endpoints": "3.257.0",
51
- "@aws-sdk/util-retry": "3.257.0",
52
- "@aws-sdk/util-user-agent-browser": "3.257.0",
53
- "@aws-sdk/util-user-agent-node": "3.259.0",
48
+ "@aws-sdk/util-defaults-mode-browser": "3.266.1",
49
+ "@aws-sdk/util-defaults-mode-node": "3.266.1",
50
+ "@aws-sdk/util-endpoints": "3.266.1",
51
+ "@aws-sdk/util-retry": "3.266.1",
52
+ "@aws-sdk/util-user-agent-browser": "3.266.1",
53
+ "@aws-sdk/util-user-agent-node": "3.266.1",
54
54
  "@aws-sdk/util-utf8": "3.254.0",
55
- "@aws-sdk/util-waiter": "3.257.0",
55
+ "@aws-sdk/util-waiter": "3.266.1",
56
56
  "tslib": "^2.3.1"
57
57
  },
58
58
  "devDependencies": {