data-transfer-cli 0.3.7__tar.gz → 0.3.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/PKG-INFO +3 -2
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/README.md +1 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/dtproxy.py +5 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/parser/cli_parser.py +31 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/pyproject.toml +4 -3
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/.env +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/__init__.py +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/conf/cli.cfg +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/dtcli.cfg +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/dtcli.py +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/data_transfer_cli/parser/__init__.py +0 -0
- {data_transfer_cli-0.3.7 → data_transfer_cli-0.3.9}/dtcli +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: data-transfer-cli
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.9
|
|
4
4
|
Summary: HiDALGO Data Transfer CLI provides commands to transfer data between different data providers and consumers using NIFI pipelines
|
|
5
5
|
License: APL-2.0
|
|
6
6
|
Author: Jesús Gorroñogoitia
|
|
@@ -11,7 +11,7 @@ Classifier: Programming Language :: Python :: 3
|
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
-
Requires-Dist: hid_data_transfer_lib (>=0.3.
|
|
14
|
+
Requires-Dist: hid_data_transfer_lib (>=0.3.9)
|
|
15
15
|
Requires-Dist: paramiko (>=3.3.1)
|
|
16
16
|
Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
|
|
17
17
|
Requires-Dist: requests (>=2.31.0)
|
|
@@ -212,6 +212,7 @@ options:
|
|
|
212
212
|
-acct, --accounting [Optional] Enable returning accounting information of data transfer
|
|
213
213
|
-ct CONCURRENT_TASKS, --concurrent-tasks CONCURRENT_TASKS
|
|
214
214
|
[Optional] set the number of concurrent tasks for parallel data transfer
|
|
215
|
+
-R, --recursive [Optional] if True the data-source subdirectories will be transferred as well, otherwise only the root data-source folder
|
|
215
216
|
```
|
|
216
217
|
|
|
217
218
|
A common command flow (e.g. transfer data from hdfs to hpc) would be like this:
|
|
@@ -193,6 +193,7 @@ options:
|
|
|
193
193
|
-acct, --accounting [Optional] Enable returning accounting information of data transfer
|
|
194
194
|
-ct CONCURRENT_TASKS, --concurrent-tasks CONCURRENT_TASKS
|
|
195
195
|
[Optional] set the number of concurrent tasks for parallel data transfer
|
|
196
|
+
-R, --recursive [Optional] if True the data-source subdirectories will be transferred as well, otherwise only the root data-source folder
|
|
196
197
|
```
|
|
197
198
|
|
|
198
199
|
A common command flow (e.g. transfer data from hdfs to hpc) would be like this:
|
|
@@ -114,6 +114,7 @@ class DataTransferProxy:
|
|
|
114
114
|
kerberos_principal=args.kerberos_principal,
|
|
115
115
|
kerberos_password=args.kerberos_password,
|
|
116
116
|
concurrent_tasks=args.concurrent_tasks,
|
|
117
|
+
recursive=args.recursive,
|
|
117
118
|
)
|
|
118
119
|
|
|
119
120
|
except Exception as ex:
|
|
@@ -151,6 +152,7 @@ class DataTransferProxy:
|
|
|
151
152
|
kerberos_principal=args.kerberos_principal,
|
|
152
153
|
kerberos_password=args.kerberos_password,
|
|
153
154
|
concurrent_tasks=args.concurrent_tasks,
|
|
155
|
+
recursive=args.recursive,
|
|
154
156
|
)
|
|
155
157
|
|
|
156
158
|
except Exception as ex:
|
|
@@ -168,10 +170,12 @@ class DataTransferProxy:
|
|
|
168
170
|
ckan_api_key=args.ckan_api_key,
|
|
169
171
|
ckan_organization=args.ckan_organization,
|
|
170
172
|
ckan_dataset=args.ckan_dataset,
|
|
173
|
+
ckan_resource=args.ckan_resource,
|
|
171
174
|
data_source=args.data_source,
|
|
172
175
|
kerberos_principal=args.kerberos_principal,
|
|
173
176
|
kerberos_password=args.kerberos_password,
|
|
174
177
|
concurrent_tasks=args.concurrent_tasks,
|
|
178
|
+
recursive=args.recursive,
|
|
175
179
|
)
|
|
176
180
|
|
|
177
181
|
except Exception as ex:
|
|
@@ -238,6 +242,7 @@ class DataTransferProxy:
|
|
|
238
242
|
hpc_secret_key_password=args.hpc_secret_key_password,
|
|
239
243
|
data_source=args.data_source,
|
|
240
244
|
concurrent_tasks=args.concurrent_tasks,
|
|
245
|
+
recursive=args.recursive,
|
|
241
246
|
)
|
|
242
247
|
|
|
243
248
|
except Exception as ex:
|
|
@@ -235,6 +235,12 @@ class CLIParser(argparse.ArgumentParser):
|
|
|
235
235
|
help="[Optional] set the number of concurrent tasks"
|
|
236
236
|
" for parallel data transfer"
|
|
237
237
|
)
|
|
238
|
+
hdfs2hpc_parser.add_argument(
|
|
239
|
+
"-R", "--recursive",
|
|
240
|
+
required=False, action="store_true", default=False,
|
|
241
|
+
help="[Optional] if True the data-source subdirectories"
|
|
242
|
+
" will be transferred as well, otherwise only the root data-source folder"
|
|
243
|
+
)
|
|
238
244
|
hdfs2hpc_parser.set_defaults(func=target.hdfs2hpc)
|
|
239
245
|
|
|
240
246
|
# hpc2hdfs
|
|
@@ -266,6 +272,12 @@ class CLIParser(argparse.ArgumentParser):
|
|
|
266
272
|
help="[Optional] set the number of concurrent tasks"
|
|
267
273
|
" for parallel data transfer"
|
|
268
274
|
)
|
|
275
|
+
hpc2hdfs_parser.add_argument(
|
|
276
|
+
"-R", "--recursive",
|
|
277
|
+
required=False, action="store_true", default=False,
|
|
278
|
+
help="[Optional] if True the data-source subdirectories"
|
|
279
|
+
" will be transferred as well, otherwise only the root data-source folder"
|
|
280
|
+
)
|
|
269
281
|
hpc2hdfs_parser.set_defaults(func=target.hpc2hdfs)
|
|
270
282
|
|
|
271
283
|
# ckan2hdfs
|
|
@@ -304,6 +316,13 @@ class CLIParser(argparse.ArgumentParser):
|
|
|
304
316
|
"hdfs2ckan", help="transfer data from HDFS to a target CKAN"
|
|
305
317
|
)
|
|
306
318
|
hdfs2ckan_parser = self.add_default_ckan_arguments(hdfs2ckan_parser)
|
|
319
|
+
hdfs2ckan_parser.add_argument(
|
|
320
|
+
"-r",
|
|
321
|
+
"--ckan-resource",
|
|
322
|
+
required=False,
|
|
323
|
+
help="[Optional] CKAN resource to create from transferred sources. \
|
|
324
|
+
If omitted, target resource name will adopt the source file or folder name",
|
|
325
|
+
)
|
|
307
326
|
hdfs2ckan_parser = self.add_default_kerberos_arguments(hdfs2ckan_parser)
|
|
308
327
|
hdfs2ckan_parser.add_argument(
|
|
309
328
|
"-s",
|
|
@@ -322,6 +341,12 @@ class CLIParser(argparse.ArgumentParser):
|
|
|
322
341
|
help="[Optional] set the number of concurrent tasks"
|
|
323
342
|
" for parallel data transfer"
|
|
324
343
|
)
|
|
344
|
+
hdfs2ckan_parser.add_argument(
|
|
345
|
+
"-R", "--recursive",
|
|
346
|
+
required=False, action="store_true", default=False,
|
|
347
|
+
help="[Optional] if True the data-source subdirectories"
|
|
348
|
+
" will be transferred as well, otherwise only the root data-source folder"
|
|
349
|
+
)
|
|
325
350
|
hdfs2ckan_parser.set_defaults(func=target.hdfs2ckan)
|
|
326
351
|
|
|
327
352
|
# ckan2hpc
|
|
@@ -395,6 +420,12 @@ class CLIParser(argparse.ArgumentParser):
|
|
|
395
420
|
help="[Optional] set the number of concurrent tasks"
|
|
396
421
|
" for parallel data transfer"
|
|
397
422
|
)
|
|
423
|
+
hpc2ckan_parser.add_argument(
|
|
424
|
+
"-R", "--recursive",
|
|
425
|
+
required=False, action="store_true", default=False,
|
|
426
|
+
help="[Optional] if True the data-source subdirectories"
|
|
427
|
+
" will be transferred as well, otherwise only the root data-source folder"
|
|
428
|
+
)
|
|
398
429
|
hpc2ckan_parser.set_defaults(func=target.hpc2ckan)
|
|
399
430
|
|
|
400
431
|
# local2ckan
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "data-transfer-cli"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.9"
|
|
4
4
|
description = "HiDALGO Data Transfer CLI provides commands to transfer data between different data providers and consumers using NIFI pipelines"
|
|
5
5
|
authors = [
|
|
6
6
|
{ name = "Jesús Gorroñogoitia", email = "jesus.gorronogoitia@eviden.com" },
|
|
@@ -11,9 +11,9 @@ requires-python = ">=3.11, <4.0"
|
|
|
11
11
|
dependencies = [
|
|
12
12
|
"requests>=2.31.0",
|
|
13
13
|
"paramiko>=3.3.1",
|
|
14
|
-
"hid_data_transfer_lib>=0.3.
|
|
14
|
+
"hid_data_transfer_lib>=0.3.9",
|
|
15
15
|
"pyyaml (>=6.0.2,<7.0.0)",
|
|
16
|
-
#"hid-data-transfer-lib @ file:///home/yosu/Projects/Hidalgo2/git/hid-data-management/data-transfer/nifi/hid_data_transfer_lib/dist/hid_data_transfer_lib-0.3.
|
|
16
|
+
#"hid-data-transfer-lib @ file:///home/yosu/Projects/Hidalgo2/git/hid-data-management/data-transfer/nifi/hid_data_transfer_lib/dist/hid_data_transfer_lib-0.3.9-py3-none-any.whl",
|
|
17
17
|
]
|
|
18
18
|
|
|
19
19
|
[tool.poetry]
|
|
@@ -47,3 +47,4 @@ dtcli = 'data_transfer_cli.dtcli:main'
|
|
|
47
47
|
|
|
48
48
|
[tool.pytest.ini_options]
|
|
49
49
|
pythonpath = ["data_transfer_cli"]
|
|
50
|
+
addopts = "-s"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|