cosmotech-acceleration-library 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cosmotech/coal/__init__.py +1 -1
- cosmotech/coal/azure/adx/runner.py +1 -3
- cosmotech/coal/postgresql/runner.py +3 -1
- cosmotech/coal/postgresql/store.py +3 -0
- cosmotech/csm_data/commands/adx_send_data.py +7 -7
- cosmotech/csm_data/commands/adx_send_runnerdata.py +10 -10
- cosmotech/csm_data/commands/api/api.py +1 -1
- cosmotech/csm_data/commands/api/postgres_send_runner_metadata.py +23 -11
- cosmotech/csm_data/commands/api/rds_load_csv.py +8 -8
- cosmotech/csm_data/commands/api/rds_send_csv.py +6 -6
- cosmotech/csm_data/commands/api/rds_send_store.py +6 -6
- cosmotech/csm_data/commands/api/run_load_data.py +10 -10
- cosmotech/csm_data/commands/api/runtemplate_load_handler.py +5 -5
- cosmotech/csm_data/commands/api/tdl_load_files.py +6 -6
- cosmotech/csm_data/commands/api/tdl_send_files.py +7 -7
- cosmotech/csm_data/commands/api/wsf_load_file.py +5 -5
- cosmotech/csm_data/commands/api/wsf_send_file.py +6 -6
- cosmotech/csm_data/commands/az_storage_upload.py +6 -6
- cosmotech/csm_data/commands/s3_bucket_delete.py +8 -8
- cosmotech/csm_data/commands/s3_bucket_download.py +9 -9
- cosmotech/csm_data/commands/s3_bucket_upload.py +10 -10
- cosmotech/csm_data/commands/store/dump_to_azure.py +9 -9
- cosmotech/csm_data/commands/store/dump_to_postgresql.py +22 -10
- cosmotech/csm_data/commands/store/dump_to_s3.py +10 -10
- cosmotech/csm_data/commands/store/list_tables.py +3 -3
- cosmotech/csm_data/commands/store/load_csv_folder.py +3 -3
- cosmotech/csm_data/commands/store/load_from_singlestore.py +8 -8
- cosmotech/csm_data/commands/store/reset.py +2 -2
- cosmotech/csm_data/commands/store/store.py +1 -1
- cosmotech/csm_data/main.py +8 -6
- cosmotech/csm_data/utils/decorators.py +1 -1
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/api.yml +8 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/postgres_send_runner_metadata.yml +17 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/rds_load_csv.yml +13 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/rds_send_csv.yml +12 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/rds_send_store.yml +12 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/run_load_data.yml +15 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/runtemplate_load_handler.yml +7 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/tdl_load_files.yml +14 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/tdl_send_files.yml +18 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/wsf_load_file.yml +10 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/api/wsf_send_file.yml +12 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/main.yml +9 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/adx_send_data.yml +8 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/adx_send_runnerdata.yml +15 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/az_storage_upload.yml +8 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/s3_bucket_delete.yml +17 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/s3_bucket_download.yml +18 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/s3_bucket_upload.yml +21 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/storage/storage.yml +4 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/dump_to_azure.yml +23 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/dump_to_postgresql.yml +20 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/dump_to_s3.yml +26 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/list_tables.yml +5 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/load_csv_folder.yml +5 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/load_from_singlestore.yml +16 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/reset.yml +4 -0
- cosmotech/translation/csm_data/en-US/csm_data/commands/store/store.yml +4 -0
- cosmotech/translation/csm_data/en-US/csm_data/commons/decorators.yml +2 -0
- cosmotech/translation/csm_data/en-US/csm_data/commons/version.yml +4 -0
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/METADATA +7 -10
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/RECORD +66 -38
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/WHEEL +1 -1
- cosmotech/translation/csm_data/en-US/csm-data.yml +0 -434
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/entry_points.txt +0 -0
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/licenses/LICENSE +0 -0
- {cosmotech_acceleration_library-1.0.0.dist-info → cosmotech_acceleration_library-1.0.1.dist-info}/top_level.txt +0 -0
|
@@ -22,7 +22,7 @@ VALID_TYPES = (
|
|
|
22
22
|
@click.option(
|
|
23
23
|
"--store-folder",
|
|
24
24
|
envvar="CSM_PARAMETERS_ABSOLUTE_PATH",
|
|
25
|
-
help=T("
|
|
25
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.store_folder"),
|
|
26
26
|
metavar="PATH",
|
|
27
27
|
type=str,
|
|
28
28
|
show_envvar=True,
|
|
@@ -31,13 +31,13 @@ VALID_TYPES = (
|
|
|
31
31
|
@click.option(
|
|
32
32
|
"--output-type",
|
|
33
33
|
default="sqlite",
|
|
34
|
-
help=T("
|
|
34
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.output_type"),
|
|
35
35
|
type=click.Choice(VALID_TYPES, case_sensitive=False),
|
|
36
36
|
)
|
|
37
37
|
@click.option(
|
|
38
38
|
"--bucket-name",
|
|
39
39
|
envvar="CSM_DATA_BUCKET_NAME",
|
|
40
|
-
help=T("
|
|
40
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.bucket_name"),
|
|
41
41
|
metavar="BUCKET",
|
|
42
42
|
type=str,
|
|
43
43
|
show_envvar=True,
|
|
@@ -47,7 +47,7 @@ VALID_TYPES = (
|
|
|
47
47
|
"--prefix",
|
|
48
48
|
"file_prefix",
|
|
49
49
|
envvar="CSM_DATA_BUCKET_PREFIX",
|
|
50
|
-
help=T("
|
|
50
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.prefix"),
|
|
51
51
|
metavar="PREFIX",
|
|
52
52
|
type=str,
|
|
53
53
|
show_envvar=True,
|
|
@@ -56,14 +56,14 @@ VALID_TYPES = (
|
|
|
56
56
|
@click.option(
|
|
57
57
|
"--use-ssl/--no-ssl",
|
|
58
58
|
default=True,
|
|
59
|
-
help=T("
|
|
59
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.use_ssl"),
|
|
60
60
|
type=bool,
|
|
61
61
|
is_flag=True,
|
|
62
62
|
)
|
|
63
63
|
@click.option(
|
|
64
64
|
"--s3-url",
|
|
65
65
|
"endpoint_url",
|
|
66
|
-
help=T("
|
|
66
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.s3_url"),
|
|
67
67
|
type=str,
|
|
68
68
|
required=True,
|
|
69
69
|
show_envvar=True,
|
|
@@ -73,7 +73,7 @@ VALID_TYPES = (
|
|
|
73
73
|
@click.option(
|
|
74
74
|
"--access-id",
|
|
75
75
|
"access_id",
|
|
76
|
-
help=T("
|
|
76
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.access_id"),
|
|
77
77
|
type=str,
|
|
78
78
|
required=True,
|
|
79
79
|
show_envvar=True,
|
|
@@ -83,7 +83,7 @@ VALID_TYPES = (
|
|
|
83
83
|
@click.option(
|
|
84
84
|
"--secret-key",
|
|
85
85
|
"secret_key",
|
|
86
|
-
help=T("
|
|
86
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.secret_key"),
|
|
87
87
|
type=str,
|
|
88
88
|
required=True,
|
|
89
89
|
show_envvar=True,
|
|
@@ -92,14 +92,14 @@ VALID_TYPES = (
|
|
|
92
92
|
)
|
|
93
93
|
@click.option(
|
|
94
94
|
"--ssl-cert-bundle",
|
|
95
|
-
help=T("
|
|
95
|
+
help=T("csm_data.commands.store.dump_to_s3.parameters.ssl_cert_bundle"),
|
|
96
96
|
type=str,
|
|
97
97
|
show_envvar=True,
|
|
98
98
|
metavar="PATH",
|
|
99
99
|
envvar="CSM_S3_CA_BUNDLE",
|
|
100
100
|
)
|
|
101
101
|
@web_help("csm-data/store/dump-to-s3")
|
|
102
|
-
@translate_help("
|
|
102
|
+
@translate_help("csm_data.commands.store.dump_to_s3.description")
|
|
103
103
|
def dump_to_s3(
|
|
104
104
|
store_folder,
|
|
105
105
|
bucket_name: str,
|
|
@@ -12,11 +12,11 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
12
12
|
|
|
13
13
|
@click.command()
|
|
14
14
|
@web_help("csm-data/store/list-tables")
|
|
15
|
-
@translate_help("
|
|
15
|
+
@translate_help("csm_data.commands.store.list_tables.description")
|
|
16
16
|
@click.option(
|
|
17
17
|
"--store-folder",
|
|
18
18
|
envvar="CSM_PARAMETERS_ABSOLUTE_PATH",
|
|
19
|
-
help=T("
|
|
19
|
+
help=T("csm_data.commands.store.list_tables.parameters.store_folder"),
|
|
20
20
|
metavar="PATH",
|
|
21
21
|
type=str,
|
|
22
22
|
show_envvar=True,
|
|
@@ -24,7 +24,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
24
24
|
)
|
|
25
25
|
@click.option(
|
|
26
26
|
"--schema/--no-schema",
|
|
27
|
-
help=T("
|
|
27
|
+
help=T("csm_data.commands.store.list_tables.parameters.schema"),
|
|
28
28
|
is_flag=True,
|
|
29
29
|
type=bool,
|
|
30
30
|
default=False,
|
|
@@ -12,11 +12,11 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
12
12
|
|
|
13
13
|
@click.command()
|
|
14
14
|
@web_help("csm-data/store/load-csv-folder")
|
|
15
|
-
@translate_help("
|
|
15
|
+
@translate_help("csm_data.commands.store.load_csv_folder.description")
|
|
16
16
|
@click.option(
|
|
17
17
|
"--store-folder",
|
|
18
18
|
envvar="CSM_PARAMETERS_ABSOLUTE_PATH",
|
|
19
|
-
help=T("
|
|
19
|
+
help=T("csm_data.commands.store.load_csv_folder.parameters.store_folder"),
|
|
20
20
|
metavar="PATH",
|
|
21
21
|
type=str,
|
|
22
22
|
show_envvar=True,
|
|
@@ -25,7 +25,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
25
25
|
@click.option(
|
|
26
26
|
"--csv-folder",
|
|
27
27
|
envvar="CSM_DATASET_ABSOLUTE_PATH",
|
|
28
|
-
help=T("
|
|
28
|
+
help=T("csm_data.commands.store.load_csv_folder.parameters.csv_folder"),
|
|
29
29
|
metavar="PATH",
|
|
30
30
|
type=str,
|
|
31
31
|
show_envvar=True,
|
|
@@ -12,12 +12,12 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
12
12
|
|
|
13
13
|
@click.command()
|
|
14
14
|
@web_help("csm-data/store/load-from-singlestore")
|
|
15
|
-
@translate_help("
|
|
15
|
+
@translate_help("csm_data.commands.store.load_from_singlestore.description")
|
|
16
16
|
@click.option(
|
|
17
17
|
"--singlestore-host",
|
|
18
18
|
"single_store_host",
|
|
19
19
|
envvar="SINGLE_STORE_HOST",
|
|
20
|
-
help=T("
|
|
20
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_host"),
|
|
21
21
|
type=str,
|
|
22
22
|
show_envvar=True,
|
|
23
23
|
required=True,
|
|
@@ -25,7 +25,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
25
25
|
@click.option(
|
|
26
26
|
"--singlestore-port",
|
|
27
27
|
"single_store_port",
|
|
28
|
-
help=T("
|
|
28
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_port"),
|
|
29
29
|
envvar="SINGLE_STORE_PORT",
|
|
30
30
|
show_envvar=True,
|
|
31
31
|
required=False,
|
|
@@ -34,7 +34,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
34
34
|
@click.option(
|
|
35
35
|
"--singlestore-db",
|
|
36
36
|
"single_store_db",
|
|
37
|
-
help=T("
|
|
37
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_db"),
|
|
38
38
|
envvar="SINGLE_STORE_DB",
|
|
39
39
|
show_envvar=True,
|
|
40
40
|
required=True,
|
|
@@ -42,7 +42,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
42
42
|
@click.option(
|
|
43
43
|
"--singlestore-user",
|
|
44
44
|
"single_store_user",
|
|
45
|
-
help=T("
|
|
45
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_user"),
|
|
46
46
|
envvar="SINGLE_STORE_USERNAME",
|
|
47
47
|
show_envvar=True,
|
|
48
48
|
required=True,
|
|
@@ -50,7 +50,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
50
50
|
@click.option(
|
|
51
51
|
"--singlestore-password",
|
|
52
52
|
"single_store_password",
|
|
53
|
-
help=T("
|
|
53
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_password"),
|
|
54
54
|
envvar="SINGLE_STORE_PASSWORD",
|
|
55
55
|
show_envvar=True,
|
|
56
56
|
required=True,
|
|
@@ -58,7 +58,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
58
58
|
@click.option(
|
|
59
59
|
"--singlestore-tables",
|
|
60
60
|
"single_store_tables",
|
|
61
|
-
help=T("
|
|
61
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.singlestore_tables"),
|
|
62
62
|
envvar="SINGLE_STORE_TABLES",
|
|
63
63
|
show_envvar=True,
|
|
64
64
|
required=True,
|
|
@@ -67,7 +67,7 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
67
67
|
"--store-folder",
|
|
68
68
|
"store_folder",
|
|
69
69
|
envvar="CSM_PARAMETERS_ABSOLUTE_PATH",
|
|
70
|
-
help=T("
|
|
70
|
+
help=T("csm_data.commands.store.load_from_singlestore.parameters.store_folder"),
|
|
71
71
|
metavar="PATH",
|
|
72
72
|
type=str,
|
|
73
73
|
show_envvar=True,
|
|
@@ -12,11 +12,11 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
12
12
|
|
|
13
13
|
@click.command()
|
|
14
14
|
@web_help("csm-data/store/reset")
|
|
15
|
-
@translate_help("
|
|
15
|
+
@translate_help("csm_data.commands.store.reset.description")
|
|
16
16
|
@click.option(
|
|
17
17
|
"--store-folder",
|
|
18
18
|
envvar="CSM_PARAMETERS_ABSOLUTE_PATH",
|
|
19
|
-
help=T("
|
|
19
|
+
help=T("csm_data.commands.store.reset.parameters.store_folder"),
|
|
20
20
|
metavar="PATH",
|
|
21
21
|
type=str,
|
|
22
22
|
show_envvar=True,
|
cosmotech/csm_data/main.py
CHANGED
|
@@ -24,11 +24,12 @@ from cosmotech.orchestrator.utils.translate import T
|
|
|
24
24
|
def print_version(ctx, param, value):
|
|
25
25
|
if not value or ctx.resilient_parsing:
|
|
26
26
|
return
|
|
27
|
-
click.echo(
|
|
27
|
+
click.echo(T("csm_data.commons.version.message").format(version=__version__))
|
|
28
28
|
ctx.exit()
|
|
29
29
|
|
|
30
30
|
|
|
31
|
-
@click.group("csm-data")
|
|
31
|
+
@click.group("csm-data", invoke_without_command=True)
|
|
32
|
+
@click.pass_context
|
|
32
33
|
@click_log.simple_verbosity_option(LOGGER, "--log-level", envvar="LOG_LEVEL", show_envvar=True)
|
|
33
34
|
@click.option(
|
|
34
35
|
"--version",
|
|
@@ -36,12 +37,13 @@ def print_version(ctx, param, value):
|
|
|
36
37
|
callback=print_version,
|
|
37
38
|
expose_value=False,
|
|
38
39
|
is_eager=True,
|
|
39
|
-
help="
|
|
40
|
+
help=T("csm_data.commands.main.parameters.version"),
|
|
40
41
|
)
|
|
41
42
|
@web_help("csm-data")
|
|
42
|
-
@translate_help("
|
|
43
|
-
def main():
|
|
44
|
-
|
|
43
|
+
@translate_help("csm_data.commands.main.description")
|
|
44
|
+
def main(ctx):
|
|
45
|
+
if ctx.invoked_subcommand is None:
|
|
46
|
+
click.echo(T("csm_data.commands.main.content"))
|
|
45
47
|
|
|
46
48
|
|
|
47
49
|
main.add_command(api, "api")
|
|
@@ -59,7 +59,7 @@ def web_help(effective_target="", base_url=WEB_DOCUMENTATION_ROOT):
|
|
|
59
59
|
@click.option(
|
|
60
60
|
"--web-help",
|
|
61
61
|
is_flag=True,
|
|
62
|
-
help="
|
|
62
|
+
help=T("csm_data.commons.decorators.parameters.web_help").format(url=documentation_url),
|
|
63
63
|
is_eager=True,
|
|
64
64
|
callback=open_documentation,
|
|
65
65
|
)
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Cosmo Tech API helper command
|
|
3
|
+
|
|
4
|
+
This command will inform you of which connection is available to use for the Cosmo Tech API
|
|
5
|
+
|
|
6
|
+
If no connection is available, will list all possible set of parameters and return an error code,
|
|
7
|
+
|
|
8
|
+
You can use this command in a csm-orc template to make sure that API connection is available.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Send runner metadata to a PostgreSQL database.
|
|
3
|
+
|
|
4
|
+
Creates or updates a table in PostgreSQL with runner metadata including id, name, last run id, and run template id.
|
|
5
|
+
The table will be created if it doesn't exist, and existing records will be updated based on the runner id.
|
|
6
|
+
parameters:
|
|
7
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
8
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
9
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
10
|
+
table_prefix: Prefix to add to the table name
|
|
11
|
+
postgres_host: PostgreSQL host URI
|
|
12
|
+
postgres_port: PostgreSQL database port
|
|
13
|
+
postgres_db: PostgreSQL database name
|
|
14
|
+
postgres_schema: PostgreSQL schema name
|
|
15
|
+
postgres_user: PostgreSQL connection user name
|
|
16
|
+
postgres_password: PostgreSQL connection password
|
|
17
|
+
encode_password: Force encoding of password to percent encoding
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Load data from a runner's RDS database into a CSV file.
|
|
3
|
+
|
|
4
|
+
Executes a SQL query against the runner's RDS database and saves the results to a CSV file.
|
|
5
|
+
By default, it will list all tables in the public schema if no specific query is provided.
|
|
6
|
+
parameters:
|
|
7
|
+
target_folder: The folder where the csv will be written
|
|
8
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
9
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
10
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
11
|
+
run_id: A run id for the Cosmo Tech API
|
|
12
|
+
file_name: A file name to write the query results
|
|
13
|
+
query: SQL query to execute (defaults to listing all tables in public schema)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Send CSV files to a runner's RDS database.
|
|
3
|
+
|
|
4
|
+
Takes all CSV files from a source folder and sends their content to the runner's RDS database.
|
|
5
|
+
Each CSV file will be sent to a table named after the file (without the .csv extension).
|
|
6
|
+
The table name will be prefixed with "CD_" in the database.
|
|
7
|
+
parameters:
|
|
8
|
+
source_folder: The folder containing csvs to send
|
|
9
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
10
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
11
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
12
|
+
run_id: A run id for the Cosmo Tech API
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Send data from a store to a runner's RDS database.
|
|
3
|
+
|
|
4
|
+
Takes all tables from a store and sends their content to the runner's RDS database.
|
|
5
|
+
Each table will be sent to a table with the same name, prefixed with "CD_" in the database.
|
|
6
|
+
Null values in rows will be removed before sending.
|
|
7
|
+
parameters:
|
|
8
|
+
store_folder: The folder containing the store files
|
|
9
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
10
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
11
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
12
|
+
run_id: A run id for the Cosmo Tech API
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Download a runner data from the Cosmo Tech API
|
|
3
|
+
Requires a valid Azure connection either with:
|
|
4
|
+
- The AZ cli command: az login
|
|
5
|
+
- A triplet of env var AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET
|
|
6
|
+
parameters:
|
|
7
|
+
organization_id: The id of an organization in the cosmotech api
|
|
8
|
+
workspace_id: The id of a workspace in the cosmotech api
|
|
9
|
+
runner_id: The id of a runner in the cosmotech api
|
|
10
|
+
parameters_absolute_path: A local folder to store the parameters content
|
|
11
|
+
dataset_absolute_path: A local folder to store the main dataset content
|
|
12
|
+
write_json: Whether to write the data in JSON format
|
|
13
|
+
write_csv: Whether to write the data in CSV format
|
|
14
|
+
fetch_dataset: Whether to fetch datasets
|
|
15
|
+
parallel: Whether to fetch datasets in parallel
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Uses environment variables to download cloud based Template steps
|
|
3
|
+
parameters:
|
|
4
|
+
organization_id: The id of an organization in the cosmotech api
|
|
5
|
+
workspace_id: The id of a solution in the cosmotech api
|
|
6
|
+
run_template_id: The name of the run template in the cosmotech api
|
|
7
|
+
handler_list: A list of handlers to download (comma separated)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Query a twingraph and loads all the data from it
|
|
3
|
+
|
|
4
|
+
Will create 1 csv file per node type / relationship type
|
|
5
|
+
|
|
6
|
+
The twingraph must have been populated using the "tdl-send-files" command for this to work correctly
|
|
7
|
+
|
|
8
|
+
Requires a valid connection to the API to send the data
|
|
9
|
+
parameters:
|
|
10
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
11
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
12
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
13
|
+
scenario_id: A scenario id for the Cosmo Tech API
|
|
14
|
+
dir: Path to the directory to write the results to
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Reads a folder CSVs and send those to the Cosmo Tech API as a Dataset
|
|
3
|
+
|
|
4
|
+
CSVs must follow a given format:
|
|
5
|
+
- Nodes files must have an id column
|
|
6
|
+
- Relationship files must have id, src and dest columns
|
|
7
|
+
|
|
8
|
+
Non-existing relationship (aka dest or src does not point to existing node) won't trigger an error,
|
|
9
|
+
the relationship will not be created instead.
|
|
10
|
+
|
|
11
|
+
Requires a valid connection to the API to send the data
|
|
12
|
+
parameters:
|
|
13
|
+
api_url: The URI to a Cosmo Tech API instance
|
|
14
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
15
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
16
|
+
runner_id: A runner id for the Cosmo Tech API
|
|
17
|
+
dir: Path to the directory containing csvs to send
|
|
18
|
+
clear: Flag to clear the target dataset first (if set to True will clear the dataset before sending anything, irreversibly)
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Download files from a workspace.
|
|
3
|
+
|
|
4
|
+
Downloads files from a specified path in a workspace to a local target folder.
|
|
5
|
+
If the workspace path ends with '/', it will be treated as a folder and all files within will be downloaded.
|
|
6
|
+
parameters:
|
|
7
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
8
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
9
|
+
workspace_path: Path inside the workspace to load (end with '/' for a folder)
|
|
10
|
+
target_folder: Folder in which to send the downloaded file
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Upload a file to a workspace.
|
|
3
|
+
|
|
4
|
+
Uploads a local file to a specified path in a workspace.
|
|
5
|
+
If the workspace path ends with '/', the file will be uploaded to that folder with its original name.
|
|
6
|
+
Otherwise, the file will be uploaded with the name specified in the workspace path.
|
|
7
|
+
parameters:
|
|
8
|
+
organization_id: An organization id for the Cosmo Tech API
|
|
9
|
+
workspace_id: A workspace id for the Cosmo Tech API
|
|
10
|
+
file_path: Path to the file to send as a workspace file
|
|
11
|
+
workspace_path: Path inside the workspace to store the file (end with '/' for a folder)
|
|
12
|
+
overwrite: Flag to overwrite the target file if it exists
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
content: |
|
|
2
|
+
Welcome to the Cosmo Tech Data Interface command line interface (CLI) tool.
|
|
3
|
+
This tool provides a set of commands to help you manage your data connections and configurations.
|
|
4
|
+
description: |
|
|
5
|
+
Cosmo Tech Data Interface
|
|
6
|
+
|
|
7
|
+
Command toolkit providing quick implementation of data connections to use inside the Cosmo Tech Platform
|
|
8
|
+
parameters:
|
|
9
|
+
version: Print version number and return.
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
description: "Send data to ADX"
|
|
2
|
+
parameters:
|
|
3
|
+
adx_uri: The ADX cluster path (URI info can be found into ADX cluster page)
|
|
4
|
+
adx_ingest_uri: The ADX cluster ingest path (URI info can be found into ADX cluster page)
|
|
5
|
+
database_name: The targeted database name
|
|
6
|
+
waiting_ingestion: Toggle waiting for the ingestion results
|
|
7
|
+
adx_tag: The ADX tag to use for the ingestion
|
|
8
|
+
store_folder: The folder containing the datastore containing the data to send
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Uses environment variables to send content of CSV files to ADX
|
|
3
|
+
Requires a valid Azure connection either with:
|
|
4
|
+
- The AZ cli command: az login
|
|
5
|
+
- A triplet of env var AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET
|
|
6
|
+
parameters:
|
|
7
|
+
dataset_absolute_path: A local folder to store the main dataset content
|
|
8
|
+
parameters_absolute_path: A local folder to store the parameters content
|
|
9
|
+
runner_id: the Runner Id to add to records
|
|
10
|
+
adx_uri: the ADX cluster path (URI info can be found into ADX cluster page)
|
|
11
|
+
adx_ingest_uri: The ADX cluster ingest path (URI info can be found into ADX cluster page)
|
|
12
|
+
database_name: The targeted database name
|
|
13
|
+
send_parameters: whether or not to send parameters (parameters path is mandatory then)
|
|
14
|
+
send_datasets: whether or not to send datasets (parameters path is mandatory then)
|
|
15
|
+
wait: Toggle waiting for the ingestion results
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Upload a folder to an Azure Storage Blob
|
|
3
|
+
parameters:
|
|
4
|
+
source_folder: The folder/file to upload to the target blob storage
|
|
5
|
+
recursive: Recursively send the content of every folder inside the starting folder to the blob storage
|
|
6
|
+
blob_name: The blob name in the Azure Storage service to upload to
|
|
7
|
+
prefix: A prefix by which all uploaded files should start with in the blob storage
|
|
8
|
+
az_storage_sas_url: SAS url allowing access to the AZ storage container
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Delete S3 bucket content to a given folder
|
|
3
|
+
|
|
4
|
+
Will delete everything in the bucket unless a prefix is set, then only file following the given prefix will be deleted
|
|
5
|
+
|
|
6
|
+
Make use of the boto3 library to access the bucket
|
|
7
|
+
|
|
8
|
+
More information is available on this page:
|
|
9
|
+
[https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html]
|
|
10
|
+
parameters:
|
|
11
|
+
bucket_name: The bucket on S3 to delete
|
|
12
|
+
prefix_filter: A prefix by which all deleted files should start in the bucket
|
|
13
|
+
use_ssl: Use SSL to secure connection to S3
|
|
14
|
+
s3_url: URL to connect to the S3 system
|
|
15
|
+
access_id: Identity used to connect to the S3 system
|
|
16
|
+
secret_key: Secret tied to the ID used to connect to the S3 system
|
|
17
|
+
ssl_cert_bundle: Path to an alternate CA Bundle to validate SSL connections
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Download S3 bucket content to a given folder
|
|
3
|
+
|
|
4
|
+
Will download everything in the bucket unless a prefix is set, then only file following the given prefix will be downloaded
|
|
5
|
+
|
|
6
|
+
Make use of the boto3 library to access the bucket
|
|
7
|
+
|
|
8
|
+
More information is available on this page:
|
|
9
|
+
[https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html]
|
|
10
|
+
parameters:
|
|
11
|
+
target_folder: The folder in which to download the bucket content
|
|
12
|
+
bucket_name: The bucket on S3 to download
|
|
13
|
+
prefix_filter: A prefix by which all downloaded files should start in the bucket
|
|
14
|
+
use_ssl: Use SSL to secure connection to S3
|
|
15
|
+
s3_url: URL to connect to the S3 system
|
|
16
|
+
access_id: Identity used to connect to the S3 system
|
|
17
|
+
secret_key: Secret tied to the ID used to connect to the S3 system
|
|
18
|
+
ssl_cert_bundle: Path to an alternate CA Bundle to validate SSL connections
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Upload a folder to a S3 Bucket
|
|
3
|
+
|
|
4
|
+
Will upload everything from a given folder to a S3 bucket. If a single file is passed only it will be uploaded, and recursive will be ignored
|
|
5
|
+
|
|
6
|
+
Giving a prefix will add it to every upload (finishing the prefix with a "/" will allow to upload in a folder inside the bucket)
|
|
7
|
+
|
|
8
|
+
Make use of the boto3 library to access the bucket
|
|
9
|
+
|
|
10
|
+
More information is available on this page:
|
|
11
|
+
[https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html]
|
|
12
|
+
parameters:
|
|
13
|
+
source_folder: The folder/file to upload to the target bucket
|
|
14
|
+
recursive: Recursively send the content of every folder inside the starting folder to the bucket
|
|
15
|
+
bucket_name: The bucket on S3 to upload to
|
|
16
|
+
prefix: A prefix by which all uploaded files should start with in the bucket
|
|
17
|
+
use_ssl: Use SSL to secure connection to S3
|
|
18
|
+
s3_url: URL to connect to the S3 system
|
|
19
|
+
access_id: Identity used to connect to the S3 system
|
|
20
|
+
secret_key: Secret tied to the ID used to connect to the S3 system
|
|
21
|
+
ssl_cert_bundle: Path to an alternate CA Bundle to validate SSL connections
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Dump a datastore to a Azure storage account.
|
|
3
|
+
|
|
4
|
+
Will upload everything from a given data store to a Azure storage container.
|
|
5
|
+
|
|
6
|
+
3 modes currently exists:
|
|
7
|
+
- sqlite: will dump the data store underlying database as is
|
|
8
|
+
- csv: will convert every table of the datastore to csv and send them as separate files
|
|
9
|
+
- parquet: will convert every table of the datastore to parquet and send them as separate files
|
|
10
|
+
|
|
11
|
+
Make use of the azure.storage.blob library to access the container
|
|
12
|
+
|
|
13
|
+
More information is available on this page:
|
|
14
|
+
[https://learn.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python?tabs=managed-identity%2Croles-azure-portal%2Csign-in-azure-cli&pivots=blob-storage-quickstart-scratch]
|
|
15
|
+
parameters:
|
|
16
|
+
store_folder: The folder containing the store files
|
|
17
|
+
output_type: Choose the type of file output to use (sqlite, csv, parquet)
|
|
18
|
+
account_name: The account name on Azure to upload to
|
|
19
|
+
container_name: The container name on Azure to upload to
|
|
20
|
+
prefix: A prefix by which all uploaded files should start with in the container
|
|
21
|
+
tenant_id: Tenant Identity used to connect to Azure storage system
|
|
22
|
+
client_id: Client Identity used to connect to Azure storage system
|
|
23
|
+
client_secret: Client Secret tied to the ID used to connect to Azure storage system
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Running this command will dump your store to a given postgresql database
|
|
3
|
+
|
|
4
|
+
Tables names from the store will be prepended with table-prefix in target database
|
|
5
|
+
|
|
6
|
+
The postgresql user must have USAGE granted on the schema for this script to work due to the use of the command COPY FROM STDIN
|
|
7
|
+
|
|
8
|
+
You can simply give him that grant by running the command:
|
|
9
|
+
GRANT USAGE ON SCHEMA <schema> TO <username>
|
|
10
|
+
parameters:
|
|
11
|
+
store_folder: The folder containing the store files
|
|
12
|
+
table_prefix: Prefix to add to the table name
|
|
13
|
+
postgres_host: PostgreSQL host URI
|
|
14
|
+
postgres_port: PostgreSQL database port
|
|
15
|
+
postgres_db: PostgreSQL database name
|
|
16
|
+
postgres_schema: PostgreSQL schema name
|
|
17
|
+
postgres_user: PostgreSQL connection user name
|
|
18
|
+
postgres_password: PostgreSQL connection password
|
|
19
|
+
replace: Append data on existing tables
|
|
20
|
+
encode_password: Force encoding of password to percent encoding
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Dump a datastore to a S3
|
|
3
|
+
|
|
4
|
+
Will upload everything from a given data store to a S3 bucket.
|
|
5
|
+
|
|
6
|
+
3 modes currently exists:
|
|
7
|
+
- sqlite: will dump the data store underlying database as is
|
|
8
|
+
- csv: will convert every table of the datastore to csv and send them as separate files
|
|
9
|
+
- parquet: will convert every table of the datastore to parquet and send them as separate files
|
|
10
|
+
|
|
11
|
+
Giving a prefix will add it to every upload (finishing the prefix with a "/" will allow to upload in a folder inside the bucket)
|
|
12
|
+
|
|
13
|
+
Make use of the boto3 library to access the bucket
|
|
14
|
+
|
|
15
|
+
More information is available on this page:
|
|
16
|
+
[https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html]
|
|
17
|
+
parameters:
|
|
18
|
+
store_folder: The folder containing the store files
|
|
19
|
+
output_type: Choose the type of file output to use (sqlite, csv, parquet)
|
|
20
|
+
bucket_name: The bucket on S3 to upload to
|
|
21
|
+
prefix: A prefix by which all uploaded files should start with in the bucket
|
|
22
|
+
use_ssl: Use SSL to secure connection to S3
|
|
23
|
+
s3_url: URL to connect to the S3 system
|
|
24
|
+
access_id: Identity used to connect to the S3 system
|
|
25
|
+
secret_key: Secret tied to the ID used to connect to the S3 system
|
|
26
|
+
ssl_cert_bundle: Path to an alternate CA Bundle to validate SSL connections
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
description: |
|
|
2
|
+
Load data from SingleStore tables into the store.
|
|
3
|
+
Will download everything from a given SingleStore database following some configuration into the store.
|
|
4
|
+
|
|
5
|
+
Make use of the singlestoredb to access to SingleStore
|
|
6
|
+
|
|
7
|
+
More information is available on this page:
|
|
8
|
+
[https://docs.singlestore.com/cloud/developer-resources/connect-with-application-development-tools/connect-with-python/connect-using-the-singlestore-python-client/]
|
|
9
|
+
parameters:
|
|
10
|
+
singlestore_host: SingleStore instance URI
|
|
11
|
+
singlestore_port: SingleStore port
|
|
12
|
+
singlestore_db: SingleStore database name
|
|
13
|
+
singlestore_user: SingleStore connection user name
|
|
14
|
+
singlestore_password: SingleStore connection password
|
|
15
|
+
singlestore_tables: SingleStore table names to fetched (separated by comma)
|
|
16
|
+
store_folder: The folder containing the store files
|