cosmotech-acceleration-library 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cosmotech/coal/__init__.py +8 -0
- cosmotech/coal/aws/__init__.py +23 -0
- cosmotech/coal/aws/s3.py +235 -0
- cosmotech/coal/azure/__init__.py +23 -0
- cosmotech/coal/azure/adx/__init__.py +26 -0
- cosmotech/coal/azure/adx/auth.py +125 -0
- cosmotech/coal/azure/adx/ingestion.py +329 -0
- cosmotech/coal/azure/adx/query.py +56 -0
- cosmotech/coal/azure/adx/runner.py +217 -0
- cosmotech/coal/azure/adx/store.py +255 -0
- cosmotech/coal/azure/adx/tables.py +118 -0
- cosmotech/coal/azure/adx/utils.py +71 -0
- cosmotech/coal/azure/blob.py +109 -0
- cosmotech/coal/azure/functions.py +72 -0
- cosmotech/coal/azure/storage.py +74 -0
- cosmotech/coal/cosmotech_api/__init__.py +36 -0
- cosmotech/coal/cosmotech_api/connection.py +96 -0
- cosmotech/coal/cosmotech_api/dataset/__init__.py +26 -0
- cosmotech/coal/cosmotech_api/dataset/converters.py +164 -0
- cosmotech/coal/cosmotech_api/dataset/download/__init__.py +19 -0
- cosmotech/coal/cosmotech_api/dataset/download/adt.py +119 -0
- cosmotech/coal/cosmotech_api/dataset/download/common.py +140 -0
- cosmotech/coal/cosmotech_api/dataset/download/file.py +216 -0
- cosmotech/coal/cosmotech_api/dataset/download/twingraph.py +188 -0
- cosmotech/coal/cosmotech_api/dataset/utils.py +132 -0
- cosmotech/coal/cosmotech_api/parameters.py +48 -0
- cosmotech/coal/cosmotech_api/run.py +25 -0
- cosmotech/coal/cosmotech_api/run_data.py +173 -0
- cosmotech/coal/cosmotech_api/run_template.py +108 -0
- cosmotech/coal/cosmotech_api/runner/__init__.py +28 -0
- cosmotech/coal/cosmotech_api/runner/data.py +38 -0
- cosmotech/coal/cosmotech_api/runner/datasets.py +364 -0
- cosmotech/coal/cosmotech_api/runner/download.py +146 -0
- cosmotech/coal/cosmotech_api/runner/metadata.py +42 -0
- cosmotech/coal/cosmotech_api/runner/parameters.py +157 -0
- cosmotech/coal/cosmotech_api/twin_data_layer.py +512 -0
- cosmotech/coal/cosmotech_api/workspace.py +127 -0
- cosmotech/coal/csm/__init__.py +6 -0
- cosmotech/coal/csm/engine/__init__.py +47 -0
- cosmotech/coal/postgresql/__init__.py +22 -0
- cosmotech/coal/postgresql/runner.py +93 -0
- cosmotech/coal/postgresql/store.py +98 -0
- cosmotech/coal/singlestore/__init__.py +17 -0
- cosmotech/coal/singlestore/store.py +100 -0
- cosmotech/coal/store/__init__.py +42 -0
- cosmotech/coal/store/csv.py +44 -0
- cosmotech/coal/store/native_python.py +25 -0
- cosmotech/coal/store/pandas.py +26 -0
- cosmotech/coal/store/pyarrow.py +23 -0
- cosmotech/coal/store/store.py +79 -0
- cosmotech/coal/utils/__init__.py +18 -0
- cosmotech/coal/utils/api.py +68 -0
- cosmotech/coal/utils/logger.py +10 -0
- cosmotech/coal/utils/postgresql.py +236 -0
- cosmotech/csm_data/__init__.py +6 -0
- cosmotech/csm_data/commands/__init__.py +6 -0
- cosmotech/csm_data/commands/adx_send_data.py +92 -0
- cosmotech/csm_data/commands/adx_send_runnerdata.py +119 -0
- cosmotech/csm_data/commands/api/__init__.py +6 -0
- cosmotech/csm_data/commands/api/api.py +50 -0
- cosmotech/csm_data/commands/api/postgres_send_runner_metadata.py +119 -0
- cosmotech/csm_data/commands/api/rds_load_csv.py +90 -0
- cosmotech/csm_data/commands/api/rds_send_csv.py +74 -0
- cosmotech/csm_data/commands/api/rds_send_store.py +74 -0
- cosmotech/csm_data/commands/api/run_load_data.py +120 -0
- cosmotech/csm_data/commands/api/runtemplate_load_handler.py +66 -0
- cosmotech/csm_data/commands/api/tdl_load_files.py +76 -0
- cosmotech/csm_data/commands/api/tdl_send_files.py +82 -0
- cosmotech/csm_data/commands/api/wsf_load_file.py +66 -0
- cosmotech/csm_data/commands/api/wsf_send_file.py +68 -0
- cosmotech/csm_data/commands/az_storage_upload.py +76 -0
- cosmotech/csm_data/commands/s3_bucket_delete.py +107 -0
- cosmotech/csm_data/commands/s3_bucket_download.py +118 -0
- cosmotech/csm_data/commands/s3_bucket_upload.py +128 -0
- cosmotech/csm_data/commands/store/__init__.py +6 -0
- cosmotech/csm_data/commands/store/dump_to_azure.py +120 -0
- cosmotech/csm_data/commands/store/dump_to_postgresql.py +107 -0
- cosmotech/csm_data/commands/store/dump_to_s3.py +169 -0
- cosmotech/csm_data/commands/store/list_tables.py +48 -0
- cosmotech/csm_data/commands/store/load_csv_folder.py +43 -0
- cosmotech/csm_data/commands/store/load_from_singlestore.py +96 -0
- cosmotech/csm_data/commands/store/reset.py +31 -0
- cosmotech/csm_data/commands/store/store.py +37 -0
- cosmotech/csm_data/main.py +57 -0
- cosmotech/csm_data/utils/__init__.py +6 -0
- cosmotech/csm_data/utils/click.py +18 -0
- cosmotech/csm_data/utils/decorators.py +75 -0
- cosmotech/orchestrator_plugins/csm-data/__init__.py +11 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/postgres_send_runner_metadata.json +40 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/rds_load_csv.json +27 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/rds_send_csv.json +27 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/rds_send_store.json +27 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/run_load_data.json +30 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/runtemplate_load_handler.json +27 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/tdl_load_files.json +32 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/tdl_send_files.json +27 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/try_api_connection.json +9 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/wsf_load_file.json +36 -0
- cosmotech/orchestrator_plugins/csm-data/templates/api/wsf_send_file.json +36 -0
- cosmotech/orchestrator_plugins/csm-data/templates/main/adx_send_runnerdata.json +29 -0
- cosmotech/orchestrator_plugins/csm-data/templates/main/az_storage_upload.json +25 -0
- cosmotech/orchestrator_plugins/csm-data/templates/main/s3_bucket_delete.json +31 -0
- cosmotech/orchestrator_plugins/csm-data/templates/main/s3_bucket_download.json +34 -0
- cosmotech/orchestrator_plugins/csm-data/templates/main/s3_bucket_upload.json +35 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_dump_to_azure.json +35 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_dump_to_postgresql.json +34 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_dump_to_s3.json +36 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_list_tables.json +15 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_load_csv_folder.json +18 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_load_from_singlestore.json +34 -0
- cosmotech/orchestrator_plugins/csm-data/templates/store/store_reset.json +15 -0
- cosmotech/translation/coal/__init__.py +6 -0
- cosmotech/translation/coal/en-US/coal/common/data_transfer.yml +6 -0
- cosmotech/translation/coal/en-US/coal/common/errors.yml +9 -0
- cosmotech/translation/coal/en-US/coal/common/file_operations.yml +6 -0
- cosmotech/translation/coal/en-US/coal/common/progress.yml +6 -0
- cosmotech/translation/coal/en-US/coal/common/timing.yml +5 -0
- cosmotech/translation/coal/en-US/coal/common/validation.yml +8 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/connection.yml +10 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/run_data.yml +2 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/run_template.yml +8 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/runner.yml +16 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/solution.yml +5 -0
- cosmotech/translation/coal/en-US/coal/cosmotech_api/workspace.yml +7 -0
- cosmotech/translation/coal/en-US/coal/services/adx.yml +59 -0
- cosmotech/translation/coal/en-US/coal/services/api.yml +8 -0
- cosmotech/translation/coal/en-US/coal/services/azure_storage.yml +14 -0
- cosmotech/translation/coal/en-US/coal/services/database.yml +19 -0
- cosmotech/translation/coal/en-US/coal/services/dataset.yml +68 -0
- cosmotech/translation/coal/en-US/coal/services/postgresql.yml +28 -0
- cosmotech/translation/coal/en-US/coal/services/s3.yml +9 -0
- cosmotech/translation/coal/en-US/coal/solution.yml +3 -0
- cosmotech/translation/coal/en-US/coal/web.yml +2 -0
- cosmotech/translation/csm_data/__init__.py +6 -0
- cosmotech/translation/csm_data/en-US/csm-data.yml +434 -0
- cosmotech_acceleration_library-1.0.0.dist-info/METADATA +255 -0
- cosmotech_acceleration_library-1.0.0.dist-info/RECORD +141 -0
- cosmotech_acceleration_library-1.0.0.dist-info/WHEEL +5 -0
- cosmotech_acceleration_library-1.0.0.dist-info/entry_points.txt +2 -0
- cosmotech_acceleration_library-1.0.0.dist-info/licenses/LICENSE +17 -0
- cosmotech_acceleration_library-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-dump-to-azure",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"dump-to-azure"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to dump a store to Azure Blob Storage",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to dump to Azure Blob Storage"
|
|
13
|
+
},
|
|
14
|
+
"AZURE_ACCOUNT_NAME": {
|
|
15
|
+
"description": "Azure Storage account name"
|
|
16
|
+
},
|
|
17
|
+
"AZURE_CONTAINER_NAME": {
|
|
18
|
+
"description": "Azure Storage container name"
|
|
19
|
+
},
|
|
20
|
+
"CSM_DATA_PREFIX": {
|
|
21
|
+
"description": "Prefix to add to uploaded files",
|
|
22
|
+
"optional": true,
|
|
23
|
+
"defaultValue": ""
|
|
24
|
+
},
|
|
25
|
+
"AZURE_TENANT_ID": {
|
|
26
|
+
"description": "Azure tenant ID"
|
|
27
|
+
},
|
|
28
|
+
"AZURE_CLIENT_ID": {
|
|
29
|
+
"description": "Azure client ID"
|
|
30
|
+
},
|
|
31
|
+
"AZURE_CLIENT_SECRET": {
|
|
32
|
+
"description": "Azure client secret"
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-dump-to-postgresql",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"dump-to-postgresql"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to dump a store to a PostgreSQL database",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to dump to PostgreSQL"
|
|
13
|
+
},
|
|
14
|
+
"POSTGRES_HOST_URI": {
|
|
15
|
+
"description": "PostgreSQL host URI"
|
|
16
|
+
},
|
|
17
|
+
"POSTGRES_HOST_PORT": {
|
|
18
|
+
"description": "PostgreSQL host port",
|
|
19
|
+
"defaultValue": "5432"
|
|
20
|
+
},
|
|
21
|
+
"POSTGRES_DB_NAME": {
|
|
22
|
+
"description": "PostgreSQL database name"
|
|
23
|
+
},
|
|
24
|
+
"POSTGRES_DB_SCHEMA": {
|
|
25
|
+
"description": "PostgreSQL database schema"
|
|
26
|
+
},
|
|
27
|
+
"POSTGRES_USER_NAME": {
|
|
28
|
+
"description": "PostgreSQL user name"
|
|
29
|
+
},
|
|
30
|
+
"POSTGRES_USER_PASSWORD": {
|
|
31
|
+
"description": "PostgreSQL user password"
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-dump-to-s3",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"dump-to-s3"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to dump a store to an S3 bucket",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to dump to S3"
|
|
13
|
+
},
|
|
14
|
+
"CSM_DATA_BUCKET_NAME": {
|
|
15
|
+
"description": "Name of the bucket in the S3 instance to upload to"
|
|
16
|
+
},
|
|
17
|
+
"CSM_DATA_BUCKET_PREFIX": {
|
|
18
|
+
"description": "Prefix to add to uploaded files",
|
|
19
|
+
"optional": true,
|
|
20
|
+
"defaultValue": ""
|
|
21
|
+
},
|
|
22
|
+
"AWS_ENDPOINT_URL": {
|
|
23
|
+
"description": "URI required to connect to a given S3 instance"
|
|
24
|
+
},
|
|
25
|
+
"AWS_ACCESS_KEY_ID": {
|
|
26
|
+
"description": "User access key required to authenticate to the S3 instance"
|
|
27
|
+
},
|
|
28
|
+
"AWS_SECRET_ACCESS_KEY": {
|
|
29
|
+
"description": "User secret required to authenticate to the S3 instance"
|
|
30
|
+
},
|
|
31
|
+
"CSM_S3_CA_BUNDLE": {
|
|
32
|
+
"description": "Path to SSL certificate bundle",
|
|
33
|
+
"optional": true
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-list-tables",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"list-tables"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to list tables in a store",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to list tables from"
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-load-csv-folder",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"load-csv-folder"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to load CSV files from a folder into a store",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to load CSV files into"
|
|
13
|
+
},
|
|
14
|
+
"CSM_DATASET_ABSOLUTE_PATH": {
|
|
15
|
+
"description": "Folder containing CSV files to load into the store"
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-load-from-singlestore",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"load-from-singlestore"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to load data from SingleStore into a store",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to load data into"
|
|
13
|
+
},
|
|
14
|
+
"SINGLE_STORE_HOST": {
|
|
15
|
+
"description": "SingleStore host URI"
|
|
16
|
+
},
|
|
17
|
+
"SINGLE_STORE_PORT": {
|
|
18
|
+
"description": "SingleStore host port",
|
|
19
|
+
"defaultValue": "3306"
|
|
20
|
+
},
|
|
21
|
+
"SINGLE_STORE_DB": {
|
|
22
|
+
"description": "SingleStore database name"
|
|
23
|
+
},
|
|
24
|
+
"SINGLE_STORE_USERNAME": {
|
|
25
|
+
"description": "SingleStore user name"
|
|
26
|
+
},
|
|
27
|
+
"SINGLE_STORE_PASSWORD": {
|
|
28
|
+
"description": "SingleStore user password"
|
|
29
|
+
},
|
|
30
|
+
"SINGLE_STORE_TABLES": {
|
|
31
|
+
"description": "Comma-separated list of tables to load from SingleStore"
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "csm-data store-reset",
|
|
3
|
+
"command": "csm-data",
|
|
4
|
+
"arguments": [
|
|
5
|
+
"store",
|
|
6
|
+
"reset"
|
|
7
|
+
],
|
|
8
|
+
"description": "Use csm-data to reset a store (delete all tables)",
|
|
9
|
+
"useSystemEnvironment": true,
|
|
10
|
+
"environment": {
|
|
11
|
+
"CSM_PARAMETERS_ABSOLUTE_PATH": {
|
|
12
|
+
"description": "Folder containing the store to reset"
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# Copyright (C) - 2023 - 2025 - Cosmo Tech
|
|
2
|
+
# This document and all information contained herein is the exclusive property -
|
|
3
|
+
# including all intellectual property rights pertaining thereto - of Cosmo Tech.
|
|
4
|
+
# Any use, reproduction, translation, broadcasting, transmission, distribution,
|
|
5
|
+
# etc., to any person is prohibited unless it has been previously and
|
|
6
|
+
# specifically authorized by written means by Cosmo Tech.
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# Data transfer messages
|
|
2
|
+
sending_table: "Sending table {table_name} as {output_type}"
|
|
3
|
+
sending_data: " Sending {size} bytes of data"
|
|
4
|
+
table_empty: "Table {table_name} is empty (skipping)"
|
|
5
|
+
rows_inserted: "Inserted {rows} rows in table {table_name}"
|
|
6
|
+
file_sent: "Sending {file_path} as {uploaded_name}"
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# Common error messages
|
|
2
|
+
no_env_vars: "No environment variables found for connection"
|
|
3
|
+
missing_var: "Missing environment variable: {envvar}"
|
|
4
|
+
no_valid_connection: "No valid connection configuration found"
|
|
5
|
+
no_credentials: "No credentials available for connection"
|
|
6
|
+
operation_failed: "Operation '{operation}' failed: {error}"
|
|
7
|
+
unexpected_error: "Unexpected error: {error}"
|
|
8
|
+
data_invalid_output_type: "Invalid output type: {output_type}"
|
|
9
|
+
data_no_workspace_files: "No workspace files found with prefix {file_prefix} in workspace {workspace_id}"
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# File operation messages
|
|
2
|
+
not_found: "{source_folder} does not exists"
|
|
3
|
+
already_exists: "File {csv_path} already exists"
|
|
4
|
+
not_directory: "{target_dir} is a file and not a directory"
|
|
5
|
+
not_exists: '"{file_path}" does not exists'
|
|
6
|
+
not_single_file: '"{file_path}" is not a single file'
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# Progress-related messages
|
|
2
|
+
operation_started: "Operation '{operation}' started"
|
|
3
|
+
operation_completed: "Operation '{operation}' completed"
|
|
4
|
+
processing_items: "Processing {count} items"
|
|
5
|
+
items_processed: "Processed {count} items"
|
|
6
|
+
percentage_complete: "{percentage}% complete"
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
# Timing-related messages
|
|
2
|
+
operation_completed: "Operation '{operation}' completed in {time} seconds"
|
|
3
|
+
full_process: "Full process completed in {time} seconds"
|
|
4
|
+
download_completed: "{type} download completed in {time} seconds"
|
|
5
|
+
processing_time: "Processing took {time} seconds"
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# Validation-related messages
|
|
2
|
+
not_csv_file: "'{file_path}' is not a csv file"
|
|
3
|
+
invalid_nodes_relations: "'{file_path}' does not contains valid nodes or relations"
|
|
4
|
+
invalid_truth_value: '"{string} is not a recognized truth value'
|
|
5
|
+
node_requirements: "Node files must have an '{id_column}' column"
|
|
6
|
+
relationship_requirements: "Relationship files must have '{source_column}' and '{target_column}' columns, or '{id_column}'"
|
|
7
|
+
invalid_output_type: "Invalid output type: {output_type}"
|
|
8
|
+
missing_field: "Required field '{field}' is missing"
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
# Connection-specific messages
|
|
2
|
+
existing_sets: "Existing sets are:"
|
|
3
|
+
azure_connection: " Azure Entra Connection : {keys}"
|
|
4
|
+
api_key_connection: " Cosmo Tech API Key : {keys}"
|
|
5
|
+
keycloak_connection: " Keycloak connection : {keys}"
|
|
6
|
+
found_keycloak: "Found Keycloack connection info"
|
|
7
|
+
found_cert_authority: "Found Certificate Authority override for IDP connection, using it."
|
|
8
|
+
found_api_key: "Found Api Key connection info"
|
|
9
|
+
found_azure: "Found Azure Entra connection info"
|
|
10
|
+
found_valid: "Found valid connection of type: {type}"
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# Run Template-specific messages
|
|
2
|
+
loading_solution: "Loading solution information"
|
|
3
|
+
error_details: "Error details: {details}"
|
|
4
|
+
querying_handler: "Querying handler {handler} for template {template}"
|
|
5
|
+
handler_not_found: "Handler {handler} not found in template {template} for solution {solution}"
|
|
6
|
+
extracting_handler: "Extracting handler to {path}"
|
|
7
|
+
handler_not_zip: "Handler {handler} is not a valid zip file"
|
|
8
|
+
run_issues: "Run template has issues"
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Runner-specific messages
|
|
2
|
+
starting_download: "Starting the Run data download"
|
|
3
|
+
no_parameters: "no parameters found in the runner"
|
|
4
|
+
loaded_data: "Loaded run data"
|
|
5
|
+
loading_data: "Loading data from {source}"
|
|
6
|
+
parameter_debug: " - {param_id:<{max_name_size}} {var_type:<{max_type_size}} \"{value}\"{inherited}"
|
|
7
|
+
not_single_dataset: "{runner_id} is not tied to a single dataset but {count}"
|
|
8
|
+
dataset_state: "Dataset {dataset_id} is in state {status}"
|
|
9
|
+
downloading_datasets: "Downloading {count} datasets"
|
|
10
|
+
runner_info: "Runner info: {info}"
|
|
11
|
+
dataset_info: "Dataset info: {info}"
|
|
12
|
+
writing_parameters: "Writing parameters to files"
|
|
13
|
+
generating_file: "Generating {file}"
|
|
14
|
+
dataset_debug: " - {folder} ({id})"
|
|
15
|
+
no_dataset_write: "No dataset write asked, skipping"
|
|
16
|
+
no_parameters_write: "No parameters write asked, skipping"
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
# Workspace-specific messages
|
|
2
|
+
target_is_folder: "Target {target_dir} is a folder"
|
|
3
|
+
loading_file: "Loading file {file_name}"
|
|
4
|
+
file_loaded: "File {file} loaded"
|
|
5
|
+
sending_to_api: "Sending file to API"
|
|
6
|
+
file_sent: "File sent to API"
|
|
7
|
+
not_found: "Workspace {workspace_id} not found in organization {organization_id}"
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# ADX-specific messages
|
|
2
|
+
creating_kusto_client: "Creating Kusto client for cluster: {cluster_url}"
|
|
3
|
+
creating_ingest_client: "Creating ingest client for URL: {ingest_url}"
|
|
4
|
+
using_app_auth: "Using Azure AD application authentication"
|
|
5
|
+
using_cli_auth: "Using Azure CLI authentication"
|
|
6
|
+
generating_urls: "Generating URLs for cluster {cluster_name} in region {cluster_region}"
|
|
7
|
+
running_query: "Running query on database {database}: {query}"
|
|
8
|
+
running_command: "Running command on database {database}: {query}"
|
|
9
|
+
query_complete: "Query complete, returned {rows} rows"
|
|
10
|
+
command_complete: "Command execution complete"
|
|
11
|
+
ingesting_dataframe: "Ingesting dataframe with {rows} rows to table {table_name}"
|
|
12
|
+
ingestion_queued: "Ingestion queued with source ID: {source_id}"
|
|
13
|
+
sending_to_adx: "Sending {items} items to ADX table {table_name}"
|
|
14
|
+
empty_dict_list: "Empty dictionary list provided, nothing to send"
|
|
15
|
+
table_creation_failed: "Error creating table {table_name}"
|
|
16
|
+
ingesting: "Ingesting data into table {table}"
|
|
17
|
+
max_retry: "Maximum retry count reached"
|
|
18
|
+
waiting_results: "Waiting for ingestion results (try {count}/{limit}, waiting {duration}s)"
|
|
19
|
+
status: "Checking ingestion statuses"
|
|
20
|
+
status_report: "Table {table} ingestion status: {status}"
|
|
21
|
+
no_wait: "Not waiting for ingestion to complete"
|
|
22
|
+
exceptions: "Exceptions: {exceptions}"
|
|
23
|
+
checking_status: "Checking ingestion status for {count} operations"
|
|
24
|
+
status_messages: "Found {success} success messages and {failure} failure messages"
|
|
25
|
+
status_found: "Found status for {source_id}: {status}"
|
|
26
|
+
ingestion_timeout: "Ingestion operation {source_id} timed out"
|
|
27
|
+
clear_queues_no_confirmation: "Clear queues operation requires confirmation=True"
|
|
28
|
+
clearing_queues: "DANGER: Clearing all ingestion status queues"
|
|
29
|
+
queues_cleared: "All ingestion status queues have been cleared"
|
|
30
|
+
waiting_ingestion: "Waiting for ingestion of data to finish"
|
|
31
|
+
ingestion_failed: "Ingestion {ingestion_id} failed for table {table}"
|
|
32
|
+
ingestion_completed: "All data ingestion attempts completed"
|
|
33
|
+
failures_detected: "Failures detected during ingestion - dropping data with tag: {operation_tag}"
|
|
34
|
+
checking_table_exists: "Checking if table exists"
|
|
35
|
+
creating_nonexistent_table: "Table does not exist, creating it"
|
|
36
|
+
dropping_data_by_tag: "Dropping data with tag: {tag}"
|
|
37
|
+
drop_completed: "Drop by tag operation completed"
|
|
38
|
+
drop_error: "Error during drop by tag operation: {error}"
|
|
39
|
+
drop_details: "Drop by tag details"
|
|
40
|
+
checking_table: "Checking if table {table_name} exists in database {database}"
|
|
41
|
+
table_exists: "Table {table_name} exists"
|
|
42
|
+
table_not_exists: "Table {table_name} does not exist"
|
|
43
|
+
creating_table: "Creating table {table_name} in database {database}"
|
|
44
|
+
create_query: "Create table query: {query}"
|
|
45
|
+
table_created: "Table {table_name} created successfully"
|
|
46
|
+
table_creation_error: "Error creating table {table_name}: {error}"
|
|
47
|
+
mapping_type: "Mapping type for key {key} with value type {value_type}"
|
|
48
|
+
content_debug: "CSV content: {content}"
|
|
49
|
+
sending_data: "Sending data to the table {table_name}"
|
|
50
|
+
listing_tables: "Listing tables"
|
|
51
|
+
working_on_table: "Working on table: {table_name}"
|
|
52
|
+
table_empty: "Table {table_name} has no rows - skipping it"
|
|
53
|
+
starting_ingestion: "Starting ingestion operation with tag: {operation_tag}"
|
|
54
|
+
loading_datastore: "Loading datastore"
|
|
55
|
+
data_sent: "Store data was sent for ADX ingestion"
|
|
56
|
+
ingestion_error: "Error during ingestion process"
|
|
57
|
+
dropping_data: "Dropping data with tag: {operation_tag}"
|
|
58
|
+
initializing_clients: "Initializing clients"
|
|
59
|
+
empty_column: "Column {column_name} has no content, defaulting it to string"
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# API-specific messages
|
|
2
|
+
solution_debug: "Solution: {solution}"
|
|
3
|
+
api_request: "API request: {method} {url}"
|
|
4
|
+
api_response: "API response: {status_code}"
|
|
5
|
+
api_error: "API error: {error}"
|
|
6
|
+
api_success: "API request successful"
|
|
7
|
+
api_timeout: "API request timed out after {timeout} seconds"
|
|
8
|
+
api_retry: "Retrying API request ({attempt}/{max_attempts})"
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# Azure Storage-specific messages
|
|
2
|
+
deleting_objects: "Deleting {objects}"
|
|
3
|
+
no_objects: "No objects to delete"
|
|
4
|
+
downloading: "Downloading {path} to {output}"
|
|
5
|
+
sending_file: "Sending {file} as {name}"
|
|
6
|
+
found_file: "Found {file}, storing it"
|
|
7
|
+
clearing_content: "Clearing all dataset content"
|
|
8
|
+
sending_content: "Sending content of '{file}'"
|
|
9
|
+
row_batch: "Found row count of {count}, sending now"
|
|
10
|
+
import_errors: "Found {count} errors while importing: "
|
|
11
|
+
error_detail: "Error: {error}"
|
|
12
|
+
all_data_sent: "Sent all data found"
|
|
13
|
+
writing_lines: "Writing {count} lines in {file}"
|
|
14
|
+
all_csv_written: "All CSV are written"
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# Database-specific messages
|
|
2
|
+
creating_table: "creating table {table}"
|
|
3
|
+
updating_metadata: "adding/updating runner metadata"
|
|
4
|
+
metadata_updated: "Runner metadata table has been updated"
|
|
5
|
+
sending_data: "Sending data to table {table}"
|
|
6
|
+
no_rows: " - No rows : skipping"
|
|
7
|
+
column_list: " - Column list: {columns}"
|
|
8
|
+
row_count: " - Sending {count} rows"
|
|
9
|
+
query_results: "Query returned {count} rows"
|
|
10
|
+
saved_results: "Results saved as {file}"
|
|
11
|
+
no_results: "No results returned by the query"
|
|
12
|
+
store_empty: "Data store is empty"
|
|
13
|
+
store_tables: "Data store contains the following tables"
|
|
14
|
+
table_entry: " - {table}"
|
|
15
|
+
table_schema: "Schema: {schema}"
|
|
16
|
+
store_reset: "Data store in {folder} got reset"
|
|
17
|
+
rows_fetched: "Rows fetched in {table} table: {count} in {time} seconds"
|
|
18
|
+
tables_to_fetch: "Tables to fetched: {tables}"
|
|
19
|
+
full_dataset: "Full dataset fetched and wrote in {time} seconds"
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# Dataset-specific messages
|
|
2
|
+
# General
|
|
3
|
+
download_started: "Starting download of {dataset_type} dataset"
|
|
4
|
+
download_completed: "Successfully downloaded {dataset_type} dataset"
|
|
5
|
+
operation_timing: "{operation} took {time} seconds"
|
|
6
|
+
dataset_downloading: "Downloading dataset (organization: {organization_id}, dataset: {dataset_id})"
|
|
7
|
+
dataset_info_retrieved: "Retrieved dataset info: {dataset_name} ({dataset_id})"
|
|
8
|
+
dataset_type_detected: "Detected dataset type: {type}"
|
|
9
|
+
parallel_download: "Downloading {count} datasets in parallel"
|
|
10
|
+
sequential_download: "Downloading {count} datasets sequentially"
|
|
11
|
+
|
|
12
|
+
# Processing
|
|
13
|
+
processing_graph_data: "Processing graph data with {nodes_count} nodes and {relationships_count} relationships (restore_names={restore_names})"
|
|
14
|
+
entity_count: "Found {count} entities of type {entity_type}"
|
|
15
|
+
extracting_headers: "Extracting headers from {rows} rows"
|
|
16
|
+
headers_extracted: "Extracted {count} fields: {fields}"
|
|
17
|
+
|
|
18
|
+
# File operations
|
|
19
|
+
converting_to_files: "Converting {dataset_type} dataset '{dataset_name}' to files"
|
|
20
|
+
created_temp_folder: "Created temporary folder: {folder}"
|
|
21
|
+
using_folder: "Using folder: {folder}"
|
|
22
|
+
converting_graph_data: "Converting graph data with {entity_types} entity types to folder: {folder}"
|
|
23
|
+
converting_file_data: "Converting {file_count} files of type {file_type} to folder: {folder}"
|
|
24
|
+
skipping_empty_entity: "Skipping empty entity type: {entity_type}"
|
|
25
|
+
writing_csv: "Writing CSV file with {count} records: {file_name}"
|
|
26
|
+
writing_file: "Writing file: {file_name} (type: {file_type})"
|
|
27
|
+
file_written: "File written: {file_path}"
|
|
28
|
+
files_created: "Created {count} files in folder: {folder}"
|
|
29
|
+
|
|
30
|
+
# ADT specific
|
|
31
|
+
adt_connecting: "Connecting to ADT instance at {url}"
|
|
32
|
+
adt_no_credentials: "No credentials available for ADT connection"
|
|
33
|
+
adt_querying_twins: "Querying digital twins"
|
|
34
|
+
adt_twins_found: "Found {count} digital twins"
|
|
35
|
+
adt_querying_relations: "Querying relationships"
|
|
36
|
+
adt_relations_found: "Found {count} relationships"
|
|
37
|
+
|
|
38
|
+
# TwinGraph specific
|
|
39
|
+
twingraph_downloading: "Downloading TwinGraph dataset (organization: {organization_id}, dataset: {dataset_id})"
|
|
40
|
+
twingraph_querying_nodes: "Querying TwinGraph nodes for dataset {dataset_id}"
|
|
41
|
+
twingraph_nodes_found: "Found {count} nodes in TwinGraph"
|
|
42
|
+
twingraph_querying_edges: "Querying TwinGraph edges for dataset {dataset_id}"
|
|
43
|
+
twingraph_edges_found: "Found {count} edges in TwinGraph"
|
|
44
|
+
|
|
45
|
+
# Legacy TwinGraph specific
|
|
46
|
+
legacy_twingraph_downloading: "Downloading legacy TwinGraph dataset (organization: {organization_id}, cache: {cache_name})"
|
|
47
|
+
legacy_twingraph_querying_nodes: "Querying legacy TwinGraph nodes for cache {cache_name}"
|
|
48
|
+
legacy_twingraph_nodes_found: "Found {count} nodes in legacy TwinGraph"
|
|
49
|
+
legacy_twingraph_querying_relations: "Querying legacy TwinGraph relationships for cache {cache_name}"
|
|
50
|
+
legacy_twingraph_relations_found: "Found {count} relationships in legacy TwinGraph"
|
|
51
|
+
|
|
52
|
+
# File specific
|
|
53
|
+
file_downloading: "Downloading file dataset (organization: {organization_id}, workspace: {workspace_id}, file: {file_name})"
|
|
54
|
+
listing_workspace_files: "Listing workspace files"
|
|
55
|
+
workspace_files_found: "Found {count} workspace files"
|
|
56
|
+
no_files_found: "No files found matching: {file_name}"
|
|
57
|
+
downloading_file: "Downloading file: {file_name}"
|
|
58
|
+
file_downloaded: "Downloaded file: {file_name} to {path}"
|
|
59
|
+
|
|
60
|
+
# File processing
|
|
61
|
+
processing_excel: "Processing Excel file: {file_name}"
|
|
62
|
+
sheet_processed: "Processed sheet {sheet_name} with {rows} rows"
|
|
63
|
+
processing_csv: "Processing CSV file: {file_name}"
|
|
64
|
+
csv_processed: "Processed CSV file {file_name} with {rows} rows"
|
|
65
|
+
processing_json: "Processing JSON file: {file_name}"
|
|
66
|
+
json_processed: "Processed JSON file {file_name} with {items} items"
|
|
67
|
+
processing_text: "Processing text file: {file_name}"
|
|
68
|
+
text_processed: "Processed text file {file_name} with {lines} lines"
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# PostgreSQL-specific messages
|
|
2
|
+
getting_schema: "Getting schema for table {postgres_schema}.{target_table_name}"
|
|
3
|
+
table_not_found: "Table {postgres_schema}.{target_table_name} not found"
|
|
4
|
+
schema_adaptation_start: "Starting schema adaptation for table with {rows} rows"
|
|
5
|
+
original_schema: "Original schema: {schema}"
|
|
6
|
+
target_schema: "Target schema: {schema}"
|
|
7
|
+
casting_column: "Attempting to cast column '{field_name}' from {original_type} to {target_type}"
|
|
8
|
+
cast_failed: "Failed to cast column '{field_name}' from {original_type} to {target_type}. Filling with nulls. Error: {error}"
|
|
9
|
+
adding_missing_column: "Adding missing column '{field_name}' with null values"
|
|
10
|
+
dropping_columns: "Dropping extra columns not in target schema: {columns}"
|
|
11
|
+
adaptation_summary: "Schema adaptation summary:"
|
|
12
|
+
added_columns: "- Added columns (filled with nulls): {columns}"
|
|
13
|
+
dropped_columns: "- Dropped columns: {columns}"
|
|
14
|
+
successful_conversions: "- Successful type conversions: {conversions}"
|
|
15
|
+
failed_conversions: "- Failed conversions (filled with nulls): {conversions}"
|
|
16
|
+
final_schema: "Final adapted table schema: {schema}"
|
|
17
|
+
preparing_send: "Preparing to send data to PostgreSQL table '{postgres_schema}.{target_table_name}'"
|
|
18
|
+
input_rows: "Input table has {rows} rows"
|
|
19
|
+
found_existing_table: "Found existing table with schema: {schema}"
|
|
20
|
+
adapting_data: "Adapting incoming data to match existing schema"
|
|
21
|
+
replace_mode: "Replace mode enabled - skipping schema adaptation"
|
|
22
|
+
no_existing_table: "No existing table found - will create new table"
|
|
23
|
+
connecting: "Connecting to PostgreSQL database"
|
|
24
|
+
ingesting_data: "Ingesting data with mode: {mode}"
|
|
25
|
+
ingestion_success: "Successfully ingested {rows} rows"
|
|
26
|
+
creating_table: "Creating table {schema_table}"
|
|
27
|
+
metadata: "Metadata: {metadata}"
|
|
28
|
+
metadata_updated: "Metadata updated"
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# S3-specific messages
|
|
2
|
+
bucket_upload: "Uploading to S3 bucket: {bucket_name}"
|
|
3
|
+
bucket_download: "Downloading from S3 bucket: {bucket_name}"
|
|
4
|
+
bucket_delete: "Deleting from S3 bucket: {bucket_name}"
|
|
5
|
+
file_uploaded: "File uploaded to S3: {file_path} as {uploaded_name}"
|
|
6
|
+
file_downloaded: "File downloaded from S3: {file_path}"
|
|
7
|
+
file_deleted: "File deleted from S3: {file_path}"
|
|
8
|
+
bucket_listing: "Listing contents of bucket: {bucket_name}"
|
|
9
|
+
bucket_items_found: "Found {count} items in bucket"
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
# Copyright (C) - 2023 - 2025 - Cosmo Tech
|
|
2
|
+
# This document and all information contained herein is the exclusive property -
|
|
3
|
+
# including all intellectual property rights pertaining thereto - of Cosmo Tech.
|
|
4
|
+
# Any use, reproduction, translation, broadcasting, transmission, distribution,
|
|
5
|
+
# etc., to any person is prohibited unless it has been previously and
|
|
6
|
+
# specifically authorized by written means by Cosmo Tech.
|