cognite-toolkit 0.6.97__py3-none-any.whl → 0.7.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf.py +21 -23
- cognite_toolkit/_cdf_tk/apps/__init__.py +4 -0
- cognite_toolkit/_cdf_tk/apps/_core_app.py +19 -5
- cognite_toolkit/_cdf_tk/apps/_data_app.py +1 -1
- cognite_toolkit/_cdf_tk/apps/_dev_app.py +86 -0
- cognite_toolkit/_cdf_tk/apps/_download_app.py +693 -25
- cognite_toolkit/_cdf_tk/apps/_dump_app.py +44 -102
- cognite_toolkit/_cdf_tk/apps/_import_app.py +41 -0
- cognite_toolkit/_cdf_tk/apps/_landing_app.py +18 -4
- cognite_toolkit/_cdf_tk/apps/_migrate_app.py +424 -9
- cognite_toolkit/_cdf_tk/apps/_modules_app.py +0 -3
- cognite_toolkit/_cdf_tk/apps/_purge.py +15 -43
- cognite_toolkit/_cdf_tk/apps/_run.py +11 -0
- cognite_toolkit/_cdf_tk/apps/_upload_app.py +45 -6
- cognite_toolkit/_cdf_tk/builders/__init__.py +2 -2
- cognite_toolkit/_cdf_tk/builders/_base.py +28 -42
- cognite_toolkit/_cdf_tk/builders/_raw.py +1 -1
- cognite_toolkit/_cdf_tk/cdf_toml.py +20 -1
- cognite_toolkit/_cdf_tk/client/_toolkit_client.py +32 -12
- cognite_toolkit/_cdf_tk/client/api/infield.py +114 -17
- cognite_toolkit/_cdf_tk/client/api/{canvas.py → legacy/canvas.py} +15 -7
- cognite_toolkit/_cdf_tk/client/api/{charts.py → legacy/charts.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_data_modeling.py → legacy/extended_data_modeling.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_files.py → legacy/extended_files.py} +2 -2
- cognite_toolkit/_cdf_tk/client/api/{extended_functions.py → legacy/extended_functions.py} +15 -18
- cognite_toolkit/_cdf_tk/client/api/{extended_raw.py → legacy/extended_raw.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/{extended_timeseries.py → legacy/extended_timeseries.py} +5 -2
- cognite_toolkit/_cdf_tk/client/api/{location_filters.py → legacy/location_filters.py} +1 -1
- cognite_toolkit/_cdf_tk/client/api/legacy/robotics/__init__.py +8 -0
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/capabilities.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/data_postprocessing.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/frames.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/locations.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/maps.py +1 -1
- cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/robots.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/{search_config.py → legacy/search_config.py} +5 -1
- cognite_toolkit/_cdf_tk/client/api/migration.py +177 -4
- cognite_toolkit/_cdf_tk/client/api/project.py +9 -8
- cognite_toolkit/_cdf_tk/client/api/search.py +2 -2
- cognite_toolkit/_cdf_tk/client/api/streams.py +88 -0
- cognite_toolkit/_cdf_tk/client/api/three_d.py +384 -0
- cognite_toolkit/_cdf_tk/client/data_classes/api_classes.py +13 -0
- cognite_toolkit/_cdf_tk/client/data_classes/base.py +37 -33
- cognite_toolkit/_cdf_tk/client/data_classes/charts_data.py +95 -213
- cognite_toolkit/_cdf_tk/client/data_classes/infield.py +32 -18
- cognite_toolkit/_cdf_tk/client/data_classes/instance_api.py +18 -13
- cognite_toolkit/_cdf_tk/client/data_classes/legacy/__init__.py +0 -0
- cognite_toolkit/_cdf_tk/client/data_classes/{canvas.py → legacy/canvas.py} +47 -4
- cognite_toolkit/_cdf_tk/client/data_classes/{charts.py → legacy/charts.py} +3 -3
- cognite_toolkit/_cdf_tk/client/data_classes/{migration.py → legacy/migration.py} +10 -2
- cognite_toolkit/_cdf_tk/client/data_classes/streams.py +90 -0
- cognite_toolkit/_cdf_tk/client/data_classes/three_d.py +112 -0
- cognite_toolkit/_cdf_tk/client/testing.py +42 -18
- cognite_toolkit/_cdf_tk/commands/__init__.py +7 -6
- cognite_toolkit/_cdf_tk/commands/_changes.py +3 -42
- cognite_toolkit/_cdf_tk/commands/_download.py +21 -11
- cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py +0 -2
- cognite_toolkit/_cdf_tk/commands/_migrate/command.py +22 -20
- cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +140 -92
- cognite_toolkit/_cdf_tk/commands/_migrate/creators.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +108 -26
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +448 -45
- cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py +1 -0
- cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py +6 -6
- cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +52 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +377 -11
- cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py +9 -4
- cognite_toolkit/_cdf_tk/commands/_profile.py +1 -1
- cognite_toolkit/_cdf_tk/commands/_purge.py +36 -39
- cognite_toolkit/_cdf_tk/commands/_questionary_style.py +16 -0
- cognite_toolkit/_cdf_tk/commands/_upload.py +109 -86
- cognite_toolkit/_cdf_tk/commands/about.py +221 -0
- cognite_toolkit/_cdf_tk/commands/auth.py +19 -12
- cognite_toolkit/_cdf_tk/commands/build_cmd.py +16 -62
- cognite_toolkit/_cdf_tk/commands/build_v2/__init__.py +0 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_cmd.py +241 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_input.py +85 -0
- cognite_toolkit/_cdf_tk/commands/build_v2/build_issues.py +27 -0
- cognite_toolkit/_cdf_tk/commands/clean.py +63 -16
- cognite_toolkit/_cdf_tk/commands/deploy.py +20 -17
- cognite_toolkit/_cdf_tk/commands/dump_resource.py +10 -8
- cognite_toolkit/_cdf_tk/commands/init.py +225 -3
- cognite_toolkit/_cdf_tk/commands/modules.py +20 -44
- cognite_toolkit/_cdf_tk/commands/pull.py +6 -19
- cognite_toolkit/_cdf_tk/commands/resources.py +179 -0
- cognite_toolkit/_cdf_tk/commands/run.py +1 -1
- cognite_toolkit/_cdf_tk/constants.py +20 -1
- cognite_toolkit/_cdf_tk/cruds/__init__.py +19 -5
- cognite_toolkit/_cdf_tk/cruds/_base_cruds.py +14 -70
- cognite_toolkit/_cdf_tk/cruds/_data_cruds.py +10 -19
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/__init__.py +4 -1
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/agent.py +11 -9
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/auth.py +5 -15
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/classic.py +45 -44
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py +5 -12
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py +4 -13
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py +206 -67
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py +6 -18
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py +126 -35
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py +7 -28
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py +23 -30
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/hosted_extractors.py +12 -30
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/industrial_tool.py +4 -8
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/location.py +4 -16
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py +5 -13
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/raw.py +5 -11
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/relationship.py +3 -8
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/robotics.py +16 -45
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/streams.py +94 -0
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/three_d_model.py +3 -7
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/timeseries.py +5 -15
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/transformation.py +75 -32
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/workflow.py +20 -40
- cognite_toolkit/_cdf_tk/cruds/_worker.py +24 -36
- cognite_toolkit/_cdf_tk/data_classes/_module_toml.py +1 -0
- cognite_toolkit/_cdf_tk/feature_flags.py +16 -36
- cognite_toolkit/_cdf_tk/plugins.py +2 -1
- cognite_toolkit/_cdf_tk/resource_classes/__init__.py +4 -0
- cognite_toolkit/_cdf_tk/resource_classes/capabilities.py +12 -0
- cognite_toolkit/_cdf_tk/resource_classes/functions.py +3 -1
- cognite_toolkit/_cdf_tk/resource_classes/infield_cdm_location_config.py +109 -0
- cognite_toolkit/_cdf_tk/resource_classes/migration.py +8 -17
- cognite_toolkit/_cdf_tk/resource_classes/search_config.py +1 -1
- cognite_toolkit/_cdf_tk/resource_classes/streams.py +29 -0
- cognite_toolkit/_cdf_tk/resource_classes/workflow_version.py +164 -5
- cognite_toolkit/_cdf_tk/storageio/__init__.py +9 -21
- cognite_toolkit/_cdf_tk/storageio/_annotations.py +19 -16
- cognite_toolkit/_cdf_tk/storageio/_applications.py +340 -28
- cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +67 -104
- cognite_toolkit/_cdf_tk/storageio/_base.py +61 -29
- cognite_toolkit/_cdf_tk/storageio/_datapoints.py +276 -20
- cognite_toolkit/_cdf_tk/storageio/_file_content.py +435 -0
- cognite_toolkit/_cdf_tk/storageio/_instances.py +35 -3
- cognite_toolkit/_cdf_tk/storageio/_raw.py +26 -0
- cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py +71 -4
- cognite_toolkit/_cdf_tk/storageio/selectors/_base.py +14 -2
- cognite_toolkit/_cdf_tk/storageio/selectors/_canvas.py +14 -0
- cognite_toolkit/_cdf_tk/storageio/selectors/_charts.py +14 -0
- cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py +23 -3
- cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py +164 -0
- cognite_toolkit/_cdf_tk/storageio/selectors/_three_d.py +34 -0
- cognite_toolkit/_cdf_tk/tk_warnings/other.py +4 -0
- cognite_toolkit/_cdf_tk/tracker.py +2 -2
- cognite_toolkit/_cdf_tk/utils/cdf.py +1 -1
- cognite_toolkit/_cdf_tk/utils/dtype_conversion.py +9 -3
- cognite_toolkit/_cdf_tk/utils/fileio/__init__.py +2 -0
- cognite_toolkit/_cdf_tk/utils/fileio/_base.py +5 -1
- cognite_toolkit/_cdf_tk/utils/fileio/_readers.py +112 -20
- cognite_toolkit/_cdf_tk/utils/fileio/_writers.py +15 -15
- cognite_toolkit/_cdf_tk/utils/http_client/__init__.py +28 -0
- cognite_toolkit/_cdf_tk/utils/http_client/_client.py +285 -18
- cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py +56 -4
- cognite_toolkit/_cdf_tk/utils/http_client/_data_classes2.py +247 -0
- cognite_toolkit/_cdf_tk/utils/http_client/_tracker.py +5 -2
- cognite_toolkit/_cdf_tk/utils/interactive_select.py +60 -18
- cognite_toolkit/_cdf_tk/utils/sql_parser.py +2 -3
- cognite_toolkit/_cdf_tk/utils/useful_types.py +6 -2
- cognite_toolkit/_cdf_tk/validation.py +83 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +5 -4
- cognite_toolkit/_version.py +1 -1
- cognite_toolkit/config.dev.yaml +13 -0
- {cognite_toolkit-0.6.97.dist-info → cognite_toolkit-0.7.39.dist-info}/METADATA +24 -24
- cognite_toolkit-0.7.39.dist-info/RECORD +322 -0
- cognite_toolkit-0.7.39.dist-info/WHEEL +4 -0
- {cognite_toolkit-0.6.97.dist-info → cognite_toolkit-0.7.39.dist-info}/entry_points.txt +1 -0
- cognite_toolkit/_cdf_tk/client/api/robotics/__init__.py +0 -3
- cognite_toolkit/_cdf_tk/commands/_migrate/canvas.py +0 -201
- cognite_toolkit/_cdf_tk/commands/dump_data.py +0 -489
- cognite_toolkit/_cdf_tk/commands/featureflag.py +0 -27
- cognite_toolkit/_cdf_tk/prototypes/import_app.py +0 -41
- cognite_toolkit/_cdf_tk/utils/table_writers.py +0 -434
- cognite_toolkit-0.6.97.dist-info/RECORD +0 -306
- cognite_toolkit-0.6.97.dist-info/WHEEL +0 -4
- cognite_toolkit-0.6.97.dist-info/licenses/LICENSE +0 -18
- /cognite_toolkit/_cdf_tk/{prototypes/commands → client/api/legacy}/__init__.py +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{dml.py → legacy/dml.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{fixed_transformations.py → legacy/fixed_transformations.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/api.py +0 -0
- /cognite_toolkit/_cdf_tk/client/api/{robotics → legacy/robotics}/utlis.py +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{apm_config_v1.py → legacy/apm_config_v1.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extendable_cognite_file.py → legacy/extendable_cognite_file.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_filemetadata.py → legacy/extended_filemetadata.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_filemetdata.py → legacy/extended_filemetdata.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{extended_timeseries.py → legacy/extended_timeseries.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{functions.py → legacy/functions.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{graphql_data_models.py → legacy/graphql_data_models.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{instances.py → legacy/instances.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{location_filters.py → legacy/location_filters.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{pending_instances_ids.py → legacy/pending_instances_ids.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{project.py → legacy/project.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{raw.py → legacy/raw.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{robotics.py → legacy/robotics.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{search_config.py → legacy/search_config.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{sequences.py → legacy/sequences.py} +0 -0
- /cognite_toolkit/_cdf_tk/client/data_classes/{streamlit_.py → legacy/streamlit_.py} +0 -0
- /cognite_toolkit/_cdf_tk/{prototypes/commands/import_.py → commands/_import_cmd.py} +0 -0
|
@@ -1,32 +1,33 @@
|
|
|
1
|
+
from collections import Counter
|
|
1
2
|
from collections.abc import Sequence
|
|
2
3
|
from functools import partial
|
|
3
4
|
from pathlib import Path
|
|
4
5
|
|
|
5
|
-
from cognite.client.data_classes.
|
|
6
|
-
|
|
6
|
+
from cognite.client.data_classes.data_modeling import (
|
|
7
|
+
ViewId,
|
|
8
|
+
)
|
|
7
9
|
from rich.console import Console
|
|
8
10
|
|
|
9
11
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
10
|
-
from cognite_toolkit._cdf_tk.constants import
|
|
12
|
+
from cognite_toolkit._cdf_tk.constants import DATA_MANIFEST_SUFFIX, DATA_RESOURCE_DIR
|
|
13
|
+
from cognite_toolkit._cdf_tk.cruds import ViewCRUD
|
|
11
14
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
|
|
15
|
+
from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
|
|
12
16
|
from cognite_toolkit._cdf_tk.storageio import (
|
|
13
17
|
T_Selector,
|
|
14
18
|
UploadableStorageIO,
|
|
15
|
-
are_same_kind,
|
|
16
19
|
get_upload_io,
|
|
17
20
|
)
|
|
18
|
-
from cognite_toolkit._cdf_tk.storageio._base import
|
|
19
|
-
from cognite_toolkit._cdf_tk.storageio.selectors import Selector,
|
|
20
|
-
from cognite_toolkit._cdf_tk.
|
|
21
|
-
from cognite_toolkit._cdf_tk.tk_warnings
|
|
21
|
+
from cognite_toolkit._cdf_tk.storageio._base import TableUploadableStorageIO, UploadItem
|
|
22
|
+
from cognite_toolkit._cdf_tk.storageio.selectors import Selector, load_selector
|
|
23
|
+
from cognite_toolkit._cdf_tk.storageio.selectors._instances import InstanceSpaceSelector
|
|
24
|
+
from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning, MediumSeverityWarning, ToolkitWarning
|
|
22
25
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
23
|
-
from cognite_toolkit._cdf_tk.utils.
|
|
24
|
-
from cognite_toolkit._cdf_tk.utils.fileio import TABLE_READ_CLS_BY_FORMAT, FileReader
|
|
26
|
+
from cognite_toolkit._cdf_tk.utils.fileio import MultiFileReader
|
|
25
27
|
from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, ItemMessage, SuccessResponseItems
|
|
26
28
|
from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
|
|
27
29
|
from cognite_toolkit._cdf_tk.utils.progress_tracker import ProgressTracker
|
|
28
30
|
from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
|
|
29
|
-
from cognite_toolkit._cdf_tk.validation import humanize_validation_error
|
|
30
31
|
|
|
31
32
|
from ._base import ToolkitCommand
|
|
32
33
|
from .deploy import DeployCommand
|
|
@@ -76,53 +77,76 @@ class UploadCommand(ToolkitCommand):
|
|
|
76
77
|
│ │ └── table2.Table.yaml
|
|
77
78
|
│ └── ...
|
|
78
79
|
├── datafile1.kind.ndjson # Data file of a specific kind
|
|
79
|
-
├── datafile1.
|
|
80
|
+
├── datafile1.Manifest.yaml # Manifest for datafile1
|
|
80
81
|
├── datafile2.kind2.ndjson # Another data file of the same or different kind
|
|
81
|
-
├── datafile2.
|
|
82
|
+
├── datafile2.Manifest.yaml # Manifest file for datafile2
|
|
82
83
|
└── ...
|
|
83
84
|
"""
|
|
84
|
-
console =
|
|
85
|
-
data_files_by_selector = self._find_data_files(input_dir
|
|
85
|
+
console = client.console
|
|
86
|
+
data_files_by_selector = self._find_data_files(input_dir)
|
|
86
87
|
|
|
87
88
|
self._deploy_resource_folder(input_dir / DATA_RESOURCE_DIR, deploy_resources, client, console, dry_run, verbose)
|
|
88
89
|
|
|
90
|
+
data_files_by_selector = self._topological_sort_if_instance_selector(data_files_by_selector, client)
|
|
91
|
+
|
|
89
92
|
self._upload_data(data_files_by_selector, client, dry_run, input_dir, console, verbose)
|
|
90
93
|
|
|
94
|
+
def _topological_sort_if_instance_selector(
|
|
95
|
+
self, data_files_by_selector: dict[Selector, list[Path]], client: ToolkitClient
|
|
96
|
+
) -> dict[Selector, list[Path]]:
|
|
97
|
+
"""Topologically sorts InstanceSpaceSelectors (if they are present) to determine the order of upload based on container dependencies from the views.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
data_files_by_selector: A dictionary mapping selectors to their data files.
|
|
101
|
+
client: The cognite client to use for the upload.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
A dictionary mapping selectors to their data files with necessary preprocessing.
|
|
105
|
+
"""
|
|
106
|
+
counts = Counter(type(selector) for selector in data_files_by_selector.keys())
|
|
107
|
+
if counts[InstanceSpaceSelector] <= 1:
|
|
108
|
+
return data_files_by_selector
|
|
109
|
+
|
|
110
|
+
selector_by_view_id: dict[ViewId, Selector] = {}
|
|
111
|
+
for selector in data_files_by_selector:
|
|
112
|
+
if isinstance(selector, InstanceSpaceSelector) and selector.view is not None:
|
|
113
|
+
selector_by_view_id[selector.view.as_id()] = selector
|
|
114
|
+
|
|
115
|
+
view_dependencies = ViewCRUD.create_loader(client).topological_sort_container_constraints(
|
|
116
|
+
list(selector_by_view_id.keys())
|
|
117
|
+
)
|
|
118
|
+
prepared_selectors: dict[Selector, list[Path]] = {}
|
|
119
|
+
|
|
120
|
+
# Reorder selectors according to the dependency-sorted view list
|
|
121
|
+
for view_id in view_dependencies:
|
|
122
|
+
selector = selector_by_view_id[view_id]
|
|
123
|
+
prepared_selectors[selector] = data_files_by_selector[selector]
|
|
124
|
+
|
|
125
|
+
# Preserve selectors that aren't affected by view dependencies
|
|
126
|
+
# (e.g., raw tables, time series, non-view instance data)
|
|
127
|
+
for selector in data_files_by_selector.keys():
|
|
128
|
+
if selector not in prepared_selectors:
|
|
129
|
+
prepared_selectors[selector] = data_files_by_selector[selector]
|
|
130
|
+
|
|
131
|
+
return prepared_selectors
|
|
132
|
+
|
|
91
133
|
def _find_data_files(
|
|
92
134
|
self,
|
|
93
135
|
input_dir: Path,
|
|
94
|
-
kind: str | None = None,
|
|
95
136
|
) -> dict[Selector, list[Path]]:
|
|
96
137
|
"""Finds data files and their corresponding metadata files in the input directory."""
|
|
97
|
-
manifest_file_endswith = f".{DATA_MANIFEST_STEM}.yaml"
|
|
98
138
|
data_files_by_metadata: dict[Selector, list[Path]] = {}
|
|
99
|
-
for
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
if kind is not None and data_files:
|
|
107
|
-
data_files = [data_file for data_file in data_files if are_same_kind(kind, data_file)]
|
|
108
|
-
if not data_files:
|
|
109
|
-
continue
|
|
139
|
+
for manifest_file in input_dir.glob(f"*{DATA_MANIFEST_SUFFIX}"):
|
|
140
|
+
selector_or_warning = load_selector(manifest_file)
|
|
141
|
+
if isinstance(selector_or_warning, ToolkitWarning):
|
|
142
|
+
self.warn(selector_or_warning)
|
|
143
|
+
continue
|
|
144
|
+
selector: Selector = selector_or_warning
|
|
145
|
+
data_files = selector.find_data_files(input_dir, manifest_file)
|
|
110
146
|
if not data_files:
|
|
111
147
|
self.warn(
|
|
112
148
|
MediumSeverityWarning(
|
|
113
|
-
f"Metadata file {
|
|
114
|
-
)
|
|
115
|
-
)
|
|
116
|
-
continue
|
|
117
|
-
|
|
118
|
-
selector_dict = read_yaml_file(metadata_file, expected_output="dict")
|
|
119
|
-
try:
|
|
120
|
-
selector = SelectorAdapter.validate_python(selector_dict)
|
|
121
|
-
except ValidationError as e:
|
|
122
|
-
errors = humanize_validation_error(e)
|
|
123
|
-
self.warn(
|
|
124
|
-
ResourceFormatWarning(
|
|
125
|
-
metadata_file, tuple(errors), text="Invalid selector in metadata file, skipping."
|
|
149
|
+
f"Metadata file {manifest_file.as_posix()!r} has no corresponding data files, skipping.",
|
|
126
150
|
)
|
|
127
151
|
)
|
|
128
152
|
continue
|
|
@@ -178,49 +202,48 @@ class UploadCommand(ToolkitCommand):
|
|
|
178
202
|
io = self._create_selected_io(selector, datafiles[0], client)
|
|
179
203
|
if io is None:
|
|
180
204
|
continue
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
tracker=tracker,
|
|
202
|
-
console=console,
|
|
203
|
-
),
|
|
204
|
-
iteration_count=None,
|
|
205
|
-
max_queue_size=self._MAX_QUEUE_SIZE,
|
|
206
|
-
download_description=f"Reading {file_count:,}/{total_file_count + 1:,}: {file_display.as_posix()!s}",
|
|
207
|
-
process_description="Processing",
|
|
208
|
-
write_description=f"{action} {selector.display_name!r}",
|
|
205
|
+
reader = MultiFileReader(datafiles)
|
|
206
|
+
if reader.is_table and not isinstance(io, TableUploadableStorageIO):
|
|
207
|
+
raise ToolkitValueError(f"{selector.display_name} does not support {reader.format!r} files.")
|
|
208
|
+
|
|
209
|
+
chunk_count = io.count_chunks(reader)
|
|
210
|
+
iteration_count = chunk_count // io.CHUNK_SIZE + (1 if chunk_count % io.CHUNK_SIZE > 0 else 0)
|
|
211
|
+
|
|
212
|
+
tracker = ProgressTracker[str]([self._UPLOAD])
|
|
213
|
+
executor = ProducerWorkerExecutor[list[tuple[str, dict[str, JsonVal]]], Sequence[UploadItem]](
|
|
214
|
+
download_iterable=io.read_chunks(reader, selector),
|
|
215
|
+
process=partial(io.rows_to_data, selector=selector)
|
|
216
|
+
if reader.is_table and isinstance(io, TableUploadableStorageIO)
|
|
217
|
+
else io.json_chunk_to_data,
|
|
218
|
+
write=partial(
|
|
219
|
+
self._upload_items,
|
|
220
|
+
upload_client=upload_client,
|
|
221
|
+
io=io,
|
|
222
|
+
dry_run=dry_run,
|
|
223
|
+
selector=selector,
|
|
224
|
+
tracker=tracker,
|
|
209
225
|
console=console,
|
|
210
|
-
)
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
226
|
+
),
|
|
227
|
+
iteration_count=iteration_count,
|
|
228
|
+
max_queue_size=self._MAX_QUEUE_SIZE,
|
|
229
|
+
download_description=f"Reading {selector.display_name!r} files",
|
|
230
|
+
process_description="Processing",
|
|
231
|
+
write_description=f"{action} {selector.display_name!r}",
|
|
232
|
+
console=console,
|
|
233
|
+
)
|
|
234
|
+
executor.run()
|
|
235
|
+
file_count += len(datafiles)
|
|
236
|
+
executor.raise_on_error()
|
|
237
|
+
final_action = "Uploaded" if not dry_run else "Would upload"
|
|
238
|
+
suffix = " successfully" if not dry_run else ""
|
|
239
|
+
results = tracker.aggregate()
|
|
240
|
+
success = results.get((self._UPLOAD, "success"), 0)
|
|
241
|
+
failed = results.get((self._UPLOAD, "failed"), 0)
|
|
242
|
+
if failed > 0:
|
|
243
|
+
suffix += f", {failed:,} failed"
|
|
244
|
+
console.print(
|
|
245
|
+
f"{final_action} {success:,} {selector.display_name} from {len(datafiles)} files{suffix}."
|
|
246
|
+
)
|
|
224
247
|
|
|
225
248
|
@staticmethod
|
|
226
249
|
def _path_as_display_name(input_path: Path, cwd: Path = Path.cwd()) -> Path:
|
|
@@ -233,7 +256,7 @@ class UploadCommand(ToolkitCommand):
|
|
|
233
256
|
self, selector: Selector, data_file: Path, client: ToolkitClient
|
|
234
257
|
) -> UploadableStorageIO | None:
|
|
235
258
|
try:
|
|
236
|
-
io_cls = get_upload_io(
|
|
259
|
+
io_cls = get_upload_io(selector)
|
|
237
260
|
except ValueError as e:
|
|
238
261
|
self.warn(HighSeverityWarning(f"Could not find StorageIO for selector {selector}: {e}"))
|
|
239
262
|
return None
|
|
@@ -242,9 +265,9 @@ class UploadCommand(ToolkitCommand):
|
|
|
242
265
|
@classmethod
|
|
243
266
|
def _upload_items(
|
|
244
267
|
cls,
|
|
245
|
-
data_chunk: Sequence[UploadItem],
|
|
268
|
+
data_chunk: Sequence[UploadItem[T_ResourceRequest]],
|
|
246
269
|
upload_client: HTTPClient,
|
|
247
|
-
io: UploadableStorageIO[T_Selector,
|
|
270
|
+
io: UploadableStorageIO[T_Selector, T_ResourceResponse, T_ResourceRequest],
|
|
248
271
|
selector: T_Selector,
|
|
249
272
|
dry_run: bool,
|
|
250
273
|
tracker: ProgressTracker[str],
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import platform
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from rich import print
|
|
6
|
+
from rich.table import Table
|
|
7
|
+
|
|
8
|
+
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml, _read_toml
|
|
9
|
+
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
10
|
+
from cognite_toolkit._cdf_tk.constants import clean_name
|
|
11
|
+
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
12
|
+
from cognite_toolkit._cdf_tk.plugins import Plugins
|
|
13
|
+
from cognite_toolkit._cdf_tk.tk_warnings import LowSeverityWarning, MediumSeverityWarning
|
|
14
|
+
from cognite_toolkit._version import __version__
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AboutCommand(ToolkitCommand):
|
|
18
|
+
def execute(self, cwd: Path) -> None:
|
|
19
|
+
# Version information
|
|
20
|
+
print(f"\n[bold cyan]Cognite Toolkit[/bold cyan] version: [yellow]{__version__}[/yellow]")
|
|
21
|
+
print(f"Python version: {sys.version.split()[0]}")
|
|
22
|
+
print(f"Platform: {platform.system()} {platform.release()}")
|
|
23
|
+
|
|
24
|
+
# Check for cdf.toml in the current directory
|
|
25
|
+
cdf_toml_path = cwd / CDFToml.file_name
|
|
26
|
+
|
|
27
|
+
if cdf_toml_path.exists():
|
|
28
|
+
print(f"\n[bold green]Configuration file found:[/bold green] {cdf_toml_path}")
|
|
29
|
+
|
|
30
|
+
cdf_toml = CDFToml.load(cwd)
|
|
31
|
+
|
|
32
|
+
# We need to read the raw TOML to get original key names for plugins and alpha flags
|
|
33
|
+
raw_toml = _read_toml(cdf_toml_path)
|
|
34
|
+
|
|
35
|
+
self._check_unrecognized_sections(raw_toml)
|
|
36
|
+
self._display_plugins(cdf_toml, raw_toml)
|
|
37
|
+
self._display_alpha_flags(cdf_toml, raw_toml)
|
|
38
|
+
self._display_additional_config(cdf_toml)
|
|
39
|
+
|
|
40
|
+
else:
|
|
41
|
+
# Search for cdf.toml in subdirectories
|
|
42
|
+
found_files = self._search_cdf_toml(cwd)
|
|
43
|
+
|
|
44
|
+
if found_files:
|
|
45
|
+
print(f"\n[bold yellow]No cdf.toml found in current directory:[/bold yellow] {cwd}")
|
|
46
|
+
print("\n[bold]Found cdf.toml files in subdirectories:[/bold]")
|
|
47
|
+
for file in found_files:
|
|
48
|
+
rel_path = file.relative_to(cwd)
|
|
49
|
+
print(f" • {rel_path}")
|
|
50
|
+
print(f"\n[bold cyan]Hint:[/bold cyan] Move one of these files to {cwd} or navigate to its directory.")
|
|
51
|
+
else:
|
|
52
|
+
print("\n[bold yellow]No cdf.toml found[/bold yellow] in current directory or subdirectories.")
|
|
53
|
+
print(f"Current directory: {cwd}")
|
|
54
|
+
print("\n[bold cyan]Hint:[/bold cyan] Run [yellow]cdf init[/yellow] to create a new project.")
|
|
55
|
+
|
|
56
|
+
def _check_unrecognized_sections(self, raw_toml: dict) -> None:
|
|
57
|
+
"""Check for unrecognized tables in cdf.toml and warn about them."""
|
|
58
|
+
# Valid top-level tables in cdf.toml
|
|
59
|
+
valid_tables = {"cdf", "modules", "alpha_flags", "feature_flags", "plugins", "library"}
|
|
60
|
+
|
|
61
|
+
# Filter out empty keys, whitespace-only keys, and check for unrecognized tables
|
|
62
|
+
unrecognized_tables = [key for key in raw_toml.keys() if key and key.strip() and key not in valid_tables]
|
|
63
|
+
|
|
64
|
+
if unrecognized_tables:
|
|
65
|
+
print()
|
|
66
|
+
|
|
67
|
+
for table in unrecognized_tables:
|
|
68
|
+
# Try to find a matching valid table by stripping non-alphabetical characters
|
|
69
|
+
suggestion = self._find_similar_table(table, valid_tables)
|
|
70
|
+
|
|
71
|
+
message = f"Table '{table}' in cdf.toml is not recognized and will have no effect."
|
|
72
|
+
if suggestion:
|
|
73
|
+
message += f" Did you mean '{suggestion}'?"
|
|
74
|
+
|
|
75
|
+
self.warn(MediumSeverityWarning(message))
|
|
76
|
+
|
|
77
|
+
@staticmethod
|
|
78
|
+
def _find_similar_table(unrecognized: str, valid_tables: set[str]) -> str | None:
|
|
79
|
+
"""Find a similar valid table by comparing alphabetical characters only.
|
|
80
|
+
|
|
81
|
+
Returns None if the unrecognized table is already valid or if no similar match is found.
|
|
82
|
+
"""
|
|
83
|
+
# If it's already a valid table, return None (no suggestion needed)
|
|
84
|
+
if unrecognized in valid_tables:
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
# Keep only alphabetical characters and lowercase
|
|
88
|
+
normalized_unrecognized = "".join(c for c in unrecognized if c.isalpha()).lower()
|
|
89
|
+
|
|
90
|
+
# First, try exact match (after normalization)
|
|
91
|
+
for valid in valid_tables:
|
|
92
|
+
normalized_valid = "".join(c for c in valid if c.isalpha()).lower()
|
|
93
|
+
if normalized_unrecognized == normalized_valid:
|
|
94
|
+
return valid
|
|
95
|
+
|
|
96
|
+
# If no match, check for singular/plural variations (missing 's')
|
|
97
|
+
for valid in valid_tables:
|
|
98
|
+
normalized_valid = "".join(c for c in valid if c.isalpha()).lower()
|
|
99
|
+
|
|
100
|
+
# Check if adding 's' to unrecognized matches valid (e.g., "plugin" -> "plugins")
|
|
101
|
+
if normalized_unrecognized + "s" == normalized_valid:
|
|
102
|
+
return valid
|
|
103
|
+
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
def _display_plugins(self, cdf_toml: CDFToml, raw_toml: dict) -> None:
|
|
107
|
+
"""Display all available plugins and their status."""
|
|
108
|
+
table = Table(title="Plugins", show_header=True)
|
|
109
|
+
table.add_column("Plugin", justify="left", style="cyan")
|
|
110
|
+
table.add_column("Status", justify="center")
|
|
111
|
+
table.add_column("Description", justify="left")
|
|
112
|
+
|
|
113
|
+
# Track which plugins we've seen
|
|
114
|
+
seen_plugins = set()
|
|
115
|
+
|
|
116
|
+
# Show all plugins from the enum
|
|
117
|
+
for plugin in Plugins:
|
|
118
|
+
plugin_name = plugin.value.name
|
|
119
|
+
cleaned_key = clean_name(plugin_name)
|
|
120
|
+
seen_plugins.add(cleaned_key)
|
|
121
|
+
|
|
122
|
+
is_enabled = cdf_toml.plugins.get(cleaned_key, False)
|
|
123
|
+
if is_enabled:
|
|
124
|
+
status = "[green]✓ enabled[/green]"
|
|
125
|
+
else:
|
|
126
|
+
status = "[dim]○ disabled[/dim]"
|
|
127
|
+
|
|
128
|
+
table.add_row(plugin_name, status, plugin.value.description)
|
|
129
|
+
|
|
130
|
+
print()
|
|
131
|
+
print(table)
|
|
132
|
+
|
|
133
|
+
# Show any unrecognized plugins from cdf.toml using original key names
|
|
134
|
+
raw_plugins = raw_toml.get("plugins", {})
|
|
135
|
+
unrecognized = []
|
|
136
|
+
for original_key, value in raw_plugins.items():
|
|
137
|
+
cleaned_key = clean_name(original_key)
|
|
138
|
+
if cleaned_key not in seen_plugins:
|
|
139
|
+
unrecognized.append((original_key, value))
|
|
140
|
+
|
|
141
|
+
for original_key, is_enabled in unrecognized:
|
|
142
|
+
status = "enabled" if is_enabled else "disabled"
|
|
143
|
+
self.warn(
|
|
144
|
+
LowSeverityWarning(f"Plugin '{original_key}' in cdf.toml is not recognized and will have no effect.")
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def _display_alpha_flags(self, cdf_toml: CDFToml, raw_toml: dict) -> None:
|
|
148
|
+
"""Display available alpha flags and their status."""
|
|
149
|
+
table = Table(title="Alpha Flags", show_header=True)
|
|
150
|
+
table.add_column("Flag", justify="left", style="yellow")
|
|
151
|
+
table.add_column("Status", justify="center")
|
|
152
|
+
table.add_column("Description", justify="left")
|
|
153
|
+
|
|
154
|
+
# Track which flags we've seen
|
|
155
|
+
seen_flags = set()
|
|
156
|
+
|
|
157
|
+
# Show flags from the enum that are either enabled or visible
|
|
158
|
+
for flag in Flags:
|
|
159
|
+
cleaned_key = clean_name(flag.name)
|
|
160
|
+
seen_flags.add(cleaned_key)
|
|
161
|
+
|
|
162
|
+
is_enabled = cdf_toml.alpha_flags.get(cleaned_key, False)
|
|
163
|
+
|
|
164
|
+
# Only show if enabled or visible
|
|
165
|
+
if is_enabled or flag.value.visible:
|
|
166
|
+
# Convert enum name to kebab-case for display
|
|
167
|
+
display_name = flag.name.lower().replace("_", "-")
|
|
168
|
+
|
|
169
|
+
if is_enabled:
|
|
170
|
+
status = "[green]✓ enabled[/green]"
|
|
171
|
+
else:
|
|
172
|
+
status = "[dim]○ disabled[/dim]"
|
|
173
|
+
|
|
174
|
+
table.add_row(display_name, status, flag.value.description)
|
|
175
|
+
|
|
176
|
+
print()
|
|
177
|
+
print(table)
|
|
178
|
+
|
|
179
|
+
# Show any unrecognized flags from cdf.toml using original key names
|
|
180
|
+
raw_flags = raw_toml.get("alpha_flags", {})
|
|
181
|
+
unrecognized = []
|
|
182
|
+
for original_key, value in raw_flags.items():
|
|
183
|
+
cleaned_key = clean_name(original_key)
|
|
184
|
+
if cleaned_key not in seen_flags:
|
|
185
|
+
unrecognized.append((original_key, value))
|
|
186
|
+
|
|
187
|
+
for original_key, is_enabled in unrecognized:
|
|
188
|
+
status = "enabled" if is_enabled else "disabled"
|
|
189
|
+
self.warn(
|
|
190
|
+
LowSeverityWarning(
|
|
191
|
+
f"Alpha flag '{original_key}' in cdf.toml is not recognized and will have no effect."
|
|
192
|
+
)
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def _display_additional_config(self, cdf_toml: CDFToml) -> None:
|
|
196
|
+
"""Display additional configuration information."""
|
|
197
|
+
print("\n[bold]Additional Configuration:[/bold]")
|
|
198
|
+
|
|
199
|
+
print(f" Default environment: [cyan]{cdf_toml.cdf.default_env}[/cyan]")
|
|
200
|
+
|
|
201
|
+
if cdf_toml.cdf.has_user_set_default_org:
|
|
202
|
+
print(f" Default organization dir: [cyan]{cdf_toml.cdf.default_organization_dir}[/cyan]")
|
|
203
|
+
|
|
204
|
+
if cdf_toml.cdf.file_encoding:
|
|
205
|
+
print(f" File encoding: [cyan]{cdf_toml.cdf.file_encoding}[/cyan]")
|
|
206
|
+
|
|
207
|
+
print(f" Modules version: [cyan]{cdf_toml.modules.version}[/cyan]")
|
|
208
|
+
|
|
209
|
+
if cdf_toml.libraries:
|
|
210
|
+
print(f" Configured libraries: [cyan]{len(cdf_toml.libraries)}[/cyan]")
|
|
211
|
+
for lib_name, lib_config in cdf_toml.libraries.items():
|
|
212
|
+
print(f" • {lib_name}: [dim]{lib_config.url}[/dim]")
|
|
213
|
+
|
|
214
|
+
def _search_cdf_toml(self, cwd: Path) -> list[Path]:
|
|
215
|
+
"""Search for cdf.toml files in immediate subdirectories (one level down)."""
|
|
216
|
+
try:
|
|
217
|
+
return sorted(
|
|
218
|
+
[potential_file for potential_file in cwd.glob(f"*/{CDFToml.file_name}") if potential_file.is_file()]
|
|
219
|
+
)
|
|
220
|
+
except PermissionError:
|
|
221
|
+
return []
|
|
@@ -26,6 +26,7 @@ import questionary
|
|
|
26
26
|
from cognite.client.data_classes.capabilities import (
|
|
27
27
|
AssetsAcl,
|
|
28
28
|
Capability,
|
|
29
|
+
ExtractionConfigsAcl,
|
|
29
30
|
FunctionsAcl,
|
|
30
31
|
GroupsAcl,
|
|
31
32
|
ProjectsAcl,
|
|
@@ -46,6 +47,7 @@ from cognite_toolkit._cdf_tk.constants import (
|
|
|
46
47
|
TOOLKIT_DEMO_GROUP_NAME,
|
|
47
48
|
TOOLKIT_SERVICE_PRINCIPAL_GROUP_NAME,
|
|
48
49
|
)
|
|
50
|
+
from cognite_toolkit._cdf_tk.cruds import ExtractionPipelineConfigCRUD
|
|
49
51
|
from cognite_toolkit._cdf_tk.exceptions import (
|
|
50
52
|
AuthenticationError,
|
|
51
53
|
AuthorizationError,
|
|
@@ -95,16 +97,6 @@ class AuthCommand(ToolkitCommand):
|
|
|
95
97
|
raise AuthenticationError(f"Unable to verify the credentials.\n{e}")
|
|
96
98
|
|
|
97
99
|
print("[green]The credentials are valid.[/green]")
|
|
98
|
-
if no_verify:
|
|
99
|
-
return
|
|
100
|
-
print(
|
|
101
|
-
Panel(
|
|
102
|
-
"Running verification, 'cdf auth verify'...",
|
|
103
|
-
title="",
|
|
104
|
-
expand=False,
|
|
105
|
-
)
|
|
106
|
-
)
|
|
107
|
-
self.verify(client, dry_run)
|
|
108
100
|
|
|
109
101
|
def _store_dotenv(self, env_vars: EnvironmentVariables) -> None:
|
|
110
102
|
new_env_file = env_vars.create_dotenv_file()
|
|
@@ -444,8 +436,23 @@ class AuthCommand(ToolkitCommand):
|
|
|
444
436
|
crud = crud_cls.create_loader(client)
|
|
445
437
|
if crud.prerequisite_warning() is not None:
|
|
446
438
|
continue
|
|
447
|
-
|
|
448
|
-
|
|
439
|
+
if isinstance(crud, ExtractionPipelineConfigCRUD):
|
|
440
|
+
# The Extraction Pipeline Config CRUD requires special handling.
|
|
441
|
+
# The .get_required_capability is used in the DeployCommand as well. Since, there is no way to no
|
|
442
|
+
# the extraction pipeline ID or dataSetId from an ExtractionPipelineConfigWrite object, we do not
|
|
443
|
+
# check those there. If we returned the full capability, it would always have to be all scoped.
|
|
444
|
+
# That is too restrictive in the deploy command, so we return an empty list, essentially not checking
|
|
445
|
+
# anything there. Here, we want to add the all scoped capability, so that the Toolkit group gets the
|
|
446
|
+
# correct capability.
|
|
447
|
+
capabilities: list[Capability] = [
|
|
448
|
+
ExtractionConfigsAcl(
|
|
449
|
+
[ExtractionConfigsAcl.Action.Read, ExtractionConfigsAcl.Action.Write],
|
|
450
|
+
ExtractionConfigsAcl.Scope.All(),
|
|
451
|
+
)
|
|
452
|
+
]
|
|
453
|
+
else:
|
|
454
|
+
capability = crud_cls.get_required_capability(None, read_only=False)
|
|
455
|
+
capabilities = capability if isinstance(capability, list) else [capability]
|
|
449
456
|
for cap in capabilities:
|
|
450
457
|
if project_type == "DATA_MODELING_ONLY" and isinstance(cap, AssetsAcl | RelationshipsAcl):
|
|
451
458
|
continue
|
|
@@ -14,13 +14,12 @@ from rich.progress import track
|
|
|
14
14
|
from cognite_toolkit._cdf_tk.builders import Builder, create_builder
|
|
15
15
|
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
16
16
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
17
|
-
from cognite_toolkit._cdf_tk.client.data_classes.raw import RawDatabase
|
|
17
|
+
from cognite_toolkit._cdf_tk.client.data_classes.legacy.raw import RawDatabase
|
|
18
18
|
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
19
19
|
from cognite_toolkit._cdf_tk.constants import (
|
|
20
20
|
_RUNNING_IN_BROWSER,
|
|
21
21
|
BUILD_FOLDER_ENCODING,
|
|
22
22
|
DEFAULT_ENV,
|
|
23
|
-
DEV_ONLY_MODULES,
|
|
24
23
|
HINT_LEAD_TEXT,
|
|
25
24
|
ROOT_MODULES,
|
|
26
25
|
TEMPLATE_VARS_FILE_SUFFIXES,
|
|
@@ -59,10 +58,7 @@ from cognite_toolkit._cdf_tk.data_classes import (
|
|
|
59
58
|
SourceLocationLazy,
|
|
60
59
|
)
|
|
61
60
|
from cognite_toolkit._cdf_tk.exceptions import (
|
|
62
|
-
ToolkitDuplicatedModuleError,
|
|
63
|
-
ToolkitEnvError,
|
|
64
61
|
ToolkitError,
|
|
65
|
-
ToolkitMissingModuleError,
|
|
66
62
|
ToolkitYAMLFormatError,
|
|
67
63
|
)
|
|
68
64
|
from cognite_toolkit._cdf_tk.hints import Hint, ModuleDefinition, verify_module_directory
|
|
@@ -89,6 +85,7 @@ from cognite_toolkit._cdf_tk.utils.file import safe_rmtree
|
|
|
89
85
|
from cognite_toolkit._cdf_tk.utils.modules import parse_user_selected_modules
|
|
90
86
|
from cognite_toolkit._cdf_tk.validation import (
|
|
91
87
|
validate_data_set_is_set,
|
|
88
|
+
validate_module_selection,
|
|
92
89
|
validate_modules_variables,
|
|
93
90
|
validate_resource_yaml_pydantic,
|
|
94
91
|
)
|
|
@@ -207,7 +204,11 @@ class BuildCommand(ToolkitCommand):
|
|
|
207
204
|
|
|
208
205
|
user_selected_modules = config.environment.get_selected_modules(packages)
|
|
209
206
|
modules = ModuleDirectories.load(organization_dir, user_selected_modules)
|
|
210
|
-
|
|
207
|
+
module_warnings = validate_module_selection(modules, config, packages, user_selected_modules, organization_dir)
|
|
208
|
+
if module_warnings:
|
|
209
|
+
self.warning_list.extend(module_warnings)
|
|
210
|
+
if self.print_warning:
|
|
211
|
+
print(str(module_warnings))
|
|
211
212
|
|
|
212
213
|
if verbose:
|
|
213
214
|
self.console("Selected packages:")
|
|
@@ -358,6 +359,15 @@ class BuildCommand(ToolkitCommand):
|
|
|
358
359
|
for resource_name, resource_files in module.source_paths_by_resource_folder.items():
|
|
359
360
|
source_files = self._replace_variables(resource_files, module_variables, resource_name, module.dir, verbose)
|
|
360
361
|
|
|
362
|
+
if resource_name == "data_models":
|
|
363
|
+
resource_name = "data_modeling"
|
|
364
|
+
self.warn(
|
|
365
|
+
MediumSeverityWarning(
|
|
366
|
+
"The resource folder 'data_models' is deprecated and will be removed in v1.0. "
|
|
367
|
+
"Please rename the folder to 'data_modeling'."
|
|
368
|
+
)
|
|
369
|
+
)
|
|
370
|
+
|
|
361
371
|
builder = self._get_builder(build_dir, resource_name)
|
|
362
372
|
|
|
363
373
|
built_resources = BuiltResourceList[Hashable]()
|
|
@@ -413,62 +423,6 @@ class BuildCommand(ToolkitCommand):
|
|
|
413
423
|
builder = self._builder_by_resource_folder[resource_name]
|
|
414
424
|
return builder
|
|
415
425
|
|
|
416
|
-
def _validate_modules(
|
|
417
|
-
self,
|
|
418
|
-
modules: ModuleDirectories,
|
|
419
|
-
config: BuildConfigYAML,
|
|
420
|
-
packages: dict[str, list[str]],
|
|
421
|
-
selected_modules: set[str | Path],
|
|
422
|
-
organization_dir: Path,
|
|
423
|
-
) -> None:
|
|
424
|
-
# Validations: Ambiguous selection.
|
|
425
|
-
selected_names = {s for s in config.environment.selected if isinstance(s, str)}
|
|
426
|
-
if duplicate_modules := {
|
|
427
|
-
module_name: paths
|
|
428
|
-
for module_name, paths in modules.as_path_by_name().items()
|
|
429
|
-
if len(paths) > 1 and module_name in selected_names
|
|
430
|
-
}:
|
|
431
|
-
# If the user has selected a module by name, and there are multiple modules with that name, raise an error.
|
|
432
|
-
# Note, if the user uses a path to select a module, this error will not be raised.
|
|
433
|
-
raise ToolkitDuplicatedModuleError(
|
|
434
|
-
f"Ambiguous module selected in config.{config.environment.name}.yaml:", duplicate_modules
|
|
435
|
-
)
|
|
436
|
-
# Package Referenced Modules Exists
|
|
437
|
-
for package, package_modules in packages.items():
|
|
438
|
-
if package not in selected_names:
|
|
439
|
-
# We do not check packages that are not selected.
|
|
440
|
-
# Typically, the user will delete the modules that are irrelevant for them;
|
|
441
|
-
# thus we only check the selected packages.
|
|
442
|
-
continue
|
|
443
|
-
if missing_packages := set(package_modules) - modules.available_names:
|
|
444
|
-
ToolkitMissingModuleError(
|
|
445
|
-
f"Package {package} defined in {CDFToml.file_name!s} is referring "
|
|
446
|
-
f"the following missing modules {missing_packages}."
|
|
447
|
-
)
|
|
448
|
-
|
|
449
|
-
# Selected modules does not exists
|
|
450
|
-
if missing_modules := set(selected_modules) - modules.available:
|
|
451
|
-
hint = ModuleDefinition.long(missing_modules, organization_dir)
|
|
452
|
-
raise ToolkitMissingModuleError(
|
|
453
|
-
f"The following selected modules are missing, please check path: {missing_modules}.\n{hint}"
|
|
454
|
-
)
|
|
455
|
-
|
|
456
|
-
# Nothing is Selected
|
|
457
|
-
if not modules.selected:
|
|
458
|
-
raise ToolkitEnvError(
|
|
459
|
-
f"No selected modules specified in {config.filepath!s}, have you configured "
|
|
460
|
-
f"the environment ({config.environment.name})?"
|
|
461
|
-
)
|
|
462
|
-
|
|
463
|
-
dev_modules = modules.available_names & DEV_ONLY_MODULES
|
|
464
|
-
if dev_modules and config.environment.validation_type != "dev":
|
|
465
|
-
self.warn(
|
|
466
|
-
MediumSeverityWarning(
|
|
467
|
-
"The following modules should [bold]only[/bold] be used a in CDF Projects designated as dev (development): "
|
|
468
|
-
f"{humanize_collection(dev_modules)!r}",
|
|
469
|
-
)
|
|
470
|
-
)
|
|
471
|
-
|
|
472
426
|
def _replace_variables(
|
|
473
427
|
self,
|
|
474
428
|
resource_files: Sequence[Path],
|
|
File without changes
|