cognite-toolkit 0.7.10__py3-none-any.whl → 0.7.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf.py +8 -0
- cognite_toolkit/_cdf_tk/commands/__init__.py +2 -2
- cognite_toolkit/_cdf_tk/commands/_purge.py +27 -28
- cognite_toolkit/_cdf_tk/commands/_upload.py +5 -1
- cognite_toolkit/_cdf_tk/commands/about.py +221 -0
- cognite_toolkit/_cdf_tk/commands/clean.py +11 -13
- cognite_toolkit/_cdf_tk/commands/deploy.py +9 -17
- cognite_toolkit/_cdf_tk/commands/dump_resource.py +6 -4
- cognite_toolkit/_cdf_tk/commands/pull.py +6 -19
- cognite_toolkit/_cdf_tk/cruds/_base_cruds.py +7 -25
- cognite_toolkit/_cdf_tk/cruds/_data_cruds.py +3 -6
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/agent.py +4 -6
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/auth.py +4 -12
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/classic.py +19 -36
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py +4 -10
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py +4 -12
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py +16 -41
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py +5 -15
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py +8 -21
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py +6 -22
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py +5 -15
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/hosted_extractors.py +12 -26
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/industrial_tool.py +3 -6
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/location.py +3 -14
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py +4 -8
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/raw.py +4 -8
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/relationship.py +3 -6
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/robotics.py +15 -34
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/streams.py +2 -5
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/three_d_model.py +3 -6
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/timeseries.py +5 -13
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/transformation.py +4 -19
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/workflow.py +20 -37
- cognite_toolkit/_cdf_tk/cruds/_worker.py +13 -30
- cognite_toolkit/_cdf_tk/feature_flags.py +1 -1
- cognite_toolkit/_cdf_tk/storageio/_base.py +23 -0
- cognite_toolkit/_cdf_tk/storageio/_file_content.py +4 -0
- cognite_toolkit/_cdf_tk/utils/fileio/_readers.py +42 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.10.dist-info → cognite_toolkit-0.7.12.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.10.dist-info → cognite_toolkit-0.7.12.dist-info}/RECORD +47 -47
- cognite_toolkit/_cdf_tk/commands/featureflag.py +0 -27
- {cognite_toolkit-0.7.10.dist-info → cognite_toolkit-0.7.12.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.7.10.dist-info → cognite_toolkit-0.7.12.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.7.10.dist-info → cognite_toolkit-0.7.12.dist-info}/licenses/LICENSE +0 -0
cognite_toolkit/_cdf.py
CHANGED
|
@@ -36,6 +36,7 @@ from cognite_toolkit._cdf_tk.apps import (
|
|
|
36
36
|
)
|
|
37
37
|
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
38
38
|
from cognite_toolkit._cdf_tk.commands import (
|
|
39
|
+
AboutCommand,
|
|
39
40
|
CollectCommand,
|
|
40
41
|
)
|
|
41
42
|
from cognite_toolkit._cdf_tk.constants import HINT_LEAD_TEXT, URL, USE_SENTRY
|
|
@@ -115,6 +116,13 @@ _app.add_typer(ModulesApp(**default_typer_kws), name="modules")
|
|
|
115
116
|
_app.command("init")(landing_app.main_init)
|
|
116
117
|
|
|
117
118
|
|
|
119
|
+
@_app.command("about")
|
|
120
|
+
def about() -> None:
|
|
121
|
+
"""Display information about the Toolkit installation and configuration."""
|
|
122
|
+
cmd = AboutCommand()
|
|
123
|
+
cmd.run(lambda: cmd.execute(Path.cwd()))
|
|
124
|
+
|
|
125
|
+
|
|
118
126
|
def app() -> NoReturn:
|
|
119
127
|
# --- Main entry point ---
|
|
120
128
|
# Users run 'app()' directly, but that doesn't allow us to control excepton handling:
|
|
@@ -6,13 +6,13 @@ from ._migrate import (
|
|
|
6
6
|
from ._profile import ProfileAssetCentricCommand, ProfileAssetCommand, ProfileRawCommand, ProfileTransformationCommand
|
|
7
7
|
from ._purge import PurgeCommand
|
|
8
8
|
from ._upload import UploadCommand
|
|
9
|
+
from .about import AboutCommand
|
|
9
10
|
from .auth import AuthCommand
|
|
10
11
|
from .build_cmd import BuildCommand
|
|
11
12
|
from .clean import CleanCommand
|
|
12
13
|
from .collect import CollectCommand
|
|
13
14
|
from .deploy import DeployCommand
|
|
14
15
|
from .dump_resource import DumpResourceCommand
|
|
15
|
-
from .featureflag import FeatureFlagCommand
|
|
16
16
|
from .init import InitCommand
|
|
17
17
|
from .modules import ModulesCommand
|
|
18
18
|
from .pull import PullCommand
|
|
@@ -21,6 +21,7 @@ from .resources import ResourcesCommand
|
|
|
21
21
|
from .run import RunFunctionCommand, RunTransformationCommand, RunWorkflowCommand
|
|
22
22
|
|
|
23
23
|
__all__ = [
|
|
24
|
+
"AboutCommand",
|
|
24
25
|
"AuthCommand",
|
|
25
26
|
"BuildCommand",
|
|
26
27
|
"CleanCommand",
|
|
@@ -28,7 +29,6 @@ __all__ = [
|
|
|
28
29
|
"DeployCommand",
|
|
29
30
|
"DownloadCommand",
|
|
30
31
|
"DumpResourceCommand",
|
|
31
|
-
"FeatureFlagCommand",
|
|
32
32
|
"InitCommand",
|
|
33
33
|
"MigrationCanvasCommand",
|
|
34
34
|
"MigrationPrepareCommand",
|
|
@@ -7,12 +7,7 @@ from typing import Literal, cast
|
|
|
7
7
|
|
|
8
8
|
import questionary
|
|
9
9
|
from cognite.client.data_classes import DataSetUpdate
|
|
10
|
-
from cognite.client.data_classes.
|
|
11
|
-
from cognite.client.data_classes.data_modeling import (
|
|
12
|
-
EdgeList,
|
|
13
|
-
NodeId,
|
|
14
|
-
NodeList,
|
|
15
|
-
)
|
|
10
|
+
from cognite.client.data_classes.data_modeling import Edge, NodeId
|
|
16
11
|
from cognite.client.data_classes.data_modeling.statistics import SpaceStatistics
|
|
17
12
|
from cognite.client.exceptions import CogniteAPIError
|
|
18
13
|
from cognite.client.utils._identifier import InstanceId
|
|
@@ -46,6 +41,7 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
46
41
|
AuthorizationError,
|
|
47
42
|
ToolkitMissingResourceError,
|
|
48
43
|
)
|
|
44
|
+
from cognite_toolkit._cdf_tk.protocols import ResourceResponseProtocol
|
|
49
45
|
from cognite_toolkit._cdf_tk.storageio import InstanceIO
|
|
50
46
|
from cognite_toolkit._cdf_tk.storageio.selectors import InstanceSelector
|
|
51
47
|
from cognite_toolkit._cdf_tk.tk_warnings import (
|
|
@@ -107,7 +103,7 @@ class ToDelete(ABC):
|
|
|
107
103
|
@abstractmethod
|
|
108
104
|
def get_process_function(
|
|
109
105
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
110
|
-
) -> Callable[[
|
|
106
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
111
107
|
raise NotImplementedError()
|
|
112
108
|
|
|
113
109
|
def get_extra_fields(self) -> dict[str, JsonVal]:
|
|
@@ -118,9 +114,10 @@ class ToDelete(ABC):
|
|
|
118
114
|
class DataModelingToDelete(ToDelete):
|
|
119
115
|
def get_process_function(
|
|
120
116
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
121
|
-
) -> Callable[[
|
|
122
|
-
def as_id(chunk:
|
|
123
|
-
|
|
117
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
118
|
+
def as_id(chunk: list[ResourceResponseProtocol]) -> list[JsonVal]:
|
|
119
|
+
# We know that all data modeling resources implement as_id
|
|
120
|
+
return [item.as_id().dump(include_type=False) for item in chunk] # type: ignore[attr-defined]
|
|
124
121
|
|
|
125
122
|
return as_id
|
|
126
123
|
|
|
@@ -129,11 +126,11 @@ class DataModelingToDelete(ToDelete):
|
|
|
129
126
|
class EdgeToDelete(ToDelete):
|
|
130
127
|
def get_process_function(
|
|
131
128
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
132
|
-
) -> Callable[[
|
|
133
|
-
def as_id(chunk:
|
|
129
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
130
|
+
def as_id(chunk: list[ResourceResponseProtocol]) -> list[JsonVal]:
|
|
134
131
|
return [
|
|
135
132
|
{"space": item.space, "externalId": item.external_id, "instanceType": "edge"}
|
|
136
|
-
for item in cast(
|
|
133
|
+
for item in cast(list[Edge], chunk)
|
|
137
134
|
]
|
|
138
135
|
|
|
139
136
|
return as_id
|
|
@@ -146,9 +143,10 @@ class NodesToDelete(ToDelete):
|
|
|
146
143
|
|
|
147
144
|
def get_process_function(
|
|
148
145
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
149
|
-
) -> Callable[[
|
|
150
|
-
def check_for_data(chunk:
|
|
151
|
-
|
|
146
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
147
|
+
def check_for_data(chunk: list[ResourceResponseProtocol]) -> list[JsonVal]:
|
|
148
|
+
# We know that all node resources implement as_id
|
|
149
|
+
node_ids = [item.as_id() for item in chunk] # type: ignore[attr-defined]
|
|
152
150
|
found_ids: set[InstanceId] = set()
|
|
153
151
|
if not self.delete_datapoints:
|
|
154
152
|
timeseries = client.time_series.retrieve_multiple(instance_ids=node_ids, ignore_unknown_ids=True)
|
|
@@ -164,8 +162,7 @@ class NodesToDelete(ToDelete):
|
|
|
164
162
|
dumped = node_id.dump(include_instance_type=True)
|
|
165
163
|
# The delete endpoint expects "instanceType" instead of "type"
|
|
166
164
|
dumped["instanceType"] = dumped.pop("type")
|
|
167
|
-
|
|
168
|
-
result.append(dumped) # type: ignore[arg-type]
|
|
165
|
+
result.append(dumped)
|
|
169
166
|
return result
|
|
170
167
|
|
|
171
168
|
return check_for_data
|
|
@@ -175,9 +172,10 @@ class NodesToDelete(ToDelete):
|
|
|
175
172
|
class IdResourceToDelete(ToDelete):
|
|
176
173
|
def get_process_function(
|
|
177
174
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
178
|
-
) -> Callable[[
|
|
179
|
-
def as_id(chunk:
|
|
180
|
-
|
|
175
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
176
|
+
def as_id(chunk: list[ResourceResponseProtocol]) -> list[JsonVal]:
|
|
177
|
+
# We know that all id resources have an id attribute
|
|
178
|
+
return [{"id": item.id} for item in chunk] # type: ignore[attr-defined]
|
|
181
179
|
|
|
182
180
|
return as_id
|
|
183
181
|
|
|
@@ -186,9 +184,10 @@ class IdResourceToDelete(ToDelete):
|
|
|
186
184
|
class ExternalIdToDelete(ToDelete):
|
|
187
185
|
def get_process_function(
|
|
188
186
|
self, client: ToolkitClient, console: Console, verbose: bool, process_results: ResourceDeployResult
|
|
189
|
-
) -> Callable[[
|
|
190
|
-
def as_external_id(chunk:
|
|
191
|
-
|
|
187
|
+
) -> Callable[[list[ResourceResponseProtocol]], list[JsonVal]]:
|
|
188
|
+
def as_external_id(chunk: list[ResourceResponseProtocol]) -> list[JsonVal]:
|
|
189
|
+
# We know that all external id resources have an external_id attribute
|
|
190
|
+
return [{"externalId": item.external_id} for item in chunk] # type: ignore[attr-defined]
|
|
192
191
|
|
|
193
192
|
return as_external_id
|
|
194
193
|
|
|
@@ -321,7 +320,7 @@ class PurgeCommand(ToolkitCommand):
|
|
|
321
320
|
iteration_count = item.total // self.BATCH_SIZE_DM + (
|
|
322
321
|
1 if item.total % self.BATCH_SIZE_DM > 0 else 0
|
|
323
322
|
)
|
|
324
|
-
executor = ProducerWorkerExecutor[
|
|
323
|
+
executor = ProducerWorkerExecutor[list[ResourceResponseProtocol], list[JsonVal]](
|
|
325
324
|
download_iterable=self._iterate_batch(
|
|
326
325
|
item.crud, space, data_set_external_id, batch_size=self.BATCH_SIZE_DM
|
|
327
326
|
),
|
|
@@ -348,13 +347,13 @@ class PurgeCommand(ToolkitCommand):
|
|
|
348
347
|
@staticmethod
|
|
349
348
|
def _iterate_batch(
|
|
350
349
|
crud: ResourceCRUD, selected_space: str | None, data_set_external_id: str | None, batch_size: int
|
|
351
|
-
) -> Iterable[
|
|
352
|
-
batch =
|
|
350
|
+
) -> Iterable[list[ResourceResponseProtocol]]:
|
|
351
|
+
batch: list[ResourceResponseProtocol] = []
|
|
353
352
|
for resource in crud.iterate(space=selected_space, data_set_external_id=data_set_external_id):
|
|
354
353
|
batch.append(resource)
|
|
355
354
|
if len(batch) >= batch_size:
|
|
356
355
|
yield batch
|
|
357
|
-
batch =
|
|
356
|
+
batch = []
|
|
358
357
|
if batch:
|
|
359
358
|
yield batch
|
|
360
359
|
|
|
@@ -215,6 +215,10 @@ class UploadCommand(ToolkitCommand):
|
|
|
215
215
|
reader = MultiFileReader(datafiles)
|
|
216
216
|
if reader.is_table and not isinstance(io, TableUploadableStorageIO):
|
|
217
217
|
raise ToolkitValueError(f"{selector.display_name} does not support {reader.format!r} files.")
|
|
218
|
+
|
|
219
|
+
chunk_count = io.count_chunks(reader)
|
|
220
|
+
iteration_count = chunk_count // io.CHUNK_SIZE + (1 if chunk_count % io.CHUNK_SIZE > 0 else 0)
|
|
221
|
+
|
|
218
222
|
tracker = ProgressTracker[str]([self._UPLOAD])
|
|
219
223
|
executor = ProducerWorkerExecutor[list[tuple[str, dict[str, JsonVal]]], Sequence[UploadItem]](
|
|
220
224
|
download_iterable=io.read_chunks(reader, selector),
|
|
@@ -230,7 +234,7 @@ class UploadCommand(ToolkitCommand):
|
|
|
230
234
|
tracker=tracker,
|
|
231
235
|
console=console,
|
|
232
236
|
),
|
|
233
|
-
iteration_count=
|
|
237
|
+
iteration_count=iteration_count,
|
|
234
238
|
max_queue_size=self._MAX_QUEUE_SIZE,
|
|
235
239
|
download_description=f"Reading {selector.display_name!r} files",
|
|
236
240
|
process_description="Processing",
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import platform
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from rich import print
|
|
6
|
+
from rich.table import Table
|
|
7
|
+
|
|
8
|
+
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml, _read_toml
|
|
9
|
+
from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
|
|
10
|
+
from cognite_toolkit._cdf_tk.constants import clean_name
|
|
11
|
+
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
12
|
+
from cognite_toolkit._cdf_tk.plugins import Plugins
|
|
13
|
+
from cognite_toolkit._cdf_tk.tk_warnings import LowSeverityWarning, MediumSeverityWarning
|
|
14
|
+
from cognite_toolkit._version import __version__
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AboutCommand(ToolkitCommand):
|
|
18
|
+
def execute(self, cwd: Path) -> None:
|
|
19
|
+
# Version information
|
|
20
|
+
print(f"\n[bold cyan]Cognite Toolkit[/bold cyan] version: [yellow]{__version__}[/yellow]")
|
|
21
|
+
print(f"Python version: {sys.version.split()[0]}")
|
|
22
|
+
print(f"Platform: {platform.system()} {platform.release()}")
|
|
23
|
+
|
|
24
|
+
# Check for cdf.toml in the current directory
|
|
25
|
+
cdf_toml_path = cwd / CDFToml.file_name
|
|
26
|
+
|
|
27
|
+
if cdf_toml_path.exists():
|
|
28
|
+
print(f"\n[bold green]Configuration file found:[/bold green] {cdf_toml_path}")
|
|
29
|
+
|
|
30
|
+
cdf_toml = CDFToml.load(cwd)
|
|
31
|
+
|
|
32
|
+
# We need to read the raw TOML to get original key names for plugins and alpha flags
|
|
33
|
+
raw_toml = _read_toml(cdf_toml_path)
|
|
34
|
+
|
|
35
|
+
self._check_unrecognized_sections(raw_toml)
|
|
36
|
+
self._display_plugins(cdf_toml, raw_toml)
|
|
37
|
+
self._display_alpha_flags(cdf_toml, raw_toml)
|
|
38
|
+
self._display_additional_config(cdf_toml)
|
|
39
|
+
|
|
40
|
+
else:
|
|
41
|
+
# Search for cdf.toml in subdirectories
|
|
42
|
+
found_files = self._search_cdf_toml(cwd)
|
|
43
|
+
|
|
44
|
+
if found_files:
|
|
45
|
+
print(f"\n[bold yellow]No cdf.toml found in current directory:[/bold yellow] {cwd}")
|
|
46
|
+
print("\n[bold]Found cdf.toml files in subdirectories:[/bold]")
|
|
47
|
+
for file in found_files:
|
|
48
|
+
rel_path = file.relative_to(cwd)
|
|
49
|
+
print(f" • {rel_path}")
|
|
50
|
+
print(f"\n[bold cyan]Hint:[/bold cyan] Move one of these files to {cwd} or navigate to its directory.")
|
|
51
|
+
else:
|
|
52
|
+
print("\n[bold yellow]No cdf.toml found[/bold yellow] in current directory or subdirectories.")
|
|
53
|
+
print(f"Current directory: {cwd}")
|
|
54
|
+
print("\n[bold cyan]Hint:[/bold cyan] Run [yellow]cdf init[/yellow] to create a new project.")
|
|
55
|
+
|
|
56
|
+
def _check_unrecognized_sections(self, raw_toml: dict) -> None:
|
|
57
|
+
"""Check for unrecognized tables in cdf.toml and warn about them."""
|
|
58
|
+
# Valid top-level tables in cdf.toml
|
|
59
|
+
valid_tables = {"cdf", "modules", "alpha_flags", "feature_flags", "plugins", "library"}
|
|
60
|
+
|
|
61
|
+
# Filter out empty keys, whitespace-only keys, and check for unrecognized tables
|
|
62
|
+
unrecognized_tables = [key for key in raw_toml.keys() if key and key.strip() and key not in valid_tables]
|
|
63
|
+
|
|
64
|
+
if unrecognized_tables:
|
|
65
|
+
print()
|
|
66
|
+
|
|
67
|
+
for table in unrecognized_tables:
|
|
68
|
+
# Try to find a matching valid table by stripping non-alphabetical characters
|
|
69
|
+
suggestion = self._find_similar_table(table, valid_tables)
|
|
70
|
+
|
|
71
|
+
message = f"Table '{table}' in cdf.toml is not recognized and will have no effect."
|
|
72
|
+
if suggestion:
|
|
73
|
+
message += f" Did you mean '{suggestion}'?"
|
|
74
|
+
|
|
75
|
+
self.warn(MediumSeverityWarning(message))
|
|
76
|
+
|
|
77
|
+
@staticmethod
|
|
78
|
+
def _find_similar_table(unrecognized: str, valid_tables: set[str]) -> str | None:
|
|
79
|
+
"""Find a similar valid table by comparing alphabetical characters only.
|
|
80
|
+
|
|
81
|
+
Returns None if the unrecognized table is already valid or if no similar match is found.
|
|
82
|
+
"""
|
|
83
|
+
# If it's already a valid table, return None (no suggestion needed)
|
|
84
|
+
if unrecognized in valid_tables:
|
|
85
|
+
return None
|
|
86
|
+
|
|
87
|
+
# Keep only alphabetical characters and lowercase
|
|
88
|
+
normalized_unrecognized = "".join(c for c in unrecognized if c.isalpha()).lower()
|
|
89
|
+
|
|
90
|
+
# First, try exact match (after normalization)
|
|
91
|
+
for valid in valid_tables:
|
|
92
|
+
normalized_valid = "".join(c for c in valid if c.isalpha()).lower()
|
|
93
|
+
if normalized_unrecognized == normalized_valid:
|
|
94
|
+
return valid
|
|
95
|
+
|
|
96
|
+
# If no match, check for singular/plural variations (missing 's')
|
|
97
|
+
for valid in valid_tables:
|
|
98
|
+
normalized_valid = "".join(c for c in valid if c.isalpha()).lower()
|
|
99
|
+
|
|
100
|
+
# Check if adding 's' to unrecognized matches valid (e.g., "plugin" -> "plugins")
|
|
101
|
+
if normalized_unrecognized + "s" == normalized_valid:
|
|
102
|
+
return valid
|
|
103
|
+
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
def _display_plugins(self, cdf_toml: CDFToml, raw_toml: dict) -> None:
|
|
107
|
+
"""Display all available plugins and their status."""
|
|
108
|
+
table = Table(title="Plugins", show_header=True)
|
|
109
|
+
table.add_column("Plugin", justify="left", style="cyan")
|
|
110
|
+
table.add_column("Status", justify="center")
|
|
111
|
+
table.add_column("Description", justify="left")
|
|
112
|
+
|
|
113
|
+
# Track which plugins we've seen
|
|
114
|
+
seen_plugins = set()
|
|
115
|
+
|
|
116
|
+
# Show all plugins from the enum
|
|
117
|
+
for plugin in Plugins:
|
|
118
|
+
plugin_name = plugin.value.name
|
|
119
|
+
cleaned_key = clean_name(plugin_name)
|
|
120
|
+
seen_plugins.add(cleaned_key)
|
|
121
|
+
|
|
122
|
+
is_enabled = cdf_toml.plugins.get(cleaned_key, False)
|
|
123
|
+
if is_enabled:
|
|
124
|
+
status = "[green]✓ enabled[/green]"
|
|
125
|
+
else:
|
|
126
|
+
status = "[dim]○ disabled[/dim]"
|
|
127
|
+
|
|
128
|
+
table.add_row(plugin_name, status, plugin.value.description)
|
|
129
|
+
|
|
130
|
+
print()
|
|
131
|
+
print(table)
|
|
132
|
+
|
|
133
|
+
# Show any unrecognized plugins from cdf.toml using original key names
|
|
134
|
+
raw_plugins = raw_toml.get("plugins", {})
|
|
135
|
+
unrecognized = []
|
|
136
|
+
for original_key, value in raw_plugins.items():
|
|
137
|
+
cleaned_key = clean_name(original_key)
|
|
138
|
+
if cleaned_key not in seen_plugins:
|
|
139
|
+
unrecognized.append((original_key, value))
|
|
140
|
+
|
|
141
|
+
for original_key, is_enabled in unrecognized:
|
|
142
|
+
status = "enabled" if is_enabled else "disabled"
|
|
143
|
+
self.warn(
|
|
144
|
+
LowSeverityWarning(f"Plugin '{original_key}' in cdf.toml is not recognized and will have no effect.")
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def _display_alpha_flags(self, cdf_toml: CDFToml, raw_toml: dict) -> None:
|
|
148
|
+
"""Display available alpha flags and their status."""
|
|
149
|
+
table = Table(title="Alpha Flags", show_header=True)
|
|
150
|
+
table.add_column("Flag", justify="left", style="yellow")
|
|
151
|
+
table.add_column("Status", justify="center")
|
|
152
|
+
table.add_column("Description", justify="left")
|
|
153
|
+
|
|
154
|
+
# Track which flags we've seen
|
|
155
|
+
seen_flags = set()
|
|
156
|
+
|
|
157
|
+
# Show flags from the enum that are either enabled or visible
|
|
158
|
+
for flag in Flags:
|
|
159
|
+
cleaned_key = clean_name(flag.name)
|
|
160
|
+
seen_flags.add(cleaned_key)
|
|
161
|
+
|
|
162
|
+
is_enabled = cdf_toml.alpha_flags.get(cleaned_key, False)
|
|
163
|
+
|
|
164
|
+
# Only show if enabled or visible
|
|
165
|
+
if is_enabled or flag.value.visible:
|
|
166
|
+
# Convert enum name to kebab-case for display
|
|
167
|
+
display_name = flag.name.lower().replace("_", "-")
|
|
168
|
+
|
|
169
|
+
if is_enabled:
|
|
170
|
+
status = "[green]✓ enabled[/green]"
|
|
171
|
+
else:
|
|
172
|
+
status = "[dim]○ disabled[/dim]"
|
|
173
|
+
|
|
174
|
+
table.add_row(display_name, status, flag.value.description)
|
|
175
|
+
|
|
176
|
+
print()
|
|
177
|
+
print(table)
|
|
178
|
+
|
|
179
|
+
# Show any unrecognized flags from cdf.toml using original key names
|
|
180
|
+
raw_flags = raw_toml.get("alpha_flags", {})
|
|
181
|
+
unrecognized = []
|
|
182
|
+
for original_key, value in raw_flags.items():
|
|
183
|
+
cleaned_key = clean_name(original_key)
|
|
184
|
+
if cleaned_key not in seen_flags:
|
|
185
|
+
unrecognized.append((original_key, value))
|
|
186
|
+
|
|
187
|
+
for original_key, is_enabled in unrecognized:
|
|
188
|
+
status = "enabled" if is_enabled else "disabled"
|
|
189
|
+
self.warn(
|
|
190
|
+
LowSeverityWarning(
|
|
191
|
+
f"Alpha flag '{original_key}' in cdf.toml is not recognized and will have no effect."
|
|
192
|
+
)
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def _display_additional_config(self, cdf_toml: CDFToml) -> None:
|
|
196
|
+
"""Display additional configuration information."""
|
|
197
|
+
print("\n[bold]Additional Configuration:[/bold]")
|
|
198
|
+
|
|
199
|
+
print(f" Default environment: [cyan]{cdf_toml.cdf.default_env}[/cyan]")
|
|
200
|
+
|
|
201
|
+
if cdf_toml.cdf.has_user_set_default_org:
|
|
202
|
+
print(f" Default organization dir: [cyan]{cdf_toml.cdf.default_organization_dir}[/cyan]")
|
|
203
|
+
|
|
204
|
+
if cdf_toml.cdf.file_encoding:
|
|
205
|
+
print(f" File encoding: [cyan]{cdf_toml.cdf.file_encoding}[/cyan]")
|
|
206
|
+
|
|
207
|
+
print(f" Modules version: [cyan]{cdf_toml.modules.version}[/cyan]")
|
|
208
|
+
|
|
209
|
+
if cdf_toml.libraries:
|
|
210
|
+
print(f" Configured libraries: [cyan]{len(cdf_toml.libraries)}[/cyan]")
|
|
211
|
+
for lib_name, lib_config in cdf_toml.libraries.items():
|
|
212
|
+
print(f" • {lib_name}: [dim]{lib_config.url}[/dim]")
|
|
213
|
+
|
|
214
|
+
def _search_cdf_toml(self, cwd: Path) -> list[Path]:
|
|
215
|
+
"""Search for cdf.toml files in immediate subdirectories (one level down)."""
|
|
216
|
+
try:
|
|
217
|
+
return sorted(
|
|
218
|
+
[potential_file for potential_file in cwd.glob(f"*/{CDFToml.file_name}") if potential_file.is_file()]
|
|
219
|
+
)
|
|
220
|
+
except PermissionError:
|
|
221
|
+
return []
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import traceback
|
|
2
|
+
from collections.abc import Sequence
|
|
2
3
|
from graphlib import TopologicalSorter
|
|
3
4
|
from pathlib import Path
|
|
4
5
|
|
|
@@ -24,14 +25,7 @@ from cognite_toolkit._cdf_tk.cruds import (
|
|
|
24
25
|
ResourceCRUD,
|
|
25
26
|
ResourceWorker,
|
|
26
27
|
)
|
|
27
|
-
from cognite_toolkit._cdf_tk.cruds._base_cruds import
|
|
28
|
-
T_ID,
|
|
29
|
-
Loader,
|
|
30
|
-
T_ResourceRequest,
|
|
31
|
-
T_ResourceRequestList,
|
|
32
|
-
T_ResourceResponse,
|
|
33
|
-
T_ResourceResponseList,
|
|
34
|
-
)
|
|
28
|
+
from cognite_toolkit._cdf_tk.cruds._base_cruds import Loader
|
|
35
29
|
from cognite_toolkit._cdf_tk.data_classes import (
|
|
36
30
|
BuildEnvironment,
|
|
37
31
|
DeployResults,
|
|
@@ -46,6 +40,7 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
46
40
|
ToolkitValidationError,
|
|
47
41
|
ToolkitValueError,
|
|
48
42
|
)
|
|
43
|
+
from cognite_toolkit._cdf_tk.protocols import T_ResourceRequest, T_ResourceResponse
|
|
49
44
|
from cognite_toolkit._cdf_tk.tk_warnings import (
|
|
50
45
|
LowSeverityWarning,
|
|
51
46
|
MediumSeverityWarning,
|
|
@@ -57,6 +52,7 @@ from cognite_toolkit._cdf_tk.utils import (
|
|
|
57
52
|
read_yaml_file,
|
|
58
53
|
)
|
|
59
54
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
55
|
+
from cognite_toolkit._cdf_tk.utils.useful_types import T_ID
|
|
60
56
|
|
|
61
57
|
from ._utils import _print_ids_or_length
|
|
62
58
|
|
|
@@ -66,9 +62,7 @@ AVAILABLE_DATA_TYPES: tuple[str, ...] = tuple(CRUDS_BY_FOLDER_NAME)
|
|
|
66
62
|
class CleanCommand(ToolkitCommand):
|
|
67
63
|
def clean_resources(
|
|
68
64
|
self,
|
|
69
|
-
loader: ResourceCRUD[
|
|
70
|
-
T_ID, T_ResourceRequest, T_ResourceResponse, T_ResourceRequestList, T_ResourceResponseList
|
|
71
|
-
],
|
|
65
|
+
loader: ResourceCRUD[T_ID, T_ResourceRequest, T_ResourceResponse],
|
|
72
66
|
env_vars: EnvironmentVariables,
|
|
73
67
|
read_modules: list[ReadModule],
|
|
74
68
|
dry_run: bool = False,
|
|
@@ -139,7 +133,7 @@ class CleanCommand(ToolkitCommand):
|
|
|
139
133
|
return ResourceDeployResult(name=loader.display_name)
|
|
140
134
|
|
|
141
135
|
def _delete_resources(
|
|
142
|
-
self, loaded_resources:
|
|
136
|
+
self, loaded_resources: Sequence[T_ResourceResponse], loader: ResourceCRUD, dry_run: bool, verbose: bool
|
|
143
137
|
) -> int:
|
|
144
138
|
nr_of_deleted = 0
|
|
145
139
|
resource_ids = loader.get_ids(loaded_resources)
|
|
@@ -164,7 +158,11 @@ class CleanCommand(ToolkitCommand):
|
|
|
164
158
|
return nr_of_deleted
|
|
165
159
|
|
|
166
160
|
def _drop_data(
|
|
167
|
-
self,
|
|
161
|
+
self,
|
|
162
|
+
loaded_resources: Sequence[T_ResourceResponse],
|
|
163
|
+
loader: ResourceContainerCRUD,
|
|
164
|
+
dry_run: bool,
|
|
165
|
+
verbose: bool,
|
|
168
166
|
) -> int:
|
|
169
167
|
nr_of_dropped = 0
|
|
170
168
|
resource_ids = loader.get_ids(loaded_resources)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from collections.abc import Hashable
|
|
1
|
+
from collections.abc import Hashable, Sequence
|
|
2
2
|
from graphlib import TopologicalSorter
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from typing import overload
|
|
@@ -46,9 +46,7 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
46
46
|
)
|
|
47
47
|
from cognite_toolkit._cdf_tk.protocols import (
|
|
48
48
|
T_ResourceRequest,
|
|
49
|
-
T_ResourceRequestList,
|
|
50
49
|
T_ResourceResponse,
|
|
51
|
-
T_ResourceResponseList,
|
|
52
50
|
)
|
|
53
51
|
from cognite_toolkit._cdf_tk.tk_warnings import EnvironmentVariableMissingWarning
|
|
54
52
|
from cognite_toolkit._cdf_tk.tk_warnings.base import WarningList, catch_warnings
|
|
@@ -338,9 +336,7 @@ class DeployCommand(ToolkitCommand):
|
|
|
338
336
|
|
|
339
337
|
def deploy_resource_type(
|
|
340
338
|
self,
|
|
341
|
-
loader: ResourceCRUD[
|
|
342
|
-
T_ID, T_ResourceRequest, T_ResourceResponse, T_ResourceRequestList, T_ResourceResponseList
|
|
343
|
-
],
|
|
339
|
+
loader: ResourceCRUD[T_ID, T_ResourceRequest, T_ResourceResponse],
|
|
344
340
|
env_vars: EnvironmentVariables,
|
|
345
341
|
read_modules: list[ReadModule] | None = None,
|
|
346
342
|
dry_run: bool = False,
|
|
@@ -402,10 +398,8 @@ class DeployCommand(ToolkitCommand):
|
|
|
402
398
|
|
|
403
399
|
def actual_deploy(
|
|
404
400
|
self,
|
|
405
|
-
resources: CategorizedResources[T_ID,
|
|
406
|
-
loader: ResourceCRUD[
|
|
407
|
-
T_ID, T_ResourceRequest, T_ResourceResponse, T_ResourceRequestList, T_ResourceResponseList
|
|
408
|
-
],
|
|
401
|
+
resources: CategorizedResources[T_ID, T_ResourceRequest],
|
|
402
|
+
loader: ResourceCRUD[T_ID, T_ResourceRequest, T_ResourceResponse],
|
|
409
403
|
env_var_warnings: WarningList | None = None,
|
|
410
404
|
) -> ResourceDeployResult:
|
|
411
405
|
environment_variable_warning_by_id = {
|
|
@@ -436,10 +430,8 @@ class DeployCommand(ToolkitCommand):
|
|
|
436
430
|
|
|
437
431
|
@staticmethod
|
|
438
432
|
def dry_run_deploy(
|
|
439
|
-
resources: CategorizedResources[T_ID,
|
|
440
|
-
loader: ResourceCRUD[
|
|
441
|
-
T_ID, T_ResourceRequest, T_ResourceResponse, T_ResourceRequestList, T_ResourceResponseList
|
|
442
|
-
],
|
|
433
|
+
resources: CategorizedResources[T_ID, T_ResourceRequest],
|
|
434
|
+
loader: ResourceCRUD[T_ID, T_ResourceRequest, T_ResourceResponse],
|
|
443
435
|
has_done_drop: bool,
|
|
444
436
|
has_dropped_data: bool,
|
|
445
437
|
) -> ResourceDeployResult:
|
|
@@ -470,7 +462,7 @@ class DeployCommand(ToolkitCommand):
|
|
|
470
462
|
|
|
471
463
|
@staticmethod
|
|
472
464
|
def _verbose_print(
|
|
473
|
-
resources: CategorizedResources[T_ID,
|
|
465
|
+
resources: CategorizedResources[T_ID, T_ResourceRequest],
|
|
474
466
|
loader: ResourceCRUD,
|
|
475
467
|
dry_run: bool,
|
|
476
468
|
) -> None:
|
|
@@ -494,7 +486,7 @@ class DeployCommand(ToolkitCommand):
|
|
|
494
486
|
|
|
495
487
|
def _create_resources(
|
|
496
488
|
self,
|
|
497
|
-
resources:
|
|
489
|
+
resources: Sequence[T_ResourceRequest],
|
|
498
490
|
loader: ResourceCRUD,
|
|
499
491
|
environment_variable_warning_by_id: dict[Hashable, EnvironmentVariableMissingWarning],
|
|
500
492
|
) -> int:
|
|
@@ -517,7 +509,7 @@ class DeployCommand(ToolkitCommand):
|
|
|
517
509
|
|
|
518
510
|
def _update_resources(
|
|
519
511
|
self,
|
|
520
|
-
resources:
|
|
512
|
+
resources: Sequence[T_ResourceRequest],
|
|
521
513
|
loader: ResourceCRUD,
|
|
522
514
|
environment_variable_warning_by_id: dict[Hashable, EnvironmentVariableMissingWarning],
|
|
523
515
|
) -> int:
|
|
@@ -3,7 +3,7 @@ import json
|
|
|
3
3
|
import zipfile
|
|
4
4
|
from abc import ABC, abstractmethod
|
|
5
5
|
from collections import defaultdict
|
|
6
|
-
from collections.abc import Hashable, Iterable, Iterator
|
|
6
|
+
from collections.abc import Hashable, Iterable, Iterator, Sequence
|
|
7
7
|
from functools import cached_property
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
from typing import Generic, cast
|
|
@@ -22,6 +22,7 @@ from cognite.client.data_classes import (
|
|
|
22
22
|
filters,
|
|
23
23
|
)
|
|
24
24
|
from cognite.client.data_classes._base import (
|
|
25
|
+
CogniteResource,
|
|
25
26
|
CogniteResourceList,
|
|
26
27
|
)
|
|
27
28
|
from cognite.client.data_classes.agents import (
|
|
@@ -113,7 +114,7 @@ class ResourceFinder(Iterable, ABC, Generic[T_ID]):
|
|
|
113
114
|
raise NotImplementedError
|
|
114
115
|
|
|
115
116
|
# Can be implemented in subclasses
|
|
116
|
-
def update(self, resources:
|
|
117
|
+
def update(self, resources: Sequence[CogniteResource]) -> None: ...
|
|
117
118
|
|
|
118
119
|
|
|
119
120
|
class DataModelFinder(ResourceFinder[DataModelId]):
|
|
@@ -178,7 +179,7 @@ class DataModelFinder(ResourceFinder[DataModelId]):
|
|
|
178
179
|
self.data_model = models_by_version[selected_model]
|
|
179
180
|
return self.data_model.as_id()
|
|
180
181
|
|
|
181
|
-
def update(self, resources:
|
|
182
|
+
def update(self, resources: Sequence[CogniteResource]) -> None:
|
|
182
183
|
if isinstance(resources, dm.DataModelList):
|
|
183
184
|
self.view_ids |= {
|
|
184
185
|
view.as_id() if isinstance(view, dm.View) else view for item in resources for view in item.views
|
|
@@ -187,7 +188,7 @@ class DataModelFinder(ResourceFinder[DataModelId]):
|
|
|
187
188
|
self.container_ids |= resources.referenced_containers()
|
|
188
189
|
elif isinstance(resources, dm.SpaceList):
|
|
189
190
|
return
|
|
190
|
-
self.space_ids |= {item.space for item in resources}
|
|
191
|
+
self.space_ids |= {item.space for item in resources if hasattr(item, "space")}
|
|
191
192
|
|
|
192
193
|
def __iter__(self) -> Iterator[tuple[list[Hashable], CogniteResourceList | None, ResourceCRUD, None | str]]:
|
|
193
194
|
self.identifier = self._selected()
|
|
@@ -804,6 +805,7 @@ class DumpResourceCommand(ToolkitCommand):
|
|
|
804
805
|
output_dir.mkdir(exist_ok=True)
|
|
805
806
|
|
|
806
807
|
dumped_ids: list[Hashable] = []
|
|
808
|
+
resources: Sequence[CogniteResource] | None = None
|
|
807
809
|
for identifiers, resources, loader, subfolder in finder:
|
|
808
810
|
if not identifiers and not resources:
|
|
809
811
|
# No resources to dump
|