cognite-toolkit 0.5.61__py3-none-any.whl → 0.5.62__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_builtin_modules/cdf.toml +1 -1
- cognite_toolkit/_cdf_tk/apps/_modules_app.py +9 -9
- cognite_toolkit/_cdf_tk/apps/_profile_app.py +2 -2
- cognite_toolkit/_cdf_tk/cdf_toml.py +46 -3
- cognite_toolkit/_cdf_tk/client/data_classes/canvas.py +395 -0
- cognite_toolkit/_cdf_tk/commands/__init__.py +2 -2
- cognite_toolkit/_cdf_tk/commands/_profile.py +198 -89
- cognite_toolkit/_cdf_tk/commands/modules.py +128 -5
- cognite_toolkit/_cdf_tk/data_classes/_packages.py +2 -2
- cognite_toolkit/_cdf_tk/feature_flags.py +4 -0
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_version.py +1 -1
- cognite_toolkit/config.dev.yaml +7 -0
- {cognite_toolkit-0.5.61.dist-info → cognite_toolkit-0.5.62.dist-info}/METADATA +2 -2
- {cognite_toolkit-0.5.61.dist-info → cognite_toolkit-0.5.62.dist-info}/RECORD +20 -18
- /cognite_toolkit/_builtin_modules/{package.toml → packages.toml} +0 -0
- {cognite_toolkit-0.5.61.dist-info → cognite_toolkit-0.5.62.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.5.61.dist-info → cognite_toolkit-0.5.62.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.5.61.dist-info → cognite_toolkit-0.5.62.dist-info}/licenses/LICENSE +0 -0
@@ -62,14 +62,14 @@ class ModulesApp(typer.Typer):
|
|
62
62
|
) -> None:
|
63
63
|
"""Initialize or upgrade a new CDF project with templates interactively."""
|
64
64
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
65
|
+
with ModulesCommand() as cmd:
|
66
|
+
cmd.run(
|
67
|
+
lambda: cmd.init(
|
68
|
+
organization_dir=organization_dir,
|
69
|
+
select_all=all,
|
70
|
+
clean=clean,
|
71
|
+
)
|
71
72
|
)
|
72
|
-
)
|
73
73
|
|
74
74
|
def upgrade(
|
75
75
|
self,
|
@@ -116,8 +116,8 @@ class ModulesApp(typer.Typer):
|
|
116
116
|
] = False,
|
117
117
|
) -> None:
|
118
118
|
"""Add one or more new module(s) to the project."""
|
119
|
-
|
120
|
-
|
119
|
+
with ModulesCommand() as cmd:
|
120
|
+
cmd.run(lambda: cmd.add(organization_dir=organization_dir))
|
121
121
|
|
122
122
|
def pull(
|
123
123
|
self,
|
@@ -3,7 +3,7 @@ from typing import Any
|
|
3
3
|
import typer
|
4
4
|
from rich import print
|
5
5
|
|
6
|
-
from cognite_toolkit._cdf_tk.commands import
|
6
|
+
from cognite_toolkit._cdf_tk.commands import ProfileAssetCentricCommand
|
7
7
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
8
8
|
|
9
9
|
|
@@ -27,7 +27,7 @@ class ProfileApp(typer.Typer):
|
|
27
27
|
This shows an approximation of unstructured data count. This can, for example, be used to estimate the
|
28
28
|
effort to model this data in data modeling."""
|
29
29
|
client = EnvironmentVariables.create_from_environment().get_client()
|
30
|
-
cmd =
|
30
|
+
cmd = ProfileAssetCentricCommand()
|
31
31
|
cmd.run(
|
32
32
|
lambda: cmd.asset_centric(
|
33
33
|
client,
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import re
|
4
4
|
import sys
|
5
|
+
import urllib
|
5
6
|
from contextlib import suppress
|
6
7
|
from dataclasses import dataclass, field
|
7
8
|
from pathlib import Path
|
@@ -64,12 +65,39 @@ class ModulesConfig:
|
|
64
65
|
):
|
65
66
|
raise ToolkitVersionError(
|
66
67
|
f"The version of the modules ({version}) does not match the version of the installed CLI "
|
67
|
-
f"({_version.__version__}). Please
|
68
|
-
f"run `pip install cognite-toolkit=={version}` to downgrade cdf
|
68
|
+
f"({_version.__version__}). Please run `cdf modules upgrade` to upgrade the modules OR "
|
69
|
+
f"run `pip install cognite-toolkit=={version}` to downgrade cdf CLI."
|
69
70
|
)
|
70
71
|
return cls(version=version, packages=packages)
|
71
72
|
|
72
73
|
|
74
|
+
@dataclass
|
75
|
+
class Library:
|
76
|
+
url: str
|
77
|
+
checksum: str
|
78
|
+
|
79
|
+
@classmethod
|
80
|
+
def load(cls, raw: dict[str, Any]) -> Library:
|
81
|
+
if "url" not in raw:
|
82
|
+
raise ValueError("Library configuration must contain 'url' field.")
|
83
|
+
|
84
|
+
if "checksum" not in raw:
|
85
|
+
raise ValueError("Library configuration must contain 'checksum' field.")
|
86
|
+
|
87
|
+
parsed_url = urllib.parse.urlparse(raw["url"])
|
88
|
+
|
89
|
+
if not all([parsed_url.scheme, parsed_url.netloc]):
|
90
|
+
raise ValueError("URL is missing scheme or network location (e.g., 'https://domain.com')")
|
91
|
+
|
92
|
+
if parsed_url.scheme != "https":
|
93
|
+
raise ValueError("URL must start with 'https'")
|
94
|
+
|
95
|
+
if not parsed_url.path.casefold().endswith(".zip"):
|
96
|
+
raise ValueError("URL must point to a .zip file.")
|
97
|
+
|
98
|
+
return cls(**raw)
|
99
|
+
|
100
|
+
|
73
101
|
@dataclass
|
74
102
|
class CDFToml:
|
75
103
|
"""This is the configuration for the CLI and Modules"""
|
@@ -80,6 +108,7 @@ class CDFToml:
|
|
80
108
|
modules: ModulesConfig
|
81
109
|
alpha_flags: dict[str, bool] = field(default_factory=dict)
|
82
110
|
plugins: dict[str, bool] = field(default_factory=dict)
|
111
|
+
libraries: dict[str, Library] = field(default_factory=dict)
|
83
112
|
|
84
113
|
is_loaded_from_file: bool = False
|
85
114
|
|
@@ -114,7 +143,21 @@ class CDFToml:
|
|
114
143
|
if "plugins" in raw:
|
115
144
|
plugins = {clean_name(k): v for k, v in raw["plugins"].items()}
|
116
145
|
|
117
|
-
|
146
|
+
libraries = {}
|
147
|
+
for k, v in raw.get("library", {}).items():
|
148
|
+
try:
|
149
|
+
libraries[k] = Library.load(v)
|
150
|
+
except Exception as e:
|
151
|
+
raise ToolkitTOMLFormatError(f"Invalid library configuration for '{k}': {e.args[0]}") from e
|
152
|
+
|
153
|
+
instance = cls(
|
154
|
+
cdf=cdf,
|
155
|
+
modules=modules,
|
156
|
+
alpha_flags=alpha_flags,
|
157
|
+
plugins=plugins,
|
158
|
+
libraries=libraries,
|
159
|
+
is_loaded_from_file=True,
|
160
|
+
)
|
118
161
|
if use_singleton:
|
119
162
|
_CDF_TOML = instance
|
120
163
|
return instance
|
@@ -0,0 +1,395 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
from cognite.client.data_classes.data_modeling import DirectRelationReference
|
4
|
+
from cognite.client.data_classes.data_modeling.ids import ViewId
|
5
|
+
from cognite.client.data_classes.data_modeling.instances import (
|
6
|
+
PropertyOptions,
|
7
|
+
TypedNode,
|
8
|
+
TypedNodeApply,
|
9
|
+
)
|
10
|
+
|
11
|
+
|
12
|
+
class _CanvasProperties:
|
13
|
+
created_by = PropertyOptions("createdBy")
|
14
|
+
updated_at = PropertyOptions("updatedAt")
|
15
|
+
updated_by = PropertyOptions("updatedBy")
|
16
|
+
is_locked = PropertyOptions("isLocked")
|
17
|
+
source_canvas_id = PropertyOptions("sourceCanvasId")
|
18
|
+
is_archived = PropertyOptions("isArchived")
|
19
|
+
solution_tags = PropertyOptions("solutionTags")
|
20
|
+
|
21
|
+
@classmethod
|
22
|
+
def get_source(cls) -> ViewId:
|
23
|
+
return ViewId("cdf_industrial_canvas", "Canvas", "v7")
|
24
|
+
|
25
|
+
|
26
|
+
class CanvasApply(_CanvasProperties, TypedNodeApply):
|
27
|
+
"""This represents the writing format of canvas.
|
28
|
+
|
29
|
+
It is used to when data is written to CDF.
|
30
|
+
|
31
|
+
Args:
|
32
|
+
space: The space where the node is located.
|
33
|
+
external_id: The external id of the canva.
|
34
|
+
name: The name or title of the canvas.
|
35
|
+
created_by: The user identifier of the user that created the canvas.
|
36
|
+
updated_at: The timestamp when the canvas was last updated.
|
37
|
+
updated_by: The user identifier of the user that last updated the canvas.
|
38
|
+
is_locked: The boolean state for handling canvas locking which is one-way operation from the user perspective
|
39
|
+
visibility: The application-level visibility of the canvas. Must be either 'public' or 'private' and, if not
|
40
|
+
set, the canvas is expected to be public.
|
41
|
+
source_canvas_id: The property for handling versioning. Example sourceCanvasId === selectedCanvas -> query all
|
42
|
+
versions of such canvas
|
43
|
+
is_archived: Boolean that indicates whether the canvas is archived.
|
44
|
+
context: Stores contextual data attached to the canvas, such as rules and pinned values.
|
45
|
+
solution_tags: The list of solution tags associated with the canvas.
|
46
|
+
existing_version: Fail the ingestion request if the node's version is greater than or equal to this value.
|
47
|
+
If no existingVersion is specified, the ingestion will always overwrite any existing data for the node
|
48
|
+
(for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert,
|
49
|
+
so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion
|
50
|
+
request, then the item will be skipped instead of failing the ingestion request.
|
51
|
+
type: Direct relation pointing to the type node.
|
52
|
+
"""
|
53
|
+
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
space: str,
|
57
|
+
external_id: str,
|
58
|
+
*,
|
59
|
+
name: str,
|
60
|
+
created_by: str,
|
61
|
+
updated_at: datetime,
|
62
|
+
updated_by: str,
|
63
|
+
is_locked: bool | None = None,
|
64
|
+
visibility: str | None = None,
|
65
|
+
source_canvas_id: str | None = None,
|
66
|
+
is_archived: bool | None = None,
|
67
|
+
context: list[dict] | None = None,
|
68
|
+
solution_tags: list[DirectRelationReference | tuple[str, str]] | None = None,
|
69
|
+
existing_version: int | None = None,
|
70
|
+
type: DirectRelationReference | tuple[str, str] | None = None,
|
71
|
+
) -> None:
|
72
|
+
TypedNodeApply.__init__(self, space, external_id, existing_version, type)
|
73
|
+
self.name = name
|
74
|
+
self.created_by = created_by
|
75
|
+
self.updated_at = updated_at
|
76
|
+
self.updated_by = updated_by
|
77
|
+
self.is_locked = is_locked
|
78
|
+
self.visibility = visibility
|
79
|
+
self.source_canvas_id = source_canvas_id
|
80
|
+
self.is_archived = is_archived
|
81
|
+
self.context = context
|
82
|
+
self.solution_tags = (
|
83
|
+
[DirectRelationReference.load(solution_tag) for solution_tag in solution_tags] if solution_tags else None
|
84
|
+
)
|
85
|
+
|
86
|
+
|
87
|
+
class Canvas(_CanvasProperties, TypedNode):
|
88
|
+
"""This represents the reading format of canva.
|
89
|
+
|
90
|
+
It is used to when data is read from CDF.
|
91
|
+
|
92
|
+
Args:
|
93
|
+
space: The space where the node is located.
|
94
|
+
external_id: The external id of the canva.
|
95
|
+
version (int): DMS version.
|
96
|
+
last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
97
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
98
|
+
created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
99
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
100
|
+
name: The name or title of the canvas.
|
101
|
+
created_by: The user identifier of the user that created the canvas.
|
102
|
+
updated_at: The timestamp when the canvas was last updated.
|
103
|
+
updated_by: The user identifier of the user that last updated the canvas.
|
104
|
+
is_locked: The boolean state for handling canvas locking which is one-way operation from the user perspective
|
105
|
+
visibility: The application-level visibility of the canvas. Must be either 'public' or 'private' and, if not
|
106
|
+
set, the canvas is expected to be public.
|
107
|
+
source_canvas_id: The property for handling versioning. Example sourceCanvasId === selectedCanvas -> query all
|
108
|
+
versions of such canvas
|
109
|
+
is_archived: Boolean that indicates whether the canvas is archived.
|
110
|
+
context: Stores contextual data attached to the canvas, such as rules and pinned values.
|
111
|
+
solution_tags: The list of solution tags associated with the canvas.
|
112
|
+
type: Direct relation pointing to the type node.
|
113
|
+
deleted_time: The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time
|
114
|
+
(UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances
|
115
|
+
are filtered out of query results, but present in sync results
|
116
|
+
"""
|
117
|
+
|
118
|
+
def __init__(
|
119
|
+
self,
|
120
|
+
space: str,
|
121
|
+
external_id: str,
|
122
|
+
version: int,
|
123
|
+
last_updated_time: int,
|
124
|
+
created_time: int,
|
125
|
+
*,
|
126
|
+
name: str,
|
127
|
+
created_by: str,
|
128
|
+
updated_at: datetime,
|
129
|
+
updated_by: str,
|
130
|
+
is_locked: bool | None = None,
|
131
|
+
visibility: str | None = None,
|
132
|
+
source_canvas_id: str | None = None,
|
133
|
+
is_archived: bool | None = None,
|
134
|
+
context: list[dict] | None = None,
|
135
|
+
solution_tags: list[DirectRelationReference] | None = None,
|
136
|
+
type: DirectRelationReference | None = None,
|
137
|
+
deleted_time: int | None = None,
|
138
|
+
) -> None:
|
139
|
+
TypedNode.__init__(self, space, external_id, version, last_updated_time, created_time, deleted_time, type)
|
140
|
+
self.name = name
|
141
|
+
self.created_by = created_by
|
142
|
+
self.updated_at = updated_at
|
143
|
+
self.updated_by = updated_by
|
144
|
+
self.is_locked = is_locked
|
145
|
+
self.visibility = visibility
|
146
|
+
self.source_canvas_id = source_canvas_id
|
147
|
+
self.is_archived = is_archived
|
148
|
+
self.context = context
|
149
|
+
self.solution_tags = (
|
150
|
+
[DirectRelationReference.load(solution_tag) for solution_tag in solution_tags] if solution_tags else None
|
151
|
+
)
|
152
|
+
|
153
|
+
def as_write(self) -> CanvasApply:
|
154
|
+
return CanvasApply(
|
155
|
+
self.space,
|
156
|
+
self.external_id,
|
157
|
+
name=self.name,
|
158
|
+
created_by=self.created_by,
|
159
|
+
updated_at=self.updated_at,
|
160
|
+
updated_by=self.updated_by,
|
161
|
+
is_locked=self.is_locked,
|
162
|
+
visibility=self.visibility,
|
163
|
+
source_canvas_id=self.source_canvas_id,
|
164
|
+
is_archived=self.is_archived,
|
165
|
+
context=self.context,
|
166
|
+
solution_tags=self.solution_tags, # type: ignore[arg-type]
|
167
|
+
existing_version=self.version,
|
168
|
+
type=self.type,
|
169
|
+
)
|
170
|
+
|
171
|
+
|
172
|
+
class _CanvasAnnotationProperties:
|
173
|
+
id_ = PropertyOptions("id")
|
174
|
+
annotation_type = PropertyOptions("annotationType")
|
175
|
+
container_id = PropertyOptions("containerId")
|
176
|
+
is_selectable = PropertyOptions("isSelectable")
|
177
|
+
is_draggable = PropertyOptions("isDraggable")
|
178
|
+
is_resizable = PropertyOptions("isResizable")
|
179
|
+
properties_ = PropertyOptions("properties")
|
180
|
+
|
181
|
+
@classmethod
|
182
|
+
def get_source(cls) -> ViewId:
|
183
|
+
return ViewId("cdf_industrial_canvas", "CanvasAnnotation", "v1")
|
184
|
+
|
185
|
+
|
186
|
+
class CanvasAnnotationApply(_CanvasAnnotationProperties, TypedNodeApply):
|
187
|
+
"""This represents the writing format of canvas annotation.
|
188
|
+
|
189
|
+
It is used to when data is written to CDF.
|
190
|
+
|
191
|
+
Args:
|
192
|
+
space: The space where the node is located.
|
193
|
+
external_id: The external id of the canvas annotation.
|
194
|
+
id_: The unique identifier of the canvas annotation.
|
195
|
+
annotation_type: The type of the annotation. Must be one of rectangle, ellipse, polyline, text or sticky.
|
196
|
+
container_id: The optional ID of the container that the annotation is contained in.
|
197
|
+
is_selectable: Boolean that indicates whether the annotation is selectable.
|
198
|
+
is_draggable: Boolean that indicates whether the annotation is draggable.
|
199
|
+
is_resizable: Boolean that indicates whether the annotation is resizable.
|
200
|
+
properties_: Additional properties or configuration for the annotation.
|
201
|
+
existing_version: Fail the ingestion request if the node's version is greater than or equal to this value.
|
202
|
+
If no existingVersion is specified, the ingestion will always overwrite any existing data for the node
|
203
|
+
(for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert,
|
204
|
+
so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion
|
205
|
+
request, then the item will be skipped instead of failing the ingestion request.
|
206
|
+
type: Direct relation pointing to the type node.
|
207
|
+
"""
|
208
|
+
|
209
|
+
def __init__(
|
210
|
+
self,
|
211
|
+
space: str,
|
212
|
+
external_id: str,
|
213
|
+
*,
|
214
|
+
id_: str,
|
215
|
+
annotation_type: str,
|
216
|
+
container_id: str | None = None,
|
217
|
+
is_selectable: bool | None = None,
|
218
|
+
is_draggable: bool | None = None,
|
219
|
+
is_resizable: bool | None = None,
|
220
|
+
properties_: dict | None = None,
|
221
|
+
existing_version: int | None = None,
|
222
|
+
type: DirectRelationReference | tuple[str, str] | None = None,
|
223
|
+
) -> None:
|
224
|
+
TypedNodeApply.__init__(self, space, external_id, existing_version, type)
|
225
|
+
self.id_ = id_
|
226
|
+
self.annotation_type = annotation_type
|
227
|
+
self.container_id = container_id
|
228
|
+
self.is_selectable = is_selectable
|
229
|
+
self.is_draggable = is_draggable
|
230
|
+
self.is_resizable = is_resizable
|
231
|
+
self.properties_ = properties_
|
232
|
+
|
233
|
+
|
234
|
+
class CanvasAnnotation(_CanvasAnnotationProperties, TypedNode):
|
235
|
+
"""This represents the reading format of canvas annotation.
|
236
|
+
|
237
|
+
It is used to when data is read from CDF.
|
238
|
+
|
239
|
+
Args:
|
240
|
+
space: The space where the node is located.
|
241
|
+
external_id: The external id of the canvas annotation.
|
242
|
+
version (int): DMS version.
|
243
|
+
last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
244
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
245
|
+
created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
246
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
247
|
+
id_: The unique identifier of the canvas annotation.
|
248
|
+
annotation_type: The type of the annotation. Must be one of rectangle, ellipse, polyline, text or sticky.
|
249
|
+
container_id: The optional ID of the container that the annotation is contained in.
|
250
|
+
is_selectable: Boolean that indicates whether the annotation is selectable.
|
251
|
+
is_draggable: Boolean that indicates whether the annotation is draggable.
|
252
|
+
is_resizable: Boolean that indicates whether the annotation is resizable.
|
253
|
+
properties_: Additional properties or configuration for the annotation.
|
254
|
+
type: Direct relation pointing to the type node.
|
255
|
+
deleted_time: The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time
|
256
|
+
(UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances
|
257
|
+
are filtered out of query results, but present in sync results
|
258
|
+
"""
|
259
|
+
|
260
|
+
def __init__(
|
261
|
+
self,
|
262
|
+
space: str,
|
263
|
+
external_id: str,
|
264
|
+
version: int,
|
265
|
+
last_updated_time: int,
|
266
|
+
created_time: int,
|
267
|
+
*,
|
268
|
+
id_: str,
|
269
|
+
annotation_type: str,
|
270
|
+
container_id: str | None = None,
|
271
|
+
is_selectable: bool | None = None,
|
272
|
+
is_draggable: bool | None = None,
|
273
|
+
is_resizable: bool | None = None,
|
274
|
+
properties_: dict | None = None,
|
275
|
+
type: DirectRelationReference | None = None,
|
276
|
+
deleted_time: int | None = None,
|
277
|
+
) -> None:
|
278
|
+
TypedNode.__init__(self, space, external_id, version, last_updated_time, created_time, deleted_time, type)
|
279
|
+
self.id_ = id_
|
280
|
+
self.annotation_type = annotation_type
|
281
|
+
self.container_id = container_id
|
282
|
+
self.is_selectable = is_selectable
|
283
|
+
self.is_draggable = is_draggable
|
284
|
+
self.is_resizable = is_resizable
|
285
|
+
self.properties_ = properties_
|
286
|
+
|
287
|
+
def as_write(self) -> CanvasAnnotationApply:
|
288
|
+
return CanvasAnnotationApply(
|
289
|
+
self.space,
|
290
|
+
self.external_id,
|
291
|
+
id_=self.id_,
|
292
|
+
annotation_type=self.annotation_type,
|
293
|
+
container_id=self.container_id,
|
294
|
+
is_selectable=self.is_selectable,
|
295
|
+
is_draggable=self.is_draggable,
|
296
|
+
is_resizable=self.is_resizable,
|
297
|
+
properties_=self.properties_,
|
298
|
+
existing_version=self.version,
|
299
|
+
type=self.type,
|
300
|
+
)
|
301
|
+
|
302
|
+
|
303
|
+
class _CogniteSolutionTagProperties:
|
304
|
+
@classmethod
|
305
|
+
def get_source(cls) -> ViewId:
|
306
|
+
return ViewId("cdf_apps_shared", "CogniteSolutionTag", "v1")
|
307
|
+
|
308
|
+
|
309
|
+
class CogniteSolutionTagApply(_CogniteSolutionTagProperties, TypedNodeApply):
|
310
|
+
"""This represents the writing format of Cognite solution tag.
|
311
|
+
|
312
|
+
It is used to when data is written to CDF.
|
313
|
+
|
314
|
+
Args:
|
315
|
+
space: The space where the node is located.
|
316
|
+
external_id: The external id of the Cognite solution tag.
|
317
|
+
name: Name of the solution tag/label
|
318
|
+
description: Description of the solution tag/label
|
319
|
+
color: Color of the solution tag/label
|
320
|
+
existing_version: Fail the ingestion request if the node's version is greater than or equal to this value.
|
321
|
+
If no existingVersion is specified, the ingestion will always overwrite any existing data for the node
|
322
|
+
(for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert,
|
323
|
+
so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion
|
324
|
+
request, then the item will be skipped instead of failing the ingestion request.
|
325
|
+
type: Direct relation pointing to the type node.
|
326
|
+
"""
|
327
|
+
|
328
|
+
def __init__(
|
329
|
+
self,
|
330
|
+
space: str,
|
331
|
+
external_id: str,
|
332
|
+
*,
|
333
|
+
name: str,
|
334
|
+
description: str | None = None,
|
335
|
+
color: str | None = None,
|
336
|
+
existing_version: int | None = None,
|
337
|
+
type: DirectRelationReference | tuple[str, str] | None = None,
|
338
|
+
) -> None:
|
339
|
+
TypedNodeApply.__init__(self, space, external_id, existing_version, type)
|
340
|
+
self.name = name
|
341
|
+
self.description = description
|
342
|
+
self.color = color
|
343
|
+
|
344
|
+
|
345
|
+
class CogniteSolutionTag(_CogniteSolutionTagProperties, TypedNode):
|
346
|
+
"""This represents the reading format of Cognite solution tag.
|
347
|
+
|
348
|
+
It is used to when data is read from CDF.
|
349
|
+
|
350
|
+
Args:
|
351
|
+
space: The space where the node is located.
|
352
|
+
external_id: The external id of the Cognite solution tag.
|
353
|
+
version (int): DMS version.
|
354
|
+
last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
355
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
356
|
+
created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970,
|
357
|
+
Coordinated Universal Time (UTC), minus leap seconds.
|
358
|
+
name: Name of the solution tag/label
|
359
|
+
description: Description of the solution tag/label
|
360
|
+
color: Color of the solution tag/label
|
361
|
+
type: Direct relation pointing to the type node.
|
362
|
+
deleted_time: The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time
|
363
|
+
(UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances
|
364
|
+
are filtered out of query results, but present in sync results
|
365
|
+
"""
|
366
|
+
|
367
|
+
def __init__(
|
368
|
+
self,
|
369
|
+
space: str,
|
370
|
+
external_id: str,
|
371
|
+
version: int,
|
372
|
+
last_updated_time: int,
|
373
|
+
created_time: int,
|
374
|
+
*,
|
375
|
+
name: str,
|
376
|
+
description: str | None = None,
|
377
|
+
color: str | None = None,
|
378
|
+
type: DirectRelationReference | None = None,
|
379
|
+
deleted_time: int | None = None,
|
380
|
+
) -> None:
|
381
|
+
TypedNode.__init__(self, space, external_id, version, last_updated_time, created_time, deleted_time, type)
|
382
|
+
self.name = name
|
383
|
+
self.description = description
|
384
|
+
self.color = color
|
385
|
+
|
386
|
+
def as_write(self) -> CogniteSolutionTagApply:
|
387
|
+
return CogniteSolutionTagApply(
|
388
|
+
self.space,
|
389
|
+
self.external_id,
|
390
|
+
name=self.name,
|
391
|
+
description=self.description,
|
392
|
+
color=self.color,
|
393
|
+
existing_version=self.version,
|
394
|
+
type=self.type,
|
395
|
+
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
from ._migrate import MigrateTimeseriesCommand, MigrationPrepareCommand
|
2
2
|
from ._populate import PopulateCommand
|
3
|
-
from ._profile import
|
3
|
+
from ._profile import ProfileAssetCentricCommand
|
4
4
|
from ._purge import PurgeCommand
|
5
5
|
from .auth import AuthCommand
|
6
6
|
from .build_cmd import BuildCommand
|
@@ -30,7 +30,7 @@ __all__ = [
|
|
30
30
|
"MigrationPrepareCommand",
|
31
31
|
"ModulesCommand",
|
32
32
|
"PopulateCommand",
|
33
|
-
"
|
33
|
+
"ProfileAssetCentricCommand",
|
34
34
|
"PullCommand",
|
35
35
|
"PurgeCommand",
|
36
36
|
"RepoCommand",
|
@@ -1,7 +1,12 @@
|
|
1
|
-
from
|
1
|
+
from abc import ABC, abstractmethod
|
2
|
+
from collections.abc import Callable, Mapping
|
2
3
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
4
|
+
from functools import cached_property
|
5
|
+
from typing import ClassVar, Literal, TypeAlias, overload
|
3
6
|
|
4
7
|
from cognite.client.exceptions import CogniteException
|
8
|
+
from rich import box
|
9
|
+
from rich.console import Console
|
5
10
|
from rich.live import Live
|
6
11
|
from rich.spinner import Spinner
|
7
12
|
from rich.table import Table
|
@@ -23,108 +28,212 @@ from cognite_toolkit._cdf_tk.utils.aggregators import (
|
|
23
28
|
from ._base import ToolkitCommand
|
24
29
|
|
25
30
|
|
26
|
-
class
|
27
|
-
|
28
|
-
|
29
|
-
Count = "Count"
|
30
|
-
MetadataKeyCount = "Metadata Key Count"
|
31
|
-
LabelCount = "Label Count"
|
32
|
-
Transformation = "Transformations"
|
31
|
+
class WaitingAPICallClass:
|
32
|
+
def __bool__(self) -> bool:
|
33
|
+
return False
|
33
34
|
|
34
|
-
columns = (
|
35
|
-
Columns.Resource,
|
36
|
-
Columns.Count,
|
37
|
-
Columns.MetadataKeyCount,
|
38
|
-
Columns.LabelCount,
|
39
|
-
Columns.Transformation,
|
40
|
-
)
|
41
|
-
spinner_speed = 1.0
|
42
35
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
36
|
+
WaitingAPICall = WaitingAPICallClass()
|
37
|
+
|
38
|
+
PendingCellValue: TypeAlias = int | float | str | bool | None | WaitingAPICallClass
|
39
|
+
CellValue: TypeAlias = int | float | str | bool | None
|
40
|
+
PendingTable: TypeAlias = dict[tuple[str, str], PendingCellValue]
|
41
|
+
|
42
|
+
|
43
|
+
class ProfileCommand(ToolkitCommand, ABC):
|
44
|
+
def __init__(self, print_warning: bool = True, skip_tracking: bool = False, silent: bool = False) -> None:
|
45
|
+
super().__init__(print_warning, skip_tracking, silent)
|
46
|
+
self.table_title = self.__class__.__name__.removesuffix("Command")
|
47
|
+
|
48
|
+
class Columns: # Placeholder for columns, subclasses should define their own Columns class
|
49
|
+
...
|
50
|
+
|
51
|
+
spinner_args: ClassVar[Mapping] = dict(name="arc", text="loading...", style="bold green", speed=1.0)
|
52
|
+
|
53
|
+
max_workers = 8
|
54
|
+
is_dynamic_table = False
|
55
|
+
|
56
|
+
@cached_property
|
57
|
+
def columns(self) -> tuple[str, ...]:
|
58
|
+
return (
|
59
|
+
tuple([attr for attr in self.Columns.__dict__.keys() if not attr.startswith("_")])
|
60
|
+
if hasattr(self, "Columns")
|
61
|
+
else tuple()
|
62
|
+
)
|
63
|
+
|
64
|
+
def create_profile_table(self, client: ToolkitClient) -> list[dict[str, CellValue]]:
|
65
|
+
console = Console()
|
66
|
+
with console.status("Setting up", spinner="aesthetic", speed=0.4) as _:
|
67
|
+
table = self.create_initial_table(client)
|
68
|
+
with (
|
69
|
+
Live(self.draw_table(table), refresh_per_second=4, console=console) as live,
|
70
|
+
ThreadPoolExecutor(max_workers=self.max_workers) as executor,
|
71
|
+
):
|
72
|
+
while True:
|
73
|
+
current_calls = {
|
74
|
+
executor.submit(self.call_api(row, col, client)): (row, col)
|
75
|
+
for (row, col), cell in table.items()
|
76
|
+
if cell is WaitingAPICall
|
66
77
|
}
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
78
|
+
if not current_calls:
|
79
|
+
break
|
80
|
+
for future in as_completed(current_calls):
|
81
|
+
row, col = current_calls[future]
|
82
|
+
try:
|
83
|
+
result = future.result()
|
84
|
+
except CogniteException as e:
|
85
|
+
result = type(e).__name__
|
86
|
+
table[(row, col)] = self.format_result(result, row, col)
|
87
|
+
if self.is_dynamic_table:
|
88
|
+
table = self.update_table(table, result, row, col)
|
89
|
+
live.update(self.draw_table(table))
|
90
|
+
return self.as_record_format(table, allow_waiting_api_call=False)
|
72
91
|
|
73
|
-
@
|
74
|
-
def
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
row: dict[str, str | Spinner] = {
|
81
|
-
cls.Columns.Resource: aggregator.display_name,
|
82
|
-
cls.Columns.Count: Spinner("arc", text="loading...", style="bold green", speed=cls.spinner_speed),
|
83
|
-
}
|
84
|
-
api_calls[(index, cls.Columns.Count)] = cls._call_api(aggregator.count)
|
85
|
-
count: str | Spinner = "-"
|
86
|
-
if isinstance(aggregator, MetadataAggregator):
|
87
|
-
count = Spinner("arc", text="loading...", style="bold green", speed=cls.spinner_speed)
|
88
|
-
api_calls[(index, cls.Columns.MetadataKeyCount)] = cls._call_api(aggregator.metadata_key_count)
|
89
|
-
row[cls.Columns.MetadataKeyCount] = count
|
92
|
+
@abstractmethod
|
93
|
+
def create_initial_table(self, client: ToolkitClient) -> PendingTable:
|
94
|
+
"""
|
95
|
+
Create the initial table with placeholders for API calls.
|
96
|
+
Each cell that requires an API call should be initialized with WaitingAPICall.
|
97
|
+
"""
|
98
|
+
raise NotImplementedError("Subclasses must implement create_initial_table.")
|
90
99
|
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
api_calls[(index, cls.Columns.LabelCount)] = cls._call_api(aggregator.label_count)
|
95
|
-
row[cls.Columns.LabelCount] = count
|
100
|
+
@abstractmethod
|
101
|
+
def call_api(self, row: str, col: str, client: ToolkitClient) -> Callable:
|
102
|
+
raise NotImplementedError("Subclasses must implement call_api.")
|
96
103
|
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
104
|
+
def format_result(self, result: object, row: str, col: str) -> CellValue:
|
105
|
+
"""
|
106
|
+
Format the result of an API call for display in the table.
|
107
|
+
This can be overridden by subclasses to customize formatting.
|
108
|
+
"""
|
109
|
+
if isinstance(result, int | float | bool | str):
|
110
|
+
return result
|
111
|
+
raise NotImplementedError("Subclasses must implement format_result.")
|
101
112
|
|
102
|
-
|
103
|
-
|
113
|
+
def update_table(
|
114
|
+
self,
|
115
|
+
current_table: PendingTable,
|
116
|
+
result: object,
|
117
|
+
row: str,
|
118
|
+
col: str,
|
119
|
+
) -> PendingTable:
|
120
|
+
raise NotImplementedError("Subclasses must implement update_table.")
|
104
121
|
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
title="Asset Centric Profile",
|
122
|
+
def draw_table(self, table: PendingTable) -> Table:
|
123
|
+
rich_table = Table(
|
124
|
+
title=self.table_title,
|
109
125
|
title_justify="left",
|
110
126
|
show_header=True,
|
111
127
|
header_style="bold magenta",
|
128
|
+
box=box.MINIMAL,
|
112
129
|
)
|
113
|
-
for col in
|
114
|
-
|
130
|
+
for col in self.columns:
|
131
|
+
rich_table.add_column(col)
|
132
|
+
|
133
|
+
rows = self.as_record_format(table)
|
115
134
|
|
116
135
|
for row in rows:
|
117
|
-
|
118
|
-
return
|
136
|
+
rich_table.add_row(*[self._as_cell(value) for value in row.values()])
|
137
|
+
return rich_table
|
138
|
+
|
139
|
+
@classmethod
|
140
|
+
@overload
|
141
|
+
def as_record_format(
|
142
|
+
cls, table: PendingTable, allow_waiting_api_call: Literal[True] = True
|
143
|
+
) -> list[dict[str, PendingCellValue]]: ...
|
144
|
+
|
145
|
+
@classmethod
|
146
|
+
@overload
|
147
|
+
def as_record_format(
|
148
|
+
cls,
|
149
|
+
table: PendingTable,
|
150
|
+
allow_waiting_api_call: Literal[False],
|
151
|
+
) -> list[dict[str, CellValue]]: ...
|
119
152
|
|
120
|
-
@
|
121
|
-
def
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
153
|
+
@classmethod
|
154
|
+
def as_record_format(
|
155
|
+
cls,
|
156
|
+
table: PendingTable,
|
157
|
+
allow_waiting_api_call: bool = True,
|
158
|
+
) -> list[dict[str, PendingCellValue]] | list[dict[str, CellValue]]:
|
159
|
+
rows: list[dict[str, PendingCellValue]] = []
|
160
|
+
row_indices: dict[str, int] = {}
|
161
|
+
for (row, col), value in table.items():
|
162
|
+
if value is WaitingAPICall and not allow_waiting_api_call:
|
163
|
+
value = None
|
164
|
+
if row not in row_indices:
|
165
|
+
row_indices[row] = len(rows)
|
166
|
+
rows.append({col: value})
|
127
167
|
else:
|
128
|
-
|
168
|
+
rows[row_indices[row]][col] = value
|
169
|
+
return rows
|
170
|
+
|
171
|
+
def _as_cell(self, value: PendingCellValue) -> str | Spinner:
|
172
|
+
if isinstance(value, WaitingAPICallClass):
|
173
|
+
return Spinner(**self.spinner_args)
|
174
|
+
elif isinstance(value, int):
|
175
|
+
return f"{value:,}"
|
176
|
+
elif isinstance(value, float):
|
177
|
+
return f"{value:.2f}"
|
178
|
+
elif value is None:
|
179
|
+
return "-"
|
180
|
+
return str(value)
|
181
|
+
|
182
|
+
|
183
|
+
class ProfileAssetCentricCommand(ProfileCommand):
|
184
|
+
def __init__(self, print_warning: bool = True, skip_tracking: bool = False, silent: bool = False) -> None:
|
185
|
+
super().__init__(print_warning, skip_tracking, silent)
|
186
|
+
self.table_title = "Asset Centric Profile"
|
187
|
+
self.aggregators: dict[str, AssetCentricAggregator] = {}
|
188
|
+
|
189
|
+
class Columns:
|
190
|
+
Resource = "Resource"
|
191
|
+
Count = "Count"
|
192
|
+
MetadataKeyCount = "Metadata Key Count"
|
193
|
+
LabelCount = "Label Count"
|
194
|
+
Transformation = "Transformations"
|
195
|
+
|
196
|
+
def asset_centric(self, client: ToolkitClient, verbose: bool = False) -> list[dict[str, CellValue]]:
|
197
|
+
self.aggregators.update(
|
198
|
+
{
|
199
|
+
agg.display_name: agg
|
200
|
+
for agg in [
|
201
|
+
AssetAggregator(client),
|
202
|
+
EventAggregator(client),
|
203
|
+
FileAggregator(client),
|
204
|
+
TimeSeriesAggregator(client),
|
205
|
+
SequenceAggregator(client),
|
206
|
+
RelationshipAggregator(client),
|
207
|
+
LabelCountAggregator(client),
|
208
|
+
]
|
209
|
+
}
|
210
|
+
)
|
211
|
+
return self.create_profile_table(client)
|
212
|
+
|
213
|
+
def create_initial_table(self, client: ToolkitClient) -> PendingTable:
|
214
|
+
table: dict[tuple[str, str], str | int | float | bool | None | WaitingAPICallClass] = {}
|
215
|
+
for index, aggregator in self.aggregators.items():
|
216
|
+
table[(index, self.Columns.Resource)] = aggregator.display_name
|
217
|
+
table[(index, self.Columns.Count)] = WaitingAPICall
|
218
|
+
if isinstance(aggregator, MetadataAggregator):
|
219
|
+
table[(index, self.Columns.MetadataKeyCount)] = WaitingAPICall
|
220
|
+
else:
|
221
|
+
table[(index, self.Columns.MetadataKeyCount)] = None
|
222
|
+
if isinstance(aggregator, LabelAggregator):
|
223
|
+
table[(index, self.Columns.LabelCount)] = WaitingAPICall
|
224
|
+
else:
|
225
|
+
table[(index, self.Columns.LabelCount)] = None
|
226
|
+
table[(index, self.Columns.Transformation)] = WaitingAPICall
|
227
|
+
return table
|
129
228
|
|
130
|
-
|
229
|
+
def call_api(self, row: str, col: str, client: ToolkitClient) -> Callable:
|
230
|
+
aggregator = self.aggregators[row]
|
231
|
+
if col == self.Columns.Count:
|
232
|
+
return aggregator.count
|
233
|
+
elif col == self.Columns.MetadataKeyCount and isinstance(aggregator, MetadataAggregator):
|
234
|
+
return aggregator.metadata_key_count
|
235
|
+
elif col == self.Columns.LabelCount and isinstance(aggregator, LabelAggregator):
|
236
|
+
return aggregator.label_count
|
237
|
+
elif col == self.Columns.Transformation:
|
238
|
+
return aggregator.transformation_count
|
239
|
+
raise ValueError(f"Unknown column: {col} for row: {row}")
|
@@ -1,12 +1,17 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import shutil
|
4
|
+
import tempfile
|
5
|
+
import zipfile
|
4
6
|
from collections import Counter
|
7
|
+
from hashlib import sha256
|
5
8
|
from importlib import resources
|
6
9
|
from pathlib import Path
|
10
|
+
from types import TracebackType
|
7
11
|
from typing import Any, Literal, Optional
|
8
12
|
|
9
13
|
import questionary
|
14
|
+
import requests
|
10
15
|
import typer
|
11
16
|
from packaging.version import Version
|
12
17
|
from packaging.version import parse as parse_version
|
@@ -14,7 +19,7 @@ from rich import print
|
|
14
19
|
from rich.markdown import Markdown
|
15
20
|
from rich.padding import Padding
|
16
21
|
from rich.panel import Panel
|
17
|
-
from rich.progress import track
|
22
|
+
from rich.progress import Progress, track
|
18
23
|
from rich.rule import Rule
|
19
24
|
from rich.table import Table
|
20
25
|
from rich.tree import Tree
|
@@ -47,7 +52,8 @@ from cognite_toolkit._cdf_tk.data_classes import (
|
|
47
52
|
Package,
|
48
53
|
Packages,
|
49
54
|
)
|
50
|
-
from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError, ToolkitValueError
|
55
|
+
from cognite_toolkit._cdf_tk.exceptions import ToolkitError, ToolkitRequiredValueError, ToolkitValueError
|
56
|
+
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
51
57
|
from cognite_toolkit._cdf_tk.hints import verify_module_directory
|
52
58
|
from cognite_toolkit._cdf_tk.tk_warnings import MediumSeverityWarning
|
53
59
|
from cognite_toolkit._cdf_tk.utils import humanize_collection, read_yaml_file
|
@@ -85,6 +91,28 @@ class ModulesCommand(ToolkitCommand):
|
|
85
91
|
def __init__(self, print_warning: bool = True, skip_tracking: bool = False, silent: bool = False):
|
86
92
|
super().__init__(print_warning, skip_tracking, silent)
|
87
93
|
self._builtin_modules_path = Path(resources.files(cognite_toolkit.__name__)) / BUILTIN_MODULES # type: ignore [arg-type]
|
94
|
+
self._temp_download_dir = Path(tempfile.gettempdir()) / "library_downloads"
|
95
|
+
if not self._temp_download_dir.exists():
|
96
|
+
self._temp_download_dir.mkdir(parents=True, exist_ok=True)
|
97
|
+
|
98
|
+
def __enter__(self) -> ModulesCommand:
|
99
|
+
"""
|
100
|
+
Context manager to ensure the temporary download directory is cleaned up after use. It requires the command to be used in a `with` block.
|
101
|
+
"""
|
102
|
+
return self
|
103
|
+
|
104
|
+
def __exit__(
|
105
|
+
self,
|
106
|
+
exc_type: type[BaseException] | None, # Type of the exception
|
107
|
+
exc_value: BaseException | None, # Exception instance
|
108
|
+
traceback: TracebackType | None, # Traceback object
|
109
|
+
) -> None:
|
110
|
+
"""
|
111
|
+
Clean up the temporary download directory.
|
112
|
+
"""
|
113
|
+
|
114
|
+
if self._temp_download_dir.exists():
|
115
|
+
safe_rmtree(self._temp_download_dir)
|
88
116
|
|
89
117
|
@classmethod
|
90
118
|
def _create_tree(cls, item: Packages) -> Tree:
|
@@ -128,6 +156,7 @@ class ModulesCommand(ToolkitCommand):
|
|
128
156
|
downloader_by_repo: dict[str, FileDownloader] = {}
|
129
157
|
|
130
158
|
extra_resources: set[Path] = set()
|
159
|
+
|
131
160
|
for package_name, package in selected_packages.items():
|
132
161
|
print(f"{INDENT}[{'yellow' if mode == 'clean' else 'green'}]Creating {package_name}[/]")
|
133
162
|
|
@@ -280,7 +309,8 @@ default_organization_dir = "{organization_dir.name}"''',
|
|
280
309
|
organization_dir = Path(organization_dir_raw.strip())
|
281
310
|
|
282
311
|
modules_root_dir = organization_dir / MODULES
|
283
|
-
|
312
|
+
|
313
|
+
packages = self._get_available_packages()
|
284
314
|
|
285
315
|
if select_all:
|
286
316
|
print(Panel("Instantiating all available modules"))
|
@@ -680,9 +710,102 @@ default_organization_dir = "{organization_dir.name}"''',
|
|
680
710
|
build_env = default.environment.validation_type
|
681
711
|
|
682
712
|
existing_module_names = [module.name for module in ModuleResources(organization_dir, build_env).list()]
|
683
|
-
available_packages =
|
684
|
-
|
713
|
+
available_packages = self._get_available_packages()
|
685
714
|
added_packages = self._select_packages(available_packages, existing_module_names)
|
686
715
|
|
687
716
|
download_data = self._get_download_data(added_packages)
|
688
717
|
self._create(organization_dir, added_packages, environments, "update", download_data)
|
718
|
+
|
719
|
+
def _get_available_packages(self) -> Packages:
|
720
|
+
"""
|
721
|
+
Returns a list of available packages, either from the CDF TOML file or from external libraries if the feature flag is enabled.
|
722
|
+
If the feature flag is not enabled and no libraries are specified, it returns the built-in modules.
|
723
|
+
"""
|
724
|
+
|
725
|
+
cdf_toml = CDFToml.load()
|
726
|
+
if Flags.EXTERNAL_LIBRARIES.is_enabled() and cdf_toml.libraries:
|
727
|
+
for library_name, library in cdf_toml.libraries.items():
|
728
|
+
try:
|
729
|
+
print(f"[green]Adding library {library_name}[/]")
|
730
|
+
file_path = self._temp_download_dir / f"{library_name}.zip"
|
731
|
+
self._download(library.url, file_path)
|
732
|
+
self._validate_checksum(library.checksum, file_path)
|
733
|
+
self._unpack(file_path)
|
734
|
+
return Packages().load(file_path.parent)
|
735
|
+
except Exception as e:
|
736
|
+
if isinstance(e, ToolkitError):
|
737
|
+
raise e
|
738
|
+
else:
|
739
|
+
raise ToolkitError(
|
740
|
+
f"An unexpected error occurred during downloading {library.url} to {file_path}: {e}"
|
741
|
+
) from e
|
742
|
+
|
743
|
+
raise ToolkitError(f"Failed to add library {library_name}, {e}")
|
744
|
+
# If no libraries are specified or the flag is not enabled, load the built-in modules
|
745
|
+
raise ValueError("No valid libraries found.")
|
746
|
+
else:
|
747
|
+
return Packages.load(self._builtin_modules_path)
|
748
|
+
|
749
|
+
def _download(self, url: str, file_path: Path) -> None:
|
750
|
+
"""
|
751
|
+
Downloads a file from a URL to the specified output path.
|
752
|
+
If the file already exists, it skips the download.
|
753
|
+
"""
|
754
|
+
try:
|
755
|
+
response = requests.get(url, stream=True)
|
756
|
+
response.raise_for_status() # Raise an exception for HTTP errors
|
757
|
+
|
758
|
+
total_size = int(response.headers.get("content-length", 0))
|
759
|
+
|
760
|
+
with Progress() as progress:
|
761
|
+
task = progress.add_task("Download", total=total_size)
|
762
|
+
with open(file_path, "wb") as f:
|
763
|
+
for chunk in response.iter_content(chunk_size=8192):
|
764
|
+
f.write(chunk)
|
765
|
+
progress.update(task, advance=len(chunk))
|
766
|
+
|
767
|
+
except requests.exceptions.RequestException as e:
|
768
|
+
raise ToolkitError(f"Error downloading file from {url}: {e}") from e
|
769
|
+
|
770
|
+
def _validate_checksum(self, checksum: str, file_path: Path) -> None:
|
771
|
+
"""
|
772
|
+
Compares the checksum of the downloaded file with the expected checksum.
|
773
|
+
"""
|
774
|
+
|
775
|
+
if checksum.lower().startswith("sha256:"):
|
776
|
+
checksum = checksum[7:]
|
777
|
+
else:
|
778
|
+
raise ToolkitValueError(f"Unsupported checksum format: {checksum}. Expected 'sha256:' prefix")
|
779
|
+
|
780
|
+
chunk_size: int = 8192
|
781
|
+
sha256_hash = sha256()
|
782
|
+
try:
|
783
|
+
with open(file_path, "rb") as f:
|
784
|
+
# Read the file in chunks to handle large files efficiently
|
785
|
+
for chunk in iter(lambda: f.read(chunk_size), b""):
|
786
|
+
sha256_hash.update(chunk)
|
787
|
+
calculated = sha256_hash.hexdigest()
|
788
|
+
if calculated != checksum:
|
789
|
+
raise ToolkitError(f"Checksum mismatch. Expected {checksum}, got {calculated}.")
|
790
|
+
else:
|
791
|
+
print("Checksum verified")
|
792
|
+
except Exception as e:
|
793
|
+
raise ToolkitError(f"Failed to calculate checksum for {file_path}: {e}") from e
|
794
|
+
|
795
|
+
def _unpack(self, file_path: Path) -> None:
|
796
|
+
"""
|
797
|
+
Unzips the downloaded file to the specified output path.
|
798
|
+
If the file is not a zip file, it raises an error.
|
799
|
+
"""
|
800
|
+
total_size = file_path.stat().st_size if file_path.exists() else 0
|
801
|
+
|
802
|
+
try:
|
803
|
+
with Progress() as progress:
|
804
|
+
unzip = progress.add_task("Unzipping", total=total_size)
|
805
|
+
with zipfile.ZipFile(file_path, "r") as zip_ref:
|
806
|
+
zip_ref.extractall(file_path.parent)
|
807
|
+
progress.update(unzip, advance=total_size)
|
808
|
+
except zipfile.BadZipFile as e:
|
809
|
+
raise ToolkitError(f"Error unpacking zip file {file_path}: {e}") from e
|
810
|
+
except Exception as e:
|
811
|
+
raise ToolkitError(f"An unexpected error occurred while unpacking {file_path}: {e}") from e
|
@@ -66,8 +66,8 @@ class Packages(dict, MutableMapping[str, Package]):
|
|
66
66
|
root_module_dir: The module directories to load the packages from.
|
67
67
|
"""
|
68
68
|
|
69
|
-
package_definition_path = root_module_dir
|
70
|
-
if not package_definition_path.exists():
|
69
|
+
package_definition_path = next(root_module_dir.rglob("packages.toml"), None)
|
70
|
+
if not package_definition_path or not package_definition_path.exists():
|
71
71
|
raise ToolkitFileNotFoundError(f"Package manifest toml not found at {package_definition_path}")
|
72
72
|
package_definitions = toml.loads(package_definition_path.read_text(encoding="utf-8"))["packages"]
|
73
73
|
|
@@ -52,6 +52,10 @@ class Flags(Enum):
|
|
52
52
|
"visible": True,
|
53
53
|
"description": "Enables the migrate command",
|
54
54
|
}
|
55
|
+
EXTERNAL_LIBRARIES: ClassVar[dict[str, Any]] = { # type: ignore[misc]
|
56
|
+
"visible": True,
|
57
|
+
"description": "Enables the support for external libraries in the config file",
|
58
|
+
}
|
55
59
|
|
56
60
|
def is_enabled(self) -> bool:
|
57
61
|
return FeatureFlag.is_enabled(self)
|
cognite_toolkit/_version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.5.
|
1
|
+
__version__ = "0.5.62"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: cognite_toolkit
|
3
|
-
Version: 0.5.
|
3
|
+
Version: 0.5.62
|
4
4
|
Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
|
5
5
|
Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
|
6
6
|
Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
|
@@ -52,7 +52,7 @@ It supports three different modes of operation:
|
|
52
52
|
1. As an **interactive command-line tool** used alongside the Cognite Data Fusion web application to retrieve and
|
53
53
|
push configuration of the different Cognite Data Fusion services like data sets, data models, transformations,
|
54
54
|
and more. This mode also supports configuration of new Cognite Data Fusion projects to quickly get started.
|
55
|
-
2. As tool to support the **project life-
|
55
|
+
2. As tool to support the **project life-cycle by scripting and automating** configuration and management of Cognite Data
|
56
56
|
Fusion projects where CDF configurations are kept as yaml-files that can be checked into version
|
57
57
|
control. This mode also supports DevOps workflows with development, staging, and production projects.
|
58
58
|
3. As a **tool to deploy official Cognite project templates** to your Cognite Data Fusion project. The tool comes
|
@@ -1,10 +1,11 @@
|
|
1
1
|
cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
2
|
cognite_toolkit/_cdf.py,sha256=WWMslI-y2VbIYDMH19wnINebGwlOvAeYr-qkPRC1f68,5834
|
3
|
-
cognite_toolkit/_version.py,sha256=
|
3
|
+
cognite_toolkit/_version.py,sha256=fTpVYKhDGFXtOWGEUIjMCKrmXus7JYNBnA_m_Tqs5cw,23
|
4
|
+
cognite_toolkit/config.dev.yaml,sha256=CIDmi1OGNOJ-70h2BNCozZRmhvU5BfpZoh6Q04b8iMs,109
|
4
5
|
cognite_toolkit/_builtin_modules/README.md,sha256=roU3G05E6ogP5yhw4hdIvVDKV831zCh2pzt9BVddtBg,307
|
5
6
|
cognite_toolkit/_builtin_modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
-
cognite_toolkit/_builtin_modules/cdf.toml,sha256=
|
7
|
-
cognite_toolkit/_builtin_modules/
|
7
|
+
cognite_toolkit/_builtin_modules/cdf.toml,sha256=xO_fCIiJ0cPWc1BOJhKuQcAh1HOKcsYdSUVM-ktp4oE,273
|
8
|
+
cognite_toolkit/_builtin_modules/packages.toml,sha256=RdY44Sxvh6sUtAkgp1dHID1mtqkOTzP_rbZL2Q27fYw,1147
|
8
9
|
cognite_toolkit/_builtin_modules/bootcamp/README.md,sha256=iTVqoy3PLpC-xPi5pbuMIAEHILBSfWTGLexwa1AltpY,211
|
9
10
|
cognite_toolkit/_builtin_modules/bootcamp/default.config.yaml,sha256=MqYTcRiz03bow4LT8E3jumnd_BsqC5SvjgYOVVkHGE0,93
|
10
11
|
cognite_toolkit/_builtin_modules/bootcamp/module.toml,sha256=kdB-p9fQopXdkfnRJBsu9DCaKIfiIM4Y7-G8rtBqHWM,97
|
@@ -479,10 +480,10 @@ cognite_toolkit/_builtin_modules/sourcesystem/cdf_sharepoint/workflows/populatio
|
|
479
480
|
cognite_toolkit/_builtin_modules/sourcesystem/cdf_sharepoint/workflows/trigger.WorkflowTrigger.yaml,sha256=9cfJenjYYm1O2IEY0P1jHohUgif6QHjbjp7UDoM2pCQ,253
|
480
481
|
cognite_toolkit/_builtin_modules/sourcesystem/cdf_sharepoint/workflows/v1.WorkflowVersion.yaml,sha256=YgGGZKlEro-ViFtOCU9RYVqnjflJjkCU_nBgmCE0SQk,322
|
481
482
|
cognite_toolkit/_cdf_tk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
482
|
-
cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=
|
483
|
+
cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=e76CvC-KM0tOuDz181-vYciAeAm0h2I0-efkyj8yrtg,7955
|
483
484
|
cognite_toolkit/_cdf_tk/constants.py,sha256=c722pX6JYm0LD69T-lF8eT6MarExBKgE2eiiij1H0_U,5750
|
484
485
|
cognite_toolkit/_cdf_tk/exceptions.py,sha256=HJZ6kpQ0mJcXsueOyRLCesONiEx_G4QA-EkRuEwXvqo,5118
|
485
|
-
cognite_toolkit/_cdf_tk/feature_flags.py,sha256=
|
486
|
+
cognite_toolkit/_cdf_tk/feature_flags.py,sha256=dUtVp9OLpTXY9pk09_HMARUau81uio4_UDVfLC0JbEY,2743
|
486
487
|
cognite_toolkit/_cdf_tk/hints.py,sha256=M832_PM_GcbkmOHtMGtujLW0qHfdmAr0fE66Zqp5YLI,6510
|
487
488
|
cognite_toolkit/_cdf_tk/plugins.py,sha256=XpSVJnePU8Yd6r7_xAU7nj3p6dxVo83YE8kfsEbLUZ4,819
|
488
489
|
cognite_toolkit/_cdf_tk/tracker.py,sha256=ZafqhkCe-CcWuj9fOa3i558ydFRDBpjnKVb1IvtHuAY,6306
|
@@ -499,9 +500,9 @@ cognite_toolkit/_cdf_tk/apps/_core_app.py,sha256=-4ABeNtC0cxw7XvCRouPzTvlmqsS0NR
|
|
499
500
|
cognite_toolkit/_cdf_tk/apps/_dump_app.py,sha256=UXmB8oFwVLOmxJBlxxLIBMLPCLwdgyaFfuG6Ex-GZh4,25608
|
500
501
|
cognite_toolkit/_cdf_tk/apps/_landing_app.py,sha256=v4t2ryxzFre7y9IkEPIDwmyJDO8VDIIv6hIcft5TjpQ,422
|
501
502
|
cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=GRsOlqYAWB0rsZsdTJTGfjPm1OkbUq7xBrM4pzQRKoY,3708
|
502
|
-
cognite_toolkit/_cdf_tk/apps/_modules_app.py,sha256=
|
503
|
+
cognite_toolkit/_cdf_tk/apps/_modules_app.py,sha256=tjCP-QbuPYd7iw6dkxnhrrWf514Lr25_oVgSJyJcaL8,6642
|
503
504
|
cognite_toolkit/_cdf_tk/apps/_populate_app.py,sha256=PGUqK_USOqdPCDvUJI-4ne9TN6EssC33pUbEeCmiLPg,2805
|
504
|
-
cognite_toolkit/_cdf_tk/apps/_profile_app.py,sha256=
|
505
|
+
cognite_toolkit/_cdf_tk/apps/_profile_app.py,sha256=kceXE60pZz57cRr73DBbOUxShJ3R1tccUQ2L18zPJJw,1317
|
505
506
|
cognite_toolkit/_cdf_tk/apps/_purge.py,sha256=RxlUx2vzOuxETBszARUazK8azDpZsf-Y_HHuG9PBVd4,4089
|
506
507
|
cognite_toolkit/_cdf_tk/apps/_repo_app.py,sha256=jOf_s7oUWJqnRyz89JFiSzT2l8GlyQ7wqidHUQavGo0,1455
|
507
508
|
cognite_toolkit/_cdf_tk/apps/_run.py,sha256=vAuPzYBYfAAFJ_0myn5AxFXG3BJWq8A0HKrhMZ7PaHI,8539
|
@@ -544,6 +545,7 @@ cognite_toolkit/_cdf_tk/client/data_classes/__init__.py,sha256=47DEQpj8HBSa-_TIm
|
|
544
545
|
cognite_toolkit/_cdf_tk/client/data_classes/agent_tools.py,sha256=n26oUmKvRz4HwCJrVDXKsb_mepBr8VFah39TLWLXXAE,2609
|
545
546
|
cognite_toolkit/_cdf_tk/client/data_classes/agents.py,sha256=7kNrUV2d95iyIpDg0ty6SegHLNrecZogLw9ew6rYU30,4866
|
546
547
|
cognite_toolkit/_cdf_tk/client/data_classes/apm_config_v1.py,sha256=0bPq7R0qvdf8SMFS06kX7TXHIClDcJNHwdTBweQB-GU,20150
|
548
|
+
cognite_toolkit/_cdf_tk/client/data_classes/canvas.py,sha256=yfckaS0JJQNF2PEL1VFLAiqdlLmy-hw1GTfH7UFsb9U,17463
|
547
549
|
cognite_toolkit/_cdf_tk/client/data_classes/extendable_cognite_file.py,sha256=jFusjXtg769RMEMqQkqbkwn6nJN6tfjQqidEn3bj_yA,9722
|
548
550
|
cognite_toolkit/_cdf_tk/client/data_classes/extended_timeseries.py,sha256=yAvJCHePO_JPhvx5UTQ_qUdCXC5t_aSQY6YxuMnkGIQ,5378
|
549
551
|
cognite_toolkit/_cdf_tk/client/data_classes/functions.py,sha256=wF1IUDoDyhASfeeglJ-u52--SlthCp4hXK69TNCh_Nc,414
|
@@ -559,12 +561,12 @@ cognite_toolkit/_cdf_tk/client/data_classes/streamlit_.py,sha256=OGoMQ_K88F9vSZu
|
|
559
561
|
cognite_toolkit/_cdf_tk/client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
560
562
|
cognite_toolkit/_cdf_tk/client/utils/_concurrency.py,sha256=z6gqFv-kw80DsEpbaR7sI0-_WvZdOdAsR4VoFvTqvyU,1309
|
561
563
|
cognite_toolkit/_cdf_tk/client/utils/_http_client.py,sha256=oXNKrIaizG4WiSAhL_kSCHAuL4aaaEhCU4pOJGxh6Xs,483
|
562
|
-
cognite_toolkit/_cdf_tk/commands/__init__.py,sha256
|
564
|
+
cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=-9OGZp44naD2ACg4UjBQjc5wqlppll-BgeMd6scu6CQ,1195
|
563
565
|
cognite_toolkit/_cdf_tk/commands/_base.py,sha256=3Zc3ffR8mjZ1eV7WrC-Y1sYmyMzdbbJDDmsiKEMEJwo,2480
|
564
566
|
cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=3bR_C8p02IW6apexwAAoXuneBM4RcUGdX6Hw_Rtx7Kg,24775
|
565
567
|
cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=6nezoDrw3AkF8hANHjUILgTj_nbdzgT0siweaKI35Fk,1047
|
566
568
|
cognite_toolkit/_cdf_tk/commands/_populate.py,sha256=59VXEFRc4521xhTmCuQnjgWNYE3z4TUkUq8YbFREDGc,12280
|
567
|
-
cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=
|
569
|
+
cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=gZ1Ic0LBudnxH7QgQbK_rB9aL991po8VIAdbvUIiNHI,9182
|
568
570
|
cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=bE2ytMMlMuZc5xGyktKayvZ25x0kdzoKspjwgfab1Qs,26483
|
569
571
|
cognite_toolkit/_cdf_tk/commands/_utils.py,sha256=_IfPBLyfOUc7ubbABiHPpg1MzNGNCxElQ-hmV-vfFDc,1271
|
570
572
|
cognite_toolkit/_cdf_tk/commands/_virtual_env.py,sha256=45_aEPZJeyfGmS2Ph_lucaO7ujY7AF5L5N1K3UH3F0o,2216
|
@@ -577,7 +579,7 @@ cognite_toolkit/_cdf_tk/commands/dump_data.py,sha256=U_e-fEAEphpkJMlDTHQvQ1F0k3q
|
|
577
579
|
cognite_toolkit/_cdf_tk/commands/dump_resource.py,sha256=Dt8jlkmtpRtzPDMEjKdpOJPFr92k7Mw-BWkRsE9CJ8s,20515
|
578
580
|
cognite_toolkit/_cdf_tk/commands/featureflag.py,sha256=VPz7FrjVQFqjkz8BYTP2Np3k7BTLFMq_eooNSqmb2ms,1034
|
579
581
|
cognite_toolkit/_cdf_tk/commands/init.py,sha256=M9Qs6OVw5mKS4HkWX9DbWwFES1l65J2G90AXqUpyO4c,1744
|
580
|
-
cognite_toolkit/_cdf_tk/commands/modules.py,sha256=
|
582
|
+
cognite_toolkit/_cdf_tk/commands/modules.py,sha256=lYImbi7eX07j2lbE_8xJ5uix9xa2lL6vBk7IzjGPlhw,35946
|
581
583
|
cognite_toolkit/_cdf_tk/commands/pull.py,sha256=t7KQCxpoFDNBWTYPohK7chrRzPyAOGVmfaY7iBLnTqM,39286
|
582
584
|
cognite_toolkit/_cdf_tk/commands/repo.py,sha256=vQfLMTzSnI4w6eYCQuMnZ_xXVAVjyLnST4Tmu2zgNfE,3874
|
583
585
|
cognite_toolkit/_cdf_tk/commands/run.py,sha256=88AkfCdS4gXHA4I5ZhdU3HWWA5reOTGbfaauM-Yvp8o,37407
|
@@ -597,7 +599,7 @@ cognite_toolkit/_cdf_tk/data_classes/_deploy_results.py,sha256=BhGvCiuR2LI2DuJae
|
|
597
599
|
cognite_toolkit/_cdf_tk/data_classes/_module_directories.py,sha256=wQ-hM-0onMrFHHFb3c-V9hFuN-FToEFVzCLG7UrzvbQ,11882
|
598
600
|
cognite_toolkit/_cdf_tk/data_classes/_module_resources.py,sha256=SsZM3vwCqDbc1ejyFtuX-SOY9K_kLGBfIC7JTlQ7QnM,9160
|
599
601
|
cognite_toolkit/_cdf_tk/data_classes/_module_toml.py,sha256=35VFoP_rLMUKhztrePt-uey0SgpVCYgSFuButHkrUq4,2731
|
600
|
-
cognite_toolkit/_cdf_tk/data_classes/_packages.py,sha256=
|
602
|
+
cognite_toolkit/_cdf_tk/data_classes/_packages.py,sha256=LX17FD_PKEl-QqALQaF3rbVBlnlB_y_lQaFOy8AoWe8,3738
|
601
603
|
cognite_toolkit/_cdf_tk/data_classes/_yaml_comments.py,sha256=zfuDu9aAsb1ExeZBAJIqVaoqIZ050tO_oh3dApzlDwY,4937
|
602
604
|
cognite_toolkit/_cdf_tk/loaders/__init__.py,sha256=9giALvw48KIry7WWdCUxA1AvlVFCAR0bOJ5tKAhy-Lk,6241
|
603
605
|
cognite_toolkit/_cdf_tk/loaders/_base_loaders.py,sha256=sF9D7ImyHmjbLBGVM66D2xSmOj8XnG3LmDqlQQZRarQ,20502
|
@@ -695,12 +697,12 @@ cognite_toolkit/_repo_files/.gitignore,sha256=3exydcQPCJTldGFJoZy1RPHc1horbAprAo
|
|
695
697
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
|
696
698
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=KVBxW8urCRDtVlJ6HN-kYmw0NCpW6c4lD-nlxz9tZsQ,692
|
697
699
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=Cp4KYraeWPjP8SnnEIbJoJnjmrRUwc982DPjOOzy2iM,722
|
698
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=
|
699
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256
|
700
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=aqocHZ1qaXChsxpsKtaDraQxQStn6YQCVV0UOIyCaP4,667
|
701
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=PPtsCAloE4RBUuQTCjl79zVJGYJ_kWdNzm39LGTEHFA,2430
|
700
702
|
cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
|
701
703
|
cognite_toolkit/demo/_base.py,sha256=63nWYI_MHU5EuPwEX_inEAQxxiD5P6k8IAmlgl4CxpE,8082
|
702
|
-
cognite_toolkit-0.5.
|
703
|
-
cognite_toolkit-0.5.
|
704
|
-
cognite_toolkit-0.5.
|
705
|
-
cognite_toolkit-0.5.
|
706
|
-
cognite_toolkit-0.5.
|
704
|
+
cognite_toolkit-0.5.62.dist-info/METADATA,sha256=fcozRCkXl52ZawHzq8cg2TzsyZllmyoAuH7eA3u5tNM,4410
|
705
|
+
cognite_toolkit-0.5.62.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
706
|
+
cognite_toolkit-0.5.62.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
|
707
|
+
cognite_toolkit-0.5.62.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
|
708
|
+
cognite_toolkit-0.5.62.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|