cognite-neat 0.97.3__py3-none-any.whl → 0.99.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (109) hide show
  1. cognite/neat/_client/__init__.py +4 -0
  2. cognite/neat/_client/_api/data_modeling_loaders.py +512 -0
  3. cognite/neat/_client/_api/schema.py +50 -0
  4. cognite/neat/_client/_api_client.py +17 -0
  5. cognite/neat/_client/data_classes/__init__.py +0 -0
  6. cognite/neat/{_utils/cdf/data_classes.py → _client/data_classes/data_modeling.py} +8 -135
  7. cognite/neat/{_rules/models/dms/_schema.py → _client/data_classes/schema.py} +32 -281
  8. cognite/neat/_graph/_shared.py +14 -15
  9. cognite/neat/_graph/extractors/_classic_cdf/_assets.py +14 -154
  10. cognite/neat/_graph/extractors/_classic_cdf/_base.py +154 -7
  11. cognite/neat/_graph/extractors/_classic_cdf/_classic.py +23 -12
  12. cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +17 -92
  13. cognite/neat/_graph/extractors/_classic_cdf/_events.py +13 -162
  14. cognite/neat/_graph/extractors/_classic_cdf/_files.py +15 -179
  15. cognite/neat/_graph/extractors/_classic_cdf/_labels.py +32 -100
  16. cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +27 -178
  17. cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +14 -139
  18. cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +15 -173
  19. cognite/neat/_graph/extractors/_rdf_file.py +6 -7
  20. cognite/neat/_graph/loaders/__init__.py +1 -2
  21. cognite/neat/_graph/queries/_base.py +17 -1
  22. cognite/neat/_graph/transformers/_classic_cdf.py +50 -134
  23. cognite/neat/_graph/transformers/_prune_graph.py +1 -1
  24. cognite/neat/_graph/transformers/_rdfpath.py +1 -1
  25. cognite/neat/_issues/warnings/__init__.py +6 -0
  26. cognite/neat/_issues/warnings/_external.py +8 -0
  27. cognite/neat/_issues/warnings/_models.py +9 -0
  28. cognite/neat/_issues/warnings/_properties.py +16 -0
  29. cognite/neat/_rules/_constants.py +7 -6
  30. cognite/neat/_rules/_shared.py +3 -8
  31. cognite/neat/_rules/analysis/__init__.py +1 -2
  32. cognite/neat/_rules/analysis/_base.py +10 -27
  33. cognite/neat/_rules/analysis/_dms.py +4 -10
  34. cognite/neat/_rules/analysis/_information.py +2 -10
  35. cognite/neat/_rules/catalog/info-rules-imf.xlsx +0 -0
  36. cognite/neat/_rules/exporters/_base.py +3 -4
  37. cognite/neat/_rules/exporters/_rules2dms.py +29 -40
  38. cognite/neat/_rules/exporters/_rules2excel.py +15 -72
  39. cognite/neat/_rules/exporters/_rules2ontology.py +4 -4
  40. cognite/neat/_rules/importers/_base.py +3 -4
  41. cognite/neat/_rules/importers/_dms2rules.py +21 -45
  42. cognite/neat/_rules/importers/_dtdl2rules/dtdl_converter.py +1 -7
  43. cognite/neat/_rules/importers/_dtdl2rules/dtdl_importer.py +7 -10
  44. cognite/neat/_rules/importers/_rdf/_base.py +17 -29
  45. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +2 -2
  46. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +5 -10
  47. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +1 -2
  48. cognite/neat/_rules/importers/_rdf/_inference2rules.py +55 -51
  49. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +2 -2
  50. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +5 -8
  51. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +1 -2
  52. cognite/neat/_rules/importers/_rdf/_shared.py +25 -140
  53. cognite/neat/_rules/importers/_spreadsheet2rules.py +10 -41
  54. cognite/neat/_rules/models/__init__.py +3 -17
  55. cognite/neat/_rules/models/_base_rules.py +118 -62
  56. cognite/neat/_rules/models/dms/__init__.py +2 -2
  57. cognite/neat/_rules/models/dms/_exporter.py +20 -178
  58. cognite/neat/_rules/models/dms/_rules.py +65 -128
  59. cognite/neat/_rules/models/dms/_rules_input.py +72 -56
  60. cognite/neat/_rules/models/dms/_validation.py +16 -109
  61. cognite/neat/_rules/models/entities/_single_value.py +32 -4
  62. cognite/neat/_rules/models/information/_rules.py +19 -122
  63. cognite/neat/_rules/models/information/_rules_input.py +32 -41
  64. cognite/neat/_rules/models/information/_validation.py +34 -102
  65. cognite/neat/_rules/models/mapping/__init__.py +2 -3
  66. cognite/neat/_rules/models/mapping/_classic2core.py +36 -146
  67. cognite/neat/_rules/models/mapping/_classic2core.yaml +339 -0
  68. cognite/neat/_rules/transformers/__init__.py +3 -6
  69. cognite/neat/_rules/transformers/_converters.py +128 -206
  70. cognite/neat/_rules/transformers/_mapping.py +105 -34
  71. cognite/neat/_rules/transformers/_verification.py +5 -16
  72. cognite/neat/_session/_base.py +83 -21
  73. cognite/neat/_session/_collector.py +126 -0
  74. cognite/neat/_session/_drop.py +35 -0
  75. cognite/neat/_session/_inspect.py +22 -10
  76. cognite/neat/_session/_mapping.py +39 -0
  77. cognite/neat/_session/_prepare.py +222 -27
  78. cognite/neat/_session/_read.py +109 -19
  79. cognite/neat/_session/_set.py +2 -2
  80. cognite/neat/_session/_show.py +11 -11
  81. cognite/neat/_session/_to.py +27 -14
  82. cognite/neat/_session/exceptions.py +20 -3
  83. cognite/neat/_store/_base.py +27 -24
  84. cognite/neat/_store/_provenance.py +2 -2
  85. cognite/neat/_utils/auxiliary.py +19 -0
  86. cognite/neat/_utils/rdf_.py +28 -1
  87. cognite/neat/_version.py +1 -1
  88. cognite/neat/_workflows/steps/data_contracts.py +2 -10
  89. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +14 -49
  90. cognite/neat/_workflows/steps/lib/current/rules_importer.py +4 -1
  91. cognite/neat/_workflows/steps/lib/current/rules_validator.py +5 -9
  92. {cognite_neat-0.97.3.dist-info → cognite_neat-0.99.0.dist-info}/METADATA +4 -3
  93. {cognite_neat-0.97.3.dist-info → cognite_neat-0.99.0.dist-info}/RECORD +97 -100
  94. cognite/neat/_graph/loaders/_rdf2asset.py +0 -416
  95. cognite/neat/_rules/analysis/_asset.py +0 -173
  96. cognite/neat/_rules/models/asset/__init__.py +0 -13
  97. cognite/neat/_rules/models/asset/_rules.py +0 -109
  98. cognite/neat/_rules/models/asset/_rules_input.py +0 -101
  99. cognite/neat/_rules/models/asset/_validation.py +0 -45
  100. cognite/neat/_rules/models/domain.py +0 -136
  101. cognite/neat/_rules/models/mapping/_base.py +0 -131
  102. cognite/neat/_utils/cdf/loaders/__init__.py +0 -25
  103. cognite/neat/_utils/cdf/loaders/_base.py +0 -54
  104. cognite/neat/_utils/cdf/loaders/_data_modeling.py +0 -339
  105. cognite/neat/_utils/cdf/loaders/_ingestion.py +0 -167
  106. /cognite/neat/{_utils/cdf → _client/_api}/__init__.py +0 -0
  107. {cognite_neat-0.97.3.dist-info → cognite_neat-0.99.0.dist-info}/LICENSE +0 -0
  108. {cognite_neat-0.97.3.dist-info → cognite_neat-0.99.0.dist-info}/WHEEL +0 -0
  109. {cognite_neat-0.97.3.dist-info → cognite_neat-0.99.0.dist-info}/entry_points.txt +0 -0
@@ -1,339 +0,0 @@
1
- from collections.abc import Callable, Sequence
2
- from graphlib import TopologicalSorter
3
- from typing import Any, Literal, cast
4
-
5
- from cognite.client import CogniteClient
6
- from cognite.client.data_classes import filters
7
- from cognite.client.data_classes._base import (
8
- T_CogniteResourceList,
9
- T_WritableCogniteResource,
10
- T_WriteClass,
11
- )
12
- from cognite.client.data_classes.data_modeling import (
13
- Container,
14
- ContainerApply,
15
- ContainerApplyList,
16
- ContainerList,
17
- DataModel,
18
- DataModelApply,
19
- DataModelApplyList,
20
- DataModelList,
21
- RequiresConstraint,
22
- Space,
23
- SpaceApply,
24
- SpaceApplyList,
25
- SpaceList,
26
- View,
27
- ViewApply,
28
- ViewApplyList,
29
- ViewList,
30
- )
31
- from cognite.client.data_classes.data_modeling.ids import (
32
- ContainerId,
33
- DataModelId,
34
- NodeId,
35
- ViewId,
36
- )
37
- from cognite.client.exceptions import CogniteAPIError
38
- from cognite.client.utils.useful_types import SequenceNotStr
39
-
40
- from ._base import T_ID, ResourceLoader, T_WritableCogniteResourceList
41
-
42
-
43
- class DataModelingLoader(
44
- ResourceLoader[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList]
45
- ):
46
- @classmethod
47
- def in_space(cls, item: T_WriteClass | T_WritableCogniteResource | T_ID, space: set[str]) -> bool:
48
- if hasattr(item, "space"):
49
- return item.space in space
50
- raise ValueError(f"Item {item} does not have a space attribute")
51
-
52
- def sort_by_dependencies(self, items: list[T_WriteClass]) -> list[T_WriteClass]:
53
- return items
54
-
55
- def _create_force(
56
- self,
57
- items: Sequence[T_WriteClass],
58
- tried_force_deploy: set[T_ID],
59
- create_method: Callable[[Sequence[T_WriteClass]], T_WritableCogniteResourceList],
60
- ) -> T_WritableCogniteResourceList:
61
- try:
62
- return create_method(items)
63
- except CogniteAPIError as e:
64
- failed_items = {failed.as_id() for failed in e.failed if hasattr(failed, "as_id")}
65
- to_redeploy = [
66
- item
67
- for item in items
68
- if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
69
- ]
70
- if not to_redeploy:
71
- # Avoid infinite loop
72
- raise e
73
- ids = [item.as_id() for item in to_redeploy] # type: ignore[attr-defined]
74
- tried_force_deploy.update(ids)
75
- self.delete(ids)
76
- return self._create_force(to_redeploy, tried_force_deploy, create_method)
77
-
78
-
79
- class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, SpaceList]):
80
- resource_name = "spaces"
81
-
82
- @classmethod
83
- def get_id(cls, item: Space | SpaceApply | str | dict) -> str:
84
- if isinstance(item, Space | SpaceApply):
85
- return item.space
86
- if isinstance(item, dict):
87
- return item["space"]
88
- return item
89
-
90
- def create(self, items: Sequence[SpaceApply]) -> SpaceList:
91
- return self.client.data_modeling.spaces.apply(items)
92
-
93
- def retrieve(self, ids: SequenceNotStr[str]) -> SpaceList:
94
- return self.client.data_modeling.spaces.retrieve(ids)
95
-
96
- def update(self, items: Sequence[SpaceApply]) -> SpaceList:
97
- return self.create(items)
98
-
99
- def delete(self, ids: SequenceNotStr[str] | Sequence[Space | SpaceApply]) -> list[str]:
100
- if all(isinstance(item, Space) for item in ids) or all(isinstance(item, SpaceApply) for item in ids):
101
- ids = [cast(Space | SpaceApply, item).space for item in ids]
102
- return self.client.data_modeling.spaces.delete(cast(SequenceNotStr[str], ids))
103
-
104
- def clean(self, space: str) -> None:
105
- """Deletes all data in a space.
106
-
107
- This means all nodes, edges, views, containers, and data models located in the given space.
108
-
109
- Args:
110
- client: Connected CogniteClient
111
- space: The space to delete.
112
-
113
- """
114
- edges = self.client.data_modeling.instances.list(
115
- "edge", limit=-1, filter=filters.Equals(["edge", "space"], space)
116
- )
117
- if edges:
118
- instances = self.client.data_modeling.instances.delete(edges=edges.as_ids())
119
- print(f"Deleted {len(instances.edges)} edges")
120
- nodes = self.client.data_modeling.instances.list(
121
- "node", limit=-1, filter=filters.Equals(["node", "space"], space)
122
- )
123
- node_types = {NodeId(node.type.space, node.type.external_id) for node in nodes if node.type}
124
- node_data = set(nodes.as_ids()) - node_types
125
- if node_data:
126
- instances = self.client.data_modeling.instances.delete(nodes=list(node_data))
127
- print(f"Deleted {len(instances.nodes)} nodes")
128
- if node_types:
129
- instances = self.client.data_modeling.instances.delete(nodes=list(node_types))
130
- print(f"Deleted {len(instances.nodes)} node types")
131
- views = self.client.data_modeling.views.list(limit=-1, space=space)
132
- if views:
133
- deleted_views = self.client.data_modeling.views.delete(views.as_ids())
134
- print(f"Deleted {len(deleted_views)} views")
135
- containers = self.client.data_modeling.containers.list(limit=-1, space=space)
136
- if containers:
137
- deleted_containers = self.client.data_modeling.containers.delete(containers.as_ids())
138
- print(f"Deleted {len(deleted_containers)} containers")
139
- if data_models := self.client.data_modeling.data_models.list(limit=-1, space=space):
140
- deleted_data_models = self.client.data_modeling.data_models.delete(data_models.as_ids())
141
- print(f"Deleted {len(deleted_data_models)} data models")
142
- deleted_space = self.client.data_modeling.spaces.delete(space)
143
- print(f"Deleted space {deleted_space}")
144
-
145
-
146
- class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, ViewList]):
147
- resource_name = "views"
148
-
149
- def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "skip", "update", "force"] = "fail"):
150
- super().__init__(client)
151
- self.existing_handling = existing_handling
152
- self._cache_view_by_id: dict[ViewId, View] = {}
153
- self._tried_force_deploy: set[ViewId] = set()
154
-
155
- @classmethod
156
- def get_id(cls, item: View | ViewApply | ViewId | dict) -> ViewId:
157
- if isinstance(item, View | ViewApply):
158
- return item.as_id()
159
- if isinstance(item, dict):
160
- return ViewId.load(item)
161
- return item
162
-
163
- def create(self, items: Sequence[ViewApply]) -> ViewList:
164
- if self.existing_handling == "force":
165
- return self._create_force(items, self._tried_force_deploy, self.client.data_modeling.views.apply)
166
- else:
167
- return self.client.data_modeling.views.apply(items)
168
-
169
- def retrieve(self, ids: SequenceNotStr[ViewId]) -> ViewList:
170
- return self.client.data_modeling.views.retrieve(cast(Sequence, ids))
171
-
172
- def update(self, items: Sequence[ViewApply]) -> ViewList:
173
- return self.create(items)
174
-
175
- def delete(self, ids: SequenceNotStr[ViewId]) -> list[ViewId]:
176
- return self.client.data_modeling.views.delete(cast(Sequence, ids))
177
-
178
- def _as_write_raw(self, view: View) -> dict[str, Any]:
179
- dumped = view.as_write().dump()
180
- if view.properties:
181
- # All read version of views have all the properties of their parent views.
182
- # We need to remove these properties to compare with the local view.
183
- parents = self._retrieve_view_ancestors(view.implements or [], self._cache_view_by_id)
184
- for parent in parents:
185
- for prop_name in parent.properties.keys():
186
- dumped["properties"].pop(prop_name, None)
187
-
188
- if "properties" in dumped and not dumped["properties"]:
189
- # All properties were removed, so we remove the properties key.
190
- dumped.pop("properties", None)
191
- return dumped
192
-
193
- def are_equal(self, local: ViewApply, remote: View) -> bool:
194
- local_dumped = local.dump()
195
- if not remote.implements:
196
- return local_dumped == remote.as_write().dump()
197
-
198
- cdf_resource_dumped = self._as_write_raw(remote)
199
-
200
- if "properties" in local_dumped and not local_dumped["properties"]:
201
- # In case the local properties are set to an empty dict.
202
- local_dumped.pop("properties", None)
203
-
204
- return local_dumped == cdf_resource_dumped
205
-
206
- def as_write(self, view: View) -> ViewApply:
207
- return ViewApply.load(self._as_write_raw(view))
208
-
209
- def retrieve_all_parents(self, views: list[ViewId]) -> list[View]:
210
- return self._retrieve_view_ancestors(views, self._cache_view_by_id)
211
-
212
- def _retrieve_view_ancestors(self, parents: list[ViewId], cache: dict[ViewId, View]) -> list[View]:
213
- """Retrieves all ancestors of a view.
214
-
215
- This will mutate the cache passed in, and return a list of views that are the ancestors
216
- of the views in the parents list.
217
-
218
- Args:
219
- parents: The parents of the view to retrieve all ancestors for
220
- cache: The cache to store the views in
221
- """
222
- parent_ids = parents.copy()
223
- found: list[View] = []
224
- found_ids: set[ViewId] = set()
225
- while parent_ids:
226
- to_lookup: set[ViewId] = set()
227
- grand_parent_ids = []
228
- for parent in parent_ids:
229
- if parent in found_ids:
230
- continue
231
- elif parent in cache:
232
- found.append(cache[parent])
233
- grand_parent_ids.extend(cache[parent].implements or [])
234
- else:
235
- to_lookup.add(parent)
236
-
237
- if to_lookup:
238
- looked_up = self.client.data_modeling.views.retrieve(list(to_lookup))
239
- cache.update({view.as_id(): view for view in looked_up})
240
- found.extend(looked_up)
241
- found_ids.update({view.as_id() for view in looked_up})
242
- for view in looked_up:
243
- grand_parent_ids.extend(view.implements or [])
244
-
245
- parent_ids = grand_parent_ids
246
- return found
247
-
248
-
249
- class ContainerLoader(DataModelingLoader[ContainerId, ContainerApply, Container, ContainerApplyList, ContainerList]):
250
- resource_name = "containers"
251
-
252
- def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "skip", "update", "force"] = "fail"):
253
- super().__init__(client)
254
- self.existing_handling = existing_handling
255
- self._tried_force_deploy: set[ContainerId] = set()
256
-
257
- @classmethod
258
- def get_id(cls, item: Container | ContainerApply | ContainerId | dict) -> ContainerId:
259
- if isinstance(item, Container | ContainerApply):
260
- return item.as_id()
261
- if isinstance(item, dict):
262
- return ContainerId.load(item)
263
- return item
264
-
265
- def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]:
266
- container_by_id = {container.as_id(): container for container in items}
267
- container_dependencies = {
268
- container.as_id(): {
269
- const.require
270
- for const in container.constraints.values()
271
- if isinstance(const, RequiresConstraint) and const.require in container_by_id
272
- }
273
- for container in items
274
- }
275
- return [
276
- container_by_id[container_id] for container_id in TopologicalSorter(container_dependencies).static_order()
277
- ]
278
-
279
- def create(self, items: Sequence[ContainerApply]) -> ContainerList:
280
- if self.existing_handling == "force":
281
- return self._create_force(items, self._tried_force_deploy, self.client.data_modeling.containers.apply)
282
- else:
283
- return self.client.data_modeling.containers.apply(items)
284
-
285
- def retrieve(self, ids: SequenceNotStr[ContainerId]) -> ContainerList:
286
- return self.client.data_modeling.containers.retrieve(cast(Sequence, ids))
287
-
288
- def update(self, items: Sequence[ContainerApply]) -> ContainerList:
289
- return self.create(items)
290
-
291
- def delete(self, ids: SequenceNotStr[ContainerId]) -> list[ContainerId]:
292
- return self.client.data_modeling.containers.delete(cast(Sequence, ids))
293
-
294
- def are_equal(self, local: ContainerApply, remote: Container) -> bool:
295
- local_dumped = local.dump(camel_case=True)
296
- if "usedFor" not in local_dumped:
297
- # Setting used_for to "node" as it is the default value in the CDF.
298
- local_dumped["usedFor"] = "node"
299
-
300
- return local_dumped == remote.as_write().dump(camel_case=True)
301
-
302
-
303
- class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel, DataModelApplyList, DataModelList]):
304
- resource_name = "data_models"
305
-
306
- @classmethod
307
- def get_id(cls, item: DataModel | DataModelApply | DataModelId | dict) -> DataModelId:
308
- if isinstance(item, DataModel | DataModelApply):
309
- return item.as_id()
310
- if isinstance(item, dict):
311
- return DataModelId.load(item)
312
- return item
313
-
314
- def create(self, items: Sequence[DataModelApply]) -> DataModelList:
315
- return self.client.data_modeling.data_models.apply(items)
316
-
317
- def retrieve(self, ids: SequenceNotStr[DataModelId]) -> DataModelList:
318
- return self.client.data_modeling.data_models.retrieve(cast(Sequence, ids))
319
-
320
- def update(self, items: Sequence[DataModelApply]) -> DataModelList:
321
- return self.create(items)
322
-
323
- def delete(self, ids: SequenceNotStr[DataModelId]) -> list[DataModelId]:
324
- return self.client.data_modeling.data_models.delete(cast(Sequence, ids))
325
-
326
- def are_equal(self, local: DataModelApply, remote: DataModel) -> bool:
327
- local_dumped = local.dump()
328
- cdf_resource_dumped = remote.as_write().dump()
329
-
330
- # Data models that have the same views, but in different order, are considered equal.
331
- # We also account for whether views are given as IDs or View objects.
332
- local_dumped["views"] = sorted(
333
- (v if isinstance(v, ViewId) else v.as_id()).as_tuple() for v in local.views or []
334
- )
335
- cdf_resource_dumped["views"] = sorted(
336
- (v if isinstance(v, ViewId) else v.as_id()).as_tuple() for v in remote.views or []
337
- )
338
-
339
- return local_dumped == cdf_resource_dumped
@@ -1,167 +0,0 @@
1
- from collections.abc import Iterable, Sequence
2
- from itertools import groupby
3
- from typing import cast, overload
4
-
5
- from cognite.client.data_classes import (
6
- Database,
7
- DatabaseList,
8
- DatabaseWrite,
9
- DatabaseWriteList,
10
- Transformation,
11
- TransformationList,
12
- TransformationWrite,
13
- TransformationWriteList,
14
- )
15
- from cognite.client.exceptions import CogniteAPIError
16
- from cognite.client.utils.useful_types import SequenceNotStr
17
-
18
- from cognite.neat._utils.cdf.data_classes import RawTable, RawTableID, RawTableList, RawTableWrite, RawTableWriteList
19
-
20
- from ._base import ResourceLoader
21
-
22
-
23
- class TransformationLoader(
24
- ResourceLoader[str, TransformationWrite, Transformation, TransformationWriteList, TransformationList]
25
- ):
26
- resource_name = "transformations"
27
-
28
- @classmethod
29
- def get_id(cls, item: Transformation | TransformationWrite | str | dict) -> str:
30
- if isinstance(item, Transformation | TransformationWrite):
31
- if item.external_id is None:
32
- raise ValueError(f"Transformation {item} does not have an external_id")
33
- return item.external_id
34
- if isinstance(item, dict):
35
- if item.get("externalId") is None:
36
- raise ValueError(f"Transformation {item} does not have an external_id")
37
- return item["externalId"]
38
- return item
39
-
40
- def create(self, items: Sequence[TransformationWrite]) -> TransformationList:
41
- return self.client.transformations.create(items)
42
-
43
- def retrieve(self, ids: SequenceNotStr[str]) -> TransformationList:
44
- return self.client.transformations.retrieve_multiple(external_ids=ids, ignore_unknown_ids=True)
45
-
46
- def update(self, items: Sequence[TransformationWrite]) -> TransformationList:
47
- return self.client.transformations.update(items)
48
-
49
- def delete(self, ids: SequenceNotStr[str]) -> list[str]:
50
- existing = self.retrieve(ids)
51
- self.client.transformations.delete(external_id=ids, ignore_unknown_ids=True)
52
- return existing.as_external_ids()
53
-
54
-
55
- class RawDatabaseLoader(ResourceLoader[str, DatabaseWrite, Database, DatabaseWriteList, DatabaseList]):
56
- resource_name = "databases"
57
-
58
- @classmethod
59
- def get_id(cls, item: Database | DatabaseWrite | str | dict) -> str:
60
- if isinstance(item, Database | DatabaseWrite):
61
- if item.name is None:
62
- raise ValueError(f"Database {item} does not have a name")
63
- return item.name
64
- if isinstance(item, dict):
65
- if item.get("name") is None:
66
- raise ValueError(f"Database {item} does not have a name")
67
- return item["name"]
68
- return item
69
-
70
- def create(self, items: Sequence[DatabaseWrite]) -> DatabaseList:
71
- return self.client.raw.databases.create([item.name for item in items if item.name is not None])
72
-
73
- def retrieve(self, ids: SequenceNotStr[str]) -> DatabaseList:
74
- all_databases = self.client.raw.databases.list(limit=-1)
75
- return DatabaseList([db for db in all_databases if db.name in ids])
76
-
77
- def update(self, items: Sequence[DatabaseWrite]) -> DatabaseList:
78
- if not items:
79
- return DatabaseList([])
80
- raise NotImplementedError("The CDF API does not support updating a RAW database.")
81
-
82
- def delete(self, ids: SequenceNotStr[str]) -> list[str]:
83
- existing_databases = self.retrieve(ids)
84
- existing_names = {item.name for item in existing_databases}
85
- self.client.raw.databases.delete([name for name in ids if name in existing_names])
86
- return existing_databases.as_names()
87
-
88
-
89
- class RawTableLoader(ResourceLoader[RawTableID, RawTableWrite, RawTable, RawTableWriteList, RawTableList]):
90
- resource_name = "tables"
91
-
92
- @classmethod
93
- def get_id(cls, item: RawTable | RawTableWrite | RawTableID | dict) -> RawTableID:
94
- if isinstance(item, RawTable | RawTableWrite):
95
- return item.as_id()
96
- if isinstance(item, dict):
97
- return RawTableID(database=item["database"], table=item["name"])
98
- return item
99
-
100
- @overload
101
- def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]: ...
102
-
103
- @overload
104
- def _groupby_database(self, items: SequenceNotStr[RawTableID]) -> Iterable[tuple[str, Iterable[RawTableID]]]: ...
105
-
106
- def _groupby_database(
107
- self, items: Sequence[RawTableWrite] | SequenceNotStr[RawTableID]
108
- ) -> Iterable[tuple[str, Iterable[RawTableWrite] | Iterable[RawTableID]]]:
109
- return cast(
110
- Iterable[tuple[str, Iterable[RawTableID] | Iterable[RawTableWrite]]],
111
- groupby(sorted(items, key=lambda x: x.database or ""), lambda x: x.database or ""),
112
- )
113
-
114
- def create(self, items: Sequence[RawTableWrite]) -> RawTableList:
115
- existing = set(self.retrieve([table.as_id() for table in items]).as_ids())
116
- output = RawTableList([])
117
- for db_name, tables in self._groupby_database(items):
118
- to_create = [table.name for table in tables if table.name if table.as_id() not in existing]
119
- if not to_create:
120
- continue
121
- created = self.client.raw.tables.create(db_name=db_name, name=to_create)
122
- for table in created:
123
- output.append(
124
- RawTable(
125
- name=table.name, database=db_name, created_time=table.created_time, cognite_client=self.client
126
- )
127
- )
128
- return output
129
-
130
- def retrieve(self, ids: SequenceNotStr[RawTableID]) -> RawTableList:
131
- output = RawTableList([])
132
- for db_name, id_group in self._groupby_database(ids):
133
- try:
134
- all_tables = self.client.raw.tables.list(db_name, limit=-1)
135
- except CogniteAPIError as e:
136
- if e.code == 404 and e.message.startswith("Following databases not found"):
137
- continue
138
- looking_for = {table_id.table for table_id in id_group if table_id.table is not None}
139
- output.extend(
140
- [
141
- RawTable(
142
- name=table.name, database=db_name, created_time=table.created_time, cognite_client=self.client
143
- )
144
- for table in all_tables
145
- if table.name in looking_for
146
- ]
147
- )
148
- return output
149
-
150
- def update(self, items: Sequence[RawTableWrite]) -> RawTableList:
151
- if not items:
152
- return RawTableList([])
153
- raise NotImplementedError("The CDF API does not support updating a RAW table.")
154
-
155
- def delete(self, ids: SequenceNotStr[RawTableID]) -> list[RawTableID]:
156
- existing_tables = self.retrieve(ids)
157
- existing_names = {item.name for item in existing_tables}
158
- for db_name, id_group in self._groupby_database(ids):
159
- self.client.raw.tables.delete(
160
- db_name=db_name,
161
- name=[
162
- table_id.table
163
- for table_id in id_group
164
- if table_id.table is not None and table_id.table in existing_names
165
- ],
166
- )
167
- return existing_tables.as_ids()
File without changes