cognite-neat 0.123.30__py3-none-any.whl → 0.123.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

File without changes
@@ -0,0 +1,10 @@
1
+ from ._base import Resource, WriteableResource
2
+ from ._space import Space, SpaceRequest, SpaceResponse
3
+
4
+ __all__ = [
5
+ "Resource",
6
+ "Space",
7
+ "SpaceRequest",
8
+ "SpaceResponse",
9
+ "WriteableResource",
10
+ ]
@@ -0,0 +1,27 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Generic, TypeVar
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic.alias_generators import to_camel
6
+
7
+
8
+ class BaseModelObject(BaseModel, alias_generator=to_camel, extra="ignore"):
9
+ """Base class for all object. This includes resources and nested objects."""
10
+
11
+ ...
12
+
13
+
14
+ class Resource(BaseModelObject):
15
+ """Base class for all data modeling resources."""
16
+
17
+ ...
18
+
19
+
20
+ T_Resource = TypeVar("T_Resource", bound=Resource)
21
+
22
+
23
+ class WriteableResource(Generic[T_Resource], Resource, ABC):
24
+ @abstractmethod
25
+ def as_request(self) -> T_Resource:
26
+ """Convert the response model to a request model by removing read-only fields."""
27
+ raise NotImplementedError()
@@ -0,0 +1,2 @@
1
+ SPACE_FORMAT_PATTERN = r"[a-zA-Z][a-zA-Z0-9_-]{0,41}[a-zA-Z0-9]?$"
2
+ FORBIDDEN_SPACES = frozenset(["space", "cdf", "dms", "pg3", "shared", "system", "node", "edge"])
@@ -0,0 +1,44 @@
1
+ from abc import ABC
2
+
3
+ from pydantic import Field, field_validator
4
+
5
+ from cognite.neat.core._utils.text import humanize_collection
6
+
7
+ from ._base import WriteableResource
8
+ from ._constants import FORBIDDEN_SPACES, SPACE_FORMAT_PATTERN
9
+
10
+
11
+ class Space(WriteableResource["SpaceRequest"], ABC):
12
+ space: str = Field(
13
+ description="The Space identifier (id).",
14
+ min_length=1,
15
+ max_length=43,
16
+ pattern=SPACE_FORMAT_PATTERN,
17
+ )
18
+ name: str | None = Field(None, description="Name of the space.", max_length=1024)
19
+ description: str | None = Field(None, description="The description of the space.", max_length=255)
20
+
21
+ @field_validator("space")
22
+ def check_forbidden_space_value(cls, val: str) -> str:
23
+ """Check the space name not present in forbidden set"""
24
+ if val in FORBIDDEN_SPACES:
25
+ raise ValueError(f"{val!r} is a reserved space. Reserved Spaces: {humanize_collection(FORBIDDEN_SPACES)}")
26
+ return val
27
+
28
+ def as_request(self) -> "SpaceRequest":
29
+ return SpaceRequest.model_validate(self.model_dump(by_alias=True))
30
+
31
+
32
+ class SpaceResponse(Space):
33
+ created_time: int = Field(
34
+ description="When the space was created. The number of milliseconds since 00:00:00 Thursday, 1 January 1970, "
35
+ "Coordinated Universal Time (UTC), minus leap seconds."
36
+ )
37
+ last_updated_time: int = Field(
38
+ description="When the space was last updated. The number of milliseconds since 00:00:00 Thursday, "
39
+ "1 January 1970, Coordinated Universal Time (UTC), minus leap seconds."
40
+ )
41
+ is_global: bool = Field(description="Whether the space is a global space.")
42
+
43
+
44
+ class SpaceRequest(Space): ...
File without changes
File without changes
@@ -0,0 +1,32 @@
1
+ import inspect
2
+ from abc import ABC
3
+ from typing import TypeVar
4
+
5
+ T_Cls = TypeVar("T_Cls")
6
+
7
+
8
+ def get_concrete_subclasses(base_cls: type[T_Cls], exclude_direct_abc_inheritance: bool = True) -> list[type[T_Cls]]:
9
+ """
10
+ Returns a list of all concrete subclasses of the given base class.
11
+ Args:
12
+ base_cls (type[T_Cls]): The base class to find subclasses for.
13
+ exclude_direct_abc_inheritance (bool): If True, excludes classes that directly inherit from `abc.ABC`.
14
+ This is used as a marker to filter out intermediate base classes. Defaults to True.
15
+ Returns:
16
+ list[type[T_Cls]]: A list of concrete subclasses of the base class.
17
+ """
18
+ to_check = [base_cls]
19
+ subclasses: list[type[T_Cls]] = []
20
+ seen: set[type[T_Cls]] = {base_cls}
21
+ while to_check:
22
+ current_cls = to_check.pop()
23
+ for subclass in current_cls.__subclasses__():
24
+ if subclass in seen:
25
+ continue
26
+ if (not inspect.isabstract(subclass)) and (
27
+ not exclude_direct_abc_inheritance or ABC not in subclass.__bases__
28
+ ):
29
+ subclasses.append(subclass)
30
+ seen.add(subclass)
31
+ to_check.append(subclass)
32
+ return subclasses
cognite/neat/_version.py CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = "0.123.30"
1
+ __version__ = "0.123.31"
2
2
  __engine__ = "^2.0.4"
@@ -33,7 +33,6 @@ from ._mapping import MappingAPI
33
33
  from ._plugin import PluginAPI
34
34
  from ._prepare import PrepareAPI
35
35
  from ._read import ReadAPI
36
- from ._session._data_model import DataModelAPI
37
36
  from ._set import SetAPI
38
37
  from ._show import ShowAPI
39
38
  from ._state import SessionState
@@ -116,9 +115,6 @@ class NeatSession:
116
115
  if load_engine != "skip" and (engine_version := load_neat_engine(client, load_engine)):
117
116
  print(f"Neat Engine {engine_version} loaded.")
118
117
 
119
- # new API for data model operations
120
- self.data_model = DataModelAPI(self._state)
121
-
122
118
  def _select_most_performant_store(self) -> Literal["memory", "oxigraph"]:
123
119
  """Select the most performant store based on the current environment."""
124
120
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite-neat
3
- Version: 0.123.30
3
+ Version: 0.123.31
4
4
  Summary: Knowledge graph transformation
5
5
  Project-URL: Documentation, https://cognite-neat.readthedocs-hosted.com/
6
6
  Project-URL: Homepage, https://cognite-neat.readthedocs-hosted.com/
@@ -1,6 +1,15 @@
1
1
  cognite/neat/__init__.py,sha256=12StS1dzH9_MElqxGvLWrNsxCJl9Hv8A2a9D0E5OD_U,193
2
- cognite/neat/_version.py,sha256=Umq3xx0HZoXDYrJ16jkCraj5gi3lRvqf-wxtIkntgvg,47
2
+ cognite/neat/_version.py,sha256=c-BbCQozGLPXIz0KlrtSttiQDSYohkj9MbUXKI693Pc,47
3
3
  cognite/neat/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ cognite/neat/_data_model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ cognite/neat/_data_model/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ cognite/neat/_data_model/models/dms/__init__.py,sha256=LwuPR4zOPinhT89mN0XEB38d5deWmLbGFs_RvENVQlc,212
7
+ cognite/neat/_data_model/models/dms/_base.py,sha256=R8SP3Zi9daTBqewYKGjuNEkrWc-j91f-6t34CN-9YJ0,719
8
+ cognite/neat/_data_model/models/dms/_constants.py,sha256=TpnOZ5Q1O_r2H5Ez3sAvaCH2s5DbnUm0qeRC-K1iPAQ,164
9
+ cognite/neat/_data_model/models/dms/_space.py,sha256=W5tRG3GIcxRK9RBkpWWtZWdaqUZPiKq-NMOwPuHjj9o,1677
10
+ cognite/neat/_data_model/models/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ cognite/neat/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ cognite/neat/_utils/auxiliary.py,sha256=szvIrFRfBSZ_CpF24z5I1ustJohdvGZjdi_7TFB6Ltc,1236
4
13
  cognite/neat/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
14
  cognite/neat/core/_config.py,sha256=WT1BS8uADcFvGoUYOOfwFOVq_VBl472TisdoA3wLick,280
6
15
  cognite/neat/core/_constants.py,sha256=wIpGOzZAKS2vhshXR1K51cbcsgq2TlvZaZ3zw91CU9I,9054
@@ -170,7 +179,7 @@ cognite/neat/plugins/data_model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
170
179
  cognite/neat/plugins/data_model/importers/__init__.py,sha256=d4UJNCFR1DXPY7lv5LdCW2hiStEhvXiu2g_bRSIp1y0,89
171
180
  cognite/neat/plugins/data_model/importers/_base.py,sha256=M9zXp7tEu1SfJZRAJAtLmqpssdFcoi2X-5e25q_n_h8,1034
172
181
  cognite/neat/session/__init__.py,sha256=fxQ5URVlUnmEGYyB8Baw7IDq-uYacqkigbc4b-Pr9Fw,58
173
- cognite/neat/session/_base.py,sha256=yxAItY6663wcmlrwxmzWkJorrrrm9oahpYOP-5RGwvQ,13081
182
+ cognite/neat/session/_base.py,sha256=6P63Kq4JJSi3S1CpcVLiG-dfKsN9Ml3o5GqZXjcLVvo,12937
174
183
  cognite/neat/session/_collector.py,sha256=-icWXOT9YBjAOVZfpPtBx-D39kpRP2RaQKdPtcr7Xm8,4233
175
184
  cognite/neat/session/_drop.py,sha256=ipD8RS_ZebPNpeIkhC7yqSSeo7e57TXMRxrh5_6IRik,4239
176
185
  cognite/neat/session/_experimental.py,sha256=0peZPZ9JpmzQE05wHbng2tWmPPLLTAVfWZEEUhdnI6o,1274
@@ -189,18 +198,12 @@ cognite/neat/session/_template.py,sha256=NCgrrwLT98DpLYoo3Wybr_OUXrEXpsJZjrJ83Kq
189
198
  cognite/neat/session/_to.py,sha256=_R-UB3iEIQoa12kTD7tuSrRDdbySQXQg_mzbn5t-7bg,19399
190
199
  cognite/neat/session/_wizard.py,sha256=hARNNzD5Zfkk_V147rIjOLVvrFaqzXGXWhZuH1NJG3M,1486
191
200
  cognite/neat/session/exceptions.py,sha256=z5jxwfVTXDCCFZKTTYVIaksNKqb9CMa2tyIZgyNL3Us,3475
192
- cognite/neat/session/_session/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
193
- cognite/neat/session/_session/_data_model/__init__.py,sha256=417QF6wm3r-bLTmhYGXL_5XnEFShCklLCoGg3YBOIqY,62
194
- cognite/neat/session/_session/_data_model/_read.py,sha256=B_mVOt_9uKzG5vbrZG2Uf5D0FQGjcCDEdfAhIp2veUI,7902
195
- cognite/neat/session/_session/_data_model/_routes.py,sha256=FtCMbqvji2KqAPDkZ4yXXp_Fuf6mSFTpNRyVfWOMzFE,1773
196
- cognite/neat/session/_session/_data_model/_show.py,sha256=yX4BTIeBzcCcllfJvGm8g4qy13heFmcJtpXPixI8T2o,5835
197
- cognite/neat/session/_session/_data_model/_write.py,sha256=qpb15cnGLknkDkxKndpdvLxhsmJ4XkXfuFD4-CDw0qE,14612
198
201
  cognite/neat/session/_state/README.md,sha256=o6N7EL98lgyWffw8IoEUf2KG5uSKveD5__TW45YzVjA,902
199
202
  cognite/neat/session/engine/__init__.py,sha256=D3MxUorEs6-NtgoICqtZ8PISQrjrr4dvca6n48bu_bI,120
200
203
  cognite/neat/session/engine/_import.py,sha256=1QxA2_EK613lXYAHKQbZyw2yjo5P9XuiX4Z6_6-WMNQ,169
201
204
  cognite/neat/session/engine/_interface.py,sha256=3W-cYr493c_mW3P5O6MKN1xEQg3cA7NHR_ev3zdF9Vk,533
202
205
  cognite/neat/session/engine/_load.py,sha256=g52uYakQM03VqHt_RDHtpHso1-mFFifH5M4T2ScuH8A,5198
203
- cognite_neat-0.123.30.dist-info/METADATA,sha256=girPGuZI_KaMxUsiBAAyFdVBLXuFrHyPMJP8dDX0u6M,9166
204
- cognite_neat-0.123.30.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
205
- cognite_neat-0.123.30.dist-info/licenses/LICENSE,sha256=W8VmvFia4WHa3Gqxq1Ygrq85McUNqIGDVgtdvzT-XqA,11351
206
- cognite_neat-0.123.30.dist-info/RECORD,,
206
+ cognite_neat-0.123.31.dist-info/METADATA,sha256=fDMu0gS-R0DVtoE1eFJcyXsniWkamkN_jvc5kSTFx_8,9166
207
+ cognite_neat-0.123.31.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
208
+ cognite_neat-0.123.31.dist-info/licenses/LICENSE,sha256=W8VmvFia4WHa3Gqxq1Ygrq85McUNqIGDVgtdvzT-XqA,11351
209
+ cognite_neat-0.123.31.dist-info/RECORD,,
@@ -1,3 +0,0 @@
1
- from ._routes import DataModelAPI
2
-
3
- __all__ = ["DataModelAPI"]
@@ -1,193 +0,0 @@
1
- from typing import Any, Literal, cast
2
- from zipfile import Path
3
-
4
- from cognite.client.data_classes.data_modeling import DataModelId, DataModelIdentifier
5
-
6
- from cognite.neat.core._client._api_client import NeatClient
7
- from cognite.neat.core._data_model import importers
8
- from cognite.neat.core._data_model.importers._base import BaseImporter
9
- from cognite.neat.core._issues._base import IssueList
10
- from cognite.neat.core._issues.errors._general import NeatValueError
11
- from cognite.neat.core._issues.warnings._general import MissingCogniteClientWarning
12
- from cognite.neat.core._utils.reader._base import NeatReader
13
- from cognite.neat.plugins._manager import get_plugin_manager
14
- from cognite.neat.plugins.data_model.importers._base import DataModelImporterPlugin
15
- from cognite.neat.session._state import SessionState
16
- from cognite.neat.session.exceptions import NeatSessionError, session_class_wrapper
17
-
18
- InternalReaderName = Literal["excel", "cdf", "ontology", "yaml"]
19
-
20
-
21
- @session_class_wrapper
22
- class ReadAPI:
23
- def __init__(self, state: SessionState) -> None:
24
- self._state = state
25
-
26
- def __call__(self, name: str, io: str | Path | DataModelIdentifier, **kwargs: Any) -> IssueList:
27
- """Provides access to internal data model readers and external data model
28
- reader plugins.
29
-
30
- Args:
31
- name (str): The name of format (e.g. Excel) reader is handling.
32
- io (str | Path | | DataModelIdentifier | None): The input/output interface for the reader.
33
- **kwargs (Any): Additional keyword arguments for the reader.
34
-
35
- !!! note "io"
36
- The `io` parameter can be a file path, sting, or a DataModelIdentifier
37
- depending on the reader's requirements.
38
-
39
- !!! note "kwargs"
40
- Users must consult the documentation of the reader to understand
41
- what keyword arguments are supported.
42
- """
43
-
44
- # Clean the input name once before matching.
45
- clean_name: InternalReaderName | str = name.strip().lower()
46
-
47
- # The match statement cleanly handles each case.
48
- match clean_name:
49
- case "excel":
50
- return self.excel(cast(str | Path, io), **kwargs)
51
-
52
- case "cdf":
53
- return self.cdf(cast(DataModelIdentifier, io))
54
-
55
- case "ontology":
56
- return self.ontology(cast(str | Path, io))
57
-
58
- case "yaml":
59
- return self.yaml(cast(str | Path, io), **kwargs)
60
-
61
- case _: # The wildcard '_' acts as the default 'else' case.
62
- return self._plugin(name, cast(str | Path, io), **kwargs)
63
-
64
- def _plugin(self, name: str, io: str | Path, **kwargs: Any) -> IssueList:
65
- """Provides access to the external plugins for data model importing.
66
-
67
- Args:
68
- name (str): The name of format (e.g. Excel) plugin is handling.
69
- io (str | Path | None): The input/output interface for the plugin.
70
- **kwargs (Any): Additional keyword arguments for the plugin.
71
-
72
- !!! note "kwargs"
73
- Users must consult the documentation of the plugin to understand
74
- what keyword arguments are supported.
75
- """
76
-
77
- # Some plugins may not support the io argument
78
- reader = NeatReader.create(io)
79
- path = reader.materialize_path()
80
-
81
- self._state._raise_exception_if_condition_not_met(
82
- "Data Model Read",
83
- empty_data_model_store_required=True,
84
- )
85
-
86
- plugin_manager = get_plugin_manager()
87
- plugin = plugin_manager.get(name, DataModelImporterPlugin)
88
-
89
- print(
90
- f"You are using an external plugin {plugin.__name__}, which is not developed by the NEAT team."
91
- "\nUse it at your own risk."
92
- )
93
-
94
- return self._state.data_model_import(plugin().configure(io=path, **kwargs))
95
-
96
- def cdf(self, io: DataModelIdentifier) -> IssueList:
97
- """Reads a Data Model from CDF to the knowledge graph.
98
-
99
- Args:
100
- io: Tuple of strings with the id of a CDF Data Model.
101
- Notation as follows (<name_of_space>, <name_of_data_model>, <data_model_version>)
102
-
103
- Example:
104
- ```python
105
- neat.read.cdf.data_model(("example_data_model_space", "EXAMPLE_DATA_MODEL", "v1"))
106
- ```
107
- """
108
-
109
- data_model_id = DataModelId.load(io)
110
-
111
- if not data_model_id.version:
112
- raise NeatSessionError("Data model version is required to read a data model.")
113
-
114
- self._state._raise_exception_if_condition_not_met(
115
- "Read data model from CDF",
116
- empty_data_model_store_required=True,
117
- client_required=True,
118
- )
119
-
120
- return self._state.data_model_import(
121
- importers.DMSImporter.from_data_model_id(cast(NeatClient, self._state.client), data_model_id)
122
- )
123
-
124
- def excel(self, io: str | Path, *, enable_manual_edit: bool = False) -> IssueList:
125
- """Reads a Neat Excel Data Model to the data model store.
126
- The data model spreadsheets may contain conceptual or physical data model definitions.
127
-
128
- Args:
129
- io: file path to the Excel sheet
130
- enable_manual_edit: If True, the user will be able to re-import data model
131
- which where edit outside of NeatSession
132
- """
133
- reader = NeatReader.create(io)
134
- path = reader.materialize_path()
135
-
136
- self._state._raise_exception_if_condition_not_met(
137
- "Read Excel Data Model",
138
- empty_data_model_store_required=not enable_manual_edit,
139
- )
140
-
141
- return self._state.data_model_import(importers.ExcelImporter(path), enable_manual_edit)
142
-
143
- def ontology(self, io: str | Path) -> IssueList:
144
- """Reads an OWL ontology source into NeatSession.
145
-
146
- Args:
147
- io: file path or url to the OWL file
148
- """
149
-
150
- self._state._raise_exception_if_condition_not_met(
151
- "Read Ontology",
152
- empty_data_model_store_required=True,
153
- )
154
-
155
- reader = NeatReader.create(io)
156
- importer = importers.OWLImporter.from_file(reader.materialize_path(), source_name=f"file {reader!s}")
157
- return self._state.data_model_import(importer)
158
-
159
- def yaml(self, io: str | Path, *, format: Literal["neat", "toolkit"] = "neat") -> IssueList:
160
- """Reads a yaml with either neat data mode, or several toolkit yaml files to
161
- import Data Model(s) into NeatSession.
162
-
163
- Args:
164
- io: File path to the Yaml file in the case of "neat" yaml, or path to a zip folder or directory with several
165
- Yaml files in the case of "toolkit".
166
- format: The format of the yaml file(s). Can be either "neat" or "toolkit".
167
-
168
- Example:
169
- ```python
170
- neat.read.yaml("path_to_toolkit_yamls")
171
- ```
172
- """
173
- self._state._raise_exception_if_condition_not_met(
174
- "Read YAML data model",
175
- empty_data_model_store_required=True,
176
- )
177
- reader = NeatReader.create(io)
178
- path = reader.materialize_path()
179
- importer: BaseImporter
180
- if format == "neat":
181
- importer = importers.DictImporter.from_yaml_file(path, source_name=f"{reader!s}")
182
- elif format == "toolkit":
183
- dms_importer = importers.DMSImporter.from_path(path, self._state.client)
184
- if dms_importer.issue_list.has_warning_type(MissingCogniteClientWarning):
185
- raise NeatSessionError(
186
- "No client provided. You are referencing Cognite containers in your data model, "
187
- "NEAT needs a client to lookup the container definitions. "
188
- "Please set the client in the session, NeatSession(client=client)."
189
- )
190
- importer = dms_importer
191
- else:
192
- raise NeatValueError(f"Unsupported YAML format: {format}")
193
- return self._state.data_model_import(importer)
@@ -1,45 +0,0 @@
1
- from cognite.neat.core._data_model.models.conceptual._verified import ConceptualDataModel
2
- from cognite.neat.core._data_model.models.physical._verified import PhysicalDataModel
3
- from cognite.neat.session._session._data_model._read import ReadAPI
4
- from cognite.neat.session._session._data_model._show import ShowAPI
5
- from cognite.neat.session._session._data_model._write import WriteAPI
6
- from cognite.neat.session._state import SessionState
7
- from cognite.neat.session.exceptions import session_class_wrapper
8
-
9
-
10
- @session_class_wrapper
11
- class DataModelAPI:
12
- """API for managing data models in NEAT session."""
13
-
14
- def __init__(self, state: SessionState) -> None:
15
- self._state = state
16
- self.read = ReadAPI(state)
17
- self.write = WriteAPI(state)
18
- self.show = ShowAPI(state)
19
-
20
- @property
21
- def physical(self) -> PhysicalDataModel | None:
22
- """Access to the physical data model level."""
23
- return self._state.data_model_store.try_get_last_physical_data_model
24
-
25
- @property
26
- def conceptual(self) -> ConceptualDataModel | None:
27
- """Access to the conceptual data model level."""
28
- return self._state.data_model_store.try_get_last_conceptual_data_model
29
-
30
- def _repr_html_(self) -> str:
31
- if self._state.data_model_store.empty:
32
- return (
33
- "<strong>No data model</strong>. Get started by reading data model with the <em>.read</em> attribute."
34
- )
35
-
36
- output = []
37
-
38
- if self._state.data_model_store.provenance:
39
- if self.physical:
40
- html = self.physical._repr_html_()
41
- if self.conceptual:
42
- html = self.conceptual._repr_html_()
43
- output.append(f"<H2>Data Model</H2><br />{html}")
44
-
45
- return "<br />".join(output)
@@ -1,147 +0,0 @@
1
- from typing import Any
2
-
3
- import networkx as nx
4
- from IPython.display import HTML, display
5
- from pyvis.network import Network as PyVisNetwork # type: ignore
6
-
7
- from cognite.neat.core._constants import IN_NOTEBOOK, IN_PYODIDE
8
- from cognite.neat.core._data_model.analysis._base import DataModelAnalysis
9
- from cognite.neat.core._utils.io_ import to_directory_compatible
10
- from cognite.neat.core._utils.rdf_ import uri_display_name
11
- from cognite.neat.session._show import _generate_hex_color_per_type
12
- from cognite.neat.session._state import SessionState
13
- from cognite.neat.session.exceptions import NeatSessionError, session_class_wrapper
14
-
15
-
16
- @session_class_wrapper
17
- class ShowAPI:
18
- def __init__(self, state: SessionState) -> None:
19
- self._state = state
20
-
21
- def __call__(self) -> Any:
22
- """Generates a visualization of the data model without implements."""
23
- if self._state.data_model_store.empty:
24
- raise NeatSessionError("No data model available. Try using [bold].read[/bold] to read a data model.")
25
-
26
- last_target = self._state.data_model_store.provenance[-1].target_entity
27
- data_model = last_target.physical or last_target.conceptual
28
- analysis = DataModelAnalysis(physical=last_target.physical, conceptual=last_target.conceptual)
29
-
30
- if last_target.physical is not None:
31
- di_graph = analysis._physical_di_graph(format="data-model")
32
- else:
33
- di_graph = analysis._conceptual_di_graph(format="data-model")
34
-
35
- identifier = to_directory_compatible(str(data_model.metadata.identifier))
36
- name = f"{identifier}.html"
37
- return self._generate_visualization(di_graph, name)
38
-
39
- def implements(self) -> Any:
40
- """Generates a visualization of implements of the data model concepts, showing
41
- the inheritance between the concepts in the data model."""
42
- if self._state.data_model_store.empty:
43
- raise NeatSessionError("No data model available. Try using [bold].read[/bold] to read a data model.")
44
-
45
- last_target = self._state.data_model_store.provenance[-1].target_entity
46
- data_model = last_target.physical or last_target.conceptual
47
- analysis = DataModelAnalysis(physical=last_target.physical, conceptual=last_target.conceptual)
48
-
49
- if last_target.physical is not None:
50
- di_graph = analysis._physical_di_graph(format="implements")
51
- else:
52
- di_graph = analysis._conceptual_di_graph(format="implements")
53
- identifier = to_directory_compatible(str(data_model.metadata.identifier))
54
- name = f"{identifier}_implements.html"
55
- return self._generate_visualization(di_graph, name)
56
-
57
- def provenance(self) -> Any:
58
- if not self._state.data_model_store.provenance:
59
- raise NeatSessionError("No data model available. Try using [bold].read[/bold] to load data model.")
60
-
61
- di_graph = self._generate_provenance_di_graph()
62
- unique_hash = self._state.data_model_store.calculate_provenance_hash(shorten=True)
63
- return self._generate_visualization(di_graph, name=f"data_model_provenance_{unique_hash}.html")
64
-
65
- def _generate_visualization(self, di_graph: nx.DiGraph, name: str) -> Any:
66
- if not IN_NOTEBOOK:
67
- raise NeatSessionError("Visualization is only available in Jupyter notebooks!")
68
-
69
- net = PyVisNetwork(
70
- notebook=IN_NOTEBOOK,
71
- cdn_resources="remote",
72
- directed=True,
73
- height="750px",
74
- width="100%",
75
- select_menu=IN_NOTEBOOK,
76
- )
77
-
78
- # Change the plotting layout
79
- net.repulsion(
80
- node_distance=100,
81
- central_gravity=0.3,
82
- spring_length=200,
83
- spring_strength=0.05,
84
- damping=0.09,
85
- )
86
-
87
- net.from_nx(di_graph)
88
- if IN_PYODIDE:
89
- net.write_html(name)
90
- return display(HTML(name))
91
-
92
- else:
93
- return net.show(name)
94
-
95
- def _generate_provenance_di_graph(self) -> nx.DiGraph:
96
- di_graph = nx.DiGraph()
97
- hex_colored_types = _generate_hex_color_per_type(["Agent", "Entity", "Activity", "Export", "Pruned"])
98
-
99
- for change in self._state.data_model_store.provenance:
100
- source = uri_display_name(change.source_entity.id_)
101
- target = uri_display_name(change.target_entity.id_)
102
- agent = uri_display_name(change.agent.id_)
103
-
104
- di_graph.add_node(
105
- source,
106
- label=source,
107
- type="Entity",
108
- title="Entity",
109
- color=hex_colored_types["Entity"],
110
- )
111
-
112
- di_graph.add_node(
113
- target,
114
- label=target,
115
- type="Entity",
116
- title="Entity",
117
- color=hex_colored_types["Entity"],
118
- )
119
-
120
- di_graph.add_node(
121
- agent,
122
- label=agent,
123
- type="Agent",
124
- title="Agent",
125
- color=hex_colored_types["Agent"],
126
- )
127
-
128
- di_graph.add_edge(source, agent, label="used", color="grey")
129
- di_graph.add_edge(agent, target, label="generated", color="grey")
130
-
131
- for (
132
- source_id,
133
- exports,
134
- ) in self._state.data_model_store.exports_by_source_entity_id.items():
135
- source_shorten = uri_display_name(source_id)
136
- for export in exports:
137
- export_id = uri_display_name(export.target_entity.id_)
138
- di_graph.add_node(
139
- export_id,
140
- label=export_id,
141
- type="Export",
142
- title="Export",
143
- color=hex_colored_types["Export"],
144
- )
145
- di_graph.add_edge(source_shorten, export_id, label="exported", color="grey")
146
-
147
- return di_graph
@@ -1,335 +0,0 @@
1
- import warnings
2
- from pathlib import Path
3
- from typing import Any, Literal, cast, overload
4
-
5
- from cognite.client.data_classes.data_modeling import DataModelIdentifier
6
-
7
- from cognite.neat.core._client._api_client import NeatClient
8
- from cognite.neat.core._constants import COGNITE_MODELS
9
- from cognite.neat.core._data_model import exporters
10
- from cognite.neat.core._data_model._shared import VerifiedDataModel
11
- from cognite.neat.core._data_model.importers._dms2data_model import DMSImporter
12
- from cognite.neat.core._data_model.models.conceptual._verified import ConceptualDataModel
13
- from cognite.neat.core._data_model.models.physical._verified import PhysicalDataModel, PhysicalMetadata
14
- from cognite.neat.core._issues._base import IssueList
15
- from cognite.neat.core._issues._contextmanagers import catch_issues
16
- from cognite.neat.core._utils.auxiliary import filter_kwargs_by_method
17
- from cognite.neat.core._utils.reader._base import NeatReader
18
- from cognite.neat.core._utils.upload import UploadResultList
19
- from cognite.neat.session._state import SessionState
20
- from cognite.neat.session.exceptions import NeatSessionError, session_class_wrapper
21
-
22
- InternalWriterName = Literal["excel", "ontology", "shacl", "cdf", "yaml"]
23
-
24
-
25
- @session_class_wrapper
26
- class WriteAPI:
27
- def __init__(self, state: SessionState) -> None:
28
- self._state = state
29
-
30
- def __call__(
31
- self, name: str, io: str | Path | None = None, **kwargs: Any
32
- ) -> str | UploadResultList | IssueList | None:
33
- """Provides access to the writers for exporting data models to different formats.
34
-
35
- Args:
36
- name (str): The name of format (e.g. Excel) writer is handling.
37
- io (str | Path | None): The input/output interface for the writer.
38
- **kwargs (Any): Additional keyword arguments for the writer.
39
-
40
- !!! note "kwargs"
41
- Users must consult the documentation of the writer
42
- to understand what keyword arguments are supported.
43
- """
44
-
45
- # Clean the input name once before matching.
46
- clean_name: InternalWriterName | str = name.strip().lower()
47
-
48
- match clean_name:
49
- case "excel":
50
- if io is None:
51
- raise NeatSessionError("'io' parameter is required for Excel format.")
52
- return self.excel(cast(str | Path, io), **filter_kwargs_by_method(kwargs, self.excel))
53
- case "cdf":
54
- return self.cdf(**filter_kwargs_by_method(kwargs, self.cdf))
55
- case "yaml":
56
- return self.yaml(io, **filter_kwargs_by_method(kwargs, self.yaml))
57
- case "ontology":
58
- if io is None:
59
- raise NeatSessionError("'io' parameter is required for ontology format.")
60
- self.ontology(cast(str | Path, io))
61
- return None
62
- case "shacl":
63
- if io is None:
64
- raise NeatSessionError("'io' parameter is required for SHACL format.")
65
- self.shacl(cast(str | Path, io))
66
- return None
67
- case _:
68
- raise NeatSessionError(
69
- f"Unsupported data model writer: {name}. "
70
- "Please use one of the following: 'excel', 'cdf', 'yaml', 'ontology', 'shacl'."
71
- )
72
-
73
- def excel(
74
- self,
75
- io: str | Path,
76
- *,
77
- include_reference: bool | DataModelIdentifier = True,
78
- include_properties: Literal["same-space", "all"] = "all",
79
- add_empty_rows: bool = False,
80
- ) -> IssueList | None:
81
- """Export the verified data model to Excel.
82
-
83
- Args:
84
- io: The file path or file-like object to write the Excel file to.
85
- include_reference: If True, the reference data model will be included. Defaults to True.
86
- Note that this only applies if you have created the data model using the
87
- create.enterprise_model(...), create.solution_model(), or create.data_product_model() methods.
88
- You can also provide a DataModelIdentifier directly, which will be read from CDF
89
- include_properties: The properties to include in the Excel file. Defaults to "all".
90
- - "same-space": Only properties that are in the same space as the data model will be included.
91
- add_empty_rows: If True, empty rows will be added between each component. Defaults to False.
92
-
93
- Example:
94
- Export conceptual data model to an Excel file
95
- ```python
96
- conceptual_dm_file_name = "conceptual_data_model.xlsx"
97
- neat.data_model.write.excel(conceptual_dm_file_name)
98
- ```
99
-
100
- Example:
101
- Read CogniteCore model, convert it to an enterprise model, and export it to an excel file
102
- ```python
103
- client = CogniteClient()
104
- neat = NeatSession(client)
105
-
106
- neat.data_model.read.cdf(("cdf_cdm", "CogniteCore", "v1"))
107
- neat.data_model.create.enterprise_model(
108
- data_model_id=("sp_doctrino_space", "ExtensionCore", "v1"),
109
- org_name="MyOrg",
110
- )
111
- physical_dm_file_name = "physical_dm.xlsx"
112
- neat.data_model.write.excel(physical_dm_file_name, include_reference=True)
113
- ```
114
-
115
- Example:
116
- Read the data model ("my_space", "ISA95Model", "v5") and export it to an excel file with the
117
- CogniteCore model in the reference sheets.
118
- ```python
119
- client = CogniteClient()
120
- neat = NeatSession(client)
121
-
122
- neat.data_model.read.cdf(("my_space", "ISA95Model", "v5"))
123
- physical_dm_file_name = "physical_dm.xlsx"
124
- neat.data_model.write.excel(physical_dm_file_name, include_reference=("cdf_cdm", "CogniteCore", "v1"))
125
- """
126
- reference_data_model_with_prefix: tuple[VerifiedDataModel, str] | None = None
127
- include_properties = include_properties.strip().lower()
128
- if include_properties not in ["same-space", "all"]:
129
- raise NeatSessionError(
130
- f"Invalid include_properties value: '{include_properties}'. Must be 'same-space' or 'all'."
131
- )
132
-
133
- if include_reference is not False:
134
- if include_reference is True and self._state.last_reference is not None:
135
- ref_data_model: ConceptualDataModel | PhysicalDataModel | None = self._state.last_reference
136
- elif include_reference is True:
137
- ref_data_model = None
138
- else:
139
- if not self._state.client:
140
- raise NeatSessionError("No client provided!")
141
- ref_data_model = None
142
- with catch_issues() as issues:
143
- ref_read = DMSImporter.from_data_model_id(self._state.client, include_reference).to_data_model()
144
- if ref_read.unverified_data_model is not None:
145
- ref_data_model = ref_read.unverified_data_model.as_verified_data_model()
146
- if ref_data_model is None or issues.has_errors:
147
- issues.action = f"Read {include_reference}"
148
- return issues
149
- if ref_data_model is not None:
150
- prefix = "Ref"
151
- if (
152
- isinstance(ref_data_model.metadata, PhysicalMetadata)
153
- and ref_data_model.metadata.as_data_model_id() in COGNITE_MODELS
154
- ):
155
- prefix = "CDM"
156
- reference_data_model_with_prefix = ref_data_model, prefix
157
-
158
- exporter = exporters.ExcelExporter(
159
- styling="maximal",
160
- reference_data_model_with_prefix=reference_data_model_with_prefix,
161
- add_empty_rows=add_empty_rows,
162
- include_properties=cast(Literal["same-space", "all"], include_properties),
163
- )
164
- self._state.data_model_store.export_to_file(exporter, NeatReader.create(io).materialize_path())
165
- return None
166
-
167
- def cdf(
168
- self,
169
- *,
170
- existing: Literal["fail", "skip", "update", "force", "recreate"] = "update",
171
- dry_run: bool = False,
172
- drop_data: bool = False,
173
- ) -> UploadResultList:
174
- """Export the verified DMS data model to CDF.
175
-
176
- Args:
177
- existing: What to do if the component already exists. Defaults to "update".
178
- See the note below for more information about the options.
179
- dry_run: If True, no changes will be made to CDF. Defaults to False.
180
- drop_data: If existing is 'force' or 'recreate' and the operation will lead to data loss,
181
- the component will be skipped unless drop_data is True. Defaults to False.
182
- Note this only applies to spaces and containers if they contain data.
183
-
184
- !!! note "Data Model creation modes"
185
- - "fail": If any component already exists, the export will fail.
186
- - "skip": If any component already exists, it will be skipped.
187
- - "update": If any component already exists, it will be updated. For data models, views, and containers
188
- this means combining the existing and new component. Fo example, for data models the new
189
- views will be added to the existing views.
190
- - "force": If any component already exists, and the update fails, it will be deleted and recreated.
191
- - "recreate": All components will be deleted and recreated. The exception is spaces, which will be updated.
192
-
193
- """
194
-
195
- self._state._raise_exception_if_condition_not_met(
196
- "Export DMS data model to CDF",
197
- client_required=True,
198
- )
199
-
200
- exporter = exporters.DMSExporter(existing=existing, drop_data=drop_data)
201
-
202
- result = self._state.data_model_store.export_to_cdf(exporter, cast(NeatClient, self._state.client), dry_run)
203
- print("You can inspect the details with the .inspect.outcome.data_model(...) method.")
204
- return result
205
-
206
- @overload
207
- def yaml(
208
- self,
209
- io: None,
210
- *,
211
- format: Literal["neat", "toolkit"] = "neat",
212
- skip_system_spaces: bool = True,
213
- ) -> str: ...
214
-
215
- @overload
216
- def yaml(
217
- self,
218
- io: str | Path,
219
- *,
220
- format: Literal["neat", "toolkit"] = "neat",
221
- skip_system_spaces: bool = True,
222
- ) -> None: ...
223
-
224
- def yaml(
225
- self,
226
- io: str | Path | None = None,
227
- *,
228
- format: Literal["neat", "toolkit"] = "neat",
229
- skip_system_spaces: bool = True,
230
- ) -> str | None:
231
- """Export the verified data model to YAML.
232
-
233
- Args:
234
- io: The file path or file-like object to write the YAML file to. Defaults to None.
235
- format: The format of the YAML file. Defaults to "neat".
236
- skip_system_spaces: If True, system spaces will be skipped. Defaults to True.
237
-
238
- !!! note "YAML formats"
239
- - "neat": This is the format Neat uses to store the data model.
240
- - "toolkit": This is the format used by Cognite Toolkit, that matches the CDF API.
241
-
242
- Returns:
243
- str | None: If io is None, the YAML string will be returned. Otherwise, None will be returned.
244
-
245
- Example:
246
- Export to yaml file in the case of "neat" format
247
- ```python
248
- your_yaml_file_name = "neat_dm.yaml"
249
- neat.data_model.write.yaml(your_yaml_file_name, format="neat")
250
- ```
251
-
252
- Example:
253
- Export yaml files as a zip folder in the case of "toolkit" format
254
- ```python
255
- your_zip_folder_name = "toolkit_data_model_files.zip"
256
- neat.data_model.write.yaml(your_zip_folder_name, format="toolkit")
257
- ```
258
-
259
- Example:
260
- Export yaml files to a folder in the case of "toolkit" format
261
- ```python
262
- your_folder_name = "my_project/data_model_files"
263
- neat.data_model.write.yaml(your_folder_name, format="toolkit")
264
- ```
265
- """
266
-
267
- if format == "neat":
268
- exporter = exporters.YAMLExporter()
269
- if io is None:
270
- return self._state.data_model_store.export(exporter)
271
-
272
- self._state.data_model_store.export_to_file(exporter, NeatReader.create(io).materialize_path())
273
- elif format == "toolkit":
274
- if io is None:
275
- raise NeatSessionError(
276
- "Please provide a zip file or directory path to write the YAML files to."
277
- "This is required for the 'toolkit' format."
278
- )
279
- user_path = NeatReader.create(io).materialize_path()
280
- if user_path.suffix == "" and not user_path.exists():
281
- user_path.mkdir(parents=True)
282
- self._state.data_model_store.export_to_file(
283
- exporters.DMSExporter(remove_cdf_spaces=skip_system_spaces), user_path
284
- )
285
- else:
286
- raise NeatSessionError("Please provide a valid format. 'neat' or 'toolkit'")
287
-
288
- return None
289
-
290
- def ontology(self, io: str | Path) -> None:
291
- """Write out data model as OWL ontology.
292
-
293
- Args:
294
- io: The file path to file-like object to write the session to.
295
-
296
- Example:
297
- Export the session to a file
298
- ```python
299
- ontology_file_name = "neat_session.ttl"
300
- neat.data_model.write.ontology(ontology_file_name)
301
- ```
302
- """
303
-
304
- filepath = self._prepare_ttl_filepath(io)
305
- exporter = exporters.OWLExporter()
306
- self._state.data_model_store.export_to_file(exporter, filepath)
307
- return None
308
-
309
- def shacl(self, io: str | Path) -> None:
310
- """Write out data model as SHACL shapes.
311
-
312
- Args:
313
- io: The file path to file-like object to write the session to.
314
-
315
- Example:
316
- Export the session to a file
317
- ```python
318
- shacl_file_name = "neat_session.shacl.ttl"
319
- neat.data_model.write.shacl(shacl_file_name)
320
- ```
321
- """
322
-
323
- filepath = self._prepare_ttl_filepath(io)
324
- exporter = exporters.SHACLExporter()
325
- self._state.data_model_store.export_to_file(exporter, filepath)
326
- return None
327
-
328
- def _prepare_ttl_filepath(self, io: str | Path) -> Path:
329
- """Ensures the filepath has a .ttl extension, adding it if missing."""
330
- filepath = NeatReader.create(io).materialize_path()
331
- if filepath.suffix != ".ttl":
332
- warnings.filterwarnings("default")
333
- warnings.warn("File extension is not .ttl, adding it to the file name", stacklevel=2)
334
- filepath = filepath.with_suffix(".ttl")
335
- return filepath