cognite-neat 0.102.0__py3-none-any.whl → 0.103.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (64) hide show
  1. cognite/neat/__init__.py +1 -1
  2. cognite/neat/_app/api/routers/crud.py +1 -1
  3. cognite/neat/_client/__init__.py +1 -1
  4. cognite/neat/_client/_api/data_modeling_loaders.py +1 -1
  5. cognite/neat/_client/_api/schema.py +1 -1
  6. cognite/neat/_graph/_tracking/__init__.py +1 -1
  7. cognite/neat/_graph/extractors/__init__.py +8 -8
  8. cognite/neat/_graph/extractors/_mock_graph_generator.py +2 -3
  9. cognite/neat/_graph/loaders/_base.py +1 -1
  10. cognite/neat/_graph/loaders/_rdf2dms.py +165 -47
  11. cognite/neat/_graph/transformers/__init__.py +13 -9
  12. cognite/neat/_graph/transformers/_value_type.py +196 -2
  13. cognite/neat/_issues/__init__.py +6 -6
  14. cognite/neat/_issues/_base.py +4 -4
  15. cognite/neat/_issues/errors/__init__.py +22 -22
  16. cognite/neat/_issues/formatters.py +1 -1
  17. cognite/neat/_issues/warnings/__init__.py +20 -18
  18. cognite/neat/_issues/warnings/_properties.py +7 -0
  19. cognite/neat/_issues/warnings/user_modeling.py +2 -2
  20. cognite/neat/_rules/analysis/__init__.py +1 -1
  21. cognite/neat/_rules/catalog/__init__.py +1 -0
  22. cognite/neat/_rules/catalog/hello_world_pump.xlsx +0 -0
  23. cognite/neat/_rules/exporters/__init__.py +5 -5
  24. cognite/neat/_rules/exporters/_rules2excel.py +5 -4
  25. cognite/neat/_rules/importers/__init__.py +4 -4
  26. cognite/neat/_rules/importers/_base.py +7 -3
  27. cognite/neat/_rules/importers/_rdf/__init__.py +1 -1
  28. cognite/neat/_rules/models/__init__.py +5 -5
  29. cognite/neat/_rules/models/_base_rules.py +1 -1
  30. cognite/neat/_rules/models/dms/__init__.py +11 -11
  31. cognite/neat/_rules/models/dms/_validation.py +16 -10
  32. cognite/neat/_rules/models/entities/__init__.py +26 -26
  33. cognite/neat/_rules/models/information/__init__.py +5 -5
  34. cognite/neat/_rules/models/mapping/_classic2core.yaml +54 -8
  35. cognite/neat/_rules/transformers/__init__.py +12 -12
  36. cognite/neat/_rules/transformers/_pipelines.py +10 -5
  37. cognite/neat/_session/_base.py +71 -0
  38. cognite/neat/_session/_collector.py +3 -1
  39. cognite/neat/_session/_drop.py +10 -0
  40. cognite/neat/_session/_inspect.py +35 -1
  41. cognite/neat/_session/_mapping.py +5 -0
  42. cognite/neat/_session/_prepare.py +121 -15
  43. cognite/neat/_session/_read.py +180 -20
  44. cognite/neat/_session/_set.py +11 -1
  45. cognite/neat/_session/_show.py +41 -2
  46. cognite/neat/_session/_to.py +58 -10
  47. cognite/neat/_session/engine/__init__.py +1 -1
  48. cognite/neat/_store/__init__.py +3 -2
  49. cognite/neat/_store/{_base.py → _graph_store.py} +33 -0
  50. cognite/neat/_store/_provenance.py +11 -1
  51. cognite/neat/_store/_rules_store.py +20 -0
  52. cognite/neat/_utils/auth.py +1 -1
  53. cognite/neat/_utils/reader/__init__.py +1 -1
  54. cognite/neat/_version.py +2 -2
  55. cognite/neat/_workflows/__init__.py +3 -3
  56. cognite/neat/_workflows/steps/lib/current/graph_extractor.py +1 -1
  57. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +1 -1
  58. cognite/neat/_workflows/steps/lib/current/rules_importer.py +2 -2
  59. cognite/neat/_workflows/steps/lib/io/io_steps.py +3 -3
  60. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.0.dist-info}/METADATA +1 -1
  61. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.0.dist-info}/RECORD +64 -62
  62. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.0.dist-info}/LICENSE +0 -0
  63. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.0.dist-info}/WHEEL +0 -0
  64. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.0.dist-info}/entry_points.txt +0 -0
@@ -60,7 +60,7 @@ class Collector:
60
60
  if len(args) > 1:
61
61
  # The first argument is self.
62
62
  for i, arg in enumerate(args[1:]):
63
- event_information[f"arg{i}"] = arg
63
+ event_information[f"arg{i}"] = self._serialize_value(arg)[:500]
64
64
 
65
65
  if kwargs:
66
66
  for key, value in kwargs.items():
@@ -73,6 +73,8 @@ class Collector:
73
73
  return str(value)
74
74
  if isinstance(value, list | tuple | dict):
75
75
  return str(value)
76
+ if callable(value):
77
+ return value.__name__
76
78
  return str(type(value))
77
79
 
78
80
  def _track(self, event_name: str, event_information: dict[str, Any]) -> bool:
@@ -11,6 +11,10 @@ except ImportError:
11
11
 
12
12
  @session_class_wrapper
13
13
  class DropAPI:
14
+ """
15
+ Drop instances from the session. Check out `.instances()` for performing the operation.
16
+ """
17
+
14
18
  def __init__(self, state: SessionState):
15
19
  self._state = state
16
20
 
@@ -19,6 +23,12 @@ class DropAPI:
19
23
 
20
24
  Args:
21
25
  type: The type of instances to drop.
26
+
27
+ Example:
28
+ ```python
29
+ node_type_to_drop = "Pump"
30
+ neat.drop.instances(node_type_to_drop)
31
+ ```
22
32
  """
23
33
  type_list = type if isinstance(type, list) else [type]
24
34
  uri_type_type = dict((v, k) for k, v in self._state.instances.store.queries.types.items())
@@ -21,6 +21,28 @@ except ImportError:
21
21
 
22
22
  @session_class_wrapper
23
23
  class InspectAPI:
24
+ """Inspect issues or outcomes after performing operations with NeatSession.
25
+ To inspect properties of the current data model, try out `.properties()`.
26
+
27
+ Example:
28
+ Inspect issues
29
+ ```python
30
+ neat.inspect.issues()
31
+ ```
32
+
33
+ Example:
34
+ Inspect outcome after writing a data model
35
+ ```python
36
+ neat.inspect.outcome.data_model()
37
+ ```
38
+
39
+ Example:
40
+ Inspect outcome after writing instances
41
+ ```python
42
+ neat.inspect.outcome.instances()
43
+ ```
44
+ """
45
+
24
46
  def __init__(self, state: SessionState) -> None:
25
47
  self._state = state
26
48
  self.issues = InspectIssues(state)
@@ -28,7 +50,15 @@ class InspectAPI:
28
50
 
29
51
  @property
30
52
  def properties(self) -> pd.DataFrame:
31
- """Returns the properties of the current data model."""
53
+ """Returns the properties of the current data model.
54
+
55
+ Example:
56
+ Inspect properties of the current data model
57
+ ```python
58
+ # From an active NeatSession
59
+ neat.inspect.properties
60
+ ```
61
+ """
32
62
  return self._state.data_model.last_verified_rule[1].properties.to_pandas()
33
63
 
34
64
 
@@ -106,6 +136,10 @@ class InspectIssues:
106
136
 
107
137
  @session_class_wrapper
108
138
  class InspectOutcome:
139
+ """
140
+ Inspect the outcome after writing a Data Model and Instances to CDF.
141
+ """
142
+
109
143
  def __init__(self, state: SessionState) -> None:
110
144
  self.data_model = InspectUploadOutcome(lambda: state.data_model.last_outcome)
111
145
  self.instances = InspectUploadOutcome(lambda: state.instances.last_outcome)
@@ -38,6 +38,11 @@ class DataModelMappingAPI:
38
38
  If you extend CogniteAsset, with for example, ClassicAsset. You will map the property `parentId` to `parent`.
39
39
  If you set `user_parent_property_name` to True, the `parentId` will be renamed to `parent` after the
40
40
  mapping is done. If you set it to False, the property will remain `parentId`.
41
+
42
+ Example:
43
+ ```python
44
+ neat.mapping.classic_to_core(company_prefix="WindFarmX", use_parent_property_name=True)
45
+ ```
41
46
  """
42
47
  source_id, rules = self._state.data_model.last_verified_dms_rules
43
48
 
@@ -1,7 +1,7 @@
1
1
  import copy
2
- from collections.abc import Collection
2
+ from collections.abc import Callable, Collection
3
3
  from datetime import datetime, timezone
4
- from typing import Literal, cast
4
+ from typing import Any, Literal, cast
5
5
 
6
6
  from cognite.client.data_classes.data_modeling import DataModelIdentifier
7
7
  from rdflib import URIRef
@@ -13,6 +13,8 @@ from cognite.neat._constants import (
13
13
  )
14
14
  from cognite.neat._graph.transformers import (
15
15
  AttachPropertyFromTargetToSource,
16
+ ConvertLiteral,
17
+ LiteralToEntity,
16
18
  PruneDeadEndEdges,
17
19
  PruneInstancesOfUnknownType,
18
20
  PruneTypes,
@@ -47,6 +49,10 @@ except ImportError:
47
49
 
48
50
  @session_class_wrapper
49
51
  class PrepareAPI:
52
+ """Apply various operations on the knowledge graph as a necessary preprocessing step before for instance
53
+ inferring a data model or exporting the knowledge graph to a desired destination.
54
+ """
55
+
50
56
  def __init__(self, client: NeatClient | None, state: SessionState, verbose: bool) -> None:
51
57
  self._state = state
52
58
  self._verbose = verbose
@@ -56,6 +62,8 @@ class PrepareAPI:
56
62
 
57
63
  @session_class_wrapper
58
64
  class InstancePrepareAPI:
65
+ """Operations to perform on instances of data in the knowledge graph."""
66
+
59
67
  def __init__(self, state: SessionState, verbose: bool) -> None:
60
68
  self._state = state
61
69
  self._verbose = verbose
@@ -63,14 +71,20 @@ class InstancePrepareAPI:
63
71
  def dexpi(self) -> None:
64
72
  """Prepares extracted DEXPI graph for further usage in CDF
65
73
 
66
- This method bundles several graph transformers which:
67
- - attach values of generic attributes to nodes
68
- - create associations between nodes
69
- - remove unused generic attributes
70
- - remove associations between nodes that do not exist in the extracted graph
71
- - remove edges to nodes that do not exist in the extracted graph
74
+ !!! note "This method bundles several graph transformers which"
75
+ - attach values of generic attributes to nodes
76
+ - create associations between nodes
77
+ - remove unused generic attributes
78
+ - remove associations between nodes that do not exist in the extracted graph
79
+ - remove edges to nodes that do not exist in the extracted graph
72
80
 
73
81
  and therefore safeguard CDF from a bad graph
82
+
83
+ Example:
84
+ Apply Dexpi specific transformations:
85
+ ```python
86
+ neat.prepare.instances.dexpi()
87
+ ```
74
88
  """
75
89
 
76
90
  DEXPI = get_default_prefixes_and_namespaces()["dexpi"]
@@ -104,12 +118,18 @@ class InstancePrepareAPI:
104
118
  def aml(self) -> None:
105
119
  """Prepares extracted AutomationML graph for further usage in CDF
106
120
 
107
- This method bundles several graph transformers which:
108
- - attach values of attributes to nodes
109
- - remove unused attributes
110
- - remove edges to nodes that do not exist in the extracted graph
121
+ !!! note "This method bundles several graph transformers which"
122
+ - attach values of attributes to nodes
123
+ - remove unused attributes
124
+ - remove edges to nodes that do not exist in the extracted graph
111
125
 
112
126
  and therefore safeguard CDF from a bad graph
127
+
128
+ Example:
129
+ Apply AML specific transformations:
130
+ ```python
131
+ neat.prepare.instances.aml()
132
+ ```
113
133
  """
114
134
 
115
135
  AML = get_default_prefixes_and_namespaces()["aml"]
@@ -162,7 +182,32 @@ class InstancePrepareAPI:
162
182
  target value, as follows:
163
183
  (SourceType)-[connection]->(TargetType)
164
184
 
185
+ Example:
186
+ Make connection on exact match:
187
+ ```python
188
+ # From an active NeatSession
189
+ neat.read.csv("workitem.Table.csv",
190
+ type = "Activity",
191
+ primary_key="sourceId")
192
+
193
+ neat.read.csv("assets.Table.csv",
194
+ type="Asset",
195
+ primary_key="WMT_TAG_GLOBALID")
196
+
197
+ # Here we specify what column from the source table we should use when we link it with a column in the
198
+ # target table. In this case, it is the "workorderItemname" column in the source table
199
+ source = ("Activity", "workorderItemname")
200
+
201
+ # Here we give a name to the new property that is created when a match between the source and target is
202
+ # found
203
+ connection = "asset"
204
+
205
+ # Here we specify what column from the target table we should use when searching for a match.
206
+ # In this case, it is the "wmtTagName" column in the target table
207
+ target = ("Asset", "wmtTagName")
165
208
 
209
+ neat.prepare.instances.make_connection_on_exact_match(source, target, connection)
210
+ ```
166
211
  """
167
212
 
168
213
  subject_type, subject_predicate = self._get_type_and_property_uris(*source)
@@ -216,9 +261,67 @@ class InstancePrepareAPI:
216
261
  transformer = RelationshipAsEdgeTransformer(min_relationship_types, limit_per_type)
217
262
  self._state.instances.store.transform(transformer)
218
263
 
264
+ def convert_data_type(self, source: tuple[str, str], *, convert: Callable[[Any], Any] | None = None) -> None:
265
+ """Convert the data type of the given property.
266
+
267
+ This is, for example, useful when you have a boolean property that you want to convert to an enum.
268
+
269
+ Args:
270
+ source: The source of the conversion. A tuple of (type, property)
271
+ where property is the property that should be converted.
272
+ convert: The function to use for the conversion. The function should take the value of the property
273
+ as input and return the converted value. Default to assume you have a string that should be
274
+ converted to int, float, bool, or datetime.
275
+
276
+ Example:
277
+ Convert a boolean property to a string:
278
+ ```python
279
+ neat.prepare.instances.convert_data_type(
280
+ ("TimeSeries", "isString"),
281
+ convert=lambda is_string: "string" if is_string else "numeric"
282
+ )
283
+ ```
284
+
285
+ """
286
+ subject_type, subject_predicate = self._get_type_and_property_uris(*source)
287
+
288
+ transformer = ConvertLiteral(subject_type, subject_predicate, convert)
289
+ self._state.instances.store.transform(transformer)
290
+
291
+ def property_to_type(self, source: tuple[str | None, str], type: str, new_property: str | None = None) -> None:
292
+ """Convert a property to a new type.
293
+
294
+ Args:
295
+ source: The source of the conversion. A tuple of (type, property)
296
+ where property is the property that should be converted.
297
+ You can pass (None, property) to covert all properties with the given name.
298
+ type: The new type of the property.
299
+ new_property: Add the identifier as a new property. If None, the new entity will not have a property.
300
+
301
+ Example:
302
+ Convert the property 'source' to SourceSystem
303
+ ```python
304
+ neat.prepare.instances.property_to_type(
305
+ (None, "source"), "SourceSystem"
306
+ )
307
+ ```
308
+ """
309
+ subject_type: URIRef | None = None
310
+ if source[0] is not None:
311
+ subject_type, subject_predicate = self._get_type_and_property_uris(*source) # type: ignore[arg-type, assignment]
312
+ else:
313
+ subject_predicate = self._state.instances.store.queries.property_uri(source[1])[0]
314
+
315
+ transformer = LiteralToEntity(subject_type, subject_predicate, type, new_property)
316
+ self._state.instances.store.transform(transformer)
317
+
219
318
 
220
319
  @session_class_wrapper
221
320
  class DataModelPrepareAPI:
321
+ """Operations to perform on a data model as part of a workflow before writing the data model
322
+ to a desired destination.
323
+ """
324
+
222
325
  def __init__(self, client: NeatClient | None, state: SessionState, verbose: bool) -> None:
223
326
  self._client = client
224
327
  self._state = state
@@ -288,13 +391,15 @@ class DataModelPrepareAPI:
288
391
  move_connections: If True, the connections will be moved to the new data model.
289
392
 
290
393
  !!! note "Enterprise Data Model Creation"
394
+
291
395
  Always create an enterprise data model from a Cognite Data Model as this will
292
396
  assure all the Cognite Data Fusion applications to run smoothly, such as
293
- - Search
294
- - Atlas AI
295
- - ...
397
+ - Search
398
+ - Atlas AI
399
+ - ...
296
400
 
297
401
  !!! note "Move Connections"
402
+
298
403
  If you want to move the connections to the new data model, set the move_connections
299
404
  to True. This will move the connections to the new data model and use new model
300
405
  views as the source and target views.
@@ -345,6 +450,7 @@ class DataModelPrepareAPI:
345
450
  dummy_property: The dummy property to use as placeholder for the views in the new data model.
346
451
 
347
452
  !!! note "Solution Data Model Mode"
453
+
348
454
  The read-only solution model will only be able to read from the existing containers
349
455
  from the enterprise data model, therefore the solution data model will not have
350
456
  containers in the solution data model space. Meaning the solution data model views
@@ -1,7 +1,7 @@
1
1
  import tempfile
2
2
  from datetime import datetime, timezone
3
3
  from pathlib import Path
4
- from typing import Any, Literal
4
+ from typing import Any, Literal, cast
5
5
 
6
6
  from cognite.client.data_classes.data_modeling import DataModelId, DataModelIdentifier
7
7
 
@@ -11,7 +11,7 @@ from cognite.neat._graph import examples as instances_examples
11
11
  from cognite.neat._graph import extractors
12
12
  from cognite.neat._issues import IssueList
13
13
  from cognite.neat._issues.errors import NeatValueError
14
- from cognite.neat._rules import importers
14
+ from cognite.neat._rules import catalog, importers
15
15
  from cognite.neat._rules._shared import ReadRules
16
16
  from cognite.neat._rules.importers import BaseImporter
17
17
  from cognite.neat._store._provenance import Activity as ProvenanceActivity
@@ -27,6 +27,8 @@ from .exceptions import NeatSessionError, session_class_wrapper
27
27
 
28
28
  @session_class_wrapper
29
29
  class ReadAPI:
30
+ """Read from a data source into NeatSession graph store."""
31
+
30
32
  def __init__(self, state: SessionState, client: NeatClient | None, verbose: bool) -> None:
31
33
  self._state = state
32
34
  self._verbose = verbose
@@ -68,6 +70,11 @@ class BaseReadAPI:
68
70
 
69
71
  @session_class_wrapper
70
72
  class CDFReadAPI(BaseReadAPI):
73
+ """Reads from CDF Data Models.
74
+ Use the `.data_model()` method to load a CDF Data Model to the knowledge graph.
75
+
76
+ """
77
+
71
78
  def __init__(self, state: SessionState, client: NeatClient | None, verbose: bool) -> None:
72
79
  super().__init__(state, client, verbose)
73
80
  self.classic = CDFClassicAPI(state, client, verbose)
@@ -79,6 +86,18 @@ class CDFReadAPI(BaseReadAPI):
79
86
  return self._client
80
87
 
81
88
  def data_model(self, data_model_id: DataModelIdentifier) -> IssueList:
89
+ """Reads a Data Model from CDF to the knowledge graph.
90
+
91
+ Args:
92
+ data_model_id: Tuple of strings with the id of a CDF Data Model.
93
+ Notation as follows (<name_of_space>, <name_of_data_model>, <data_model_version>)
94
+
95
+ Example:
96
+ ```python
97
+ neat.read.cdf.data_model(("example_data_model_space", "EXAMPLE_DATA_MODEL", "v1"))
98
+ ```
99
+ """
100
+
82
101
  data_model_id = DataModelId.load(data_model_id)
83
102
 
84
103
  if not data_model_id.version:
@@ -113,6 +132,11 @@ class CDFReadAPI(BaseReadAPI):
113
132
 
114
133
  @session_class_wrapper
115
134
  class CDFClassicAPI(BaseReadAPI):
135
+ """Reads from the Classic Data Model from CDF.
136
+ Use the `.graph()` method to load CDF core resources to the knowledge graph.
137
+
138
+ """
139
+
116
140
  @property
117
141
  def _get_client(self) -> NeatClient:
118
142
  if self._client is None:
@@ -124,12 +148,12 @@ class CDFClassicAPI(BaseReadAPI):
124
148
 
125
149
  The Classic Graph consists of the following core resource type.
126
150
 
127
- Classic Node CDF Resources:
128
- - Assets
129
- - TimeSeries
130
- - Sequences
131
- - Events
132
- - Files
151
+ !!! note "Classic Node CDF Resources"
152
+ - Assets
153
+ - TimeSeries
154
+ - Sequences
155
+ - Events
156
+ - Files
133
157
 
134
158
  All the classic node CDF resources can have one or more connections to one or more assets. This
135
159
  will match a direct relationship in the data modeling of CDF.
@@ -144,13 +168,12 @@ class CDFClassicAPI(BaseReadAPI):
144
168
  This extractor will extract the classic CDF graph into Neat starting from either a data set or a root asset.
145
169
 
146
170
  It works as follows:
147
-
148
- 1. Extract all core nodes (assets, time series, sequences, events, files) filtered by the given data set or
149
- root asset.
150
- 2. Extract all relationships starting from any of the extracted core nodes.
151
- 3. Extract all core nodes that are targets of the relationships that are not already extracted.
152
- 4. Extract all labels that are connected to the extracted core nodes/relationships.
153
- 5. Extract all data sets that are connected to the extracted core nodes/relationships.
171
+ 1. Extract all core nodes (assets, time series, sequences, events, files) filtered by the given data set or
172
+ root asset.
173
+ 2. Extract all relationships starting from any of the extracted core nodes.
174
+ 3. Extract all core nodes that are targets of the relationships that are not already extracted.
175
+ 4. Extract all labels that are connected to the extracted core nodes/relationships.
176
+ 5. Extract all data sets that are connected to the extracted core nodes/relationships.
154
177
 
155
178
  Args:
156
179
  root_asset_external_id: The external id of the root asset
@@ -168,7 +191,29 @@ class CDFClassicAPI(BaseReadAPI):
168
191
 
169
192
  @session_class_wrapper
170
193
  class ExcelReadAPI(BaseReadAPI):
194
+ """Reads a Neat Excel Rules sheet to the graph store. The rules sheet may stem from an Information architect,
195
+ or a DMS Architect.
196
+
197
+ Args:
198
+ io: file path to the Excel sheet
199
+
200
+ Example:
201
+ ```python
202
+ neat.read.excel("information_or_dms_rules_sheet.xlsx")
203
+ ```
204
+ """
205
+
206
+ def __init__(self, state: SessionState, client: NeatClient | None, verbose: bool) -> None:
207
+ super().__init__(state, client, verbose)
208
+ self.examples = ExcelExampleAPI(state, client, verbose)
209
+
171
210
  def __call__(self, io: Any) -> IssueList:
211
+ """Reads a Neat Excel Rules sheet to the graph store. The rules sheet may stem from an Information architect,
212
+ or a DMS Architect.
213
+
214
+ Args:
215
+ io: file path to the Excel sheet
216
+ """
172
217
  reader = NeatReader.create(io)
173
218
  start = datetime.now(timezone.utc)
174
219
  if not isinstance(reader, PathReader):
@@ -190,8 +235,45 @@ class ExcelReadAPI(BaseReadAPI):
190
235
  return input_rules.issues
191
236
 
192
237
 
238
+ @session_class_wrapper
239
+ class ExcelExampleAPI(BaseReadAPI):
240
+ """Used as example for reading some data model into the NeatSession."""
241
+
242
+ @property
243
+ def pump_example(self) -> IssueList:
244
+ """Reads the Nordic 44 knowledge graph into the NeatSession graph store."""
245
+ start = datetime.now(timezone.utc)
246
+ importer: importers.ExcelImporter = importers.ExcelImporter(catalog.hello_world_pump)
247
+ input_rules: ReadRules = importer.to_rules()
248
+ end = datetime.now(timezone.utc)
249
+
250
+ if input_rules.rules:
251
+ change = Change.from_rules_activity(
252
+ input_rules,
253
+ importer.agent,
254
+ start,
255
+ end,
256
+ description="Pump Example read as unverified data model",
257
+ )
258
+ self._store_rules(input_rules, change)
259
+ self._state.data_model.issue_lists.append(input_rules.issues)
260
+ return input_rules.issues
261
+
262
+
193
263
  @session_class_wrapper
194
264
  class YamlReadAPI(BaseReadAPI):
265
+ """Reads a yaml with either neat rules, or several toolkit yaml files to import Data Model(s) into NeatSession.
266
+
267
+ Args:
268
+ io: file path to the Yaml file in the case of "neat" yaml, or path to a zip folder or directory with several
269
+ Yaml files in the case of "toolkit".
270
+
271
+ Example:
272
+ ```python
273
+ neat.read.yaml("path_to_toolkit_yamls")
274
+ ```
275
+ """
276
+
195
277
  def __call__(self, io: Any, format: Literal["neat", "toolkit"] = "neat") -> IssueList:
196
278
  reader = NeatReader.create(io)
197
279
  if not isinstance(reader, PathReader):
@@ -242,6 +324,23 @@ class YamlReadAPI(BaseReadAPI):
242
324
 
243
325
  @session_class_wrapper
244
326
  class CSVReadAPI(BaseReadAPI):
327
+ """Reads a csv that contains a column to use as primary key which will be the unique identifier for the type of
328
+ data you want to read in. Ex. a csv can hold information about assets, and their identifiers are specified in
329
+ a "ASSET_TAG" column.
330
+
331
+ Args:
332
+ io: file path or url to the csv
333
+ type: string that specifies what type of data the csv contains. For instance "Asset" or "Equipment"
334
+ primary_key: string name of the column that should be used as the unique identifier for each row of data
335
+
336
+ Example:
337
+ ```python
338
+ type_described_in_table = "Turbine"
339
+ column_with_identifier = "UNIQUE_TAG_NAME"
340
+ neat.read.csv("url_or_path_to_csv_file", type=type_described_in_table, primary_key=column_with_identifier)
341
+ ```
342
+ """
343
+
245
344
  def __call__(self, io: Any, type: str, primary_key: str) -> None:
246
345
  reader = NeatReader.create(io)
247
346
  if isinstance(reader, HttpFileReader):
@@ -263,6 +362,13 @@ class CSVReadAPI(BaseReadAPI):
263
362
 
264
363
  @session_class_wrapper
265
364
  class XMLReadAPI(BaseReadAPI):
365
+ """Reads an XML file that is either of DEXPI or AML format.
366
+
367
+ Args:
368
+ io: file path or url to the XML
369
+ format: can be either "dexpi" or "aml" are the currenly supported XML source types.
370
+ """
371
+
266
372
  def __call__(
267
373
  self,
268
374
  io: Any,
@@ -289,6 +395,16 @@ class XMLReadAPI(BaseReadAPI):
289
395
  raise NeatValueError("Only support XML files of DEXPI format at the moment.")
290
396
 
291
397
  def dexpi(self, path):
398
+ """Reads a DEXPI file into the NeatSession.
399
+
400
+ Args:
401
+ io: file path or url to the DEXPI file
402
+
403
+ Example:
404
+ ```python
405
+ neat.read.xml.dexpi("url_or_path_to_dexpi_file")
406
+ ```
407
+ """
292
408
  engine = import_engine()
293
409
  engine.set.format = "dexpi"
294
410
  engine.set.file = path
@@ -296,6 +412,16 @@ class XMLReadAPI(BaseReadAPI):
296
412
  self._state.instances.store.write(extractor)
297
413
 
298
414
  def aml(self, path):
415
+ """Reads an AML file into NeatSession.
416
+
417
+ Args:
418
+ io: file path or url to the AML file
419
+
420
+ Example:
421
+ ```python
422
+ neat.read.xml.aml("url_or_path_to_aml_file")
423
+ ```
424
+ """
299
425
  engine = import_engine()
300
426
  engine.set.format = "aml"
301
427
  engine.set.file = path
@@ -305,11 +431,27 @@ class XMLReadAPI(BaseReadAPI):
305
431
 
306
432
  @session_class_wrapper
307
433
  class RDFReadAPI(BaseReadAPI):
434
+ """Reads an RDF source into NeatSession. Supported sources are "ontology" or "imf".
435
+
436
+ Args:
437
+ io: file path or url to the RDF source
438
+ """
439
+
308
440
  def __init__(self, state: SessionState, client: NeatClient | None, verbose: bool) -> None:
309
441
  super().__init__(state, client, verbose)
310
442
  self.examples = RDFExamples(state)
311
443
 
312
444
  def ontology(self, io: Any) -> IssueList:
445
+ """Reads an OWL ontology source into NeatSession.
446
+
447
+ Args:
448
+ io: file path or url to the OWL file
449
+
450
+ Example:
451
+ ```python
452
+ neat.read.rdf.ontology("url_or_path_to_owl_source")
453
+ ```
454
+ """
313
455
  start = datetime.now(timezone.utc)
314
456
  reader = NeatReader.create(io)
315
457
  if not isinstance(reader, PathReader):
@@ -331,6 +473,16 @@ class RDFReadAPI(BaseReadAPI):
331
473
  return input_rules.issues
332
474
 
333
475
  def imf(self, io: Any) -> IssueList:
476
+ """Reads IMF Types provided as SHACL shapes into NeatSession.
477
+
478
+ Args:
479
+ io: file path or url to the IMF file
480
+
481
+ Example:
482
+ ```python
483
+ neat.read.rdf.imf("url_or_path_to_imf_source")
484
+ ```
485
+ """
334
486
  start = datetime.now(timezone.utc)
335
487
  reader = NeatReader.create(io)
336
488
  if not isinstance(reader, PathReader):
@@ -360,16 +512,20 @@ class RDFReadAPI(BaseReadAPI):
360
512
  if type is None:
361
513
  type = object_wizard()
362
514
 
363
- if type.lower() == "Data Model".lower():
515
+ type = type.lower()
516
+
517
+ if type == "data model":
364
518
  source = source or rdf_dm_wizard("What type of data model is the RDF?")
365
- if source == "Ontology":
519
+ source = cast(str, source).lower() # type: ignore
520
+
521
+ if source == "ontology":
366
522
  return self.ontology(io)
367
- elif source == "IMF":
523
+ elif source == "imf types":
368
524
  return self.imf(io)
369
525
  else:
370
- raise ValueError(f"Expected ontology, imf or instances, got {source}")
526
+ raise ValueError(f"Expected ontology, imf types or instances, got {source}")
371
527
 
372
- elif type.lower() == "Instances".lower():
528
+ elif type == "instances":
373
529
  reader = NeatReader.create(io)
374
530
  if not isinstance(reader, PathReader):
375
531
  raise NeatValueError("Only file paths are supported for RDF files")
@@ -380,11 +536,15 @@ class RDFReadAPI(BaseReadAPI):
380
536
  raise NeatSessionError(f"Expected data model or instances, got {type}")
381
537
 
382
538
 
539
+ @session_class_wrapper
383
540
  class RDFExamples:
541
+ """Used as example for reading some triples into the NeatSession knowledge grapgh."""
542
+
384
543
  def __init__(self, state: SessionState) -> None:
385
544
  self._state = state
386
545
 
387
546
  @property
388
547
  def nordic44(self) -> IssueList:
548
+ """Reads the Nordic 44 knowledge graph into the NeatSession graph store."""
389
549
  self._state.instances.store.write(extractors.RdfFileExtractor(instances_examples.nordic44_knowledge_graph))
390
550
  return IssueList()
@@ -12,12 +12,22 @@ from .exceptions import NeatSessionError, session_class_wrapper
12
12
 
13
13
  @session_class_wrapper
14
14
  class SetAPI:
15
+ """Used to change the name of the data model from a data model id defined by neat to a user specified name."""
16
+
15
17
  def __init__(self, state: SessionState, verbose: bool) -> None:
16
18
  self._state = state
17
19
  self._verbose = verbose
18
20
 
19
21
  def data_model_id(self, new_model_id: dm.DataModelId | tuple[str, str, str]) -> None:
20
- """Sets the data model ID of the latest verified data model."""
22
+ """Sets the data model ID of the latest verified data model. Set the data model id as a tuple of strings
23
+ following the template (<data_model_space>, <data_model_name>, <data_model_version>).
24
+
25
+ Example:
26
+ Set a new data model id:
27
+ ```python
28
+ neat.set.data_model_id(("my_data_model_space", "My_Data_Model", "v1"))
29
+ ```
30
+ """
21
31
  if res := self._state.data_model.last_verified_dms_rules:
22
32
  source_id, rules = res
23
33