cognite-neat 0.100.1__py3-none-any.whl → 0.101.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

@@ -156,7 +156,6 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
156
156
 
157
157
  for identifier, properties in reader:
158
158
  try:
159
- print(view_id)
160
159
  yield self._create_node(identifier, properties, pydantic_cls, view_id)
161
160
  except ValueError as e:
162
161
  error_node = ResourceCreationError(identifier, "node", error=str(e))
@@ -7,6 +7,7 @@ from ._classic_cdf import (
7
7
  AssetTimeSeriesConnector,
8
8
  RelationshipAsEdgeTransformer,
9
9
  )
10
+ from ._prune_graph import AttachPropertyFromTargetToSource, PruneDanglingNodes
10
11
  from ._rdfpath import AddSelfReferenceProperty, MakeConnectionOnExactMatch
11
12
  from ._value_type import SplitMultiValueProperty
12
13
 
@@ -21,6 +22,8 @@ __all__ = [
21
22
  "SplitMultiValueProperty",
22
23
  "RelationshipAsEdgeTransformer",
23
24
  "MakeConnectionOnExactMatch",
25
+ "AttachPropertyFromTargetToSource",
26
+ "PruneDanglingNodes",
24
27
  ]
25
28
 
26
29
  Transformers = (
@@ -34,4 +37,6 @@ Transformers = (
34
37
  | SplitMultiValueProperty
35
38
  | RelationshipAsEdgeTransformer
36
39
  | MakeConnectionOnExactMatch
40
+ | AttachPropertyFromTargetToSource
41
+ | PruneDanglingNodes
37
42
  )
@@ -2,12 +2,12 @@ from rdflib import Namespace
2
2
 
3
3
  from cognite.neat._graph.extractors import IODDExtractor
4
4
 
5
- from ._prune_graph import PruneDanglingNodes, TwoHopFlattener
5
+ from ._prune_graph import AttachPropertyFromTargetToSource, PruneDanglingNodes
6
6
 
7
7
  IODD = Namespace("http://www.io-link.com/IODD/2010/10/")
8
8
 
9
9
 
10
- class IODDTwoHopFlattener(TwoHopFlattener):
10
+ class IODDAttachPropertyFromTargetToSource(AttachPropertyFromTargetToSource):
11
11
  _need_changes = frozenset(
12
12
  {
13
13
  str(IODDExtractor.__name__),
@@ -15,11 +15,16 @@ class IODDTwoHopFlattener(TwoHopFlattener):
15
15
  )
16
16
 
17
17
  def __init__(self):
18
- super().__init__(destination_node_type=IODD.TextObject, property_predicate=IODD.value, property_name="value")
18
+ super().__init__(
19
+ target_node_type=IODD.TextObject,
20
+ namespace=IODD,
21
+ target_property="value",
22
+ delete_target_node=True,
23
+ )
19
24
 
20
25
 
21
26
  class IODDPruneDanglingNodes(PruneDanglingNodes):
22
- _need_changes = frozenset({str(IODDExtractor.__name__), str(IODDTwoHopFlattener.__name__)})
27
+ _need_changes = frozenset({str(IODDExtractor.__name__), str(IODDAttachPropertyFromTargetToSource.__name__)})
23
28
 
24
29
  def __init__(self):
25
30
  super().__init__(node_prune_types=[IODD.TextObject])
@@ -1,84 +1,141 @@
1
1
  from rdflib import Graph, Namespace, URIRef
2
2
  from rdflib.query import ResultRow
3
- from rdflib.term import Identifier
3
+
4
+ from cognite.neat._utils.rdf_ import as_neat_compliant_uri
5
+ from cognite.neat._utils.text import sentence_or_string_to_camel
4
6
 
5
7
  from ._base import BaseTransformer
6
8
 
7
9
 
8
- # TODO: Handle the cse when value is None, which will not make the TextObject resolve
9
- class TwoHopFlattener(BaseTransformer):
10
+ class AttachPropertyFromTargetToSource(BaseTransformer):
10
11
  """
11
- Transformer that will flatten the distance between a source node, an intermediate connecting node, and a
12
- target property that is connected to the intermediate node.
13
- The transformation result is that the target property is attached directly to the source node, instead of having
14
- to go via the intermediate node.
15
- The user can also provide a flag to decide if the intermediate node should be removed from the graph or not
16
- after connecting the target property to the source node.
12
+ Transformer that considers a TargetNode and SourceNode relationship, to extract a property that is attached to
13
+ the TargetNode, and attaches it to the SourceNode instead, while also deleting the edge between
14
+ the SourceNode and TargetNode.
15
+ This means that you no longer have to go via the SourceNode to TargetNode to extract
16
+ the desired property from TargetNode, you can get it directly from the SourceNode instead.
17
+ Further, there are two ways of defining the predicate for the new property to attach to
18
+ the SourceNode. The predicate that is used will either be the old predicate between the SourceNode and TargetNode,
19
+ or, the TargetNode may hold a property with a value for the new predicate to use.
20
+ In this case, the user must specify the name of this predicate property connected to the TargetNode.
21
+ Consider the following example for illustration:
22
+
23
+ Ex. AttachPropertyFromTargetToSource
24
+ Graph before transformation:
25
+
26
+ :SourceNode a :SourceType .
27
+ :SourceNode :sourceProperty :TargetNode .
28
+
29
+ :TargetNode a :TargetType .
30
+ :TargetNode :propertyWhichValueWeWant 'Target Value' .
31
+ :TargetNode :propertyWhichValueWeMightWantAsNameForNewProperty 'PropertyName'
32
+
33
+ Use case A after transformation - attach new property to SourceNode using old predicate:
34
+
35
+ :SourceNode a :SourceType .
36
+ :SourceNode :sourceProperty 'Target Value' .
37
+
38
+ Use case B after transformation - extract new predicate from one of the properties of the TargetNode:
39
+
40
+ :SourceNode a :SourceType .
41
+ :SourceNode :PropertyName 'Target Value' .
17
42
 
18
- Ex. TwoHopFlattener:
19
43
 
20
- Graph before flattening (with deletion of intermediate node):
21
- node(A, rdf:type(Pump)) -(predicate("vendor"))>
22
- node(B, rdf:type(TextObject)) -(predicate("value"))> Literal("CompanyX")
44
+ The user can provide a flag to decide if the intermediate target node should be removed from the graph or not
45
+ after connecting the target property to the source node. The example illustrates this.
46
+ The default however is False.
23
47
 
24
- Graph after flattening nodes with destination_node_type = rdf:type(TextObject), property_predicate = :value,
25
- and property_name = "value":
48
+ If delete_target_node is not set, the expected number of triples after this transformation should be the same as
49
+ before the transformation.
26
50
 
27
- node(A, rdf:type(Pump)) -(predicate("vendor"))> Literal("CompanyX")
51
+ If delete_target_node is set, the expected number of triples should be:
52
+ #triples_before - #target_nodes * #target_nodes_properties
53
+
54
+ Number of triples after operation from above example: 5 - 1*3 = 2
28
55
 
29
56
  Args:
30
- destination_node_type: RDF.type of edge Node
31
- property_predicate: Predicate to use when resolving the value from the edge node
32
- property_name: name of the property that the intermediate node is pointing to
33
- delete_connecting_node: bool if the intermediate Node and Edge between source Node
34
- and target property should be deleted. Defaults to True.
57
+ target_node_type: RDF.type of edge Node
58
+ namespace: RDF Namespace to use when querying the graph
59
+ target_property: str with name of the property that holds the value attached to the intermediate node
60
+ target_property_holding_new_property_name: Optional str of the property name that holds
61
+ the new predicate to use when attaching the new property to the source node.
62
+ delete_target_node: bool if the intermediate Node and Edge between source Node
63
+ and target property should be deleted. Defaults to False.
35
64
  """
36
65
 
37
- description: str = "Prunes the graph of specified node types that do not have connections to other nodes."
38
- _query_template: str = """SELECT ?sourceNode ?property ?destinationNode ?value WHERE {{
39
- ?sourceNode ?property ?destinationNode .
40
- ?destinationNode a <{destination_node_type}> .
41
- ?destinationNode <{property_predicate}> ?{property_name} . }}"""
66
+ description: str = "Attaches a target property from a target node that is connected to a source node."
67
+
68
+ _query_template_use_case_a: str = """
69
+ SELECT ?sourceNode ?sourceProperty ?targetNode ?newSourceProperty ?newSourcePropertyValue WHERE {{
70
+ ?sourceNode ?sourceProperty ?targetNode .
71
+ BIND( <{target_property}> as ?newSourceProperty ) .
72
+ ?targetNode a <{target_node_type}> .
73
+ ?targetNode <{target_property}> ?newSourcePropertyValue . }}"""
74
+
75
+ _query_template_use_case_b: str = """
76
+ SELECT ?sourceNode ?sourceProperty ?targetNode ?newSourceProperty ?newSourcePropertyValue WHERE {{
77
+ ?sourceNode ?sourceProperty ?targetNode .
78
+ ?targetNode a <{target_node_type}> .
79
+ ?targetNode <{target_property_holding_new_property_name}> ?newSourceProperty .
80
+ ?targetNode <{target_property}> ?newSourcePropertyValue . }}"""
42
81
 
43
82
  def __init__(
44
83
  self,
45
- destination_node_type: URIRef,
46
- property_predicate: Namespace,
47
- property_name: str,
48
- delete_connecting_node: bool = True,
84
+ target_node_type: URIRef,
85
+ namespace: Namespace,
86
+ target_property: str,
87
+ target_property_holding_new_property_name: str | None = None,
88
+ delete_target_node: bool = False,
49
89
  ):
50
- self.destination_node_type = destination_node_type
51
- self.property_predicate = property_predicate
52
- self.property_name = property_name
53
- self.delete_connecting_node = delete_connecting_node
54
-
55
- def transform(self, graph: Graph) -> None:
56
- nodes_to_delete: list[Identifier] = []
57
-
58
- graph_traversals = list(
59
- graph.query(
60
- self._query_template.format(
61
- destination_node_type=self.destination_node_type,
62
- property_predicate=self.property_predicate,
63
- property_name=self.property_name,
64
- )
90
+ self.target_node_type = target_node_type
91
+ self.namespace = namespace
92
+ self.target_property = self.namespace[target_property]
93
+ self.delete_target_node = delete_target_node
94
+ self.target_property_holding_new_property_name = target_property_holding_new_property_name
95
+
96
+ def transform(self, graph) -> None:
97
+ nodes_to_delete: list[tuple] = []
98
+
99
+ if self.target_property_holding_new_property_name is not None:
100
+ target_property_holding_new_property_name = self.namespace[self.target_property_holding_new_property_name]
101
+ query = self._query_template_use_case_b.format(
102
+ target_node_type=self.target_node_type,
103
+ target_property_holding_new_property_name=target_property_holding_new_property_name,
104
+ target_property=self.target_property,
105
+ )
106
+ else:
107
+ query = self._query_template_use_case_a.format(
108
+ target_node_type=self.target_node_type,
109
+ target_property=self.target_property,
65
110
  )
66
- )
67
-
68
- for path in graph_traversals:
69
- if isinstance(path, ResultRow):
70
- source_node, predicate, destination_node, property_value = path.asdict().values()
71
-
72
- # Create new connection from source node to value
73
- graph.add((source_node, predicate, property_value))
74
- nodes_to_delete.append(destination_node)
75
-
76
- if self.delete_connecting_node:
77
- for node in nodes_to_delete:
78
- # Remove edge triples to node
79
- graph.remove((None, None, node))
80
- # Remove node triple
81
- graph.remove((node, None, None))
111
+
112
+ for (
113
+ source_node,
114
+ old_predicate,
115
+ target_node,
116
+ new_predicate_value,
117
+ new_property_value,
118
+ ) in graph.query(query):
119
+ if self.target_property_holding_new_property_name is not None:
120
+ # Ensure new predicate is URI compliant as we are creating a new predicate
121
+ new_predicate_value_string = sentence_or_string_to_camel(str(new_predicate_value))
122
+ predicate = as_neat_compliant_uri(self.namespace[new_predicate_value_string])
123
+ else:
124
+ predicate = old_predicate
125
+
126
+ # Create new connection from source node to value
127
+ graph.add((source_node, predicate, new_property_value))
128
+ # Remove old relationship between source node and destination node
129
+ graph.remove((source_node, old_predicate, target_node))
130
+
131
+ nodes_to_delete.append(target_node)
132
+
133
+ if self.delete_target_node:
134
+ for target_node in nodes_to_delete:
135
+ # Remove triples with edges to target_node
136
+ graph.remove((None, None, target_node))
137
+ # Remove target node triple and its properties
138
+ graph.remove((target_node, None, None))
82
139
 
83
140
 
84
141
  class PruneDanglingNodes(BaseTransformer):
@@ -12,6 +12,7 @@ from cognite.client.data_classes._base import (
12
12
  from cognite.client.data_classes.data_modeling import (
13
13
  DataModelApplyList,
14
14
  DataModelId,
15
+ SpaceApply,
15
16
  ViewApplyList,
16
17
  )
17
18
  from cognite.client.exceptions import CogniteAPIError
@@ -200,6 +201,32 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
200
201
  loader.resource_name for loader, categorized in categorized_items_by_loader.items() if categorized.to_update
201
202
  )
202
203
 
204
+ deleted_by_name: dict[str, UploadResult] = {}
205
+ if not is_failing:
206
+ # Deletion is done in reverse order to take care of dependencies
207
+ for loader, items in reversed(categorized_items_by_loader.items()):
208
+ issue_list = IssueList()
209
+
210
+ if items.resource_name == client.loaders.data_models.resource_name:
211
+ warning_list = self._validate(list(items.item_ids()), client)
212
+ issue_list.extend(warning_list)
213
+
214
+ results = UploadResult(loader.resource_name, issues=issue_list) # type: ignore[var-annotated]
215
+ if dry_run:
216
+ results.deleted.update(items.to_delete_ids)
217
+ else:
218
+ if items.to_delete_ids:
219
+ try:
220
+ deleted = loader.delete(items.to_delete_ids)
221
+ except MultiCogniteAPIError as e:
222
+ results.deleted.update([loader.get_id(item) for item in e.success])
223
+ results.failed_deleted.update([loader.get_id(item) for item in e.failed])
224
+ for error in e.errors:
225
+ results.error_messages.append(f"Failed to delete {loader.resource_name}: {error!s}")
226
+ else:
227
+ results.deleted.update(deleted)
228
+ deleted_by_name[loader.resource_name] = results
229
+
203
230
  for loader, items in categorized_items_by_loader.items():
204
231
  issue_list = IssueList()
205
232
 
@@ -221,28 +248,21 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
221
248
 
222
249
  results.unchanged.update(items.unchanged_ids)
223
250
  results.skipped.update(items.to_skip_ids)
251
+ if delete_results := deleted_by_name.get(loader.resource_name):
252
+ results.deleted.update(delete_results.deleted)
253
+ results.failed_deleted.update(delete_results.failed_deleted)
254
+ results.error_messages.extend(delete_results.error_messages)
255
+
224
256
  if dry_run:
225
257
  if self.existing in ["update", "force"]:
226
258
  # Assume all changed are successful
227
259
  results.changed.update(items.to_update_ids)
228
260
  elif self.existing == "skip":
229
261
  results.skipped.update(items.to_update_ids)
230
- results.deleted.update(items.to_delete_ids)
231
262
  results.created.update(items.to_create_ids)
232
263
  yield results
233
264
  continue
234
265
 
235
- if items.to_delete_ids:
236
- try:
237
- deleted = loader.delete(items.to_delete_ids)
238
- except MultiCogniteAPIError as e:
239
- results.deleted.update([loader.get_id(item) for item in e.success])
240
- results.failed_deleted.update([loader.get_id(item) for item in e.failed])
241
- for error in e.errors:
242
- results.error_messages.append(f"Failed to delete {loader.resource_name}: {error!s}")
243
- else:
244
- results.deleted.update(deleted)
245
-
246
266
  if items.to_create:
247
267
  try:
248
268
  created = loader.create(items.to_create)
@@ -308,7 +328,9 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
308
328
  cdf_item = cdf_item_by_id.get(item_id)
309
329
  if cdf_item is None:
310
330
  categorized.to_create.append(item)
311
- elif is_redeploying or self.existing == "recreate":
331
+ elif (is_redeploying or self.existing == "recreate") and not isinstance(item, SpaceApply):
332
+ # Spaces are not deleted, instead they are updated. Deleting a space is an expensive operation
333
+ # and are seldom needed. If you need to delete the space, it should be done in a different operation.
312
334
  if not self.drop_data and loader.has_data(item_id):
313
335
  categorized.to_skip.append(cdf_item)
314
336
  else:
@@ -147,11 +147,13 @@ class BaseMetadata(SchemaModel):
147
147
  Metadata model for data model
148
148
  """
149
149
 
150
- role: ClassVar[RoleTypes]
151
- aspect: ClassVar[DataModelAspect]
152
- space: SpaceType = Field(alias="prefix")
153
- external_id: DataModelExternalIdType = Field(alias="externalId")
154
- version: VersionType
150
+ role: ClassVar[RoleTypes] = Field(description="Role of the person creating the data model")
151
+ aspect: ClassVar[DataModelAspect] = Field(description="Aspect of the data model")
152
+ space: SpaceType = Field(alias="prefix", description="The space where the data model is defined")
153
+ external_id: DataModelExternalIdType = Field(
154
+ alias="externalId", description="External identifier for the data model"
155
+ )
156
+ version: VersionType = Field(description="Version of the data model")
155
157
 
156
158
  name: str | None = Field(
157
159
  None,
@@ -160,21 +162,23 @@ class BaseMetadata(SchemaModel):
160
162
  max_length=255,
161
163
  )
162
164
 
163
- description: str | None = Field(None, min_length=1, max_length=1024)
165
+ description: str | None = Field(
166
+ None, min_length=1, max_length=1024, description="Short description of the data model"
167
+ )
164
168
 
165
169
  creator: StrListType = Field(
166
170
  description=(
167
- "List of contributors to the data model creation, "
171
+ "List of contributors (comma seperated) to the data model creation, "
168
172
  "typically information architects are considered as contributors."
169
173
  ),
170
174
  )
171
175
 
172
176
  created: datetime = Field(
173
- description=("Date of the data model creation"),
177
+ description="Date of the data model creation",
174
178
  )
175
179
 
176
180
  updated: datetime = Field(
177
- description=("Date of the data model update"),
181
+ description="Date of the data model update",
178
182
  )
179
183
 
180
184
  @field_validator("*", mode="before")
@@ -94,20 +94,60 @@ def _metadata(context: Any) -> DMSMetadata | None:
94
94
 
95
95
 
96
96
  class DMSProperty(SheetRow):
97
- view: ViewEntityType = Field(alias="View")
98
- view_property: DmsPropertyType = Field(alias="View Property")
99
- name: str | None = Field(alias="Name", default=None)
100
- description: str | None = Field(alias="Description", default=None)
101
- connection: Literal["direct"] | ReverseConnectionEntity | EdgeEntity | None = Field(None, alias="Connection")
102
- value_type: DataType | ViewEntity | DMSUnknownEntity = Field(alias="Value Type")
103
- nullable: bool | None = Field(default=None, alias="Nullable")
104
- immutable: bool | None = Field(default=None, alias="Immutable")
105
- is_list: bool | None = Field(default=None, alias="Is List")
106
- default: str | int | dict | None = Field(None, alias="Default")
107
- container: ContainerEntityType | None = Field(None, alias="Container")
108
- container_property: DmsPropertyType | None = Field(None, alias="Container Property")
109
- index: StrListType | None = Field(None, alias="Index")
110
- constraint: StrListType | None = Field(None, alias="Constraint")
97
+ view: ViewEntityType = Field(alias="View", description="The property identifier.")
98
+ view_property: DmsPropertyType = Field(alias="View Property", description="The ViewId this property belongs to")
99
+ name: str | None = Field(alias="Name", default=None, description="Human readable name of the property")
100
+ description: str | None = Field(alias="Description", default=None, description="Short description of the property")
101
+ connection: Literal["direct"] | ReverseConnectionEntity | EdgeEntity | None = Field(
102
+ None,
103
+ alias="Connection",
104
+ description="nly applies to connection between views. "
105
+ "It specify how the connection should be implemented in CDF.",
106
+ )
107
+ value_type: DataType | ViewEntity | DMSUnknownEntity = Field(
108
+ alias="Value Type",
109
+ description="Value type that the property can hold. "
110
+ "It takes either subset of CDF primitive types or a View id",
111
+ )
112
+ nullable: bool | None = Field(
113
+ default=None,
114
+ alias="Nullable",
115
+ description="Used to indicate whether the property is required or not. Only applies to primitive type.",
116
+ )
117
+ immutable: bool | None = Field(
118
+ default=None,
119
+ alias="Immutable",
120
+ description="sed to indicate whether the property is can only be set once. Only applies to primitive type.",
121
+ )
122
+ is_list: bool | None = Field(
123
+ default=None,
124
+ alias="Is List",
125
+ description="Used to indicate whether the property holds single or multiple values (list). "
126
+ "Only applies to primitive types.",
127
+ )
128
+ default: str | int | dict | None = Field(
129
+ None, alias="Default", description="Specifies default value for the property."
130
+ )
131
+ container: ContainerEntityType | None = Field(
132
+ None,
133
+ alias="Container",
134
+ description="Specifies container where the property is stored. Only applies to primitive type.",
135
+ )
136
+ container_property: DmsPropertyType | None = Field(
137
+ None,
138
+ alias="Container Property",
139
+ description="Specifies property in the container where the property is stored. Only applies to primitive type.",
140
+ )
141
+ index: StrListType | None = Field(
142
+ None,
143
+ alias="Index",
144
+ description="The names of the indexes (comma separated) that should be created for the property.",
145
+ )
146
+ constraint: StrListType | None = Field(
147
+ None,
148
+ alias="Constraint",
149
+ description="The names of the uniquness (comma separated) that should be created for the property.",
150
+ )
111
151
  logical: URIRefType | None = Field(
112
152
  None,
113
153
  alias="Logical",
@@ -192,11 +232,21 @@ class DMSProperty(SheetRow):
192
232
 
193
233
 
194
234
  class DMSContainer(SheetRow):
195
- container: ContainerEntityType = Field(alias="Container")
196
- name: str | None = Field(alias="Name", default=None)
197
- description: str | None = Field(alias="Description", default=None)
198
- constraint: ContainerEntityList | None = Field(None, alias="Constraint")
199
- used_for: Literal["node", "edge", "all"] | None = Field("all", alias="Used For")
235
+ container: ContainerEntityType = Field(
236
+ alias="Container", description="Container id, strongly advised to PascalCase usage."
237
+ )
238
+ name: str | None = Field(
239
+ alias="Name", default=None, description="Human readable name of the container being defined."
240
+ )
241
+ description: str | None = Field(
242
+ alias="Description", default=None, description="Short description of the node being defined."
243
+ )
244
+ constraint: ContainerEntityList | None = Field(
245
+ None, alias="Constraint", description="List of required (comma separated) constraints for the container"
246
+ )
247
+ used_for: Literal["node", "edge", "all"] | None = Field(
248
+ "all", alias="Used For", description=" Whether the container is used for nodes, edges or all."
249
+ )
200
250
 
201
251
  def _identifier(self) -> tuple[Hashable, ...]:
202
252
  return (self.container,)
@@ -240,12 +290,22 @@ class DMSContainer(SheetRow):
240
290
 
241
291
 
242
292
  class DMSView(SheetRow):
243
- view: ViewEntityType = Field(alias="View")
244
- name: str | None = Field(alias="Name", default=None)
245
- description: str | None = Field(alias="Description", default=None)
246
- implements: ViewEntityList | None = Field(None, alias="Implements")
247
- filter_: HasDataFilter | NodeTypeFilter | RawFilter | None = Field(None, alias="Filter")
248
- in_model: bool = Field(True, alias="In Model")
293
+ view: ViewEntityType = Field(alias="View", description="View id, strongly advised to PascalCase usage.")
294
+ name: str | None = Field(alias="Name", default=None, description="Human readable name of the view being defined.")
295
+ description: str | None = Field(
296
+ alias="Description", default=None, description="Short description of the view being defined "
297
+ )
298
+ implements: ViewEntityList | None = Field(
299
+ None,
300
+ alias="Implements",
301
+ description="List of parent view ids (comma separated) which the view being defined implements.",
302
+ )
303
+ filter_: HasDataFilter | NodeTypeFilter | RawFilter | None = Field(
304
+ None, alias="Filter", description="Explicitly define the filter for the view."
305
+ )
306
+ in_model: bool = Field(
307
+ True, alias="In Model", description="Indicates whether the view being defined is a part of the data model."
308
+ )
249
309
  logical: URIRefType | None = Field(
250
310
  None,
251
311
  alias="Logical",
@@ -292,10 +352,14 @@ class DMSView(SheetRow):
292
352
 
293
353
 
294
354
  class DMSNode(SheetRow):
295
- node: DMSNodeEntity = Field(alias="Node")
296
- usage: Literal["type", "collection"] = Field(alias="Usage")
297
- name: str | None = Field(alias="Name", default=None)
298
- description: str | None = Field(alias="Description", default=None)
355
+ node: DMSNodeEntity = Field(alias="Node", description="The type definition of the node.")
356
+ usage: Literal["type", "collection"] = Field(
357
+ alias="Usage", description="What the usage of the node is in the data model."
358
+ )
359
+ name: str | None = Field(alias="Name", default=None, description="Human readable name of the node being defined.")
360
+ description: str | None = Field(
361
+ alias="Description", default=None, description="Short description of the node being defined."
362
+ )
299
363
 
300
364
  def _identifier(self) -> tuple[Hashable, ...]:
301
365
  return (self.node,)
@@ -316,10 +380,10 @@ class DMSNode(SheetRow):
316
380
 
317
381
 
318
382
  class DMSEnum(SheetRow):
319
- collection: ClassEntityType = Field(alias="Collection")
320
- value: str = Field(alias="Value")
321
- name: str | None = Field(alias="Name", default=None)
322
- description: str | None = Field(alias="Description", default=None)
383
+ collection: ClassEntityType = Field(alias="Collection", description="The collection this enum belongs to.")
384
+ value: str = Field(alias="Value", description="The value of the enum.")
385
+ name: str | None = Field(alias="Name", default=None, description="Human readable name of the enum.")
386
+ description: str | None = Field(alias="Description", default=None, description="Short description of the enum.")
323
387
 
324
388
  def _identifier(self) -> tuple[Hashable, ...]:
325
389
  return self.collection, self.value
@@ -332,12 +396,20 @@ class DMSEnum(SheetRow):
332
396
 
333
397
 
334
398
  class DMSRules(BaseRules):
335
- metadata: DMSMetadata = Field(alias="Metadata")
336
- properties: SheetList[DMSProperty] = Field(alias="Properties")
337
- views: SheetList[DMSView] = Field(alias="Views")
338
- containers: SheetList[DMSContainer] | None = Field(None, alias="Containers")
339
- enum: SheetList[DMSEnum] | None = Field(None, alias="Enum")
340
- nodes: SheetList[DMSNode] | None = Field(None, alias="Nodes")
399
+ metadata: DMSMetadata = Field(alias="Metadata", description="Contains information about the data model.")
400
+ properties: SheetList[DMSProperty] = Field(
401
+ alias="Properties", description="Contains the properties of the data model."
402
+ )
403
+ views: SheetList[DMSView] = Field(alias="Views", description="Contains the views of the data model.")
404
+ containers: SheetList[DMSContainer] | None = Field(
405
+ None,
406
+ alias="Containers",
407
+ description="Contains the definition containers that are the physical storage of the data model.",
408
+ )
409
+ enum: SheetList[DMSEnum] | None = Field(None, alias="Enum", description="Contains the definition of enum values.")
410
+ nodes: SheetList[DMSNode] | None = Field(
411
+ None, alias="Nodes", description="Contains the definition of the node types."
412
+ )
341
413
 
342
414
  @field_validator("views")
343
415
  def matching_version_and_space(cls, value: SheetList[DMSView], info: ValidationInfo) -> SheetList[DMSView]: