cognite-neat 0.81.0__py3-none-any.whl → 0.81.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

cognite/neat/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.81.0"
1
+ __version__ = "0.81.2"
@@ -1,10 +1,9 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
  from urllib.parse import quote
6
6
 
7
- import pytz
8
7
  from cognite.client import CogniteClient
9
8
  from cognite.client.data_classes import Asset, AssetList
10
9
  from rdflib import RDF, Literal, Namespace
@@ -49,7 +48,7 @@ class AssetsExtractor(BaseExtractor):
49
48
  @classmethod
50
49
  def _asset2triples(cls, asset: Asset, namespace: Namespace) -> list[Triple]:
51
50
  """Converts an asset to triples."""
52
- id_ = namespace[str(asset.id)]
51
+ id_ = namespace[f"Asset_{asset.id}"]
53
52
 
54
53
  # Set rdf type
55
54
  triples: list[Triple] = [(id_, RDF.type, namespace["Asset"])]
@@ -72,14 +71,14 @@ class AssetsExtractor(BaseExtractor):
72
71
  (
73
72
  id_,
74
73
  namespace.created_time,
75
- Literal(datetime.fromtimestamp(asset.created_time / 1000, pytz.utc)),
74
+ Literal(datetime.fromtimestamp(asset.created_time / 1000, timezone.utc)),
76
75
  )
77
76
  )
78
77
  triples.append(
79
78
  (
80
79
  id_,
81
80
  namespace.last_updated_time,
82
- Literal(datetime.fromtimestamp(asset.last_updated_time / 1000, pytz.utc)),
81
+ Literal(datetime.fromtimestamp(asset.last_updated_time / 1000, timezone.utc)),
83
82
  )
84
83
  )
85
84
 
@@ -87,7 +86,7 @@ class AssetsExtractor(BaseExtractor):
87
86
  for label in asset.labels:
88
87
  # external_id can create ill-formed URIs, so we create websafe URIs
89
88
  # since labels do not have internal ids, we use the external_id as the id
90
- triples.append((id_, namespace.label, namespace[quote(label.dump()["externalId"])]))
89
+ triples.append((id_, namespace.label, namespace[f"Label_{quote(label.dump()['externalId'])}"]))
91
90
 
92
91
  if asset.metadata:
93
92
  for key, value in asset.metadata.items():
@@ -96,12 +95,12 @@ class AssetsExtractor(BaseExtractor):
96
95
 
97
96
  # Create connections:
98
97
  if asset.parent_id:
99
- triples.append((id_, namespace.parent, namespace[str(asset.parent_id)]))
98
+ triples.append((id_, namespace.parent, namespace[f"Asset_{asset.parent_id}"]))
100
99
 
101
100
  if asset.root_id:
102
- triples.append((id_, namespace.root, namespace[str(asset.root_id)]))
101
+ triples.append((id_, namespace.root, namespace[f"Asset_{asset.root_id}"]))
103
102
 
104
103
  if asset.data_set_id:
105
- triples.append((id_, namespace.dataset, namespace[str(asset.data_set_id)]))
104
+ triples.append((id_, namespace.dataset, namespace[f"Dataset_{asset.data_set_id}"]))
106
105
 
107
106
  return triples
@@ -1,9 +1,8 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
 
6
- import pytz
7
6
  from cognite.client import CogniteClient
8
7
  from cognite.client.data_classes import Event, EventList
9
8
  from pydantic import AnyHttpUrl, ValidationError
@@ -44,7 +43,7 @@ class EventsExtractor(BaseExtractor):
44
43
 
45
44
  @classmethod
46
45
  def _event2triples(cls, event: Event, namespace: Namespace) -> list[Triple]:
47
- id_ = namespace[str(event.id)]
46
+ id_ = namespace[f"Event_{event.id}"]
48
47
 
49
48
  # Set rdf type
50
49
  triples: list[Triple] = [(id_, RDF.type, namespace.Event)]
@@ -77,7 +76,7 @@ class EventsExtractor(BaseExtractor):
77
76
 
78
77
  if event.created_time:
79
78
  triples.append(
80
- (id_, namespace.created_time, Literal(datetime.fromtimestamp(event.created_time / 1000, pytz.utc)))
79
+ (id_, namespace.created_time, Literal(datetime.fromtimestamp(event.created_time / 1000, timezone.utc)))
81
80
  )
82
81
 
83
82
  if event.last_updated_time:
@@ -85,7 +84,7 @@ class EventsExtractor(BaseExtractor):
85
84
  (
86
85
  id_,
87
86
  namespace.last_updated_time,
88
- Literal(datetime.fromtimestamp(event.last_updated_time / 1000, pytz.utc)),
87
+ Literal(datetime.fromtimestamp(event.last_updated_time / 1000, timezone.utc)),
89
88
  )
90
89
  )
91
90
 
@@ -94,7 +93,7 @@ class EventsExtractor(BaseExtractor):
94
93
  (
95
94
  id_,
96
95
  namespace.start_time,
97
- Literal(datetime.fromtimestamp(event.start_time / 1000, pytz.utc)),
96
+ Literal(datetime.fromtimestamp(event.start_time / 1000, timezone.utc)),
98
97
  )
99
98
  )
100
99
 
@@ -103,15 +102,15 @@ class EventsExtractor(BaseExtractor):
103
102
  (
104
103
  id_,
105
104
  namespace.end_time,
106
- Literal(datetime.fromtimestamp(event.end_time / 1000, pytz.utc)),
105
+ Literal(datetime.fromtimestamp(event.end_time / 1000, timezone.utc)),
107
106
  )
108
107
  )
109
108
 
110
109
  if event.data_set_id:
111
- triples.append((id_, namespace.data_set_id, namespace[str(event.data_set_id)]))
110
+ triples.append((id_, namespace.data_set_id, namespace[f"Dataset_{event.data_set_id}"]))
112
111
 
113
112
  if event.asset_ids:
114
113
  for asset_id in event.asset_ids:
115
- triples.append((id_, namespace.asset, namespace[str(asset_id)]))
114
+ triples.append((id_, namespace.asset, namespace[f"Asset_{asset_id}"]))
116
115
 
117
116
  return triples
@@ -1,10 +1,9 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
  from urllib.parse import quote
6
6
 
7
- import pytz
8
7
  from cognite.client import CogniteClient
9
8
  from cognite.client.data_classes import FileMetadata, FileMetadataList
10
9
  from pydantic import AnyHttpUrl, ValidationError
@@ -45,7 +44,7 @@ class FilesExtractor(BaseExtractor):
45
44
 
46
45
  @classmethod
47
46
  def _file2triples(cls, file: FileMetadata, namespace: Namespace) -> list[Triple]:
48
- id_ = namespace[str(file.id)]
47
+ id_ = namespace[f"File_{file.id}"]
49
48
 
50
49
  # Set rdf type
51
50
  triples: list[Triple] = [(id_, RDF.type, namespace.File)]
@@ -81,7 +80,7 @@ class FilesExtractor(BaseExtractor):
81
80
  (
82
81
  id_,
83
82
  namespace.source_created_time,
84
- Literal(datetime.fromtimestamp(file.source_created_time / 1000, pytz.utc)),
83
+ Literal(datetime.fromtimestamp(file.source_created_time / 1000, timezone.utc)),
85
84
  )
86
85
  )
87
86
  if file.source_modified_time:
@@ -89,17 +88,17 @@ class FilesExtractor(BaseExtractor):
89
88
  (
90
89
  id_,
91
90
  namespace.source_created_time,
92
- Literal(datetime.fromtimestamp(file.source_modified_time / 1000, pytz.utc)),
91
+ Literal(datetime.fromtimestamp(file.source_modified_time / 1000, timezone.utc)),
93
92
  )
94
93
  )
95
94
  if file.uploaded_time:
96
95
  triples.append(
97
- (id_, namespace.uploaded_time, Literal(datetime.fromtimestamp(file.uploaded_time / 1000, pytz.utc)))
96
+ (id_, namespace.uploaded_time, Literal(datetime.fromtimestamp(file.uploaded_time / 1000, timezone.utc)))
98
97
  )
99
98
 
100
99
  if file.created_time:
101
100
  triples.append(
102
- (id_, namespace.created_time, Literal(datetime.fromtimestamp(file.created_time / 1000, pytz.utc)))
101
+ (id_, namespace.created_time, Literal(datetime.fromtimestamp(file.created_time / 1000, timezone.utc)))
103
102
  )
104
103
 
105
104
  if file.last_updated_time:
@@ -107,7 +106,7 @@ class FilesExtractor(BaseExtractor):
107
106
  (
108
107
  id_,
109
108
  namespace.last_updated_time,
110
- Literal(datetime.fromtimestamp(file.last_updated_time / 1000, pytz.utc)),
109
+ Literal(datetime.fromtimestamp(file.last_updated_time / 1000, timezone.utc)),
111
110
  )
112
111
  )
113
112
 
@@ -115,17 +114,17 @@ class FilesExtractor(BaseExtractor):
115
114
  for label in file.labels:
116
115
  # external_id can create ill-formed URIs, so we create websafe URIs
117
116
  # since labels do not have internal ids, we use the external_id as the id
118
- triples.append((id_, namespace.label, namespace[quote(label.dump()["externalId"])]))
117
+ triples.append((id_, namespace.label, namespace[f"Label_{quote(label.dump()['externalId'])}"]))
119
118
 
120
119
  if file.security_categories:
121
120
  for category in file.security_categories:
122
121
  triples.append((id_, namespace.security_categories, Literal(category)))
123
122
 
124
123
  if file.data_set_id:
125
- triples.append((id_, namespace.data_set_id, namespace[str(file.data_set_id)]))
124
+ triples.append((id_, namespace.data_set_id, namespace[f"Dataset_{file.data_set_id}"]))
126
125
 
127
126
  if file.asset_ids:
128
127
  for asset_id in file.asset_ids:
129
- triples.append((id_, namespace.asset, namespace[str(asset_id)]))
128
+ triples.append((id_, namespace.asset, namespace[f"Asset_{asset_id}"]))
130
129
 
131
130
  return triples
@@ -1,10 +1,9 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
  from urllib.parse import quote
6
6
 
7
- import pytz
8
7
  from cognite.client import CogniteClient
9
8
  from cognite.client.data_classes import LabelDefinition, LabelDefinitionList
10
9
  from rdflib import RDF, Literal, Namespace
@@ -45,7 +44,7 @@ class LabelsExtractor(BaseExtractor):
45
44
 
46
45
  @classmethod
47
46
  def _labels2triples(cls, label: LabelDefinition, namespace: Namespace) -> list[Triple]:
48
- id_ = namespace[quote(cast(str, label.external_id))]
47
+ id_ = namespace[f"Label_{quote(label.dump()['externalId'])}"]
49
48
 
50
49
  # Set rdf type
51
50
  triples: list[Triple] = [(id_, RDF.type, namespace.Label)]
@@ -63,10 +62,10 @@ class LabelsExtractor(BaseExtractor):
63
62
 
64
63
  if label.created_time:
65
64
  triples.append(
66
- (id_, namespace.created_time, Literal(datetime.fromtimestamp(label.created_time / 1000, pytz.utc)))
65
+ (id_, namespace.created_time, Literal(datetime.fromtimestamp(label.created_time / 1000, timezone.utc)))
67
66
  )
68
67
 
69
68
  if label.data_set_id:
70
- triples.append((id_, namespace.data_set_id, namespace[str(label.data_set_id)]))
69
+ triples.append((id_, namespace.data_set_id, namespace[f"Dataset_{label.data_set_id}"]))
71
70
 
72
71
  return triples
@@ -1,11 +1,10 @@
1
1
  import uuid
2
2
  from collections.abc import Iterable
3
- from datetime import datetime
3
+ from datetime import datetime, timezone
4
4
  from pathlib import Path
5
5
  from typing import cast
6
6
  from urllib.parse import quote
7
7
 
8
- import pytz
9
8
  from cognite.client import CogniteClient
10
9
  from cognite.client.data_classes import Relationship, RelationshipList
11
10
  from rdflib import RDF, Literal, Namespace
@@ -49,27 +48,27 @@ class RelationshipsExtractor(BaseExtractor):
49
48
  """Converts an asset to triples."""
50
49
 
51
50
  # relationships do not have an internal id, so we generate one
52
- id_ = namespace[str(uuid.uuid4())]
51
+ id_ = namespace[f"Relationship_{uuid.uuid4()}"]
53
52
 
54
53
  # Set rdf type
55
54
  triples: list[Triple] = [(id_, RDF.type, namespace["Relationship"])]
56
55
 
57
56
  # Set source and target types
58
- if relationship.source_type:
57
+ if source_type := relationship.source_type:
59
58
  triples.append(
60
59
  (
61
60
  id_,
62
61
  namespace.source_type,
63
- namespace[relationship.source_type.title()],
62
+ namespace[source_type.title()],
64
63
  )
65
64
  )
66
65
 
67
- if relationship.target_type:
66
+ if target_type := relationship.target_type:
68
67
  triples.append(
69
68
  (
70
69
  id_,
71
70
  namespace.target_type,
72
- namespace[relationship.target_type.title()],
71
+ namespace[target_type.title()],
73
72
  )
74
73
  )
75
74
 
@@ -100,7 +99,7 @@ class RelationshipsExtractor(BaseExtractor):
100
99
  (
101
100
  id_,
102
101
  namespace.start_time,
103
- Literal(datetime.fromtimestamp(relationship.start_time / 1000, pytz.utc)),
102
+ Literal(datetime.fromtimestamp(relationship.start_time / 1000, timezone.utc)),
104
103
  )
105
104
  )
106
105
 
@@ -109,7 +108,7 @@ class RelationshipsExtractor(BaseExtractor):
109
108
  (
110
109
  id_,
111
110
  namespace.end_time,
112
- Literal(datetime.fromtimestamp(relationship.end_time / 1000, pytz.utc)),
111
+ Literal(datetime.fromtimestamp(relationship.end_time / 1000, timezone.utc)),
113
112
  )
114
113
  )
115
114
 
@@ -118,7 +117,7 @@ class RelationshipsExtractor(BaseExtractor):
118
117
  (
119
118
  id_,
120
119
  namespace.created_time,
121
- Literal(datetime.fromtimestamp(relationship.created_time / 1000, pytz.utc)),
120
+ Literal(datetime.fromtimestamp(relationship.created_time / 1000, timezone.utc)),
122
121
  )
123
122
  )
124
123
 
@@ -127,7 +126,7 @@ class RelationshipsExtractor(BaseExtractor):
127
126
  (
128
127
  id_,
129
128
  namespace.last_updated_time,
130
- Literal(datetime.fromtimestamp(relationship.last_updated_time / 1000, pytz.utc)),
129
+ Literal(datetime.fromtimestamp(relationship.last_updated_time / 1000, timezone.utc)),
131
130
  )
132
131
  )
133
132
 
@@ -144,10 +143,10 @@ class RelationshipsExtractor(BaseExtractor):
144
143
  for label in relationship.labels:
145
144
  # external_id can create ill-formed URIs, so we create websafe URIs
146
145
  # since labels do not have internal ids, we use the external_id as the id
147
- triples.append((id_, namespace.label, namespace[quote(label.dump()["externalId"])]))
146
+ triples.append((id_, namespace.label, namespace[f"Label_{quote(label.dump()['externalId'])}"]))
148
147
 
149
148
  # Create connection
150
149
  if relationship.data_set_id:
151
- triples.append((id_, namespace.dataset, namespace[str(relationship.data_set_id)]))
150
+ triples.append((id_, namespace.dataset, namespace[f"Dataset_{relationship.data_set_id}"]))
152
151
 
153
152
  return triples
@@ -1,9 +1,8 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
 
6
- import pytz
7
6
  from cognite.client import CogniteClient
8
7
  from cognite.client.data_classes import Sequence, SequenceList
9
8
  from pydantic import AnyHttpUrl, ValidationError
@@ -44,7 +43,7 @@ class SequencesExtractor(BaseExtractor):
44
43
 
45
44
  @classmethod
46
45
  def _sequence2triples(cls, sequence: Sequence, namespace: Namespace) -> list[Triple]:
47
- id_ = namespace[str(sequence.id)]
46
+ id_ = namespace[f"Sequence_{sequence.id}"]
48
47
 
49
48
  # Set rdf type
50
49
  triples: list[Triple] = [(id_, RDF.type, namespace.Sequence)]
@@ -71,7 +70,11 @@ class SequencesExtractor(BaseExtractor):
71
70
 
72
71
  if sequence.created_time:
73
72
  triples.append(
74
- (id_, namespace.created_time, Literal(datetime.fromtimestamp(sequence.created_time / 1000, pytz.utc)))
73
+ (
74
+ id_,
75
+ namespace.created_time,
76
+ Literal(datetime.fromtimestamp(sequence.created_time / 1000, timezone.utc)),
77
+ )
75
78
  )
76
79
 
77
80
  if sequence.last_updated_time:
@@ -79,14 +82,14 @@ class SequencesExtractor(BaseExtractor):
79
82
  (
80
83
  id_,
81
84
  namespace.last_updated_time,
82
- Literal(datetime.fromtimestamp(sequence.last_updated_time / 1000, pytz.utc)),
85
+ Literal(datetime.fromtimestamp(sequence.last_updated_time / 1000, timezone.utc)),
83
86
  )
84
87
  )
85
88
 
86
89
  if sequence.data_set_id:
87
- triples.append((id_, namespace.data_set_id, namespace[str(sequence.data_set_id)]))
90
+ triples.append((id_, namespace.data_set_id, namespace[f"Dataset_{sequence.data_set_id}"]))
88
91
 
89
92
  if sequence.asset_id:
90
- triples.append((id_, namespace.asset, namespace[str(sequence.asset_id)]))
93
+ triples.append((id_, namespace.asset, namespace[f"Asset_{sequence.asset_id}"]))
91
94
 
92
95
  return triples
@@ -1,9 +1,8 @@
1
1
  from collections.abc import Iterable
2
- from datetime import datetime
2
+ from datetime import datetime, timezone
3
3
  from pathlib import Path
4
4
  from typing import cast
5
5
 
6
- import pytz
7
6
  from cognite.client import CogniteClient
8
7
  from cognite.client.data_classes import TimeSeries, TimeSeriesList
9
8
  from pydantic import AnyHttpUrl, ValidationError
@@ -46,7 +45,7 @@ class TimeSeriesExtractor(BaseExtractor):
46
45
 
47
46
  @classmethod
48
47
  def _timeseries2triples(cls, timeseries: TimeSeries, namespace: Namespace) -> list[Triple]:
49
- id_ = namespace[str(timeseries.id)]
48
+ id_ = namespace[f"TimeSeries_{timeseries.id}"]
50
49
 
51
50
  # Set rdf type
52
51
  triples: list[Triple] = [(id_, RDF.type, namespace.TimeSeries)]
@@ -86,7 +85,11 @@ class TimeSeriesExtractor(BaseExtractor):
86
85
 
87
86
  if timeseries.created_time:
88
87
  triples.append(
89
- (id_, namespace.created_time, Literal(datetime.fromtimestamp(timeseries.created_time / 1000, pytz.utc)))
88
+ (
89
+ id_,
90
+ namespace.created_time,
91
+ Literal(datetime.fromtimestamp(timeseries.created_time / 1000, timezone.utc)),
92
+ )
90
93
  )
91
94
 
92
95
  if timeseries.last_updated_time:
@@ -94,7 +97,7 @@ class TimeSeriesExtractor(BaseExtractor):
94
97
  (
95
98
  id_,
96
99
  namespace.last_updated_time,
97
- Literal(datetime.fromtimestamp(timeseries.last_updated_time / 1000, pytz.utc)),
100
+ Literal(datetime.fromtimestamp(timeseries.last_updated_time / 1000, timezone.utc)),
98
101
  )
99
102
  )
100
103
 
@@ -110,9 +113,9 @@ class TimeSeriesExtractor(BaseExtractor):
110
113
  triples.append((id_, namespace.unit_external_id, Literal(timeseries.unit_external_id)))
111
114
 
112
115
  if timeseries.data_set_id:
113
- triples.append((id_, namespace.dataset, namespace[str(timeseries.data_set_id)]))
116
+ triples.append((id_, namespace.dataset, namespace[f"Dataset_{timeseries.data_set_id}"]))
114
117
 
115
118
  if timeseries.asset_id:
116
- triples.append((id_, namespace.asset, namespace[str(timeseries.asset_id)]))
119
+ triples.append((id_, namespace.asset, namespace[f"Asset_{timeseries.asset_id}"]))
117
120
 
118
121
  return triples
@@ -31,6 +31,7 @@ __all__ = [
31
31
  "ReverseRelationMissingOtherSideWarning",
32
32
  "NodeTypeFilterOnParentViewWarning",
33
33
  "MissingViewInModelWarning",
34
+ "ViewSizeWarning",
34
35
  "ChangingContainerError",
35
36
  "ChangingViewError",
36
37
  ]
@@ -44,6 +45,30 @@ class DMSSchemaError(NeatValidationError, ABC): ...
44
45
  class DMSSchemaWarning(ValidationWarning, ABC): ...
45
46
 
46
47
 
48
+ @dataclass(frozen=True)
49
+ class ViewSizeWarning(DMSSchemaWarning):
50
+ description = (
51
+ "The number of properties in the {view} view is {count} which is more than "
52
+ "the recommended limit of {limit} properties. This can lead to performance issues."
53
+ )
54
+ fix = "Reduce the size of the view"
55
+ error_name: ClassVar[str] = "ViewSizeWarning"
56
+
57
+ view_id: dm.ViewId
58
+ limit: int
59
+ count: int
60
+
61
+ def message(self) -> str:
62
+ return self.description.format(view=repr(self.view_id), count=self.count, limit=self.limit)
63
+
64
+ def dump(self) -> dict[str, Any]:
65
+ output = super().dump()
66
+ output["view_id"] = self.view_id.dump()
67
+ output["limit"] = self.limit
68
+ output["count"] = self.count
69
+ return output
70
+
71
+
47
72
  @dataclass(frozen=True)
48
73
  class IncompleteSchemaError(DMSSchemaError):
49
74
  description = "This error is raised when the schema is claimed to be complete but missing some components"
@@ -0,0 +1 @@
1
+ DMS_CONTAINER_SIZE_LIMIT = 100
@@ -6,6 +6,7 @@ from cognite.client import data_modeling as dm
6
6
  from cognite.neat.rules import issues
7
7
  from cognite.neat.rules.issues import IssueList
8
8
  from cognite.neat.rules.models._base import DataModelType, ExtensionCategory, SchemaCompleteness
9
+ from cognite.neat.rules.models._constants import DMS_CONTAINER_SIZE_LIMIT
9
10
  from cognite.neat.rules.models.data_types import DataType
10
11
  from cognite.neat.rules.models.entities import ContainerEntity
11
12
  from cognite.neat.rules.models.wrapped_entities import RawFilter
@@ -34,7 +35,7 @@ class DMSPostValidation:
34
35
  self._validate_raw_filter()
35
36
  self._consistent_container_properties()
36
37
 
37
- self._referenced_views_and_containers_are_existing()
38
+ self._referenced_views_and_containers_are_existing_and_proper_size()
38
39
  if self.metadata.schema_ is SchemaCompleteness.extended:
39
40
  self._validate_extension()
40
41
  if self.metadata.schema_ is SchemaCompleteness.partial:
@@ -118,14 +119,16 @@ class DMSPostValidation:
118
119
  prop.constraint = prop.constraint or constraint_definition
119
120
  self.issue_list.extend(errors)
120
121
 
121
- def _referenced_views_and_containers_are_existing(self) -> None:
122
+ def _referenced_views_and_containers_are_existing_and_proper_size(self) -> None:
122
123
  defined_views = {view.view.as_id() for view in self.views}
123
124
  if self.metadata.schema_ is SchemaCompleteness.extended and self.rules.last:
124
125
  defined_views |= {view.view.as_id() for view in self.rules.last.views}
125
126
 
126
- errors: list[issues.NeatValidationError] = []
127
+ property_count_by_view: dict[dm.ViewId, int] = defaultdict(int)
128
+ errors: list[issues.ValidationIssue] = []
127
129
  for prop_no, prop in enumerate(self.properties):
128
- if prop.view and (view_id := prop.view.as_id()) not in defined_views:
130
+ view_id = prop.view.as_id()
131
+ if view_id not in defined_views:
129
132
  errors.append(
130
133
  issues.spreadsheet.NonExistingViewError(
131
134
  column="View",
@@ -137,6 +140,17 @@ class DMSPostValidation:
137
140
  url=None,
138
141
  )
139
142
  )
143
+ else:
144
+ property_count_by_view[view_id] += 1
145
+ for view_id, count in property_count_by_view.items():
146
+ if count > DMS_CONTAINER_SIZE_LIMIT:
147
+ errors.append(
148
+ issues.dms.ViewSizeWarning(
149
+ view_id=view_id,
150
+ limit=DMS_CONTAINER_SIZE_LIMIT,
151
+ count=count,
152
+ )
153
+ )
140
154
  if self.metadata.schema_ is SchemaCompleteness.complete:
141
155
  defined_containers = {container.container.as_id() for container in self.containers or []}
142
156
  if self.metadata.data_model_type == DataModelType.solution and self.rules.reference:
@@ -8,6 +8,7 @@ from cognite.neat.rules.models._base import (
8
8
  SchemaCompleteness,
9
9
  SheetList,
10
10
  )
11
+ from cognite.neat.rules.models._constants import DMS_CONTAINER_SIZE_LIMIT
11
12
  from cognite.neat.rules.models.data_types import DataType
12
13
  from cognite.neat.rules.models.domain import DomainRules
13
14
  from cognite.neat.rules.models.entities import (
@@ -41,6 +42,7 @@ class _InformationRulesConverter:
41
42
  self.last_classes = {class_.class_: class_ for class_ in self.rules.last.classes}
42
43
  else:
43
44
  self.last_classes = {}
45
+ self.property_count_by_container: dict[ContainerEntity, int] = defaultdict(int)
44
46
 
45
47
  def as_domain_rules(self) -> DomainRules:
46
48
  raise NotImplementedError("DomainRules not implemented yet")
@@ -81,7 +83,6 @@ class _InformationRulesConverter:
81
83
  last_dms_rules = self.rules.last.as_dms_architect_rules() if self.rules.last else None
82
84
  ref_dms_rules = self.rules.reference.as_dms_architect_rules() if self.rules.reference else None
83
85
 
84
- containers: list[DMSContainer] = []
85
86
  class_by_entity = {cls_.class_: cls_ for cls_ in self.rules.classes}
86
87
  if self.rules.last:
87
88
  for cls_ in self.rules.last.classes:
@@ -93,6 +94,7 @@ class _InformationRulesConverter:
93
94
  if rule_set:
94
95
  existing_containers.update({c.container for c in rule_set.containers or []})
95
96
 
97
+ containers: list[DMSContainer] = []
96
98
  for container_entity, class_entities in referenced_containers.items():
97
99
  if container_entity in existing_containers:
98
100
  continue
@@ -227,9 +229,14 @@ class _InformationRulesConverter:
227
229
  # the existing container in the last schema
228
230
  container_entity = prop.class_.as_container_entity(default_space)
229
231
  container_entity.suffix = self._bump_suffix(container_entity.suffix)
230
- return container_entity, prop.property_
231
232
  else:
232
- return prop.class_.as_container_entity(default_space), prop.property_
233
+ container_entity = prop.class_.as_container_entity(default_space)
234
+
235
+ while self.property_count_by_container[container_entity] >= DMS_CONTAINER_SIZE_LIMIT:
236
+ container_entity.suffix = self._bump_suffix(container_entity.suffix)
237
+
238
+ self.property_count_by_container[container_entity] += 1
239
+ return container_entity, prop.property_
233
240
 
234
241
  def _get_view_implements(self, cls_: InformationClass, metadata: InformationMetadata) -> list[ViewEntity]:
235
242
  if isinstance(cls_.reference, ReferenceEntity) and cls_.reference.prefix != metadata.prefix:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cognite-neat
3
- Version: 0.81.0
3
+ Version: 0.81.2
4
4
  Summary: Knowledge graph transformation
5
5
  Home-page: https://cognite-neat.readthedocs-hosted.com/
6
6
  License: Apache-2.0
@@ -1,5 +1,5 @@
1
1
  cognite/neat/__init__.py,sha256=v-rRiDOgZ3sQSMQKq0vgUQZvpeOkoHFXissAx6Ktg84,61
2
- cognite/neat/_version.py,sha256=bQOEtESWMjrPZ4AptZehAfsFqrl052tpBH0z8LpC4Ao,23
2
+ cognite/neat/_version.py,sha256=i58sqP22jVmmo52bMeY-RXsH7kTVeecUQSQxkE5ZJX8,23
3
3
  cognite/neat/app/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  cognite/neat/app/api/asgi/metrics.py,sha256=nxFy7L5cChTI0a-zkCiJ59Aq8yLuIJp5c9Dg0wRXtV0,152
5
5
  cognite/neat/app/api/configuration.py,sha256=2U5M6M252swvQPQyooA1EBzFUZNtcTmuSaywfJDgckM,4232
@@ -56,13 +56,13 @@ cognite/neat/graph/exceptions.py,sha256=R6pyOH774n9w2x_X_nrUr8OMAdjJMf_XPIqAvxIQ
56
56
  cognite/neat/graph/extractors/__init__.py,sha256=ozXL6ZLK36wp3uX4UACRVs6rbvynQg2JQlDgL1UM1Wk,1025
57
57
  cognite/neat/graph/extractors/_base.py,sha256=TOXDnlqske8DgnJwA0THDVRgmR79Acjm56yF0E-2w7I,356
58
58
  cognite/neat/graph/extractors/_classic_cdf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
59
- cognite/neat/graph/extractors/_classic_cdf/_assets.py,sha256=S5QB_38ysVodGRMqr_SWYYaUtkUCS6a6L2b5D1T-888,3812
60
- cognite/neat/graph/extractors/_classic_cdf/_events.py,sha256=cYd-A7bvRw2S-FDvvE58PPDNE7uhoq2Lhu9En2i6E58,3961
61
- cognite/neat/graph/extractors/_classic_cdf/_files.py,sha256=8CpqZl8pLBRNJ6oxxp2YLfCupxlXJQ6h0ymUlI1GzH8,4783
62
- cognite/neat/graph/extractors/_classic_cdf/_labels.py,sha256=GcMPoecniy3g59enKD71F3fghvnN4K3uj1Z9bo2ZKIE,2367
63
- cognite/neat/graph/extractors/_classic_cdf/_relationships.py,sha256=5kClA5zBlhyPT6hfanLP-upLvMcE6mLU4AhkRp49NYQ,4985
64
- cognite/neat/graph/extractors/_classic_cdf/_sequences.py,sha256=ov-n8cBEC73AMO1xam2GUDHv-7SyOEWXWRxLXh9flyY,3298
65
- cognite/neat/graph/extractors/_classic_cdf/_timeseries.py,sha256=xlnJ4fKvCJawZO6l6EHpx36RRAafd3BdYWS0ajNnGVM,4449
59
+ cognite/neat/graph/extractors/_classic_cdf/_assets.py,sha256=8FQvJqi8nclkQJ7YmSo4yNqs9uExaoCn_whMW8cIAx0,3855
60
+ cognite/neat/graph/extractors/_classic_cdf/_events.py,sha256=Z0vPcyOz4mCwY0Dqa5wAQZjczO1dbTUGM0X4Y10NLGQ,3995
61
+ cognite/neat/graph/extractors/_classic_cdf/_files.py,sha256=-6nCkXUCAnDsv4eDFDEiQ-U4SGhmW1VLxZJFUcszqjU,4831
62
+ cognite/neat/graph/extractors/_classic_cdf/_labels.py,sha256=wm7JFmsk7sHsOVpTsGBE0wargIuHD09Xu-OHK_Bm20g,2386
63
+ cognite/neat/graph/extractors/_classic_cdf/_relationships.py,sha256=n7gISeyhLjiaWYLWWRj20jmaYgdvJBdYSiZ0G8ZW6mk,5035
64
+ cognite/neat/graph/extractors/_classic_cdf/_sequences.py,sha256=o4yxkf81FGFrKkflvlyDYie05fTYsT_LcRFM63OTVCI,3406
65
+ cognite/neat/graph/extractors/_classic_cdf/_timeseries.py,sha256=KTYmL8vhXijlmkN1UFQrGpaCllpRekr1y55SoLhlLbg,4559
66
66
  cognite/neat/graph/extractors/_dexpi.py,sha256=N_xaI3wxBdMBePikEEMW-HhMijSmnwQNIqQJA_WUcbY,8526
67
67
  cognite/neat/graph/extractors/_mock_graph_generator.py,sha256=gziG2FFsLk-HmA9uxAeT9RCjVpFxjkCTLiC4tq2zgvw,14961
68
68
  cognite/neat/graph/extractors/_rdf_file.py,sha256=w4-XgPgNsmZOkNxjO1ZQCcopTntmmtxfDBkQxn1se6E,463
@@ -204,7 +204,7 @@ cognite/neat/rules/importers/_spreadsheet2rules.py,sha256=nKSJyZGoTho0bqQ_5_1XB9
204
204
  cognite/neat/rules/importers/_yaml2rules.py,sha256=F0uksSz1A3po5OlRM2152_w5j8D9oYTLB9NFTkSMlWI,4275
205
205
  cognite/neat/rules/issues/__init__.py,sha256=c12m0HAHHzF6oR8lKbULE3TxOPimTi9s1O9IIrtgh0g,549
206
206
  cognite/neat/rules/issues/base.py,sha256=x2YLCfmqtPlFLoURq3qHaprXCpFaQdf0iWkql-EMyps,2446
207
- cognite/neat/rules/issues/dms.py,sha256=CKztcpNu9E_ygbAmiODOhaYKPX6o9eaXeiod7Ak-kNY,23617
207
+ cognite/neat/rules/issues/dms.py,sha256=eZmbQhdo97SufNLKJu4QWlrhZCxmngiWTwPWIOx7GSA,24400
208
208
  cognite/neat/rules/issues/fileread.py,sha256=ao199mtvhPSW0IA8ZQZ0RzuLIIipYtL0jp6fLqxb4_c,5748
209
209
  cognite/neat/rules/issues/formatters.py,sha256=_ag2bJ9hncOj8pAGJvTTEPs9kTtxbD7vkqvS9Zcnizc,3385
210
210
  cognite/neat/rules/issues/importing.py,sha256=uSk4TXo_CO3bglBZkaiWekXLXXd31UWIZE95clVSLz4,13417
@@ -212,6 +212,7 @@ cognite/neat/rules/issues/spreadsheet.py,sha256=jBEczqon1G0H_mCfdCCffWdRLHO5ER8S
212
212
  cognite/neat/rules/issues/spreadsheet_file.py,sha256=YCp0Pk_TsiqYuOPdWpjUpre-zvi2c5_MvrC_dxw10YY,4964
213
213
  cognite/neat/rules/models/__init__.py,sha256=aqhQUidHYgOk5_iqdi6s72s2g8qyMRFXShYzh-ctNpw,782
214
214
  cognite/neat/rules/models/_base.py,sha256=7GUCflYZ7CDVyRZTYd4CYQJr7tPnMefd-1B9UktaWpY,11194
215
+ cognite/neat/rules/models/_constants.py,sha256=zPREgHT79_4FMg58QlaXc7A8XKRJrjP5SUgh63jDnTk,31
215
216
  cognite/neat/rules/models/_rdfpath.py,sha256=RoHnfWufjnDtwJh7UUzWKoJz8luvX7Gb5SDQORfkQTE,11030
216
217
  cognite/neat/rules/models/_types/__init__.py,sha256=l1tGxzE7ezNHIL72AoEvNHN2IFuitxOLxiHJG__s6t4,305
217
218
  cognite/neat/rules/models/_types/_base.py,sha256=2GhLUE1ukV8X8SGL_JDxpbWGZyAvOnSqAE6JmDh5wbI,929
@@ -224,11 +225,11 @@ cognite/neat/rules/models/dms/_rules.py,sha256=XTIEWG49VjNs_bICGlgMd6uk4hseY1H6U
224
225
  cognite/neat/rules/models/dms/_rules_input.py,sha256=apDDTQll9UAyYL5gS2vDxHsujWrGBilTp7lK2kzJWO8,13467
225
226
  cognite/neat/rules/models/dms/_schema.py,sha256=byMG67i80a4sSQS_0k8YGrDvh7whio4iLbmPEIy_P44,49514
226
227
  cognite/neat/rules/models/dms/_serializer.py,sha256=iqp2zyyf8jEcU-R3PERuN8nu248xIqyxiWj4owAn92g,6406
227
- cognite/neat/rules/models/dms/_validation.py,sha256=nPSyfM1vGZ7d9Uv_2vF2HvMetygtehXW7eNtPD6eW8E,13937
228
+ cognite/neat/rules/models/dms/_validation.py,sha256=5mk9L99FSwC8Ok7weEjnFJ_OZnmqMWUc6XFMTfkqfDw,14549
228
229
  cognite/neat/rules/models/domain.py,sha256=wZ-DeIPFnacbNlxSrRuLzUpnhHdTpzNc22z0sDfisi4,2880
229
230
  cognite/neat/rules/models/entities.py,sha256=lkLsKg8U3Xto30PCB85ScDpv2SPRVq1ukVEQHzH53_g,18868
230
231
  cognite/neat/rules/models/information/__init__.py,sha256=HR6g8xgyU53U7Ck8pPdbT70817Q4NC1r1pCRq5SA8iw,291
231
- cognite/neat/rules/models/information/_converter.py,sha256=JN63_G5bygdL5WCz-q0_ygiU0NHkzUxm5mZ3WD8yUes,11029
232
+ cognite/neat/rules/models/information/_converter.py,sha256=r0a2uyzv8m82xzAkYt_-ZXdMN5u46SA_mn95Oo7ng-s,11424
232
233
  cognite/neat/rules/models/information/_rules.py,sha256=ZVTOn5fEB-AbrXL8A6SN9DwOmF9FhgyS7FzibrkT6ZM,13546
233
234
  cognite/neat/rules/models/information/_rules_input.py,sha256=xmcQQl2vBYSG_IbxOwb6x4CdN3nIg_TY2-3RAeGDYic,10418
234
235
  cognite/neat/rules/models/information/_serializer.py,sha256=yti9I_xJruxrib66YIBInhze___Io-oPTQH6uWDumPE,3503
@@ -293,8 +294,8 @@ cognite/neat/workflows/steps_registry.py,sha256=fkTX14ZA7_gkUYfWIlx7A1XbCidvqR23
293
294
  cognite/neat/workflows/tasks.py,sha256=dqlJwKAb0jlkl7abbY8RRz3m7MT4SK8-7cntMWkOYjw,788
294
295
  cognite/neat/workflows/triggers.py,sha256=_BLNplzoz0iic367u1mhHMHiUrCwP-SLK6_CZzfODX0,7071
295
296
  cognite/neat/workflows/utils.py,sha256=gKdy3RLG7ctRhbCRwaDIWpL9Mi98zm56-d4jfHDqP1E,453
296
- cognite_neat-0.81.0.dist-info/LICENSE,sha256=W8VmvFia4WHa3Gqxq1Ygrq85McUNqIGDVgtdvzT-XqA,11351
297
- cognite_neat-0.81.0.dist-info/METADATA,sha256=4djs7mouWW6qiEmId9NcrSav--3Di_PmsMw_4gDirrw,9290
298
- cognite_neat-0.81.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
299
- cognite_neat-0.81.0.dist-info/entry_points.txt,sha256=61FPqiWb25vbqB0KI7znG8nsg_ibLHBvTjYnkPvNFso,50
300
- cognite_neat-0.81.0.dist-info/RECORD,,
297
+ cognite_neat-0.81.2.dist-info/LICENSE,sha256=W8VmvFia4WHa3Gqxq1Ygrq85McUNqIGDVgtdvzT-XqA,11351
298
+ cognite_neat-0.81.2.dist-info/METADATA,sha256=bx-287mvnhJUXa-KNK2sLiZXFn0vaIVUviebyKXeamk,9290
299
+ cognite_neat-0.81.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
300
+ cognite_neat-0.81.2.dist-info/entry_points.txt,sha256=61FPqiWb25vbqB0KI7znG8nsg_ibLHBvTjYnkPvNFso,50
301
+ cognite_neat-0.81.2.dist-info/RECORD,,