cognite-neat 0.119.1__py3-none-any.whl → 0.119.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (32) hide show
  1. cognite/neat/_constants.py +34 -70
  2. cognite/neat/_graph/extractors/__init__.py +0 -6
  3. cognite/neat/_graph/loaders/_rdf2dms.py +5 -5
  4. cognite/neat/_graph/queries/__init__.py +1 -1
  5. cognite/neat/_graph/queries/_base.py +2 -456
  6. cognite/neat/_graph/queries/_queries.py +16 -0
  7. cognite/neat/_graph/queries/_select.py +440 -0
  8. cognite/neat/_graph/queries/_update.py +37 -0
  9. cognite/neat/_issues/errors/_external.py +4 -2
  10. cognite/neat/_rules/exporters/_rules2excel.py +240 -107
  11. cognite/neat/_rules/importers/_yaml2rules.py +7 -1
  12. cognite/neat/_rules/models/_base_rules.py +16 -1
  13. cognite/neat/_rules/models/dms/_validation.py +11 -2
  14. cognite/neat/_rules/transformers/_converters.py +16 -6
  15. cognite/neat/_session/_drop.py +2 -2
  16. cognite/neat/_session/_explore.py +4 -4
  17. cognite/neat/_session/_prepare.py +5 -5
  18. cognite/neat/_session/_read.py +6 -0
  19. cognite/neat/_session/_set.py +3 -3
  20. cognite/neat/_session/_show.py +1 -1
  21. cognite/neat/_session/_template.py +21 -2
  22. cognite/neat/_state/README.md +23 -0
  23. cognite/neat/_store/_graph_store.py +5 -5
  24. cognite/neat/_version.py +1 -1
  25. {cognite_neat-0.119.1.dist-info → cognite_neat-0.119.3.dist-info}/METADATA +37 -2
  26. {cognite_neat-0.119.1.dist-info → cognite_neat-0.119.3.dist-info}/RECORD +29 -28
  27. cognite/neat/_graph/extractors/_dexpi.py +0 -234
  28. cognite/neat/_graph/extractors/_iodd.py +0 -403
  29. cognite/neat/_graph/transformers/_iodd.py +0 -30
  30. {cognite_neat-0.119.1.dist-info → cognite_neat-0.119.3.dist-info}/LICENSE +0 -0
  31. {cognite_neat-0.119.1.dist-info → cognite_neat-0.119.3.dist-info}/WHEEL +0 -0
  32. {cognite_neat-0.119.1.dist-info → cognite_neat-0.119.3.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import itertools
4
- import json
5
- from datetime import datetime, timezone
4
+ from datetime import datetime
6
5
  from pathlib import Path
7
6
  from types import GenericAlias
8
7
  from typing import Any, ClassVar, Literal, cast, get_args
@@ -13,17 +12,17 @@ from openpyxl.styles import Alignment, Border, Font, PatternFill, Side
13
12
  from openpyxl.worksheet.worksheet import Worksheet
14
13
  from rdflib import Namespace
15
14
 
15
+ from cognite.neat._constants import COGNITE_CONCEPTS
16
16
  from cognite.neat._rules._constants import get_internal_properties
17
17
  from cognite.neat._rules._shared import VerifiedRules
18
18
  from cognite.neat._rules.models import (
19
- ExtensionCategory,
20
- SchemaCompleteness,
21
19
  SheetRow,
22
20
  )
23
- from cognite.neat._rules.models.data_types import _DATA_TYPE_BY_DMS_TYPE
24
- from cognite.neat._rules.models.dms import DMSMetadata
21
+ from cognite.neat._rules.models._base_rules import BaseMetadata, RoleTypes
22
+ from cognite.neat._rules.models.data_types import (
23
+ _DATA_TYPE_BY_DMS_TYPE,
24
+ )
25
25
  from cognite.neat._rules.models.dms._rules import DMSRules
26
- from cognite.neat._rules.models.information import InformationMetadata
27
26
  from cognite.neat._rules.models.information._rules import InformationRules
28
27
  from cognite.neat._utils.spreadsheet import (
29
28
  find_column_with_value,
@@ -107,6 +106,51 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
107
106
  data.close()
108
107
  return None
109
108
 
109
+ def template(self, role: RoleTypes, filepath: Path | None = None) -> None | Workbook:
110
+ """This method will create an spreadsheet template for data modeling depending on the role.
111
+
112
+ Args:
113
+ role: The role for which the template is created. Can be either "dms" or "information".
114
+ filepath: The path to the file where the template will be saved.
115
+
116
+ """
117
+ workbook = Workbook()
118
+ # Remove default sheet named "Sheet"
119
+ workbook.remove(workbook["Sheet"])
120
+
121
+ rules_model = DMSRules if role == RoleTypes.dms else InformationRules
122
+
123
+ headers_by_sheet = rules_model.headers_by_sheet(by_alias=True)
124
+ headers_by_sheet.pop("Metadata")
125
+
126
+ self._write_metadata_sheet(
127
+ workbook,
128
+ cast(BaseMetadata, rules_model.model_fields["metadata"].annotation).default().model_dump(),
129
+ )
130
+
131
+ for sheet_name, headers in headers_by_sheet.items():
132
+ if sheet_name in ("Metadata", "Prefixes", "Reference", "Last"):
133
+ continue
134
+ sheet = self._create_sheet_with_header(workbook, headers, sheet_name)
135
+ self._style_sheet_header(sheet, headers)
136
+
137
+ self._adjust_column_widths(workbook)
138
+ self._hide_internal_columns(workbook)
139
+
140
+ if role == RoleTypes.dms:
141
+ self._add_dms_drop_downs(workbook)
142
+ else:
143
+ self._add_info_drop_downs(workbook)
144
+
145
+ if filepath:
146
+ try:
147
+ workbook.save(filepath)
148
+ finally:
149
+ workbook.close()
150
+ return None
151
+
152
+ return workbook
153
+
110
154
  def export(self, rules: VerifiedRules) -> Workbook:
111
155
  workbook = Workbook()
112
156
  # Remove default sheet named "Sheet"
@@ -129,23 +173,35 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
129
173
  self._adjust_column_widths(workbook)
130
174
 
131
175
  if self.hide_internal_columns:
132
- for sheet in workbook.sheetnames:
133
- if sheet.lower() == "metadata":
134
- continue
135
- ws = workbook[sheet]
136
- for col in get_internal_properties():
137
- column_letter = find_column_with_value(ws, col)
138
- if column_letter:
139
- ws.column_dimensions[column_letter].hidden = True
176
+ self._hide_internal_columns(workbook)
140
177
 
141
178
  # Only add drop downs if the rules are DMSRules
142
179
  if self.add_drop_downs and isinstance(rules, DMSRules):
143
- self._add_drop_downs(workbook)
180
+ self._add_dms_drop_downs(workbook)
181
+ elif self.add_drop_downs and isinstance(rules, InformationRules):
182
+ self._add_info_drop_downs(workbook)
144
183
 
145
184
  return workbook
146
185
 
147
- def _add_drop_downs(self, workbook: Workbook, no_rows: int = 100) -> None:
148
- """Adds drop down menus to specific columns for fast and accurate data entry.
186
+ def _hide_internal_columns(self, workbook: Workbook) -> None:
187
+ """Hides internal columns in workbook sheets.
188
+
189
+ Args:
190
+ workbook: Workbook representation of the Excel file.
191
+
192
+ """
193
+ for sheet in workbook.sheetnames:
194
+ if sheet.lower() == "metadata":
195
+ continue
196
+ ws = workbook[sheet]
197
+ for col in get_internal_properties():
198
+ column_letter = find_column_with_value(ws, col)
199
+ if column_letter:
200
+ ws.column_dimensions[column_letter].hidden = True
201
+
202
+ def _add_info_drop_downs(self, workbook: Workbook, no_rows: int = 100) -> None:
203
+ """Adds drop down menus to specific columns for fast and accurate data entry
204
+ in the Information rules.
149
205
 
150
206
  Args:
151
207
  workbook: Workbook representation of the Excel file.
@@ -160,14 +216,101 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
160
216
  as and Excel file. Probably, the validation is not copied to the new column,
161
217
  but instead reference to the data validation object is added.
162
218
  """
219
+ self._make_helper_info_sheet(workbook, no_rows)
163
220
 
164
- self._make_helper_sheet(workbook)
221
+ # We need create individual data validation and cannot re-use the same one due
222
+ # the internals of openpyxl
223
+ dv_classes = generate_data_validation(self._helper_sheet_name, "A", no_header_rows=0, no_rows=no_rows)
224
+ dv_value_types = generate_data_validation(self._helper_sheet_name, "B", no_header_rows=0, no_rows=no_rows)
225
+ dv_implements = generate_data_validation(
226
+ self._helper_sheet_name,
227
+ "C",
228
+ no_header_rows=0,
229
+ no_rows=no_rows + len(COGNITE_CONCEPTS),
230
+ )
231
+
232
+ workbook["Properties"].add_data_validation(dv_classes)
233
+ workbook["Properties"].add_data_validation(dv_value_types)
234
+ workbook["Classes"].add_data_validation(dv_implements)
235
+
236
+ # we multiply no_rows with 100 since a view can have max 100 properties per view
237
+ if column := find_column_with_value(workbook["Properties"], "Class"):
238
+ dv_classes.add(f"{column}{3}:{column}{no_rows * 100}")
239
+
240
+ if column := find_column_with_value(workbook["Properties"], "Value Type"):
241
+ dv_value_types.add(f"{column}{3}:{column}{no_rows * 100}")
242
+
243
+ if column := find_column_with_value(workbook["Classes"], "Implements"):
244
+ dv_implements.add(f"{column}{3}:{column}{no_rows}")
245
+
246
+ def _make_helper_info_sheet(self, workbook: Workbook, no_rows: int) -> None:
247
+ """This helper Information sheet is used as source of data for drop down menus creation"""
248
+ workbook.create_sheet(title=self._helper_sheet_name)
249
+
250
+ for dtype_counter, dtype in enumerate(_DATA_TYPE_BY_DMS_TYPE.values()):
251
+ # skip types which require special handling or are surpassed by CDM
252
+ if dtype.xsd in ["enum", "timeseries", "sequence", "file", "json"]:
253
+ continue
254
+ workbook[self._helper_sheet_name].cell(row=dtype_counter + 1, column=2, value=dtype.xsd)
255
+
256
+ # Add Cognite Core Data Views:
257
+ for concept_counter, concept in enumerate(COGNITE_CONCEPTS):
258
+ workbook[self._helper_sheet_name].cell(
259
+ row=concept_counter + 1,
260
+ column=3,
261
+ value=f"cdf_cdm:{concept}(version=v1)",
262
+ )
263
+
264
+ for i in range(no_rows):
265
+ workbook[self._helper_sheet_name].cell(
266
+ row=i + 1,
267
+ column=1,
268
+ value=f'=IF(ISBLANK(Classes!A{i + 3}), "", Classes!A{i + 3})',
269
+ )
270
+ workbook[self._helper_sheet_name].cell(
271
+ row=dtype_counter + i + 2,
272
+ column=2,
273
+ value=f'=IF(ISBLANK(Classes!A{i + 3}), "", Classes!A{i + 3})',
274
+ )
275
+ workbook[self._helper_sheet_name].cell(
276
+ row=concept_counter + i + 2,
277
+ column=3,
278
+ value=f'=IF(ISBLANK(Classes!A{i + 3}), "", Classes!A{i + 3})',
279
+ )
280
+
281
+ workbook[self._helper_sheet_name].sheet_state = "hidden"
282
+
283
+ def _add_dms_drop_downs(self, workbook: Workbook, no_rows: int = 100) -> None:
284
+ """Adds drop down menus to specific columns for fast and accurate data entry
285
+ in the DMS rules.
286
+
287
+ Args:
288
+ workbook: Workbook representation of the Excel file.
289
+ no_rows: number of rows to add drop down menus. Defaults to 100*100.
290
+
291
+ !!! note "Why no_rows=100?"
292
+ Maximum number of views per data model is 100, thus this value is set accordingly
293
+
294
+ !!! note "Why defining individual data validation per desired column?
295
+ This is due to the internal working of openpyxl. Adding same validation to
296
+ different column leads to unexpected behavior when the openpyxl workbook is exported
297
+ as and Excel file. Probably, the validation is not copied to the new column,
298
+ but instead reference to the data validation object is added.
299
+ """
300
+
301
+ self._make_helper_dms_sheet(workbook, no_rows)
165
302
 
166
303
  # We need create individual data validation and cannot re-use the same one due
167
304
  # the internals of openpyxl
168
305
  dv_views = generate_data_validation(self._helper_sheet_name, "A", no_header_rows=0, no_rows=no_rows)
169
- dv_containers = generate_data_validation(self._helper_sheet_name, "b", no_header_rows=0, no_rows=no_rows)
306
+ dv_containers = generate_data_validation(self._helper_sheet_name, "B", no_header_rows=0, no_rows=no_rows)
170
307
  dv_value_types = generate_data_validation(self._helper_sheet_name, "C", no_header_rows=0, no_rows=no_rows)
308
+ dv_implements = generate_data_validation(
309
+ self._helper_sheet_name,
310
+ "F",
311
+ no_header_rows=0,
312
+ no_rows=no_rows + len(COGNITE_CONCEPTS),
313
+ )
171
314
 
172
315
  dv_immutable = generate_data_validation(self._helper_sheet_name, "D", no_header_rows=0, no_rows=3)
173
316
  dv_in_model = generate_data_validation(self._helper_sheet_name, "D", no_header_rows=0, no_rows=3)
@@ -178,6 +321,7 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
178
321
  workbook["Properties"].add_data_validation(dv_value_types)
179
322
  workbook["Properties"].add_data_validation(dv_immutable)
180
323
  workbook["Views"].add_data_validation(dv_in_model)
324
+ workbook["Views"].add_data_validation(dv_implements)
181
325
  workbook["Containers"].add_data_validation(dv_used_for)
182
326
 
183
327
  # we multiply no_rows with 100 since a view can have max 100 properties per view
@@ -196,22 +340,30 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
196
340
  if column := find_column_with_value(workbook["Views"], "In Model"):
197
341
  dv_in_model.add(f"{column}{3}:{column}{no_rows}")
198
342
 
343
+ if column := find_column_with_value(workbook["Views"], "Implements"):
344
+ dv_implements.add(f"{column}{3}:{column}{no_rows}")
345
+
199
346
  if column := find_column_with_value(workbook["Containers"], "Used For"):
200
347
  dv_used_for.add(f"{column}{3}:{column}{no_rows}")
201
348
 
202
- def _make_helper_sheet(self, workbook: Workbook) -> None:
203
- """This helper sheet is used as source of data for drop down menus creation
204
-
205
- !!! note "Why 100 rows?"
206
- The number of rows is set to 100 since this is the maximum number of views
207
- per data model.
208
- """
349
+ def _make_helper_dms_sheet(self, workbook: Workbook, no_rows: int) -> None:
350
+ """This helper DMS sheet is used as source of data for drop down menus creation"""
209
351
  workbook.create_sheet(title=self._helper_sheet_name)
210
352
 
211
- for counter, dtype in enumerate(_DATA_TYPE_BY_DMS_TYPE):
212
- workbook[self._helper_sheet_name].cell(row=counter + 1, column=3, value=dtype)
353
+ for dtype_counter, dtype in enumerate(_DATA_TYPE_BY_DMS_TYPE):
354
+ if dtype in ["enum", "timeseries", "sequence", "file", "json"]:
355
+ continue
356
+ workbook[self._helper_sheet_name].cell(row=dtype_counter + 1, column=3, value=dtype)
213
357
 
214
- for i in range(100):
358
+ # Add Cognite Core Data Views:
359
+ for concept_counter, concept in enumerate(COGNITE_CONCEPTS):
360
+ workbook[self._helper_sheet_name].cell(
361
+ row=concept_counter + 1,
362
+ column=6,
363
+ value=f"cdf_cdm:{concept}(version=v1)",
364
+ )
365
+
366
+ for i in range(no_rows):
215
367
  workbook[self._helper_sheet_name].cell(
216
368
  row=i + 1,
217
369
  column=1,
@@ -223,10 +375,15 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
223
375
  value=f'=IF(ISBLANK(Containers!A{i + 3}), "", Containers!A{i + 3})',
224
376
  )
225
377
  workbook[self._helper_sheet_name].cell(
226
- row=counter + i + 2,
378
+ row=dtype_counter + i + 2,
227
379
  column=3,
228
380
  value=f'=IF(ISBLANK(Views!A{i + 3}), "", Views!A{i + 3})',
229
381
  )
382
+ workbook[self._helper_sheet_name].cell(
383
+ row=concept_counter + i + 2,
384
+ column=6,
385
+ value=f'=IF(ISBLANK(Views!A{i + 3}), "", Views!A{i + 3})',
386
+ )
230
387
 
231
388
  for i, value in enumerate([True, False, ""]):
232
389
  workbook[self._helper_sheet_name].cell(row=i + 1, column=4, value=cast(bool | str, value))
@@ -236,6 +393,56 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
236
393
 
237
394
  workbook[self._helper_sheet_name].sheet_state = "hidden"
238
395
 
396
+ def _create_sheet_with_header(
397
+ self,
398
+ workbook: Workbook,
399
+ headers: list[str],
400
+ sheet_name: str,
401
+ sheet_prefix: str = "",
402
+ ) -> Worksheet:
403
+ """Creates an empty sheet with the given headers.
404
+
405
+ Args:
406
+ workbook: The workbook to add the sheet to.
407
+ headers: The headers to add to the sheet.
408
+ sheet_name: The name of the sheet.
409
+ sheet_prefix: The prefix to add to the sheet name, if any.
410
+ """
411
+
412
+ sheet = workbook.create_sheet(f"{sheet_prefix}{sheet_name}")
413
+ main_header = self._main_header_by_sheet_name[sheet_name]
414
+ sheet.append([main_header] + [""] * (len(headers) - 1))
415
+
416
+ if headers[0] == "Neat ID":
417
+ # Move the Neat ID to the end of the columns
418
+ headers = headers[1:] + ["Neat ID"]
419
+
420
+ # Append the headers to the sheet
421
+ sheet.append(headers)
422
+
423
+ return sheet
424
+
425
+ def _style_sheet_header(self, sheet: Worksheet, headers: list[str]) -> None:
426
+ """Styles the sheet with the given headers.
427
+
428
+ Args:
429
+ sheet: The sheet to style.
430
+ headers: The headers to style.
431
+ """
432
+ if self._styling_level > 0:
433
+ # This freezes all rows above the given row
434
+ sheet.freeze_panes = sheet["A3"]
435
+
436
+ sheet["A1"].alignment = Alignment(horizontal="left")
437
+
438
+ if self._styling_level > 1:
439
+ # Make the header row bold, larger, and colored
440
+ for cell, *_ in sheet.iter_cols(min_row=1, max_row=1, min_col=1, max_col=len(headers)):
441
+ cell.font = Font(bold=True, size=20)
442
+ cell.fill = PatternFill(fgColor="FFC000", patternType="solid")
443
+ for cell in sheet["2"]:
444
+ cell.font = Font(bold=True, size=14)
445
+
239
446
  def _write_sheets(
240
447
  self,
241
448
  workbook: Workbook,
@@ -246,16 +453,8 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
246
453
  for sheet_name, headers in rules.headers_by_sheet(by_alias=True).items():
247
454
  if sheet_name in ("Metadata", "Prefixes", "Reference", "Last"):
248
455
  continue
249
- sheet = workbook.create_sheet(f"{sheet_prefix}{sheet_name}")
250
-
251
- main_header = self._main_header_by_sheet_name[sheet_name]
252
- sheet.append([main_header] + [""] * (len(headers) - 1))
253
456
 
254
- if headers[0] == "Neat ID":
255
- # Move the Neat ID to the end of the columns
256
- headers = headers[1:] + ["Neat ID"]
257
-
258
- sheet.append(headers)
457
+ sheet = self._create_sheet_with_header(workbook, headers, sheet_name, sheet_prefix)
259
458
 
260
459
  fill_colors = itertools.cycle(["CADCFC", "FFFFFF"])
261
460
  fill_color = next(fill_colors)
@@ -292,19 +491,7 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
292
491
  cell.border = Border(left=side, right=side, top=side, bottom=side)
293
492
  last_class = class_
294
493
 
295
- if self._styling_level > 0:
296
- # This freezes all rows above the given row
297
- sheet.freeze_panes = sheet["A3"]
298
-
299
- sheet["A1"].alignment = Alignment(horizontal="left")
300
-
301
- if self._styling_level > 1:
302
- # Make the header row bold, larger, and colored
303
- for cell, *_ in sheet.iter_cols(min_row=1, max_row=1, min_col=1, max_col=len(headers)):
304
- cell.font = Font(bold=True, size=20)
305
- cell.fill = PatternFill(fgColor="FFC000", patternType="solid")
306
- for cell in sheet["2"]:
307
- cell.font = Font(bold=True, size=14)
494
+ self._style_sheet_header(sheet, headers)
308
495
 
309
496
  def _write_metadata_sheet(self, workbook: Workbook, metadata: dict[str, Any], sheet_prefix: str = "") -> None:
310
497
  # Excel does not support timezone in datetime strings
@@ -362,57 +549,3 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
362
549
  max(current, max_length + 0.5), MAX_COLUMN_WIDTH
363
550
  )
364
551
  return None
365
-
366
-
367
- class _MetadataCreator:
368
- creator_name = "<YOUR NAME>"
369
-
370
- def __init__(
371
- self,
372
- action: Literal["create", "update"],
373
- new_model_id: tuple[str, str] | None = None,
374
- ):
375
- self.action = action
376
- self.new_model_id = new_model_id or ("YOUR_SPACE", "YOUR_EXTERNAL_ID")
377
-
378
- def create(self, metadata: InformationMetadata | DMSMetadata) -> dict[str, Any]:
379
- now = datetime.now(timezone.utc).replace(microsecond=0, tzinfo=None)
380
- if self.action == "update":
381
- output = json.loads(metadata.model_dump_json(by_alias=True))
382
- # This is the same for Information and DMS
383
- output["updated"] = now.isoformat()
384
- output["schema"] = SchemaCompleteness.extended.value
385
- output["extension"] = ExtensionCategory.addition.value
386
- if value := output.get("creator"):
387
- output["creator"] = f"{value}, {self.creator_name}"
388
- else:
389
- output["creator"] = self.creator_name
390
- return output
391
-
392
- new_metadata = self._create_new_info(now)
393
- if isinstance(metadata, DMSMetadata):
394
- from cognite.neat._rules.transformers._converters import _InformationRulesConverter
395
-
396
- output_metadata: DMSMetadata | InformationMetadata = _InformationRulesConverter._convert_metadata_to_dms(
397
- new_metadata
398
- )
399
- elif isinstance(metadata, InformationMetadata):
400
- output_metadata = new_metadata
401
- else:
402
- raise ValueError(f"Bug in Neat: Unknown metadata type: {type(metadata)}")
403
-
404
- created = json.loads(output_metadata.model_dump_json(by_alias=True))
405
- created.pop("extension", None)
406
- return created
407
-
408
- def _create_new_info(self, now: datetime) -> InformationMetadata:
409
- return InformationMetadata(
410
- space=self.new_model_id[0],
411
- external_id=self.new_model_id[1],
412
- description=None,
413
- version="1",
414
- created=now,
415
- updated=now,
416
- creator=[self.creator_name],
417
- name=self.new_model_id[1],
418
- )
@@ -8,6 +8,7 @@ from cognite.neat._issues.errors import (
8
8
  FileMissingRequiredFieldError,
9
9
  FileNotAFileError,
10
10
  FileNotFoundNeatError,
11
+ FileReadError,
11
12
  FileTypeUnexpectedError,
12
13
  )
13
14
  from cognite.neat._issues.warnings import NeatValueWarning
@@ -55,7 +56,12 @@ class YAMLImporter(BaseImporter[T_InputRules]):
55
56
  return cls({}, [FileNotAFileError(filepath)])
56
57
  elif filepath.suffix not in [".yaml", ".yml"]:
57
58
  return cls({}, [FileTypeUnexpectedError(filepath, frozenset([".yaml", ".yml"]))])
58
- return cls(yaml.safe_load(filepath.read_text()), filepaths=[filepath], source_name=source_name)
59
+ try:
60
+ data = yaml.safe_load(filepath.read_text())
61
+ except yaml.YAMLError as exc:
62
+ return cls({}, [FileReadError(filepath, f"Invalid YAML: {exc!s}")])
63
+
64
+ return cls(data, filepaths=[filepath], source_name=source_name)
59
65
 
60
66
  def to_rules(self) -> ReadRules[T_InputRules]:
61
67
  if self._read_issues.has_errors or not self.raw_data:
@@ -233,6 +233,21 @@ class BaseMetadata(SchemaModel):
233
233
  def as_identifier(self) -> str:
234
234
  return repr(self.as_data_model_id())
235
235
 
236
+ @classmethod
237
+ def default(cls) -> "BaseMetadata":
238
+ """Returns a default instance of the metadata model."""
239
+ now = datetime.now()
240
+ return cls(
241
+ space="pleaseUpdateMe",
242
+ external_id="PleaseUpdateMe",
243
+ version="v1",
244
+ name="Please Update Me",
245
+ description="Please Update Me",
246
+ creator=["NEAT"],
247
+ created=now,
248
+ updated=now,
249
+ )
250
+
236
251
 
237
252
  class BaseRules(SchemaModel, ABC):
238
253
  """
@@ -263,7 +278,7 @@ class BaseRules(SchemaModel, ABC):
263
278
  annotation = annotation.__args__[0]
264
279
 
265
280
  try:
266
- if isinstance(annotation, types.GenericAlias) and get_origin(annotation) is SheetList:
281
+ if isinstance(annotation, types.GenericAlias) and get_origin(annotation).__name__ == SheetList.__name__:
267
282
  # We know that this is a SheetList, so we can safely access the annotation
268
283
  # which is the concrete type of the SheetEntity.
269
284
  model_fields = get_args(annotation)[0].model_fields # type: ignore[union-attr]
@@ -127,7 +127,7 @@ class DMSValidation:
127
127
  }
128
128
 
129
129
  if missing_views or missing_containers:
130
- raise CDFMissingResourcesError(resources=f"{missing_views.union(missing_containers)}")
130
+ raise CDFMissingResourcesError(containers=tuple(missing_containers), views=tuple(missing_views))
131
131
 
132
132
  # Setup data structures for validation
133
133
  dms_schema = self._rules.as_schema()
@@ -577,7 +577,16 @@ class DMSValidation:
577
577
  if not (
578
578
  isinstance(target_property, dm.MappedPropertyApply | dm.MappedProperty)
579
579
  # The direct relation is pointing to the view_id or one of its parents
580
- and (target_property.source == view_id or target_property.source in parents_by_view[view_id])
580
+ and (
581
+ (target_property.source == view_id or target_property.source in parents_by_view[view_id])
582
+ # This is a hack that users use to create a multi value direct relations. It works by setting
583
+ # the source of a direct relation to None. Then, you can have multiple reverse direct relations
584
+ # through this property. In Search this will give you a multi value direct relation.
585
+ # Thus, we must allow it here. Note that the missing source in the direct relation will give the
586
+ # user a DirectRelationMissingSourceWarning so they know they are doing a not
587
+ # recommended modeling pattern.
588
+ or target_property.source is None
589
+ )
581
590
  ):
582
591
  issue_list.append(
583
592
  ReversedConnectionNotFeasibleError(
@@ -19,8 +19,7 @@ from rdflib import Namespace
19
19
  from cognite.neat._client import NeatClient
20
20
  from cognite.neat._client.data_classes.data_modeling import ContainerApplyDict, ViewApplyDict
21
21
  from cognite.neat._constants import (
22
- COGNITE_CORE_CONCEPTS,
23
- COGNITE_CORE_FEATURES,
22
+ COGNITE_CONCEPTS,
24
23
  COGNITE_MODELS,
25
24
  COGNITE_SPACES,
26
25
  DMS_CONTAINER_PROPERTY_SIZE_LIMIT,
@@ -2011,9 +2010,7 @@ class _SubsetEditableCDMRules(VerifiedRulesTransformer[DMSRules, DMSRules]):
2011
2010
  """
2012
2011
 
2013
2012
  def __init__(self, views: set[ViewEntity]):
2014
- if not_in_cognite_core := {view.external_id for view in views} - COGNITE_CORE_CONCEPTS.union(
2015
- COGNITE_CORE_FEATURES
2016
- ):
2013
+ if not_in_cognite_core := {view.external_id for view in views} - set(COGNITE_CONCEPTS):
2017
2014
  raise NeatValueError(
2018
2015
  f"Concept(s) {', '.join(not_in_cognite_core)} is/are not part of the Cognite Core Data Model. Aborting."
2019
2016
  )
@@ -2220,11 +2217,13 @@ class AddCogniteProperties(RulesTransformer[ReadRules[InformationInputRules], Re
2220
2217
 
2221
2218
  Args:
2222
2219
  client: The client is used to look up the properties of the parent classes.
2220
+ dummy_property: A dummy property is added to the user defined concepts
2223
2221
 
2224
2222
  """
2225
2223
 
2226
- def __init__(self, client: NeatClient) -> None:
2224
+ def __init__(self, client: NeatClient, dummy_property: str | None = None) -> None:
2227
2225
  self._client = client
2226
+ self._dummy_property = dummy_property
2228
2227
 
2229
2228
  @property
2230
2229
  def description(self) -> str:
@@ -2268,6 +2267,17 @@ class AddCogniteProperties(RulesTransformer[ReadRules[InformationInputRules], Re
2268
2267
  new_properties.append(new_prop)
2269
2268
  properties_by_class[class_entity][prop.property_] = new_prop
2270
2269
 
2270
+ if self._dummy_property:
2271
+ new_properties.append(
2272
+ InformationInputProperty(
2273
+ class_=class_entity,
2274
+ property_=f"{to_camel_case(class_entity.suffix)}{self._dummy_property}",
2275
+ value_type=String(),
2276
+ min_count=0,
2277
+ max_count=1,
2278
+ )
2279
+ )
2280
+
2271
2281
  new_classes: list[InformationInputClass] = input_.classes.copy()
2272
2282
  existing_classes = {cls.class_ for cls in input_.classes}
2273
2283
  for class_entity, view in views_by_class_entity.items():
@@ -45,14 +45,14 @@ class DropAPI:
45
45
  # Temporary solution until we agree on the form of specifying named graphs
46
46
  # it will default to the default named graph
47
47
  named_graph = self._state.instances.store.default_named_graph
48
- uri_type_type = dict((v, k) for k, v in self._state.instances.store.queries.types(named_graph).items())
48
+ uri_type_type = dict((v, k) for k, v in self._state.instances.store.queries.select.types(named_graph).items())
49
49
  selected_uri_by_type: dict[URIRef, str] = {}
50
50
  for type_item in type_list:
51
51
  if type_item not in uri_type_type:
52
52
  print(f"Type {type_item} not found.")
53
53
  selected_uri_by_type[uri_type_type[type_item]] = type_item
54
54
 
55
- result = self._state.instances.store.queries.drop_types(list(selected_uri_by_type.keys()))
55
+ result = self._state.instances.store.queries.update.drop_types(list(selected_uri_by_type.keys()))
56
56
 
57
57
  for type_uri, count in result.items():
58
58
  print(f"Dropped {count} instances of type {selected_uri_by_type[type_uri]}")
@@ -21,19 +21,19 @@ class ExploreAPI:
21
21
 
22
22
  def types(self) -> pd.DataFrame:
23
23
  """List all the types of instances in the session."""
24
- return pd.DataFrame(self._state.instances.store.queries.types_with_instance_and_property_count())
24
+ return pd.DataFrame(self._state.instances.store.queries.select.types_with_instance_and_property_count())
25
25
 
26
26
  def properties(self) -> pd.DataFrame:
27
27
  """List all the properties of a type of instances in the session."""
28
- return pd.DataFrame(self._state.instances.store.queries.properties_with_count())
28
+ return pd.DataFrame(self._state.instances.store.queries.select.properties_with_count())
29
29
 
30
30
  def instance_with_properties(self, type: str) -> dict[str, set[str]]:
31
31
  """List all the instances of a type with their properties."""
32
- available_types = self._state.instances.store.queries.list_types(remove_namespace=False)
32
+ available_types = self._state.instances.store.queries.select.list_types(remove_namespace=False)
33
33
  uri_by_type = {remove_namespace_from_uri(t[0]): t[0] for t in available_types}
34
34
  if type not in uri_by_type:
35
35
  raise NeatSessionError(
36
36
  f"Type {type} not found. Available types are: {humanize_collection(uri_by_type.keys())}"
37
37
  )
38
38
  type_uri = cast(URIRef, uri_by_type[type])
39
- return self._state.instances.store.queries.instances_with_properties(type_uri, remove_namespace=True)
39
+ return self._state.instances.store.queries.select.instances_with_properties(type_uri, remove_namespace=True)
@@ -117,8 +117,8 @@ class InstancePrepareAPI:
117
117
  self._state.instances.store.transform(transformer)
118
118
 
119
119
  def _get_type_and_property_uris(self, type_: str, property_: str) -> tuple[URIRef, URIRef]:
120
- type_uri = self._state.instances.store.queries.type_uri(type_)
121
- property_uri = self._state.instances.store.queries.property_uri(property_)
120
+ type_uri = self._state.instances.store.queries.select.type_uri(type_)
121
+ property_uri = self._state.instances.store.queries.select.property_uri(property_)
122
122
 
123
123
  if not type_uri:
124
124
  raise NeatValueError(f"Type {type_} does not exist in the graph.")
@@ -132,7 +132,7 @@ class InstancePrepareAPI:
132
132
  f"{property_} has multiple ids found in the graph: {humanize_collection(property_uri)}."
133
133
  )
134
134
 
135
- if not self._state.instances.store.queries.type_with_property(type_uri[0], property_uri[0]):
135
+ if not self._state.instances.store.queries.select.type_with_property(type_uri[0], property_uri[0]):
136
136
  raise NeatValueError(f"Property {property_} is not defined for type {type_}.")
137
137
  return type_uri[0], property_uri[0]
138
138
 
@@ -210,7 +210,7 @@ class InstancePrepareAPI:
210
210
  except NeatValueError as e:
211
211
  raise NeatSessionError(f"Cannot convert to type: {e}") from None
212
212
  else:
213
- subject_predicate = self._state.instances.store.queries.property_uri(source[1])[0]
213
+ subject_predicate = self._state.instances.store.queries.select.property_uri(source[1])[0]
214
214
 
215
215
  transformer = LiteralToEntity(subject_type, subject_predicate, type, new_property)
216
216
  self._state.instances.store.transform(transformer)
@@ -241,7 +241,7 @@ class InstancePrepareAPI:
241
241
  except NeatValueError as e:
242
242
  raise NeatSessionError(f"Cannot convert to data type: {e}") from None
243
243
  else:
244
- subject_predicate = self._state.instances.store.queries.property_uri(source[1])[0]
244
+ subject_predicate = self._state.instances.store.queries.select.property_uri(source[1])[0]
245
245
  transformer = ConnectionToLiteral(subject_type, subject_predicate)
246
246
  self._state.instances.store.transform(transformer)
247
247