cognite-neat 0.123.43__py3-none-any.whl → 0.125.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (44) hide show
  1. cognite/neat/_data_model/importers/__init__.py +2 -1
  2. cognite/neat/_data_model/importers/_table_importer/__init__.py +0 -0
  3. cognite/neat/_data_model/importers/_table_importer/data_classes.py +141 -0
  4. cognite/neat/_data_model/importers/_table_importer/importer.py +76 -0
  5. cognite/neat/_data_model/importers/_table_importer/source.py +89 -0
  6. cognite/neat/_data_model/models/dms/__init__.py +12 -1
  7. cognite/neat/_data_model/models/dms/_base.py +1 -1
  8. cognite/neat/_data_model/models/dms/_constraints.py +5 -2
  9. cognite/neat/_data_model/models/dms/_data_types.py +26 -10
  10. cognite/neat/_data_model/models/dms/_indexes.py +6 -3
  11. cognite/neat/_data_model/models/dms/_types.py +17 -0
  12. cognite/neat/_data_model/models/dms/_view_property.py +14 -25
  13. cognite/neat/_data_model/models/entities/__init__.py +2 -1
  14. cognite/neat/_data_model/models/entities/_parser.py +32 -0
  15. cognite/neat/_exceptions.py +17 -0
  16. cognite/neat/_session/__init__.py +0 -0
  17. cognite/neat/_session/_session.py +33 -0
  18. cognite/neat/_session/_state_machine/__init__.py +23 -0
  19. cognite/neat/_session/_state_machine/_base.py +27 -0
  20. cognite/neat/_session/_state_machine/_states.py +150 -0
  21. cognite/neat/_utils/text.py +22 -0
  22. cognite/neat/_utils/useful_types.py +4 -0
  23. cognite/neat/_utils/validation.py +63 -30
  24. cognite/neat/_version.py +1 -1
  25. cognite/neat/v0/core/_data_model/_constants.py +1 -0
  26. cognite/neat/v0/core/_data_model/exporters/_data_model2excel.py +3 -3
  27. cognite/neat/v0/core/_data_model/importers/_dms2data_model.py +4 -3
  28. cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py +85 -5
  29. cognite/neat/v0/core/_data_model/models/entities/__init__.py +2 -0
  30. cognite/neat/v0/core/_data_model/models/entities/_single_value.py +14 -0
  31. cognite/neat/v0/core/_data_model/models/entities/_types.py +10 -0
  32. cognite/neat/v0/core/_data_model/models/physical/_exporter.py +3 -11
  33. cognite/neat/v0/core/_data_model/models/physical/_unverified.py +61 -12
  34. cognite/neat/v0/core/_data_model/models/physical/_validation.py +8 -4
  35. cognite/neat/v0/core/_data_model/models/physical/_verified.py +86 -15
  36. cognite/neat/v0/core/_data_model/transformers/_converters.py +11 -4
  37. cognite/neat/v0/core/_store/_instance.py +33 -0
  38. cognite/neat/v0/core/_utils/spreadsheet.py +17 -3
  39. cognite/neat/v0/session/_base.py +2 -0
  40. cognite/neat/v0/session/_diff.py +51 -0
  41. {cognite_neat-0.123.43.dist-info → cognite_neat-0.125.0.dist-info}/METADATA +1 -1
  42. {cognite_neat-0.123.43.dist-info → cognite_neat-0.125.0.dist-info}/RECORD +44 -32
  43. {cognite_neat-0.123.43.dist-info → cognite_neat-0.125.0.dist-info}/WHEEL +0 -0
  44. {cognite_neat-0.123.43.dist-info → cognite_neat-0.125.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,4 @@
1
1
  from ._base import DMSImporter
2
+ from ._table_importer.importer import DMSTableImporter
2
3
 
3
- __all__ = ["DMSImporter"]
4
+ __all__ = ["DMSImporter", "DMSTableImporter"]
@@ -0,0 +1,141 @@
1
+ from collections.abc import Mapping
2
+ from typing import Annotated, cast
3
+
4
+ from pydantic import AliasGenerator, BaseModel, BeforeValidator, Field, model_validator
5
+ from pydantic.alias_generators import to_camel
6
+
7
+ from cognite.neat._data_model.models.entities import ParsedEntity, parse_entities, parse_entity
8
+ from cognite.neat._utils.text import title_case
9
+ from cognite.neat._utils.useful_types import CellValueType
10
+
11
+
12
+ def parse_entity_str(v: str) -> ParsedEntity:
13
+ try:
14
+ return parse_entity(v)
15
+ except ValueError as e:
16
+ raise ValueError(f"Invalid entity syntax: {e}") from e
17
+
18
+
19
+ def parse_entities_str(v: str) -> list[ParsedEntity] | None:
20
+ try:
21
+ return parse_entities(v)
22
+ except ValueError as e:
23
+ raise ValueError(f"Invalid entity list syntax: {e}") from e
24
+
25
+
26
+ Entity = Annotated[ParsedEntity, BeforeValidator(parse_entity_str, str)]
27
+ EntityList = Annotated[list[ParsedEntity], BeforeValidator(parse_entities_str, str)]
28
+
29
+
30
+ class TableObj(
31
+ BaseModel,
32
+ extra="ignore",
33
+ alias_generator=AliasGenerator(
34
+ validation_alias=title_case,
35
+ serialization_alias=to_camel,
36
+ ),
37
+ ): ...
38
+
39
+
40
+ class MetadataValue(TableObj):
41
+ key: str
42
+ value: CellValueType
43
+
44
+
45
+ class DMSProperty(TableObj):
46
+ view: Entity
47
+ view_property: str
48
+ name: str | None = None
49
+ description: str | None = None
50
+ connection: Entity | None
51
+ value_type: Entity
52
+ min_count: int | None
53
+ max_count: int | None
54
+ immutable: bool | None = None
55
+ default: CellValueType | None = None
56
+ auto_increment: bool | None = None
57
+ container: Entity | None = None
58
+ container_property: str | None = None
59
+ container_property_name: str | None = None
60
+ container_property_description: str | None = None
61
+ index: EntityList | None = None
62
+ constraint: EntityList | None = None
63
+
64
+
65
+ class DMSView(TableObj):
66
+ view: Entity
67
+ name: str | None = None
68
+ description: str | None = None
69
+ implements: EntityList | None = None
70
+ filter: str | None = None
71
+ in_model: bool | None = None
72
+
73
+
74
+ class DMSContainer(TableObj):
75
+ container: Entity
76
+ name: str | None = None
77
+ description: str | None = None
78
+ constraint: EntityList | None = None
79
+ used_for: str | None = None
80
+
81
+
82
+ class DMSEnum(TableObj):
83
+ collection: str
84
+ value: str
85
+ name: str | None = None
86
+ description: str | None = None
87
+
88
+
89
+ class DMSNode(TableObj):
90
+ node: Entity
91
+
92
+
93
+ class TableDMS(TableObj):
94
+ metadata: list[MetadataValue]
95
+ properties: list[DMSProperty]
96
+ views: list[DMSView]
97
+ containers: list[DMSContainer] = Field(default_factory=list)
98
+ enum: list[DMSEnum] = Field(default_factory=list)
99
+ nodes: list[DMSNode] = Field(default_factory=list)
100
+
101
+ @model_validator(mode="before")
102
+ def _title_case_keys(
103
+ cls, data: dict[str, list[dict[str, CellValueType]]]
104
+ ) -> dict[str, list[dict[str, CellValueType]]]:
105
+ if isinstance(data, dict):
106
+ # We are case-insensitive on the table names.
107
+ return {title_case(k): v for k, v in data.items()}
108
+ return data
109
+
110
+
111
+ DMS_API_MAPPING: Mapping[str, Mapping[str, str]] = {
112
+ "Views": {
113
+ "space": "View",
114
+ "externalId": "View",
115
+ "version": "View",
116
+ **{
117
+ cast(str, field_.serialization_alias): cast(str, field_.validation_alias)
118
+ for field_id, field_ in DMSView.model_fields.items()
119
+ if field_id != "View"
120
+ },
121
+ },
122
+ "Containers": {
123
+ "space": "Container",
124
+ "externalId": "Container",
125
+ **{
126
+ cast(str, field_.serialization_alias): cast(str, field_.validation_alias)
127
+ for field_id, field_ in DMSContainer.model_fields.items()
128
+ if field_id != "Container"
129
+ },
130
+ },
131
+ "Properties": {
132
+ "space": "View",
133
+ "externalId": "View",
134
+ "property": "ViewProperty",
135
+ **{
136
+ cast(str, field_.serialization_alias): cast(str, field_.validation_alias)
137
+ for field_id, field_ in DMSProperty.model_fields.items()
138
+ if field_id not in ("View", "ViewProperty")
139
+ },
140
+ },
141
+ }
@@ -0,0 +1,76 @@
1
+ from collections.abc import Mapping
2
+ from typing import ClassVar, cast
3
+
4
+ from pydantic import ValidationError
5
+
6
+ from cognite.neat._data_model.importers._base import DMSImporter
7
+ from cognite.neat._data_model.models.dms import (
8
+ RequestSchema,
9
+ )
10
+ from cognite.neat._exceptions import DataModelImportError
11
+ from cognite.neat._issues import ModelSyntaxError
12
+ from cognite.neat._utils.useful_types import CellValueType
13
+ from cognite.neat._utils.validation import as_json_path, humanize_validation_error
14
+
15
+ from .data_classes import TableDMS
16
+
17
+
18
+ class DMSTableImporter(DMSImporter):
19
+ """Imports DMS from a table structure.
20
+
21
+ The tables can are expected to be a dictionary where the keys are the table names and the values
22
+ are lists of dictionaries representing the rows in the table.
23
+ """
24
+
25
+ # We can safely cast as we know the validation_alias is always set to a str.
26
+ REQUIRED_SHEETS = tuple(
27
+ cast(str, field_.validation_alias) for field_ in TableDMS.model_fields.values() if field_.is_required()
28
+ )
29
+ REQUIRED_SHEET_MESSAGES: ClassVar[Mapping[str, str]] = {
30
+ f"Missing required column: {sheet!r}": f"Missing required sheet: {sheet!r}" for sheet in REQUIRED_SHEETS
31
+ }
32
+
33
+ def __init__(self, tables: dict[str, list[dict[str, CellValueType]]]) -> None:
34
+ self._table = tables
35
+
36
+ def to_data_model(self) -> RequestSchema:
37
+ raise NotImplementedError()
38
+
39
+ def _read_tables(self) -> TableDMS:
40
+ try:
41
+ # Check tables, columns, data type and entity syntax.
42
+ table = TableDMS.model_validate(self._table)
43
+ except ValidationError as e:
44
+ errors = self._create_error_messages(e)
45
+ raise DataModelImportError(errors) from None
46
+ return table
47
+
48
+ def _create_error_messages(self, error: ValidationError) -> list[ModelSyntaxError]:
49
+ errors: list[ModelSyntaxError] = []
50
+ seen: set[str] = set()
51
+ for message in humanize_validation_error(
52
+ error,
53
+ humanize_location=self._location,
54
+ field_name="column",
55
+ missing_required_descriptor="missing",
56
+ ):
57
+ # Replace messages about missing required columns with missing required sheets.
58
+ message = self.REQUIRED_SHEET_MESSAGES.get(message, message)
59
+ if message in seen:
60
+ # We treat all rows as the same, so we get duplicated errors for each row.
61
+ continue
62
+ seen.add(message)
63
+ errors.append(ModelSyntaxError(message=message))
64
+ return errors
65
+
66
+ @staticmethod
67
+ def _location(loc: tuple[str | int, ...]) -> str:
68
+ if isinstance(loc[0], str) and len(loc) == 2: # Sheet + row.
69
+ # We skip the row as we treat all rows as the same. For example, if a required column is missing in one
70
+ # row, it is missing in all rows.
71
+ return f"{loc[0]} sheet"
72
+ elif len(loc) == 3 and isinstance(loc[0], str) and isinstance(loc[1], int) and isinstance(loc[2], str):
73
+ # This means there is something wrong in a specific cell.
74
+ return f"{loc[0]} sheet row {loc[1] + 1} column {loc[2]!r}"
75
+ # This should be unreachable as the TableDMS model only has 2 levels.
76
+ return as_json_path(loc)
@@ -0,0 +1,89 @@
1
+ from collections.abc import Mapping
2
+ from dataclasses import dataclass, field
3
+
4
+ from .data_classes import DMS_API_MAPPING
5
+
6
+
7
+ @dataclass
8
+ class SpreadsheetReadContext:
9
+ """This class is used to store information about the source spreadsheet.
10
+
11
+ It is used to adjust row numbers to account for header rows and empty rows
12
+ such that the error/warning messages are accurate.
13
+ """
14
+
15
+ header_row: int = 1
16
+ empty_rows: list[int] = field(default_factory=list)
17
+ skipped_rows: list[int] = field(default_factory=list)
18
+ is_one_indexed: bool = True
19
+
20
+ def __post_init__(self) -> None:
21
+ self.empty_rows.sort()
22
+ self.skipped_rows.sort()
23
+
24
+ def adjusted_row_number(self, row_no: int) -> int:
25
+ output = row_no
26
+ for empty_row in self.empty_rows:
27
+ if empty_row <= output:
28
+ output += 1
29
+ else:
30
+ break
31
+
32
+ for skipped_rows in self.skipped_rows:
33
+ if skipped_rows <= output:
34
+ output += 1
35
+ else:
36
+ break
37
+
38
+ return output + self.header_row + (1 if self.is_one_indexed else 0)
39
+
40
+
41
+ @dataclass
42
+ class TableSource:
43
+ source: str
44
+ table_read: dict[str, SpreadsheetReadContext] = field(default_factory=dict)
45
+
46
+ def location(self, path: tuple[int | str, ...]) -> str:
47
+ table_id: str | None = None
48
+ row_no: int | None = None
49
+ column: str | None = None
50
+ if len(path) >= 1 and isinstance(path[0], str):
51
+ table_id = path[0]
52
+ if len(path) >= 2 and isinstance(path[1], int):
53
+ row_no = path[1]
54
+ if len(path) >= 3 and isinstance(path[2], str):
55
+ column = path[2]
56
+ column = self.field_to_column(table_id, column)
57
+ if isinstance(row_no, int):
58
+ row_no = self.adjust_row_number(table_id, row_no)
59
+ location_parts = []
60
+ if table_id is not None:
61
+ location_parts.append(f"table {table_id!r}")
62
+ if row_no is not None:
63
+ location_parts.append(f"row {row_no}")
64
+ if column is not None:
65
+ location_parts.append(f"column {column!r}")
66
+ if len(path) > 4:
67
+ location_parts.append("-> " + ".".join(str(p) for p in path[3:]))
68
+
69
+ return " ".join(location_parts)
70
+
71
+ def adjust_row_number(self, table_id: str | None, row_no: int) -> int:
72
+ table_read = table_id and self.table_read.get(table_id)
73
+ if table_read:
74
+ return table_read.adjusted_row_number(row_no)
75
+ return row_no + 1 # Convert to 1-indexed if no table read info is available
76
+
77
+ @classmethod
78
+ def field_to_column(cls, table_id: str | None, field_: str) -> str:
79
+ """Maps the field name used in the DMS API to the column named used by Neat."""
80
+ mapping = cls.field_mapping(table_id)
81
+ if mapping is not None:
82
+ return mapping.get(field_, field_)
83
+ return field_
84
+
85
+ @classmethod
86
+ def field_mapping(cls, table_id: str | int | None) -> Mapping[str, str] | None:
87
+ if not isinstance(table_id, str):
88
+ return None
89
+ return DMS_API_MAPPING.get(table_id)
@@ -1,6 +1,7 @@
1
1
  from cognite.neat._data_model.models.dms._base import Resource, WriteableResource
2
2
  from cognite.neat._data_model.models.dms._constraints import (
3
3
  Constraint,
4
+ ConstraintAdapter,
4
5
  ConstraintDefinition,
5
6
  RequiresConstraintDefinition,
6
7
  UniquenessConstraintDefinition,
@@ -14,10 +15,14 @@ from cognite.neat._data_model.models.dms._container import (
14
15
  from cognite.neat._data_model.models.dms._data_types import (
15
16
  BooleanProperty,
16
17
  DataType,
18
+ DataTypeAdapter,
17
19
  DateProperty,
18
20
  DirectNodeRelation,
19
21
  EnumProperty,
22
+ EnumValue,
20
23
  FileCDFExternalIdReference,
24
+ Float32Property,
25
+ Float64Property,
21
26
  FloatProperty,
22
27
  Int32Property,
23
28
  Int64Property,
@@ -29,7 +34,7 @@ from cognite.neat._data_model.models.dms._data_types import (
29
34
  TimeseriesCDFExternalIdReference,
30
35
  TimestampProperty,
31
36
  )
32
- from cognite.neat._data_model.models.dms._indexes import BtreeIndex, Index, IndexDefinition, InvertedIndex
37
+ from cognite.neat._data_model.models.dms._indexes import BtreeIndex, Index, IndexAdapter, IndexDefinition, InvertedIndex
33
38
  from cognite.neat._data_model.models.dms._space import Space, SpaceRequest, SpaceResponse
34
39
 
35
40
  from ._data_model import DataModelRequest, DataModelResponse
@@ -68,6 +73,7 @@ __all__ = [
68
73
  "BtreeIndex",
69
74
  "ConnectionPropertyDefinition",
70
75
  "Constraint",
76
+ "ConstraintAdapter",
71
77
  "ConstraintDefinition",
72
78
  "ConstraintOrIndexState",
73
79
  "Container",
@@ -80,12 +86,17 @@ __all__ = [
80
86
  "DataModelRequest",
81
87
  "DataModelResponse",
82
88
  "DataType",
89
+ "DataTypeAdapter",
83
90
  "DateProperty",
84
91
  "DirectNodeRelation",
85
92
  "EnumProperty",
93
+ "EnumValue",
86
94
  "FileCDFExternalIdReference",
95
+ "Float32Property",
96
+ "Float64Property",
87
97
  "FloatProperty",
88
98
  "Index",
99
+ "IndexAdapter",
89
100
  "IndexDefinition",
90
101
  "Int32Property",
91
102
  "Int64Property",
@@ -20,7 +20,7 @@ class Resource(BaseModelObject):
20
20
  T_Resource = TypeVar("T_Resource", bound=Resource)
21
21
 
22
22
 
23
- class WriteableResource(Generic[T_Resource], Resource, ABC):
23
+ class WriteableResource(Resource, Generic[T_Resource], ABC):
24
24
  @abstractmethod
25
25
  def as_request(self) -> T_Resource:
26
26
  """Convert the response model to a request model by removing read-only fields."""
@@ -1,10 +1,11 @@
1
1
  from abc import ABC
2
2
  from typing import Annotated, Literal
3
3
 
4
- from pydantic import Field
4
+ from pydantic import Field, TypeAdapter
5
5
 
6
6
  from ._base import BaseModelObject
7
7
  from ._references import ContainerReference
8
+ from ._types import Bool
8
9
 
9
10
 
10
11
  class ConstraintDefinition(BaseModelObject, ABC):
@@ -16,7 +17,7 @@ class UniquenessConstraintDefinition(ConstraintDefinition):
16
17
  properties: list[str] = Field(
17
18
  description="List of properties included in the constraint.", min_length=1, max_length=10
18
19
  )
19
- by_space: bool | None = Field(default=None, description="Whether to make the constraint space-specific.")
20
+ by_space: Bool | None = Field(default=None, description="Whether to make the constraint space-specific.")
20
21
 
21
22
 
22
23
  class RequiresConstraintDefinition(ConstraintDefinition):
@@ -28,3 +29,5 @@ Constraint = Annotated[
28
29
  UniquenessConstraintDefinition | RequiresConstraintDefinition,
29
30
  Field(discriminator="constraint_type"),
30
31
  ]
32
+
33
+ ConstraintAdapter: TypeAdapter[Constraint] = TypeAdapter(Constraint)
@@ -1,9 +1,8 @@
1
+ import re
1
2
  from abc import ABC
2
3
  from typing import Annotated, Literal
3
4
 
4
- from pydantic import Field, field_validator
5
-
6
- from cognite.neat._utils.text import humanize_collection
5
+ from pydantic import Field, TypeAdapter, field_validator
7
6
 
8
7
  from ._base import BaseModelObject
9
8
  from ._constants import ENUM_VALUE_IDENTIFIER_PATTERN, FORBIDDEN_ENUM_VALUES, INSTANCE_ID_PATTERN
@@ -111,6 +110,7 @@ class DirectNodeRelation(ListablePropertyTypeDefinition):
111
110
 
112
111
  class EnumValue(BaseModelObject):
113
112
  name: str | None = Field(
113
+ None,
114
114
  max_length=255,
115
115
  description="The name of the enum value.",
116
116
  )
@@ -121,6 +121,9 @@ class EnumValue(BaseModelObject):
121
121
  )
122
122
 
123
123
 
124
+ _ENUM_KEY = re.compile(ENUM_VALUE_IDENTIFIER_PATTERN)
125
+
126
+
124
127
  class EnumProperty(PropertyTypeDefinition):
125
128
  type: Literal["enum"] = "enum"
126
129
  unknown_value: str | None = Field(
@@ -129,22 +132,33 @@ class EnumProperty(PropertyTypeDefinition):
129
132
  "provide forward-compatibility, Specifying what value to use if the client does not "
130
133
  "recognize the returned value. It is not possible to ingest the unknown value, "
131
134
  "but it must be part of the allowed values.",
135
+ min_length=1,
136
+ max_length=128,
137
+ pattern=ENUM_VALUE_IDENTIFIER_PATTERN,
132
138
  )
133
139
  values: dict[str, EnumValue] = Field(
134
140
  description="A set of all possible values for the enum property.",
135
141
  min_length=1,
136
142
  max_length=32,
137
- pattern=ENUM_VALUE_IDENTIFIER_PATTERN,
138
143
  )
139
144
 
140
145
  @field_validator("values", mode="after")
141
146
  def _valid_enum_value(cls, val: dict[str, EnumValue]) -> dict[str, EnumValue]:
142
- invalid_enum_values = set(val.keys()).intersection(FORBIDDEN_ENUM_VALUES)
143
- if invalid_enum_values:
144
- raise ValueError(
145
- "Enum values cannot be any of the following reserved values: "
146
- f"{humanize_collection(invalid_enum_values)}"
147
- )
147
+ errors: list[str] = []
148
+ for key in val.keys():
149
+ if not _ENUM_KEY.match(key):
150
+ errors.append(
151
+ f"Enum value {key!r} is not valid. Enum values must match "
152
+ f"the pattern: {ENUM_VALUE_IDENTIFIER_PATTERN}"
153
+ )
154
+ if len(key) > 128 or len(key) < 1:
155
+ errors.append(f"Enum value {key!r} must be between 1 and 128 characters long.")
156
+ if key.lower() in FORBIDDEN_ENUM_VALUES:
157
+ errors.append(
158
+ f"Enum value {key!r} cannot be any of the following reserved values: {FORBIDDEN_ENUM_VALUES}"
159
+ )
160
+ if errors:
161
+ raise ValueError(";".join(errors))
148
162
  return val
149
163
 
150
164
 
@@ -165,3 +179,5 @@ DataType = Annotated[
165
179
  | EnumProperty,
166
180
  Field(discriminator="type"),
167
181
  ]
182
+
183
+ DataTypeAdapter: TypeAdapter[DataType] = TypeAdapter(DataType)
@@ -1,9 +1,10 @@
1
1
  from abc import ABC
2
2
  from typing import Annotated, Literal
3
3
 
4
- from pydantic import Field
4
+ from pydantic import Field, TypeAdapter
5
5
 
6
6
  from ._base import BaseModelObject
7
+ from ._types import Bool
7
8
 
8
9
 
9
10
  class IndexDefinition(BaseModelObject, ABC):
@@ -13,8 +14,8 @@ class IndexDefinition(BaseModelObject, ABC):
13
14
 
14
15
  class BtreeIndex(IndexDefinition):
15
16
  index_type: Literal["btree"] = "btree"
16
- by_space: bool | None = Field(default=None, description="Whether to make the index space-specific.")
17
- cursorable: bool | None = Field(
17
+ by_space: Bool | None = Field(default=None, description="Whether to make the index space-specific.")
18
+ cursorable: Bool | None = Field(
18
19
  default=None, description="Whether the index can be used for cursor-based pagination."
19
20
  )
20
21
 
@@ -24,3 +25,5 @@ class InvertedIndex(IndexDefinition):
24
25
 
25
26
 
26
27
  Index = Annotated[BtreeIndex | InvertedIndex, Field(discriminator="index_type")]
28
+
29
+ IndexAdapter: TypeAdapter[Index] = TypeAdapter(Index)
@@ -0,0 +1,17 @@
1
+ from typing import Annotated, Any
2
+
3
+ from pydantic import BeforeValidator
4
+
5
+
6
+ def str_as_bool(value: Any) -> Any:
7
+ if isinstance(value, str):
8
+ val = value.lower()
9
+ if val in {"true", "1", "yes"}:
10
+ return True
11
+ if val in {"false", "0", "no"}:
12
+ return False
13
+ # All other cases are handled by Pydantic's built-in bool validator
14
+ return value
15
+
16
+
17
+ Bool = Annotated[bool, BeforeValidator(str_as_bool, str)]
@@ -1,7 +1,7 @@
1
1
  from abc import ABC
2
2
  from typing import Annotated, Literal
3
3
 
4
- from pydantic import Field, Json
4
+ from pydantic import Field, Json, TypeAdapter
5
5
 
6
6
  from ._base import BaseModelObject, Resource, WriteableResource
7
7
  from ._constants import CONTAINER_AND_VIEW_PROPERTIES_IDENTIFIER_PATTERN
@@ -130,53 +130,40 @@ class ReverseDirectRelationProperty(ConnectionPropertyDefinition, ABC):
130
130
  description="The node(s) containing the direct relation property can be read "
131
131
  "through the view specified in 'source'."
132
132
  )
133
+ through: ContainerDirectReference | ViewDirectReference = Field(
134
+ description="The view of the node containing the direct relation property."
135
+ )
133
136
 
134
137
 
135
138
  class SingleReverseDirectRelationPropertyRequest(ReverseDirectRelationProperty):
136
139
  connection_type: Literal["single_reverse_direct_relation"] = "single_reverse_direct_relation"
137
- # The API support through as either ViewDirectReference or ContainerDirectReference. However, in Neat
138
- # we only use ContainerDirectReference. This is for simplicity and it improves performance as the server
139
- # does not have to resolve the view to a container first.
140
- through: ContainerDirectReference = Field(
141
- description="The view of the node containing the direct relation property."
142
- )
140
+
141
+
142
+ class MultiReverseDirectRelationPropertyRequest(ReverseDirectRelationProperty):
143
+ connection_type: Literal["multi_reverse_direct_relation"] = "multi_reverse_direct_relation"
143
144
 
144
145
 
145
146
  class SingleReverseDirectRelationPropertyResponse(
146
147
  ReverseDirectRelationProperty, WriteableResource[SingleReverseDirectRelationPropertyRequest]
147
148
  ):
148
149
  connection_type: Literal["single_reverse_direct_relation"] = "single_reverse_direct_relation"
149
- through: ContainerDirectReference | ViewDirectReference = Field(
150
- description="The view of the node containing the direct relation property."
150
+ target_list: bool = Field(
151
+ description="Whether or not this reverse direct relation targets a list of direct relations.",
151
152
  )
152
153
 
153
154
  def as_request(self) -> SingleReverseDirectRelationPropertyRequest:
154
- if isinstance(self.through, ViewDirectReference):
155
- raise TypeError("Cannot convert to request when 'through' is a ViewDirectReference.")
156
155
  return SingleReverseDirectRelationPropertyRequest.model_validate(self.model_dump(by_alias=True))
157
156
 
158
157
 
159
- class MultiReverseDirectRelationPropertyRequest(ReverseDirectRelationProperty):
160
- connection_type: Literal["multi_reverse_direct_relation"] = "multi_reverse_direct_relation"
161
- # The API support through as either ViewDirectReference or ContainerDirectReference. However, in Neat
162
- # we only use ContainerDirectReference. This is for simplicity and it improves performance as the server
163
- # does not have to resolve the view to a container first.
164
- through: ContainerDirectReference = Field(
165
- description="The view of the node containing the direct relation property."
166
- )
167
-
168
-
169
158
  class MultiReverseDirectRelationPropertyResponse(
170
159
  ReverseDirectRelationProperty, WriteableResource[MultiReverseDirectRelationPropertyRequest]
171
160
  ):
172
161
  connection_type: Literal["multi_reverse_direct_relation"] = "multi_reverse_direct_relation"
173
- through: ContainerDirectReference | ViewDirectReference = Field(
174
- description="The view of the node containing the direct relation property."
162
+ target_list: bool = Field(
163
+ description="Whether or not this reverse direct relation targets a list of direct relations.",
175
164
  )
176
165
 
177
166
  def as_request(self) -> MultiReverseDirectRelationPropertyRequest:
178
- if isinstance(self.through, ViewDirectReference):
179
- raise TypeError("Cannot convert to request when 'through' is a ViewDirectReference.")
180
167
  return MultiReverseDirectRelationPropertyRequest.model_validate(self.model_dump(by_alias=True))
181
168
 
182
169
 
@@ -196,3 +183,5 @@ ViewResponseProperty = Annotated[
196
183
  | ViewCorePropertyResponse,
197
184
  Field(discriminator="connection_type"),
198
185
  ]
186
+
187
+ ViewRequestPropertyAdapter: TypeAdapter[ViewRequestProperty] = TypeAdapter(ViewRequestProperty)
@@ -19,7 +19,7 @@ from ._data_types import (
19
19
  Timeseries,
20
20
  )
21
21
  from ._identifiers import URI, NameSpace
22
- from ._parser import ParsedEntity, parse_entity
22
+ from ._parser import ParsedEntity, parse_entities, parse_entity
23
23
 
24
24
  __all__ = [
25
25
  "URI",
@@ -45,5 +45,6 @@ __all__ = [
45
45
  "Undefined",
46
46
  "Unknown",
47
47
  "UnknownEntity",
48
+ "parse_entities",
48
49
  "parse_entity",
49
50
  ]
@@ -1,4 +1,6 @@
1
+ import re
1
2
  from dataclasses import dataclass
3
+ from typing import Literal
2
4
 
3
5
  SPECIAL_CHARACTERS = ":()=,"
4
6
 
@@ -11,6 +13,18 @@ class ParsedEntity:
11
13
  suffix: str
12
14
  properties: dict[str, str]
13
15
 
16
+ def __str__(self) -> str:
17
+ props_str = ""
18
+ if self.properties:
19
+ joined = ",".join(f"{k}={v}" for k, v in sorted(self.properties.items(), key=lambda x: x[0]))
20
+ props_str = f"({joined})"
21
+ if self.prefix:
22
+ return f"{self.prefix}:{self.suffix}{props_str}"
23
+ return f"{self.suffix}{props_str}"
24
+
25
+ def __hash__(self) -> int:
26
+ return hash(str(self))
27
+
14
28
 
15
29
  class _EntityParser:
16
30
  """A parser for entity strings in the format 'prefix:suffix(prop1=val1,prop2=val2)'."""
@@ -192,3 +206,21 @@ def parse_entity(entity_string: str) -> ParsedEntity:
192
206
  """
193
207
  parser = _EntityParser(entity_string)
194
208
  return parser.parse()
209
+
210
+
211
+ def parse_entities(entities_str: str, separator: Literal[","] = ",") -> list[ParsedEntity] | None:
212
+ """Parse a comma-separated string of entities.
213
+
214
+ Args:
215
+ entities_str: A comma-separated string of entities.
216
+ separator: The separator used to split entities.
217
+ A list of `ParsedEntity` objects or None if the input string is empty.
218
+ """
219
+ if not entities_str.strip():
220
+ return None
221
+ if separator != ",":
222
+ raise ValueError("Only ',' is supported as a separator currently.")
223
+ # Regex to split on the separator but ignore separators within parentheses
224
+ pattern = rf"{separator}(?![^()]*\))"
225
+ parts = re.split(pattern, entities_str)
226
+ return [parse_entity(part.strip()) for part in parts if part.strip()]