industrial-model 0.1.21__tar.gz → 0.1.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {industrial_model-0.1.21 → industrial_model-0.1.22}/PKG-INFO +21 -1
  2. {industrial_model-0.1.21 → industrial_model-0.1.22}/README.md +20 -0
  3. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/__init__.py +2 -1
  4. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/__init__.py +24 -13
  5. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/aggregation_mapper.py +2 -6
  6. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/filter_mapper.py +2 -6
  7. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/optimizer.py +6 -12
  8. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/query_mapper.py +9 -27
  9. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/query_result_mapper.py +16 -41
  10. industrial_model-0.1.22/industrial_model/cognite_adapters/search_mapper.py +50 -0
  11. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/upsert_mapper.py +6 -15
  12. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/utils.py +2 -6
  13. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/view_mapper.py +2 -6
  14. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/engines/async_engine.py +19 -4
  15. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/engines/engine.py +19 -1
  16. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/models/entities.py +1 -3
  17. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/models/schemas.py +3 -8
  18. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/queries/models.py +2 -6
  19. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/statements/__init__.py +42 -12
  20. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/statements/expressions.py +8 -24
  21. {industrial_model-0.1.21 → industrial_model-0.1.22}/pyproject.toml +2 -2
  22. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/hubs.py +1 -2
  23. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/models.py +8 -0
  24. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/test_schema.py +1 -3
  25. industrial_model-0.1.22/tests/tests_adapter.py +42 -0
  26. industrial_model-0.1.22/tests/tests_search.py +28 -0
  27. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/tests_upsert_mapper.py +1 -3
  28. {industrial_model-0.1.21 → industrial_model-0.1.22}/uv.lock +1 -1
  29. industrial_model-0.1.21/tests/tests_adapter.py +0 -42
  30. {industrial_model-0.1.21 → industrial_model-0.1.22}/.gitignore +0 -0
  31. {industrial_model-0.1.21 → industrial_model-0.1.22}/.python-version +0 -0
  32. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/models.py +0 -0
  33. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/cognite_adapters/sort_mapper.py +0 -0
  34. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/config.py +0 -0
  35. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/constants.py +0 -0
  36. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/engines/__init__.py +0 -0
  37. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/models/__init__.py +0 -0
  38. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/models/base.py +0 -0
  39. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/py.typed +0 -0
  40. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/queries/__init__.py +0 -0
  41. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/queries/params.py +0 -0
  42. {industrial_model-0.1.21 → industrial_model-0.1.22}/industrial_model/utils.py +0 -0
  43. {industrial_model-0.1.21 → industrial_model-0.1.22}/scripts/build.sh +0 -0
  44. {industrial_model-0.1.21 → industrial_model-0.1.22}/scripts/format.sh +0 -0
  45. {industrial_model-0.1.21 → industrial_model-0.1.22}/scripts/lint.sh +0 -0
  46. {industrial_model-0.1.21 → industrial_model-0.1.22}/scripts/publish.sh +0 -0
  47. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/__init__.py +0 -0
  48. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/cognite-sdk-config.yaml +0 -0
  49. {industrial_model-0.1.21 → industrial_model-0.1.22}/tests/tests_aggregate.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: industrial-model
3
- Version: 0.1.21
3
+ Version: 0.1.22
4
4
  Summary: Industrial Model ORM
5
5
  Author-email: Lucas Alves <lucasrosaalves@gmail.com>
6
6
  Classifier: Programming Language :: Python
@@ -63,6 +63,7 @@ from industrial_model import (
63
63
  col,
64
64
  or_,
65
65
  select,
66
+ search,
66
67
  )
67
68
 
68
69
  # Define entities (view instances)
@@ -245,6 +246,25 @@ person = engine.query_all_pages(statement)[0]
245
246
 
246
247
  engine.delete([person])
247
248
 
249
+
250
+ # 10. Search
251
+
252
+ # Notes:
253
+ # External ID searches work as prefix searches.
254
+ # This method does not include edges or direct relations in the result model.
255
+ # Filter does not support nested properties.
256
+
257
+ class Entity(ViewInstance):
258
+ view_config = ViewInstanceConfig(
259
+ view_external_id="Person"
260
+ )
261
+ name: str
262
+
263
+
264
+ statement = search(Entity).query_by("Lucas", [Entity.name])
265
+ person = engine.search(statement)
266
+
267
+
248
268
  ```
249
269
 
250
270
  ---
@@ -43,6 +43,7 @@ from industrial_model import (
43
43
  col,
44
44
  or_,
45
45
  select,
46
+ search,
46
47
  )
47
48
 
48
49
  # Define entities (view instances)
@@ -225,6 +226,25 @@ person = engine.query_all_pages(statement)[0]
225
226
 
226
227
  engine.delete([person])
227
228
 
229
+
230
+ # 10. Search
231
+
232
+ # Notes:
233
+ # External ID searches work as prefix searches.
234
+ # This method does not include edges or direct relations in the result model.
235
+ # Filter does not support nested properties.
236
+
237
+ class Entity(ViewInstance):
238
+ view_config = ViewInstanceConfig(
239
+ view_external_id="Person"
240
+ )
241
+ name: str
242
+
243
+
244
+ statement = search(Entity).query_by("Lucas", [Entity.name])
245
+ person = engine.search(statement)
246
+
247
+
228
248
  ```
229
249
 
230
250
  ---
@@ -13,7 +13,7 @@ from .models import (
13
13
  ViewInstanceConfig,
14
14
  WritableViewInstance,
15
15
  )
16
- from .statements import aggregate, and_, col, not_, or_, select
16
+ from .statements import aggregate, and_, col, not_, or_, search, select
17
17
 
18
18
  __all__ = [
19
19
  "aggregate",
@@ -23,6 +23,7 @@ __all__ = [
23
23
  "col",
24
24
  "not_",
25
25
  "select",
26
+ "search",
26
27
  "ViewInstance",
27
28
  "InstanceId",
28
29
  "TViewInstance",
@@ -16,7 +16,11 @@ from industrial_model.models import (
16
16
  TViewInstance,
17
17
  TWritableViewInstance,
18
18
  )
19
- from industrial_model.statements import AggregationStatement, Statement
19
+ from industrial_model.statements import (
20
+ AggregationStatement,
21
+ SearchStatement,
22
+ Statement,
23
+ )
20
24
 
21
25
  from .aggregation_mapper import AggregationMapper
22
26
  from .optimizer import QueryOptimizer
@@ -24,6 +28,7 @@ from .query_mapper import QueryMapper
24
28
  from .query_result_mapper import (
25
29
  QueryResultMapper,
26
30
  )
31
+ from .search_mapper import SearchMapper
27
32
  from .upsert_mapper import UpsertMapper
28
33
  from .utils import (
29
34
  append_nodes_and_edges,
@@ -34,9 +39,7 @@ from .view_mapper import ViewMapper
34
39
 
35
40
 
36
41
  class CogniteAdapter:
37
- def __init__(
38
- self, cognite_client: CogniteClient, data_model_id: DataModelId
39
- ):
42
+ def __init__(self, cognite_client: CogniteClient, data_model_id: DataModelId):
40
43
  self._cognite_client = cognite_client
41
44
 
42
45
  view_mapper = ViewMapper(cognite_client, data_model_id)
@@ -45,6 +48,20 @@ class CogniteAdapter:
45
48
  self._result_mapper = QueryResultMapper(view_mapper)
46
49
  self._upsert_mapper = UpsertMapper(view_mapper)
47
50
  self._aggregation_mapper = AggregationMapper(view_mapper)
51
+ self._search_mapper = SearchMapper(view_mapper)
52
+
53
+ def search(self, statement: SearchStatement[TViewInstance]) -> list[dict[str, Any]]:
54
+ search_query = self._search_mapper.map(statement)
55
+ data = self._cognite_client.data_modeling.instances.search(
56
+ view=search_query.view.as_id(),
57
+ query=search_query.query,
58
+ filter=search_query.filter,
59
+ properties=search_query.query_properties,
60
+ limit=search_query.limit,
61
+ sort=search_query.sort,
62
+ )
63
+
64
+ return self._result_mapper.nodes_to_dict(data)
48
65
 
49
66
  def query(
50
67
  self, statement: Statement[TViewInstance], all_pages: bool
@@ -111,18 +128,14 @@ class CogniteAdapter:
111
128
  operation = self._upsert_mapper.map(entries)
112
129
 
113
130
  for node_chunk in operation.chunk_nodes():
114
- logger.debug(
115
- f"Upserting {len(node_chunk)} nodes (replace={replace})"
116
- )
131
+ logger.debug(f"Upserting {len(node_chunk)} nodes (replace={replace})")
117
132
  self._cognite_client.data_modeling.instances.apply(
118
133
  nodes=node_chunk,
119
134
  replace=replace,
120
135
  )
121
136
 
122
137
  for edge_chunk in operation.chunk_edges():
123
- logger.debug(
124
- f"Upserting {len(edge_chunk)} edges (replace={replace})"
125
- )
138
+ logger.debug(f"Upserting {len(edge_chunk)} edges (replace={replace})")
126
139
  self._cognite_client.data_modeling.instances.apply(
127
140
  edges=edge_chunk,
128
141
  replace=replace,
@@ -151,9 +164,7 @@ class CogniteAdapter:
151
164
  if not new_query:
152
165
  return None
153
166
 
154
- new_query_result = self._cognite_client.data_modeling.instances.query(
155
- new_query
156
- )
167
+ new_query_result = self._cognite_client.data_modeling.instances.query(new_query)
157
168
 
158
169
  result = map_nodes_and_edges(new_query_result, new_query)
159
170
 
@@ -44,9 +44,7 @@ class AggregationMapper:
44
44
  root_view = self._view_mapper.get_view(root_node)
45
45
 
46
46
  filters_ = (
47
- filters.And(
48
- *self._filter_mapper.map(statement.where_clauses, root_view)
49
- )
47
+ filters.And(*self._filter_mapper.map(statement.where_clauses, root_view))
50
48
  if statement.where_clauses
51
49
  else None
52
50
  )
@@ -63,9 +61,7 @@ class AggregationMapper:
63
61
  metric_aggregation = Count(statement.aggregation_property.property)
64
62
 
65
63
  if metric_aggregation is None:
66
- raise ValueError(
67
- f"Unsupported aggregate function: {statement.aggregate_}"
68
- )
64
+ raise ValueError(f"Unsupported aggregate function: {statement.aggregate_}")
69
65
  return AggregationQuery(
70
66
  view=root_view,
71
67
  metric_aggregation=metric_aggregation,
@@ -73,9 +73,7 @@ class FilterMapper:
73
73
  elif expression.operator == "<=":
74
74
  return cdf_filters.Range(property_ref, lte=value_)
75
75
  elif expression.operator == "nested":
76
- target_view = self._get_nested_target_view(
77
- expression.property, root_view
78
- )
76
+ target_view = self._get_nested_target_view(expression.property, root_view)
79
77
 
80
78
  assert isinstance(value_, Expression)
81
79
 
@@ -91,9 +89,7 @@ class FilterMapper:
91
89
  return cdf_filters.ContainsAll(property_ref, value_)
92
90
  elif expression.operator == "containsAny":
93
91
  return cdf_filters.ContainsAny(property_ref, value_)
94
- raise NotImplementedError(
95
- f"Operator {expression.operator} not implemented"
96
- )
92
+ raise NotImplementedError(f"Operator {expression.operator} not implemented")
97
93
 
98
94
  def _get_nested_target_view(self, property: str, root_view: View) -> View:
99
95
  view_definiton = root_view.properties[property]
@@ -16,9 +16,7 @@ SPACE_PROPERTY = "space"
16
16
 
17
17
 
18
18
  class QueryOptimizer:
19
- def __init__(
20
- self, cognite_client: CogniteClient, data_model_id: DataModelId
21
- ):
19
+ def __init__(self, cognite_client: CogniteClient, data_model_id: DataModelId):
22
20
  self._all_spaces = data_model_id.instance_spaces
23
21
  self._cognite_client = cognite_client
24
22
  self._lock = Lock()
@@ -36,9 +34,7 @@ class QueryOptimizer:
36
34
  return
37
35
 
38
36
  filter_spaces = (
39
- self._find_spaces(instance_spaces_prefix)
40
- if instance_spaces_prefix
41
- else []
37
+ self._find_spaces(instance_spaces_prefix) if instance_spaces_prefix else []
42
38
  )
43
39
  if instance_spaces:
44
40
  filter_spaces.extend(instance_spaces)
@@ -48,9 +44,9 @@ class QueryOptimizer:
48
44
 
49
45
  def _has_space_filter(self, where_clauses: list[Expression]) -> bool:
50
46
  for where_clause in where_clauses:
51
- if isinstance(
52
- where_clause, BoolExpression
53
- ) and self._has_space_filter(where_clause.filters):
47
+ if isinstance(where_clause, BoolExpression) and self._has_space_filter(
48
+ where_clause.filters
49
+ ):
54
50
  return True
55
51
  elif (
56
52
  isinstance(where_clause, LeafExpression)
@@ -64,9 +60,7 @@ class QueryOptimizer:
64
60
  all_spaces = self._load_spaces()
65
61
 
66
62
  return [
67
- space
68
- for space in all_spaces
69
- if space.startswith(instance_spaces_prefix)
63
+ space for space in all_spaces if space.startswith(instance_spaces_prefix)
70
64
  ]
71
65
 
72
66
  def _load_spaces(self) -> list[str]:
@@ -43,13 +43,9 @@ class QueryMapper:
43
43
  root_view = self._view_mapper.get_view(root_node)
44
44
  root_view_id = root_view.as_id()
45
45
 
46
- filters_: list[filters.Filter] = [
47
- filters.HasData(views=[root_view_id])
48
- ]
46
+ filters_: list[filters.Filter] = [filters.HasData(views=[root_view_id])]
49
47
 
50
- filters_.extend(
51
- self._filter_mapper.map(statement.where_clauses, root_view)
52
- )
48
+ filters_.extend(self._filter_mapper.map(statement.where_clauses, root_view))
53
49
 
54
50
  with_: dict[str, ResultSetExpression] = {
55
51
  root_node: NodeResultSetExpression(
@@ -60,9 +56,7 @@ class QueryMapper:
60
56
  }
61
57
  select_: dict[str, Select] = {}
62
58
 
63
- relations = get_schema_properties(
64
- statement.entity, NESTED_SEP, root_node
65
- )
59
+ relations = get_schema_properties(statement.entity, NESTED_SEP, root_node)
66
60
 
67
61
  properties = self._include_statements(
68
62
  root_node, root_view, relations, with_, select_
@@ -76,9 +70,7 @@ class QueryMapper:
76
70
 
77
71
  def _get_select(self, view_id: ViewId, properties: list[str]) -> Select:
78
72
  return (
79
- Select(
80
- sources=[SourceSelector(source=view_id, properties=properties)]
81
- )
73
+ Select(sources=[SourceSelector(source=view_id, properties=properties)])
82
74
  if properties
83
75
  else Select()
84
76
  )
@@ -118,9 +110,7 @@ class QueryMapper:
118
110
  through=view.as_property_ref(property_name),
119
111
  limit=MAX_LIMIT,
120
112
  )
121
- select_[property_key] = self._get_select(
122
- property.source, props
123
- )
113
+ select_[property_key] = self._get_select(property.source, props)
124
114
 
125
115
  elif (
126
116
  isinstance(property, MultiReverseDirectRelation)
@@ -138,27 +128,21 @@ class QueryMapper:
138
128
  with_[property_key] = NodeResultSetExpression(
139
129
  from_=key,
140
130
  direction="inwards",
141
- through=property.source.as_property_ref(
142
- property.through.property
143
- ),
131
+ through=property.source.as_property_ref(property.through.property),
144
132
  limit=MAX_LIMIT,
145
133
  )
146
134
 
147
135
  if property.through.property not in props:
148
136
  props.append(property.through.property)
149
137
 
150
- select_[property_key] = self._get_select(
151
- property.source, props
152
- )
138
+ select_[property_key] = self._get_select(property.source, props)
153
139
  elif isinstance(property, EdgeConnection) and property.source:
154
140
  edge_property_key = f"{property_key}{NESTED_SEP}{EDGE_MARKER}"
155
141
 
156
142
  with_[edge_property_key] = EdgeResultSetExpression(
157
143
  from_=key,
158
144
  max_distance=1,
159
- filter=filters.Equals(
160
- ["edge", "type"], property.type.dump()
161
- ),
145
+ filter=filters.Equals(["edge", "type"], property.type.dump()),
162
146
  direction=property.direction,
163
147
  limit=MAX_LIMIT,
164
148
  )
@@ -176,8 +160,6 @@ class QueryMapper:
176
160
  with_,
177
161
  select_,
178
162
  )
179
- select_[property_key] = self._get_select(
180
- property.source, props
181
- )
163
+ select_[property_key] = self._get_select(property.source, props)
182
164
 
183
165
  return select_properties
@@ -7,6 +7,7 @@ from cognite.client.data_classes.data_modeling import (
7
7
  EdgeConnection,
8
8
  MappedProperty,
9
9
  Node,
10
+ NodeList,
10
11
  View,
11
12
  )
12
13
  from cognite.client.data_classes.data_modeling.views import (
@@ -41,9 +42,7 @@ class QueryResultMapper:
41
42
  self, root_node: str, query_result: dict[str, list[Node | Edge]]
42
43
  ) -> list[dict[str, Any]]:
43
44
  if root_node not in query_result:
44
- raise ValueError(
45
- f"{root_node} is not available in the query result"
46
- )
45
+ raise ValueError(f"{root_node} is not available in the query result")
47
46
 
48
47
  root_view = self._view_mapper.get_view(root_node)
49
48
 
@@ -51,14 +50,13 @@ class QueryResultMapper:
51
50
  if not values:
52
51
  return []
53
52
 
54
- data = [
55
- node
56
- for nodes in values.values()
57
- for node in self._nodes_to_dict(nodes)
58
- ]
53
+ data = [node for nodes in values.values() for node in self.nodes_to_dict(nodes)]
59
54
 
60
55
  return data
61
56
 
57
+ def nodes_to_dict(self, nodes: list[Node] | NodeList[Node]) -> list[dict[str, Any]]:
58
+ return [self._node_to_dict(node) for node in nodes]
59
+
62
60
  def _map_node_property(
63
61
  self,
64
62
  key: str,
@@ -79,9 +77,7 @@ class QueryResultMapper:
79
77
 
80
78
  entry = properties.get(result_property_key)
81
79
  if not isinstance(entry, dict):
82
- raise ValueError(
83
- f"Invalid result property key {result_property_key}"
84
- )
80
+ raise ValueError(f"Invalid result property key {result_property_key}")
85
81
 
86
82
  return entry.get("space", ""), entry.get("externalId", "")
87
83
 
@@ -124,15 +120,11 @@ class QueryResultMapper:
124
120
  if not node_entries:
125
121
  continue
126
122
 
127
- entry_data = self._nodes_to_dict(node_entries)
128
- properties[mapping_key] = (
129
- entry_data if is_list else entry_data[0]
130
- )
123
+ entry_data = self.nodes_to_dict(node_entries)
124
+ properties[mapping_key] = entry_data if is_list else entry_data[0]
131
125
  edge_entries = mapping_edges.get(element_key)
132
126
  if edge_entries:
133
- edges_mapping[mapping_key] = self._edges_to_model(
134
- edge_entries
135
- )
127
+ edges_mapping[mapping_key] = self._edges_to_model(edge_entries)
136
128
  properties["_edges"] = edges_mapping
137
129
 
138
130
  node.properties[view_id] = properties
@@ -155,9 +147,7 @@ class QueryResultMapper:
155
147
  nodes: dict[tuple[str, str], list[Node]] | None = None
156
148
  edges: dict[tuple[str, str], list[Edge]] | None = None
157
149
  is_list = False
158
- connection_type: ConnectionTypeEnum = (
159
- ConnectionTypeEnum.DIRECT_RELATION
160
- )
150
+ connection_type: ConnectionTypeEnum = ConnectionTypeEnum.DIRECT_RELATION
161
151
 
162
152
  if isinstance(property, MappedProperty) and property.source:
163
153
  nodes = self._map_node_property(
@@ -167,10 +157,7 @@ class QueryResultMapper:
167
157
  )
168
158
  is_list = False
169
159
  connection_type = ConnectionTypeEnum.DIRECT_RELATION
170
- elif (
171
- isinstance(property, SingleReverseDirectRelation)
172
- and property.source
173
- ):
160
+ elif isinstance(property, SingleReverseDirectRelation) and property.source:
174
161
  nodes = self._map_node_property(
175
162
  property_key,
176
163
  self._view_mapper.get_view(property.source.external_id),
@@ -179,10 +166,7 @@ class QueryResultMapper:
179
166
  )
180
167
  is_list = False
181
168
  connection_type = ConnectionTypeEnum.REVERSE_DIRECT_RELATION
182
- elif (
183
- isinstance(property, MultiReverseDirectRelation)
184
- and property.source
185
- ):
169
+ elif isinstance(property, MultiReverseDirectRelation) and property.source:
186
170
  nodes = self._map_node_property(
187
171
  property_key,
188
172
  self._view_mapper.get_view(property.source.external_id),
@@ -231,12 +215,8 @@ class QueryResultMapper:
231
215
  return None, None
232
216
 
233
217
  visited: set[tuple[str, str]] = set()
234
- nodes_result: defaultdict[tuple[str, str], list[Node]] = defaultdict(
235
- list
236
- )
237
- edges_result: defaultdict[tuple[str, str], list[Edge]] = defaultdict(
238
- list
239
- )
218
+ nodes_result: defaultdict[tuple[str, str], list[Node]] = defaultdict(list)
219
+ edges_result: defaultdict[tuple[str, str], list[Edge]] = defaultdict(list)
240
220
  for edge in query_result[edge_key]:
241
221
  identify = (edge.space, edge.external_id)
242
222
  if not isinstance(edge, Edge) or identify in visited:
@@ -257,17 +237,12 @@ class QueryResultMapper:
257
237
 
258
238
  return dict(nodes_result), dict(edges_result)
259
239
 
260
- def _nodes_to_dict(self, nodes: list[Node]) -> list[dict[str, Any]]:
261
- return [self._node_to_dict(node) for node in nodes]
262
-
263
240
  def _edges_to_model(self, edges: list[Edge]) -> list[EdgeContainer]:
264
241
  return [EdgeContainer.model_validate(edge) for edge in edges]
265
242
 
266
243
  def _node_to_dict(self, node: Node) -> dict[str, Any]:
267
244
  entry = node.dump()
268
- properties: dict[str, dict[str, dict[str, Any]]] = (
269
- entry.pop("properties") or {}
270
- )
245
+ properties: dict[str, dict[str, dict[str, Any]]] = entry.pop("properties") or {}
271
246
  for space_mapping in properties.values():
272
247
  for view_mapping in space_mapping.values():
273
248
  entry.update(view_mapping)
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+
3
+ import cognite.client.data_classes.filters as filters
4
+ from cognite.client.data_classes.data_modeling import InstanceSort, View
5
+
6
+ from industrial_model.models import TViewInstance
7
+ from industrial_model.statements import SearchStatement
8
+
9
+ from .filter_mapper import (
10
+ FilterMapper,
11
+ )
12
+ from .sort_mapper import SortMapper
13
+ from .view_mapper import ViewMapper
14
+
15
+
16
+ @dataclass
17
+ class SearchQuery:
18
+ view: View
19
+ filter: filters.Filter | None
20
+ query: str | None
21
+ query_properties: list[str] | None
22
+ limit: int
23
+ sort: list[InstanceSort]
24
+
25
+
26
+ class SearchMapper:
27
+ def __init__(self, view_mapper: ViewMapper):
28
+ self._view_mapper = view_mapper
29
+ self._filter_mapper = FilterMapper(view_mapper)
30
+ self._sort_mapper = SortMapper()
31
+
32
+ def map(self, statement: SearchStatement[TViewInstance]) -> SearchQuery:
33
+ root_node = statement.entity.get_view_external_id()
34
+
35
+ root_view = self._view_mapper.get_view(root_node)
36
+
37
+ filters_ = self._filter_mapper.map(statement.where_clauses, root_view)
38
+
39
+ sort_clauses = self._sort_mapper.map(statement.sort_clauses, root_view)
40
+ for item in sort_clauses:
41
+ item.nulls_first = None
42
+
43
+ return SearchQuery(
44
+ view=root_view,
45
+ filter=filters.And(*filters_) if filters_ else None,
46
+ query=statement.query,
47
+ query_properties=statement.query_properties,
48
+ limit=statement.limit_,
49
+ sort=sort_clauses,
50
+ )
@@ -31,19 +31,14 @@ class UpsertMapper:
31
31
  edges_to_delete: dict[tuple[str, str], EdgeContainer] = {}
32
32
 
33
33
  for instance in instances:
34
- entry_nodes, entry_edges, entry_edges_to_delete = (
35
- self._map_instance(instance)
34
+ entry_nodes, entry_edges, entry_edges_to_delete = self._map_instance(
35
+ instance
36
36
  )
37
37
 
38
38
  nodes[instance.as_tuple()] = entry_nodes
39
- edges.update(
40
- {(item.space, item.external_id): item for item in entry_edges}
41
- )
39
+ edges.update({(item.space, item.external_id): item for item in entry_edges})
42
40
  edges_to_delete.update(
43
- {
44
- (item.space, item.external_id): item
45
- for item in entry_edges_to_delete
46
- }
41
+ {(item.space, item.external_id): item for item in entry_edges_to_delete}
47
42
  )
48
43
 
49
44
  return UpsertOperation(
@@ -76,9 +71,7 @@ class UpsertMapper:
76
71
  if isinstance(entry, datetime.datetime)
77
72
  else entry
78
73
  )
79
- elif isinstance(property, EdgeConnection) and isinstance(
80
- entry, list
81
- ):
74
+ elif isinstance(property, EdgeConnection) and isinstance(entry, list):
82
75
  possible_entries = self._map_edges(instance, property, entry)
83
76
 
84
77
  previous_edges = {
@@ -104,9 +97,7 @@ class UpsertMapper:
104
97
  node = NodeApply(
105
98
  external_id=instance.external_id,
106
99
  space=instance.space,
107
- sources=[
108
- NodeOrEdgeData(source=view.as_id(), properties=properties)
109
- ],
100
+ sources=[NodeOrEdgeData(source=view.as_id(), properties=properties)],
110
101
  )
111
102
 
112
103
  return node, edges, edges_to_delete
@@ -43,9 +43,7 @@ def get_property_ref(
43
43
  def get_cognite_instance_ids(
44
44
  instance_ids: list[TViewInstance],
45
45
  ) -> list[dict[str, str]]:
46
- return [
47
- get_cognite_instance_id(instance_id) for instance_id in instance_ids
48
- ]
46
+ return [get_cognite_instance_id(instance_id) for instance_id in instance_ids]
49
47
 
50
48
 
51
49
  def get_cognite_instance_id(instance_id: TViewInstance) -> dict[str, str]:
@@ -115,9 +113,7 @@ def _create_query(
115
113
  valid_keys = parent.union(children)
116
114
  valid_keys.add(cursor_key)
117
115
 
118
- with_.update(
119
- {k: v for k, v in previous_query.with_.items() if k in valid_keys}
120
- )
116
+ with_.update({k: v for k, v in previous_query.with_.items() if k in valid_keys})
121
117
  select_.update(
122
118
  {k: v for k, v in previous_query.select.items() if k in valid_keys}
123
119
  )
@@ -9,9 +9,7 @@ from industrial_model.config import DataModelId
9
9
 
10
10
 
11
11
  class ViewMapper:
12
- def __init__(
13
- self, cognite_client: CogniteClient, data_model_id: DataModelId
14
- ):
12
+ def __init__(self, cognite_client: CogniteClient, data_model_id: DataModelId):
15
13
  self._cognite_client = cognite_client
16
14
  self._data_model_id = data_model_id
17
15
  self._views_as_dict: dict[str, View] | None = None
@@ -26,9 +24,7 @@ class ViewMapper:
26
24
  def get_view(self, view_external_id: str) -> View:
27
25
  views = self._load_views()
28
26
  if view_external_id not in views:
29
- raise ValueError(
30
- f"View {view_external_id} is not available in data model"
31
- )
27
+ raise ValueError(f"View {view_external_id} is not available in data model")
32
28
 
33
29
  return views[view_external_id]
34
30
 
@@ -8,7 +8,11 @@ from industrial_model.models import (
8
8
  TWritableViewInstance,
9
9
  ValidationMode,
10
10
  )
11
- from industrial_model.statements import AggregationStatement, Statement
11
+ from industrial_model.statements import (
12
+ AggregationStatement,
13
+ SearchStatement,
14
+ Statement,
15
+ )
12
16
  from industrial_model.utils import run_async
13
17
 
14
18
  from .engine import Engine
@@ -22,6 +26,19 @@ class AsyncEngine:
22
26
  ):
23
27
  self._engine = Engine(cognite_client, data_model_id)
24
28
 
29
+ async def search_async(
30
+ self,
31
+ statement: SearchStatement[TViewInstance],
32
+ validation_mode: ValidationMode = "raiseOnError",
33
+ ) -> list[TViewInstance]:
34
+ """
35
+ Note:
36
+ External ID searches work as prefix searches.
37
+ This method does not include edges or direct relations in the result model.
38
+ Filter does not support nested properties.
39
+ """
40
+ return await run_async(self._engine.search, statement, validation_mode)
41
+
25
42
  async def query_async(
26
43
  self,
27
44
  statement: Statement[TViewInstance],
@@ -34,9 +51,7 @@ class AsyncEngine:
34
51
  statement: Statement[TViewInstance],
35
52
  validation_mode: ValidationMode = "raiseOnError",
36
53
  ) -> list[TViewInstance]:
37
- return await run_async(
38
- self._engine.query_all_pages, statement, validation_mode
39
- )
54
+ return await run_async(self._engine.query_all_pages, statement, validation_mode)
40
55
 
41
56
  async def aggregate_async(
42
57
  self, statement: AggregationStatement[TAggregatedViewInstance]
@@ -12,7 +12,11 @@ from industrial_model.models import (
12
12
  TWritableViewInstance,
13
13
  ValidationMode,
14
14
  )
15
- from industrial_model.statements import AggregationStatement, Statement
15
+ from industrial_model.statements import (
16
+ AggregationStatement,
17
+ SearchStatement,
18
+ Statement,
19
+ )
16
20
 
17
21
 
18
22
  class Engine:
@@ -23,6 +27,20 @@ class Engine:
23
27
  ):
24
28
  self._cognite_adapter = CogniteAdapter(cognite_client, data_model_id)
25
29
 
30
+ def search(
31
+ self,
32
+ statement: SearchStatement[TViewInstance],
33
+ validation_mode: ValidationMode = "raiseOnError",
34
+ ) -> list[TViewInstance]:
35
+ """
36
+ Note:
37
+ External ID searches work as prefix searches.
38
+ This method does not include edges or direct relations in the result model.
39
+ Filter does not support nested properties.
40
+ """
41
+ data = self._cognite_adapter.search(statement)
42
+ return self._validate_data(statement.entity, data, validation_mode)
43
+
26
44
  def query(
27
45
  self,
28
46
  statement: Statement[TViewInstance],
@@ -88,9 +88,7 @@ class AggregatedViewInstance(RootModel, metaclass=DBModelMetaclass):
88
88
 
89
89
 
90
90
  TViewInstance = TypeVar("TViewInstance", bound=ViewInstance)
91
- TWritableViewInstance = TypeVar(
92
- "TWritableViewInstance", bound=WritableViewInstance
93
- )
91
+ TWritableViewInstance = TypeVar("TWritableViewInstance", bound=WritableViewInstance)
94
92
  TAggregatedViewInstance = TypeVar(
95
93
  "TAggregatedViewInstance", bound=AggregatedViewInstance
96
94
  )
@@ -38,8 +38,7 @@ def get_parent_and_children_nodes(
38
38
  for key in keys:
39
39
  key_parts = key.split(NESTED_SEP)
40
40
  parent_paths = {
41
- NESTED_SEP.join(key_parts[:i])
42
- for i in range(len(key_parts) - 1, 0, -1)
41
+ NESTED_SEP.join(key_parts[:i]) for i in range(len(key_parts) - 1, 0, -1)
43
42
  }
44
43
 
45
44
  valid_paths: set[str] = set()
@@ -142,17 +141,13 @@ def _flatten_dict_keys(
142
141
  ) -> list[str]:
143
142
  paths: set[str] = set()
144
143
  for key, value in data.items():
145
- full_key = (
146
- f"{parent_key}{nested_separator}{key}" if parent_key else key
147
- )
144
+ full_key = f"{parent_key}{nested_separator}{key}" if parent_key else key
148
145
  paths.add(full_key)
149
146
  if isinstance(value, dict) and value:
150
147
  paths.update(_flatten_dict_keys(value, full_key, nested_separator))
151
148
  elif isinstance(value, str):
152
149
  paths.add(f"{full_key}{nested_separator}{value}")
153
150
  elif isinstance(value, list | set):
154
- paths.update(
155
- [f"{full_key}{nested_separator}{item}" for item in value]
156
- )
151
+ paths.update([f"{full_key}{nested_separator}{item}" for item in value])
157
152
 
158
153
  return sorted(paths)
@@ -5,9 +5,7 @@ from .params import NestedQueryParam, QueryParam, SortParam
5
5
 
6
6
 
7
7
  class BaseQuery(RootModel):
8
- def to_statement(
9
- self, entity: type[TViewInstance]
10
- ) -> Statement[TViewInstance]:
8
+ def to_statement(self, entity: type[TViewInstance]) -> Statement[TViewInstance]:
11
9
  statement = Statement(entity)
12
10
 
13
11
  for key, item in self.__class__.model_fields.items():
@@ -27,9 +25,7 @@ class BasePaginatedQuery(BaseQuery):
27
25
  limit: int = 1000
28
26
  cursor: str | None = None
29
27
 
30
- def to_statement(
31
- self, entity: type[TViewInstance]
32
- ) -> Statement[TViewInstance]:
28
+ def to_statement(self, entity: type[TViewInstance]) -> Statement[TViewInstance]:
33
29
  statement = super().to_statement(entity)
34
30
  statement.limit(self.limit)
35
31
  statement.cursor(self.cursor)
@@ -15,6 +15,7 @@ from .expressions import (
15
15
  )
16
16
 
17
17
  T = TypeVar("T")
18
+ AggregateTypes = Literal["count", "avg", "min", "max", "sum"]
18
19
 
19
20
 
20
21
  def _create_column(property: str | Column | Any) -> Column:
@@ -22,14 +23,13 @@ def _create_column(property: str | Column | Any) -> Column:
22
23
 
23
24
 
24
25
  @dataclass
25
- class Statement(Generic[T]):
26
+ class BaseStatement(Generic[T]):
26
27
  entity: type[T] = field(init=True)
27
28
  where_clauses: list[Expression] = field(init=False, default_factory=list)
28
29
  sort_clauses: list[tuple[Column, SORT_DIRECTION]] = field(
29
30
  init=False, default_factory=list
30
31
  )
31
32
  limit_: int = field(init=False, default=DEFAULT_LIMIT)
32
- cursor_: str | None = field(init=False, default=None)
33
33
 
34
34
  def where(self, *expressions: bool | Expression) -> Self:
35
35
  for expression in expressions:
@@ -37,15 +37,17 @@ class Statement(Generic[T]):
37
37
  self.where_clauses.append(expression)
38
38
  return self
39
39
 
40
+ def limit(self, limit: int) -> Self:
41
+ self.limit_ = limit
42
+ return self
43
+
40
44
  def asc(self, property: str | Column | Any) -> Self:
41
45
  return self.sort(property, "ascending")
42
46
 
43
47
  def desc(self, property: str | Column | Any) -> Self:
44
48
  return self.sort(property, "descending")
45
49
 
46
- def sort(
47
- self, property: str | Column | Any, direction: SORT_DIRECTION
48
- ) -> Self:
50
+ def sort(self, property: str | Column | Any, direction: SORT_DIRECTION) -> Self:
49
51
  self.sort_clauses.append(
50
52
  (
51
53
  _create_column(property),
@@ -54,16 +56,40 @@ class Statement(Generic[T]):
54
56
  )
55
57
  return self
56
58
 
57
- def limit(self, limit: int) -> Self:
58
- self.limit_ = limit
59
- return self
59
+
60
+ @dataclass
61
+ class Statement(BaseStatement[T]):
62
+ cursor_: str | None = field(init=False, default=None)
60
63
 
61
64
  def cursor(self, cursor: str | None) -> Self:
62
65
  self.cursor_ = cursor
63
66
  return self
64
67
 
65
68
 
66
- AggregateTypes = Literal["count", "avg", "min", "max", "sum"]
69
+ @dataclass
70
+ class SearchStatement(BaseStatement[T]):
71
+ query: str | None = field(init=False, default=None)
72
+ query_properties: list[str] | None = field(init=False, default=None)
73
+
74
+ def query_by(
75
+ self,
76
+ query: str,
77
+ query_properties: list[Column | str | Any] | None = None,
78
+ ) -> Self:
79
+ self.query = query
80
+ self.query_properties = (
81
+ [
82
+ prop
83
+ if isinstance(prop, str)
84
+ else prop.property
85
+ if isinstance(prop, Column)
86
+ else str(prop)
87
+ for prop in query_properties
88
+ ]
89
+ if query_properties
90
+ else None
91
+ )
92
+ return self
67
93
 
68
94
 
69
95
  @dataclass
@@ -71,9 +97,7 @@ class AggregationStatement(Generic[T]):
71
97
  entity: type[T] = field(init=True)
72
98
  aggregate: AggregateTypes = field(init=True)
73
99
 
74
- aggregation_property: Column = field(
75
- init=False, default=Column("externalId")
76
- )
100
+ aggregation_property: Column = field(init=False, default=Column("externalId"))
77
101
  where_clauses: list[Expression] = field(init=False, default_factory=list)
78
102
  limit_: int = field(init=False, default=-1)
79
103
 
@@ -103,11 +127,17 @@ def aggregate(
103
127
  return AggregationStatement(entity=entity, aggregate=aggregate)
104
128
 
105
129
 
130
+ def search(entity: type[T]) -> SearchStatement[T]:
131
+ return SearchStatement(entity)
132
+
133
+
106
134
  __all__ = [
107
135
  "aggregate",
108
136
  "AggregationStatement",
109
137
  "Statement",
110
138
  "select",
139
+ "search",
140
+ "SearchStatement",
111
141
  "Column",
112
142
  "col",
113
143
  "Expression",
@@ -9,9 +9,7 @@ from industrial_model.constants import (
9
9
  )
10
10
 
11
11
  RANGE_SUPPORTED_VALUES = str | int | float
12
- LIST_SUPPORTED_VALUES = (
13
- list[str] | list[int] | list[float] | list[dict[str, str]]
14
- )
12
+ LIST_SUPPORTED_VALUES = list[str] | list[int] | list[float] | list[dict[str, str]]
15
13
  SUPPORTED_VALUES = (
16
14
  RANGE_SUPPORTED_VALUES | bool | dict[str, str] | LIST_SUPPORTED_VALUES
17
15
  )
@@ -49,9 +47,7 @@ class LeafExpression(Expression):
49
47
  class Column:
50
48
  def __init__(self, property: Any):
51
49
  assert isinstance(property, str | Column)
52
- property_ = (
53
- property.property if isinstance(property, Column) else property
54
- )
50
+ property_ = property.property if isinstance(property, Column) else property
55
51
  self.property: str = property_
56
52
 
57
53
  def __hash__(self) -> int:
@@ -120,23 +116,17 @@ class Column:
120
116
  filters=[self._compare(operator, other)], # type: ignore
121
117
  )
122
118
 
123
- def nested_(
124
- self, expression: bool | LeafExpression | BoolExpression
125
- ) -> bool:
119
+ def nested_(self, expression: bool | LeafExpression | BoolExpression) -> bool:
126
120
  if not isinstance(expression, Expression):
127
121
  raise ValueError("Invalid expression")
128
122
 
129
123
  return self._compare("nested", expression)
130
124
 
131
- def _compare(
132
- self, operator: LEAF_EXPRESSION_OPERATORS, value: Any
133
- ) -> bool:
125
+ def _compare(self, operator: LEAF_EXPRESSION_OPERATORS, value: Any) -> bool:
134
126
  if isinstance(value, Column):
135
127
  raise ValueError("can not compare two columns in a graphdb")
136
128
 
137
- return LeafExpression(
138
- property=self.property, operator=operator, value=value
139
- ) # type: ignore
129
+ return LeafExpression(property=self.property, operator=operator, value=value) # type: ignore
140
130
 
141
131
 
142
132
  def _unwrap_expressions(
@@ -150,21 +140,15 @@ def _unwrap_expressions(
150
140
 
151
141
 
152
142
  def and_(*expressions: bool | LeafExpression | BoolExpression) -> bool:
153
- return BoolExpression(
154
- operator="and", filters=_unwrap_expressions(*expressions)
155
- ) # type: ignore
143
+ return BoolExpression(operator="and", filters=_unwrap_expressions(*expressions)) # type: ignore
156
144
 
157
145
 
158
146
  def or_(*expressions: bool | LeafExpression | BoolExpression) -> bool:
159
- return BoolExpression(
160
- operator="or", filters=_unwrap_expressions(*expressions)
161
- ) # type: ignore
147
+ return BoolExpression(operator="or", filters=_unwrap_expressions(*expressions)) # type: ignore
162
148
 
163
149
 
164
150
  def not_(*expressions: bool | LeafExpression | BoolExpression) -> bool:
165
- return BoolExpression(
166
- operator="not", filters=_unwrap_expressions(*expressions)
167
- ) # type: ignore
151
+ return BoolExpression(operator="not", filters=_unwrap_expressions(*expressions)) # type: ignore
168
152
 
169
153
 
170
154
  def col(property: Any) -> Column:
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "industrial-model"
3
- version = "0.1.21"
3
+ version = "0.1.22"
4
4
  description = "Industrial Model ORM"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -36,7 +36,7 @@ dev-dependencies = [
36
36
  strict = true
37
37
 
38
38
  [tool.ruff]
39
- line-length = 79
39
+ line-length = 88
40
40
 
41
41
  [tool.ruff.lint]
42
42
  select = [
@@ -15,8 +15,7 @@ def generate_config(
15
15
  ) -> dict[str, dict[str, Any]]:
16
16
  load_dotenv(override=True)
17
17
  file_path = Path(
18
- config_file_path
19
- or f"{os.path.dirname(__file__)}/cognite-sdk-config.yaml"
18
+ config_file_path or f"{os.path.dirname(__file__)}/cognite-sdk-config.yaml"
20
19
  )
21
20
  env_sub_template = Template(file_path.read_text())
22
21
  file_env_parsed = env_sub_template.substitute(dict(os.environ))
@@ -52,6 +52,14 @@ class Event(ViewInstance):
52
52
  ]
53
53
 
54
54
 
55
+ class SearchEvent(ViewInstance):
56
+ view_config = ViewInstanceConfig(view_external_id="OEEEvent")
57
+
58
+ start_date_time: datetime.datetime
59
+ event_definition: str
60
+ ref_site: InstanceId
61
+
62
+
55
63
  class Msdp(ViewInstance):
56
64
  view_config = ViewInstanceConfig(
57
65
  view_external_id="OEEMSDP",
@@ -29,9 +29,7 @@ class SuperNestedModel(DescribableEntity):
29
29
  def test_get_schema_properties() -> None:
30
30
  for entity, expected_schema in _get_test_schema().items():
31
31
  schema = get_schema_properties(entity, SEP)
32
- assert schema == expected_schema, (
33
- f"Expected {expected_schema}, got {schema}"
34
- )
32
+ assert schema == expected_schema, f"Expected {expected_schema}, got {schema}"
35
33
 
36
34
 
37
35
  def _get_test_schema() -> dict[type[BaseModel], list[str]]:
@@ -0,0 +1,42 @@
1
+ import datetime
2
+ import json
3
+
4
+ from industrial_model import col, select
5
+
6
+ from .hubs import generate_engine
7
+ from .models import DescribableEntity, Event, Msdp
8
+
9
+ if __name__ == "__main__":
10
+ adapter = generate_engine()
11
+
12
+ filter = (
13
+ col(Event.start_date_time).gt_(datetime.datetime(2025, 3, 1))
14
+ & col(Event.ref_site).nested_(DescribableEntity.external_id == "STS-CLK")
15
+ & (col(Event.start_date_time) < datetime.datetime(2025, 6, 1))
16
+ )
17
+
18
+ statement = select(Event).limit(100).where(filter).asc(Event.start_date_time)
19
+
20
+ result = [
21
+ item.model_dump(mode="json") for item in adapter.query_all_pages(statement)
22
+ ]
23
+ print(len(result))
24
+ json.dump(result, open("events.json", "w"), indent=2)
25
+
26
+ result_paginated = adapter.query(statement)
27
+ print(len(result_paginated.data))
28
+ json.dump(
29
+ result_paginated.model_dump(mode="json"),
30
+ open("events_paginated.json", "w"),
31
+ indent=2,
32
+ )
33
+
34
+ statement_msdp = (
35
+ select(Msdp).limit(2500).where(Msdp.effective_date >= datetime.date(2022, 5, 1))
36
+ )
37
+
38
+ result_msdp = [
39
+ item.model_dump(mode="json") for item in adapter.query_all_pages(statement_msdp)
40
+ ]
41
+ print(len(result_msdp))
42
+ json.dump(result_msdp, open("msdp.json", "w"), indent=2)
@@ -0,0 +1,28 @@
1
+ import datetime
2
+ import json
3
+
4
+ from industrial_model import col, search
5
+
6
+ from .hubs import generate_engine
7
+ from .models import Event, SearchEvent
8
+
9
+ if __name__ == "__main__":
10
+ adapter = generate_engine()
11
+
12
+ filter = col(SearchEvent.start_date_time).gt_(datetime.datetime(2025, 3, 1)) & (
13
+ col(SearchEvent.start_date_time) < datetime.datetime(2025, 6, 1)
14
+ )
15
+
16
+ statement = (
17
+ search(SearchEvent)
18
+ .limit(-1)
19
+ .where(filter)
20
+ .desc(Event.start_date_time)
21
+ .query_by("not runn", [SearchEvent.event_definition])
22
+ )
23
+
24
+ items = adapter.search(statement)
25
+
26
+ result = [item.model_dump(mode="json") for item in items]
27
+ print(len(result))
28
+ json.dump(result, open("search.json", "w"), indent=2)
@@ -17,9 +17,7 @@ if __name__ == "__main__":
17
17
  item = engine.query(statement).data[0]
18
18
 
19
19
  item.ref_oee_event_detail.clear()
20
- item.ref_oee_event_detail.append(
21
- EventDetail(external_id="test", space="test")
22
- )
20
+ item.ref_oee_event_detail.append(EventDetail(external_id="test", space="test"))
23
21
 
24
22
  operation = upsert_mapper.map([item])
25
23
 
@@ -215,7 +215,7 @@ wheels = [
215
215
 
216
216
  [[package]]
217
217
  name = "industrial-model"
218
- version = "0.1.21"
218
+ version = "0.1.22"
219
219
  source = { editable = "." }
220
220
  dependencies = [
221
221
  { name = "anyio" },
@@ -1,42 +0,0 @@
1
- import datetime
2
- import json
3
-
4
- from industrial_model import col, select
5
-
6
- from .hubs import generate_engine
7
- from .models import DescribableEntity, Event, Msdp
8
-
9
- if __name__ == "__main__":
10
- adapter = generate_engine()
11
-
12
- filter = (
13
- col(Event.start_date_time).gt_(datetime.datetime(2025, 3, 1))
14
- & col(Event.ref_site).nested_(
15
- DescribableEntity.external_id == "STS-CLK"
16
- )
17
- & (col(Event.start_date_time) < datetime.datetime(2025, 6, 1))
18
- )
19
-
20
- statement = (
21
- select(Event).limit(2500).where(filter).asc(Event.start_date_time)
22
- )
23
-
24
- result = [
25
- item.model_dump(mode="json")
26
- for item in adapter.query_all_pages(statement)
27
- ]
28
- print(len(result))
29
- json.dump(result, open("events.json", "w"), indent=2)
30
-
31
- statement_msdp = (
32
- select(Msdp)
33
- .limit(2500)
34
- .where(Msdp.effective_date >= datetime.date(2022, 5, 1))
35
- )
36
-
37
- result_msdp = [
38
- item.model_dump(mode="json")
39
- for item in adapter.query_all_pages(statement_msdp)
40
- ]
41
- print(len(result_msdp))
42
- json.dump(result_msdp, open("msdp.json", "w"), indent=2)