cognite-neat 0.75.8__py3-none-any.whl → 0.76.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (99) hide show
  1. cognite/neat/_version.py +1 -1
  2. cognite/neat/app/api/configuration.py +4 -9
  3. cognite/neat/app/api/routers/configuration.py +2 -1
  4. cognite/neat/app/api/routers/crud.py +5 -5
  5. cognite/neat/app/api/routers/data_exploration.py +3 -1
  6. cognite/neat/app/api/routers/rules.py +3 -3
  7. cognite/neat/app/api/routers/workflows.py +3 -3
  8. cognite/neat/app/ui/neat-app/build/asset-manifest.json +3 -3
  9. cognite/neat/app/ui/neat-app/build/index.html +1 -1
  10. cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js → main.ec7f72e2.js} +3 -3
  11. cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js.map → main.ec7f72e2.js.map} +1 -1
  12. cognite/neat/config.py +147 -12
  13. cognite/neat/constants.py +1 -0
  14. cognite/neat/graph/exceptions.py +1 -2
  15. cognite/neat/legacy/graph/exceptions.py +1 -2
  16. cognite/neat/legacy/graph/extractors/_mock_graph_generator.py +1 -2
  17. cognite/neat/legacy/graph/loaders/_asset_loader.py +8 -13
  18. cognite/neat/legacy/graph/loaders/_base.py +2 -4
  19. cognite/neat/legacy/graph/loaders/_exceptions.py +1 -3
  20. cognite/neat/legacy/graph/loaders/core/rdf_to_assets.py +4 -8
  21. cognite/neat/legacy/graph/loaders/core/rdf_to_relationships.py +2 -4
  22. cognite/neat/legacy/graph/loaders/rdf_to_dms.py +2 -4
  23. cognite/neat/legacy/graph/loaders/validator.py +1 -1
  24. cognite/neat/legacy/graph/transformations/transformer.py +1 -2
  25. cognite/neat/legacy/rules/exporters/_rules2dms.py +1 -2
  26. cognite/neat/legacy/rules/exporters/_validation.py +4 -8
  27. cognite/neat/legacy/rules/importers/_base.py +0 -4
  28. cognite/neat/legacy/rules/importers/_dms2rules.py +0 -2
  29. cognite/neat/legacy/rules/models/rdfpath.py +1 -2
  30. cognite/neat/legacy/workflows/examples/Export_DMS/workflow.yaml +89 -0
  31. cognite/neat/legacy/workflows/examples/Export_Rules_to_Ontology/workflow.yaml +152 -0
  32. cognite/neat/legacy/workflows/examples/Extract_DEXPI_Graph_and_Export_Rules/workflow.yaml +139 -0
  33. cognite/neat/legacy/workflows/examples/Extract_RDF_Graph_and_Generate_Assets/workflow.yaml +270 -0
  34. cognite/neat/legacy/workflows/examples/Import_DMS/workflow.yaml +65 -0
  35. cognite/neat/legacy/workflows/examples/Ontology_to_Data_Model/workflow.yaml +116 -0
  36. cognite/neat/legacy/workflows/examples/Validate_Rules/workflow.yaml +67 -0
  37. cognite/neat/legacy/workflows/examples/Validate_Solution_Model/workflow.yaml +64 -0
  38. cognite/neat/legacy/workflows/examples/Visualize_Data_Model_Using_Mock_Graph/workflow.yaml +95 -0
  39. cognite/neat/legacy/workflows/examples/Visualize_Semantic_Data_Model/workflow.yaml +111 -0
  40. cognite/neat/rules/exporters/_models.py +3 -0
  41. cognite/neat/rules/exporters/_rules2dms.py +46 -4
  42. cognite/neat/rules/exporters/_rules2excel.py +2 -11
  43. cognite/neat/rules/exporters/_validation.py +6 -8
  44. cognite/neat/rules/importers/_base.py +8 -4
  45. cognite/neat/rules/importers/_dms2rules.py +321 -129
  46. cognite/neat/rules/importers/_dtdl2rules/dtdl_importer.py +2 -4
  47. cognite/neat/rules/importers/_dtdl2rules/spec.py +2 -4
  48. cognite/neat/rules/importers/_owl2rules/_owl2rules.py +2 -4
  49. cognite/neat/rules/importers/_spreadsheet2rules.py +18 -16
  50. cognite/neat/rules/importers/_yaml2rules.py +2 -4
  51. cognite/neat/rules/issues/base.py +3 -0
  52. cognite/neat/rules/issues/dms.py +144 -58
  53. cognite/neat/rules/issues/fileread.py +41 -0
  54. cognite/neat/rules/issues/formatters.py +3 -1
  55. cognite/neat/rules/issues/importing.py +155 -0
  56. cognite/neat/rules/issues/spreadsheet.py +12 -9
  57. cognite/neat/rules/models/entities.py +30 -8
  58. cognite/neat/rules/models/rdfpath.py +1 -2
  59. cognite/neat/rules/models/rules/_base.py +5 -6
  60. cognite/neat/rules/models/rules/_dms_architect_rules.py +494 -333
  61. cognite/neat/rules/models/rules/_dms_rules_write.py +43 -52
  62. cognite/neat/rules/models/rules/_dms_schema.py +112 -22
  63. cognite/neat/rules/models/rules/_domain_rules.py +5 -0
  64. cognite/neat/rules/models/rules/_information_rules.py +13 -6
  65. cognite/neat/rules/models/wrapped_entities.py +166 -0
  66. cognite/neat/utils/cdf_loaders/_data_modeling.py +3 -1
  67. cognite/neat/utils/cdf_loaders/_ingestion.py +2 -4
  68. cognite/neat/utils/spreadsheet.py +2 -4
  69. cognite/neat/utils/utils.py +2 -4
  70. cognite/neat/workflows/base.py +5 -5
  71. cognite/neat/workflows/manager.py +32 -22
  72. cognite/neat/workflows/model.py +3 -3
  73. cognite/neat/workflows/steps/lib/__init__.py +0 -7
  74. cognite/neat/workflows/steps/lib/current/__init__.py +6 -0
  75. cognite/neat/workflows/steps/lib/{rules_exporter.py → current/rules_exporter.py} +8 -8
  76. cognite/neat/workflows/steps/lib/{rules_importer.py → current/rules_importer.py} +4 -4
  77. cognite/neat/workflows/steps/lib/io/__init__.py +1 -0
  78. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_contextualization.py +2 -2
  79. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_extractor.py +9 -9
  80. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_loader.py +9 -9
  81. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_store.py +4 -4
  82. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_transformer.py +2 -2
  83. cognite/neat/workflows/steps/lib/{v1 → legacy}/rules_exporter.py +15 -17
  84. cognite/neat/workflows/steps/lib/{v1 → legacy}/rules_importer.py +7 -7
  85. cognite/neat/workflows/steps/step_model.py +5 -9
  86. cognite/neat/workflows/steps_registry.py +20 -11
  87. {cognite_neat-0.75.8.dist-info → cognite_neat-0.76.0.dist-info}/METADATA +1 -1
  88. {cognite_neat-0.75.8.dist-info → cognite_neat-0.76.0.dist-info}/RECORD +98 -86
  89. cognite/neat/app/api/data_classes/configuration.py +0 -121
  90. /cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js.LICENSE.txt → main.ec7f72e2.js.LICENSE.txt} +0 -0
  91. /cognite/neat/workflows/steps/lib/{graph_extractor.py → current/graph_extractor.py} +0 -0
  92. /cognite/neat/workflows/steps/lib/{graph_loader.py → current/graph_loader.py} +0 -0
  93. /cognite/neat/workflows/steps/lib/{graph_store.py → current/graph_store.py} +0 -0
  94. /cognite/neat/workflows/steps/lib/{rules_validator.py → current/rules_validator.py} +0 -0
  95. /cognite/neat/workflows/steps/lib/{io_steps.py → io/io_steps.py} +0 -0
  96. /cognite/neat/workflows/steps/lib/{v1 → legacy}/__init__.py +0 -0
  97. {cognite_neat-0.75.8.dist-info → cognite_neat-0.76.0.dist-info}/LICENSE +0 -0
  98. {cognite_neat-0.75.8.dist-info → cognite_neat-0.76.0.dist-info}/WHEEL +0 -0
  99. {cognite_neat-0.75.8.dist-info → cognite_neat-0.76.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,166 @@
1
+ from abc import ABC, abstractmethod
2
+ from collections.abc import Collection
3
+ from functools import total_ordering
4
+ from typing import Any, ClassVar, TypeVar
5
+
6
+ from cognite.client import data_modeling as dm
7
+ from cognite.client.data_classes.data_modeling import ContainerId, NodeId
8
+ from pydantic import BaseModel, model_serializer, model_validator
9
+
10
+ from cognite.neat.rules.models.entities import ContainerEntity, DMSNodeEntity, Entity
11
+
12
+
13
+ @total_ordering
14
+ class WrappedEntity(BaseModel, ABC):
15
+ name: ClassVar[str]
16
+ _inner_cls: ClassVar[type[Entity]]
17
+ inner: list[Entity] | None
18
+
19
+ @classmethod
20
+ def load(cls: "type[T_WrappedEntity]", data: Any) -> "T_WrappedEntity":
21
+ if isinstance(data, cls):
22
+ return data
23
+ return cls.model_validate(data)
24
+
25
+ @model_validator(mode="before")
26
+ def _load(cls, data: Any) -> dict:
27
+ if isinstance(data, dict):
28
+ return data
29
+ elif not isinstance(data, str):
30
+ raise ValueError(f"Cannot load {cls.__name__} from {data}")
31
+ elif not data.casefold().startswith(cls.name.casefold()):
32
+ raise ValueError(f"Expected {cls.name} but got {data}")
33
+ result = cls._parse(data)
34
+ return result
35
+
36
+ @classmethod
37
+ def _parse(cls, data: str) -> dict:
38
+ if data.casefold() == cls.name.casefold():
39
+ return {"inner": None}
40
+ inner = data[len(cls.name) :].removeprefix("(").removesuffix(")")
41
+ return {"inner": [cls._inner_cls.load(entry.strip()) for entry in inner.split(",")]}
42
+
43
+ @model_serializer(when_used="unless-none", return_type=str)
44
+ def as_str(self) -> str:
45
+ return str(self)
46
+
47
+ def __str__(self):
48
+ return self.id
49
+
50
+ @property
51
+ def id(self) -> str:
52
+ inner = self.as_tuple()[1:]
53
+ return f"{self.name}({','.join(inner)})"
54
+
55
+ @property
56
+ def is_empty(self) -> bool:
57
+ return self.inner is None or (isinstance(self.inner, list) and not self.inner)
58
+
59
+ def dump(self) -> str:
60
+ return str(self)
61
+
62
+ def as_tuple(self) -> tuple[str, ...]:
63
+ entities: list[str] = [str(inner) for inner in self.inner or []]
64
+ return self.name, *entities
65
+
66
+ def __lt__(self, other: object) -> bool:
67
+ if not isinstance(other, WrappedEntity):
68
+ return NotImplemented
69
+ return self.as_tuple() < other.as_tuple()
70
+
71
+ def __eq__(self, other: object) -> bool:
72
+ if not isinstance(other, WrappedEntity):
73
+ return NotImplemented
74
+ return self.as_tuple() == other.as_tuple()
75
+
76
+ def __hash__(self) -> int:
77
+ return hash(str(self))
78
+
79
+ def __repr__(self) -> str:
80
+ return self.id
81
+
82
+
83
+ T_WrappedEntity = TypeVar("T_WrappedEntity", bound=WrappedEntity)
84
+
85
+
86
+ class DMSFilter(WrappedEntity):
87
+ @abstractmethod
88
+ def as_dms_filter(self, default: Any | None = None) -> dm.filters.Filter:
89
+ raise NotImplementedError
90
+
91
+ @classmethod
92
+ def from_dms_filter(cls, filter: dm.Filter) -> "DMSFilter":
93
+ dumped = filter.dump()
94
+ if (body := dumped.get(dm.filters.Equals._filter_name)) and (value := body.get("value")):
95
+ space = value.get("space")
96
+ external_id = value.get("externalId")
97
+ if space is not None and external_id is not None:
98
+ return NodeTypeFilter(inner=[DMSNodeEntity(space=space, externalId=external_id)])
99
+ elif (body := dumped.get(dm.filters.In._filter_name)) and (values := body.get("values")):
100
+ return NodeTypeFilter(
101
+ inner=[
102
+ DMSNodeEntity(space=entry["space"], externalId=entry["externalId"])
103
+ for entry in values
104
+ if isinstance(entry, dict) and "space" in entry and "externalId" in entry
105
+ ]
106
+ )
107
+ elif body := dumped.get(dm.filters.HasData._filter_name):
108
+ return HasDataFilter(
109
+ inner=[
110
+ ContainerEntity(space=entry["space"], externalId=entry["externalId"])
111
+ for entry in body
112
+ if isinstance(entry, dict) and "space" in entry and "externalId" in entry
113
+ ]
114
+ )
115
+
116
+ raise ValueError(f"Cannot convert {filter._filter_name} to {cls.__name__}")
117
+
118
+
119
+ class NodeTypeFilter(DMSFilter):
120
+ name: ClassVar[str] = "nodeType"
121
+ _inner_cls: ClassVar[type[DMSNodeEntity]] = DMSNodeEntity
122
+ inner: list[DMSNodeEntity] | None = None # type: ignore[assignment]
123
+
124
+ @property
125
+ def nodes(self) -> list[NodeId]:
126
+ return [node.as_id() for node in self.inner or []]
127
+
128
+ def as_dms_filter(self, default: Collection[NodeId] | None = None) -> dm.Filter:
129
+ if self.inner is not None:
130
+ node_ids = [node.as_id() for node in self.inner]
131
+ elif default is not None:
132
+ node_ids = list(default)
133
+ else:
134
+ raise ValueError("Empty nodeType filter, please provide a default node.")
135
+ if len(node_ids) == 1:
136
+ return dm.filters.Equals(
137
+ ["node", "type"], {"space": node_ids[0].space, "externalId": node_ids[0].external_id}
138
+ )
139
+ else:
140
+ return dm.filters.In(
141
+ ["node", "type"],
142
+ [
143
+ {"space": node.space, "externalId": node.external_id}
144
+ for node in sorted(node_ids, key=lambda node: node.as_tuple())
145
+ ],
146
+ )
147
+
148
+
149
+ class HasDataFilter(DMSFilter):
150
+ name: ClassVar[str] = "hasData"
151
+ _inner_cls: ClassVar[type[ContainerEntity]] = ContainerEntity
152
+ inner: list[ContainerEntity] | None = None # type: ignore[assignment]
153
+
154
+ def as_dms_filter(self, default: Collection[ContainerId] | None = None) -> dm.Filter:
155
+ containers: list[ContainerId]
156
+ if self.inner:
157
+ containers = [container.as_id() for container in self.inner]
158
+ elif default:
159
+ containers = list(default)
160
+ else:
161
+ raise ValueError("Empty hasData filter, please provide a default containers.")
162
+
163
+ return dm.filters.HasData(
164
+ # Sorting to ensure deterministic order
165
+ containers=sorted(containers, key=lambda container: container.as_tuple()) # type: ignore[union-attr]
166
+ )
@@ -60,7 +60,9 @@ class DataModelingLoader(
60
60
  except CogniteAPIError as e:
61
61
  failed_items = {failed.as_id() for failed in e.failed if hasattr(failed, "as_id")}
62
62
  to_redeploy = [
63
- item for item in items if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
63
+ item
64
+ for item in items
65
+ if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
64
66
  ]
65
67
  if not to_redeploy:
66
68
  # Avoid infinite loop
@@ -81,12 +81,10 @@ class RawTableLoader(ResourceLoader[RawTableID, RawTableWrite, RawTable, RawTabl
81
81
  return item.as_id()
82
82
 
83
83
  @overload
84
- def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]:
85
- ...
84
+ def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]: ...
86
85
 
87
86
  @overload
88
- def _groupby_database(self, items: SequenceNotStr[RawTableID]) -> Iterable[tuple[str, Iterable[RawTableID]]]:
89
- ...
87
+ def _groupby_database(self, items: SequenceNotStr[RawTableID]) -> Iterable[tuple[str, Iterable[RawTableID]]]: ...
90
88
 
91
89
  def _groupby_database(
92
90
  self, items: Sequence[RawTableWrite] | SequenceNotStr[RawTableID]
@@ -37,8 +37,7 @@ def read_individual_sheet(
37
37
  sheet_name: str,
38
38
  return_read_info: Literal[True],
39
39
  expected_headers: list[str] | None = None,
40
- ) -> tuple[list[dict], SpreadsheetRead]:
41
- ...
40
+ ) -> tuple[list[dict], SpreadsheetRead]: ...
42
41
 
43
42
 
44
43
  @overload
@@ -47,8 +46,7 @@ def read_individual_sheet(
47
46
  sheet_name: str,
48
47
  return_read_info: Literal[False] = False,
49
48
  expected_headers: list[str] | None = None,
50
- ) -> list[dict]:
51
- ...
49
+ ) -> list[dict]: ...
52
50
 
53
51
 
54
52
  def read_individual_sheet(
@@ -74,13 +74,11 @@ def _get_cognite_client(config: CogniteClientConfig, credentials: CredentialProv
74
74
 
75
75
 
76
76
  @overload
77
- def remove_namespace(*URI: URIRef | str, special_separator: str = "#_") -> str:
78
- ...
77
+ def remove_namespace(*URI: URIRef | str, special_separator: str = "#_") -> str: ...
79
78
 
80
79
 
81
80
  @overload
82
- def remove_namespace(*URI: tuple[URIRef | str, ...], special_separator: str = "#_") -> tuple[str, ...]:
83
- ...
81
+ def remove_namespace(*URI: tuple[URIRef | str, ...], special_separator: str = "#_") -> tuple[str, ...]: ...
84
82
 
85
83
 
86
84
  def remove_namespace(
@@ -11,8 +11,8 @@ import yaml
11
11
  from cognite.client import ClientConfig, CogniteClient
12
12
  from prometheus_client import Gauge
13
13
 
14
- from cognite.neat.app.api.configuration import Config
15
14
  from cognite.neat.app.monitoring.metrics import NeatMetricsCollector
15
+ from cognite.neat.config import Config
16
16
  from cognite.neat.utils.utils import retry_decorator
17
17
  from cognite.neat.workflows import cdf_store, utils
18
18
  from cognite.neat.workflows._exceptions import ConfigurationNotSet, InvalidStepOutputException
@@ -45,7 +45,7 @@ class BaseWorkflow:
45
45
  self,
46
46
  name: str,
47
47
  client: CogniteClient,
48
- steps_registry: StepsRegistry | None = None,
48
+ steps_registry: StepsRegistry,
49
49
  workflow_steps: list[WorkflowStepDefinition] | None = None,
50
50
  default_dataset_id: int | None = None,
51
51
  ):
@@ -81,7 +81,7 @@ class BaseWorkflow:
81
81
  self.auto_workflow_cleanup = False
82
82
  self.step_classes = None
83
83
  self.data: dict[str, DataContract | FlowMessage | CdfStore | CogniteClient | None] = {}
84
- self.steps_registry: StepsRegistry = steps_registry or StepsRegistry()
84
+ self.steps_registry: StepsRegistry = steps_registry
85
85
 
86
86
  def start(self, sync=False, is_ephemeral=False, **kwargs) -> FlowMessage | None:
87
87
  """Starts workflow execution.sync=True will block until workflow is completed and
@@ -223,7 +223,7 @@ class BaseWorkflow:
223
223
 
224
224
  def copy(self) -> "BaseWorkflow":
225
225
  """Create a copy of the workflow"""
226
- new_instance = self.__class__(self.name, self.cdf_client)
226
+ new_instance = self.__class__(self.name, self.cdf_client, self.steps_registry)
227
227
  new_instance.workflow_steps = self.workflow_steps
228
228
  new_instance.configs = self.configs
229
229
  new_instance.set_task_builder(self.task_builder)
@@ -345,7 +345,7 @@ class BaseWorkflow:
345
345
  raise Exception(f"Workflow {self.name} is not running , step {step_name} is skipped")
346
346
  self.state = WorkflowState.RUNNING_WAITING
347
347
  timeout = 3000000.0
348
- if "wait_timeout" in step.params and step.params["wait_timeout"]:
348
+ if step.params.get("wait_timeout"):
349
349
  timeout = float(step.params["wait_timeout"])
350
350
  # reporting workflow execution before waiting for event
351
351
  logging.info(f"Workflow {self.name} is waiting for event")
@@ -4,15 +4,15 @@ import shutil
4
4
  import sys
5
5
  import time
6
6
  import traceback
7
- from pathlib import Path
8
7
 
9
8
  from cognite.client import CogniteClient
10
9
  from prometheus_client import Gauge
11
10
  from pydantic import BaseModel
12
11
 
12
+ from cognite.neat.config import Config
13
13
  from cognite.neat.workflows import BaseWorkflow
14
14
  from cognite.neat.workflows.base import WorkflowDefinition
15
- from cognite.neat.workflows.model import FlowMessage, InstanceStartMethod, WorkflowState
15
+ from cognite.neat.workflows.model import FlowMessage, InstanceStartMethod, WorkflowState, WorkflowStepDefinition
16
16
  from cognite.neat.workflows.steps_registry import StepsRegistry
17
17
  from cognite.neat.workflows.tasks import WorkflowTaskBuilder
18
18
 
@@ -28,32 +28,21 @@ class WorkflowStartStatus(BaseModel, arbitrary_types_allowed=True):
28
28
  class WorkflowManager:
29
29
  """Workflow manager is responsible for loading, saving and managing workflows
30
30
  client: CogniteClient
31
- registry_storage_type: str = "file"
32
- workflows_storage_path: Path = Path("workflows")
33
- rules_storage_path: Path = Path("rules")
34
- data_set_id: int = None,
31
+ config: Config
35
32
  """
36
33
 
37
- def __init__(
38
- self,
39
- client: CogniteClient | None = None,
40
- registry_storage_type: str = "file",
41
- workflows_storage_path: Path | None = None,
42
- rules_storage_path: Path | None = None,
43
- data_store_path: Path | None = None,
44
- data_set_id: int | None = None,
45
- ):
34
+ def __init__(self, client: CogniteClient, config: Config):
46
35
  self.client = client
47
- self.data_set_id = data_set_id
48
- self.data_store_path = data_store_path
36
+ self.data_set_id = config.cdf_default_dataset_id
37
+ self.data_store_path = config.data_store_path
49
38
  self.workflow_registry: dict[str, BaseWorkflow] = {}
50
39
  self.ephemeral_instance_registry: dict[str, BaseWorkflow] = {}
51
- self.workflows_storage_type = registry_storage_type
52
- # todo use pathlib
53
- self.workflows_storage_path = workflows_storage_path if workflows_storage_path else Path("workflows")
54
- self.rules_storage_path = rules_storage_path if rules_storage_path else Path("rules")
40
+ self.workflows_storage_type = config.workflows_store_type
41
+ self.config = config
42
+ self.workflows_storage_path = config.workflows_store_path
43
+ self.rules_storage_path = config.rules_store_path
55
44
  self.task_builder = WorkflowTaskBuilder(client, self)
56
- self.steps_registry = StepsRegistry(self.data_store_path)
45
+ self.steps_registry = StepsRegistry(self.config)
57
46
  self.steps_registry.load_step_classes()
58
47
 
59
48
  def update_cdf_client(self, client: CogniteClient):
@@ -206,10 +195,21 @@ class WorkflowManager:
206
195
  self, workflow_name: str, step_id: str = "", flow_msg: FlowMessage | None = None, sync: bool | None = None
207
196
  ) -> WorkflowStartStatus:
208
197
  retrieved = self.get_workflow(workflow_name)
198
+
209
199
  if retrieved is None:
210
200
  return WorkflowStartStatus(
211
201
  workflow_instance=None, is_success=False, status_text="Workflow not found in registry"
212
202
  )
203
+
204
+ if self._is_workflow_made_of_mixed_steps(retrieved.workflow_steps):
205
+ retrieved.state = WorkflowState.FAILED
206
+ return WorkflowStartStatus(
207
+ workflow_instance=None,
208
+ is_success=False,
209
+ status_text="Workflow consists of both legacy and current steps. "
210
+ "Please update the workflow to use only current steps.",
211
+ )
212
+
213
213
  workflow = retrieved
214
214
  retrieved_step = workflow.get_trigger_step(step_id)
215
215
  if retrieved_step is None:
@@ -280,3 +280,13 @@ class WorkflowManager:
280
280
  return WorkflowStartStatus(
281
281
  workflow_instance=None, is_success=False, status_text="Unsupported workflow start method"
282
282
  )
283
+
284
+ def _is_workflow_made_of_mixed_steps(self, steps: list[WorkflowStepDefinition]):
285
+ legacy_steps = 0
286
+ current_steps = 0
287
+ for step in steps:
288
+ if step.method in self.steps_registry.categorized_steps["legacy"]:
289
+ legacy_steps += 1
290
+ if step.method in self.steps_registry.categorized_steps["current"]:
291
+ current_steps += 1
292
+ return legacy_steps > 0 and current_steps > 0
@@ -45,9 +45,9 @@ class FlowMessage(BaseModel):
45
45
  headers: dict[str, str] | None = None # The headers of the message
46
46
  output_text: str | None = None # The output text of the step that is captured in the execution log
47
47
  error_text: str | None = None # The error text of the step that is captured in the execution log
48
- next_step_ids: list[
49
- str
50
- ] | None = None # If set, the workflow will skip default route and go to the next step in the list
48
+ next_step_ids: list[str] | None = (
49
+ None # If set, the workflow will skip default route and go to the next step in the list
50
+ )
51
51
  step_execution_status: StepExecutionStatus = StepExecutionStatus.UNKNOWN # The status of the step execution
52
52
 
53
53
 
@@ -1,7 +0,0 @@
1
- from .io_steps import * # noqa
2
- from .rules_importer import * # noqa
3
- from .rules_validator import * # noqa
4
- from .rules_exporter import * # noqa
5
- from .graph_store import * # noqa
6
- from .graph_loader import * # noqa
7
- from .graph_extractor import * # noqa
@@ -0,0 +1,6 @@
1
+ from .rules_importer import * # noqa
2
+ from .rules_validator import * # noqa
3
+ from .rules_exporter import * # noqa
4
+ from .graph_store import * # noqa
5
+ from .graph_loader import * # noqa
6
+ from .graph_extractor import * # noqa
@@ -101,7 +101,7 @@ class DeleteDataModelFromCDF(Step):
101
101
  report_lines.append("\n\n# ERRORS\n\n")
102
102
  report_lines.extend(errors)
103
103
 
104
- output_dir = self.data_store_path / Path("staging")
104
+ output_dir = self.config.staging_path
105
105
  output_dir.mkdir(parents=True, exist_ok=True)
106
106
  report_file = "dms_component_creation_report.txt"
107
107
  report_full_path = output_dir / report_file
@@ -203,7 +203,7 @@ class RulesToDMS(Step):
203
203
  existing_handling=existing_components_handling,
204
204
  )
205
205
 
206
- output_dir = self.data_store_path / Path("staging")
206
+ output_dir = self.config.staging_path
207
207
  output_dir.mkdir(parents=True, exist_ok=True)
208
208
  file_name = (
209
209
  input_rules.metadata.external_id
@@ -223,7 +223,7 @@ class RulesToDMS(Step):
223
223
  report_lines.append("\n\n# ERRORS\n\n")
224
224
  report_lines.extend(errors)
225
225
 
226
- output_dir = self.data_store_path / Path("staging")
226
+ output_dir = self.config.staging_path
227
227
  output_dir.mkdir(parents=True, exist_ok=True)
228
228
  report_file = "dms_component_creation_report.txt"
229
229
  report_full_path = output_dir / report_file
@@ -348,7 +348,7 @@ class RulesToOntology(Step):
348
348
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
349
349
  )
350
350
 
351
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "ontology", "ttl")
351
+ default_path = self.config.staging_path / _get_default_file_name(rules, "ontology", "ttl")
352
352
 
353
353
  storage_path = (
354
354
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -399,7 +399,7 @@ class RulesToSHACL(Step):
399
399
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
400
400
  )
401
401
 
402
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "shacl", "ttl")
402
+ default_path = self.config.staging_path / _get_default_file_name(rules, "shacl", "ttl")
403
403
 
404
404
  storage_path = (
405
405
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -450,7 +450,7 @@ class RulesToSemanticDataModel(Step):
450
450
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
451
451
  )
452
452
 
453
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "semantic-data-model", "ttl")
453
+ default_path = self.config.staging_path / _get_default_file_name(rules, "semantic-data-model", "ttl")
454
454
 
455
455
  storage_path = (
456
456
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -513,7 +513,7 @@ class RulesToCDFTransformations(Step):
513
513
  dms_exporter = exporters.DMSExporter(
514
514
  export_pipeline=True, instance_space=instance_space, export_components=["spaces"]
515
515
  )
516
- output_dir = self.data_store_path / Path("staging")
516
+ output_dir = self.config.staging_path
517
517
  output_dir.mkdir(parents=True, exist_ok=True)
518
518
  file_name = (
519
519
  input_rules.metadata.external_id
@@ -533,7 +533,7 @@ class RulesToCDFTransformations(Step):
533
533
  report_lines.append("\n\n# ERRORS\n\n")
534
534
  report_lines.extend(errors)
535
535
 
536
- output_dir = self.data_store_path / Path("staging")
536
+ output_dir = self.config.staging_path
537
537
  output_dir.mkdir(parents=True, exist_ok=True)
538
538
  report_file = "pipeline_creation_report.txt"
539
539
  report_full_path = output_dir / report_file
@@ -75,7 +75,7 @@ class ExcelToRules(Step):
75
75
  rules, issues = excel_importer.to_rules(errors="continue", role=role_enum)
76
76
 
77
77
  if rules is None:
78
- output_dir = self.data_store_path / Path("staging")
78
+ output_dir = self.config.staging_path
79
79
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
80
80
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
81
81
  report_file = report_writer.default_file_name
@@ -137,7 +137,7 @@ class OntologyToRules(Step):
137
137
  make_compliant = self.configs.get("Make compliant", "True") == "True"
138
138
 
139
139
  if file_name:
140
- rules_file_path = Path(self.data_store_path) / "rules" / file_name
140
+ rules_file_path = self.config.rules_store_path / file_name
141
141
  elif full_path:
142
142
  rules_file_path = full_path
143
143
  else:
@@ -154,7 +154,7 @@ class OntologyToRules(Step):
154
154
  rules, issues = ontology_importer.to_rules(errors="continue", role=role_enum)
155
155
 
156
156
  if rules is None:
157
- output_dir = self.data_store_path / Path("staging")
157
+ output_dir = self.config.staging_path
158
158
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
159
159
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
160
160
  report_file = report_writer.default_file_name
@@ -226,7 +226,7 @@ class DMSToRules(Step):
226
226
  rules, issues = dms_importer.to_rules(errors="continue", role=role_enum)
227
227
 
228
228
  if rules is None:
229
- output_dir = self.data_store_path / Path("staging")
229
+ output_dir = self.config.staging_path
230
230
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
231
231
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
232
232
  report_file = report_writer.default_file_name
@@ -0,0 +1 @@
1
+ from .io_steps import * # noqa
@@ -7,11 +7,11 @@ from cognite.neat.workflows.steps.data_contracts import SolutionGraph, SourceGra
7
7
  from cognite.neat.workflows.steps.step_model import Configurable, Step
8
8
 
9
9
  __all__ = ["SimpleGraphEntityMatcher"]
10
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
10
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
11
11
 
12
12
 
13
13
  class SimpleGraphEntityMatcher(Step):
14
- version = "private-alpha"
14
+ version = "legacy"
15
15
  description = "The step matches entities in the graph and creates links based on provided configurations"
16
16
  category = CATEGORY
17
17
  configurables: ClassVar[list[Configurable]] = [
@@ -29,7 +29,7 @@ __all__ = [
29
29
  "ExtractGraphFromDexpiFile",
30
30
  ]
31
31
 
32
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
32
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
33
33
 
34
34
 
35
35
  class ExtractGraphFromRdfFile(Step):
@@ -38,7 +38,7 @@ class ExtractGraphFromRdfFile(Step):
38
38
  """
39
39
 
40
40
  description = "This step extract instances from a file into the source graph. The file must be in RDF format."
41
- version = "private-alpha"
41
+ version = "legacy"
42
42
  category = CATEGORY
43
43
  configurables: ClassVar[list[Configurable]] = [
44
44
  Configurable(
@@ -91,7 +91,7 @@ class ExtractGraphFromDexpiFile(Step):
91
91
  """
92
92
 
93
93
  description = "This step converts DEXPI P&ID (XML) into Knowledge Graph"
94
- version = "private-alpha"
94
+ version = "legacy"
95
95
  category = CATEGORY
96
96
  configurables: ClassVar[list[Configurable]] = [
97
97
  Configurable(
@@ -130,7 +130,7 @@ class ExtractGraphFromGraphCapturingSheet(Step):
130
130
  """
131
131
 
132
132
  description = "This step extracts nodes and edges from graph capturing spreadsheet and load them into graph"
133
- version = "private-alpha"
133
+ version = "legacy"
134
134
  category = CATEGORY
135
135
  configurables: ClassVar[list[Configurable]] = [
136
136
  Configurable(
@@ -187,7 +187,7 @@ class ExtractGraphFromMockGraph(Step):
187
187
  """
188
188
 
189
189
  description = "This step generate mock graph based on the defined classes and target number of instances"
190
- version = "private-alpha"
190
+ version = "legacy"
191
191
  category = CATEGORY
192
192
  configurables: ClassVar[list[Configurable]] = [
193
193
  Configurable(
@@ -239,7 +239,7 @@ class ExtractGraphFromRulesInstanceSheet(Step):
239
239
 
240
240
  description = "This step extracts instances from Rules object and loads them into the graph."
241
241
  category = CATEGORY
242
- version = "private-alpha"
242
+ version = "legacy"
243
243
 
244
244
  configurables: ClassVar[list[Configurable]] = [
245
245
  Configurable(
@@ -276,7 +276,7 @@ class ExtractGraphFromRulesDataModel(Step):
276
276
 
277
277
  description = "This step extracts data model from rules file and loads it into source graph."
278
278
  category = CATEGORY
279
- version = "private-alpha"
279
+ version = "legacy"
280
280
 
281
281
  def run( # type: ignore[override, syntax]
282
282
  self, transformation_rules: RulesData, source_graph: SourceGraph
@@ -321,7 +321,7 @@ class ExtractGraphFromJsonFile(Step):
321
321
 
322
322
  description = "This step extracts instances from json file and loads them into a graph store"
323
323
  category = CATEGORY
324
- version = "private-alpha"
324
+ version = "legacy"
325
325
  configurables: ClassVar[list[Configurable]] = [
326
326
  Configurable(
327
327
  name="file_name", value="data_dump.json", label="Full path to the file containing data dump in JSON format"
@@ -504,7 +504,7 @@ class ExtractGraphFromAvevaPiAssetFramework(Step):
504
504
 
505
505
  description = "This step extracts instances from Aveva PI AF and loads them into a graph store"
506
506
  category = CATEGORY
507
- version = "private-alpha"
507
+ version = "legacy"
508
508
  configurables: ClassVar[list[Configurable]] = [
509
509
  Configurable(
510
510
  name="file_name",