cognite-neat 0.75.8__py3-none-any.whl → 0.75.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (88) hide show
  1. cognite/neat/_version.py +1 -1
  2. cognite/neat/app/api/configuration.py +4 -9
  3. cognite/neat/app/api/routers/configuration.py +2 -1
  4. cognite/neat/app/api/routers/crud.py +5 -5
  5. cognite/neat/app/api/routers/data_exploration.py +3 -1
  6. cognite/neat/app/api/routers/rules.py +3 -3
  7. cognite/neat/app/api/routers/workflows.py +3 -3
  8. cognite/neat/app/ui/neat-app/build/asset-manifest.json +3 -3
  9. cognite/neat/app/ui/neat-app/build/index.html +1 -1
  10. cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js → main.ec7f72e2.js} +3 -3
  11. cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js.map → main.ec7f72e2.js.map} +1 -1
  12. cognite/neat/config.py +147 -12
  13. cognite/neat/constants.py +1 -0
  14. cognite/neat/graph/exceptions.py +1 -2
  15. cognite/neat/legacy/graph/exceptions.py +1 -2
  16. cognite/neat/legacy/graph/extractors/_mock_graph_generator.py +1 -2
  17. cognite/neat/legacy/graph/loaders/_asset_loader.py +8 -13
  18. cognite/neat/legacy/graph/loaders/_base.py +2 -4
  19. cognite/neat/legacy/graph/loaders/_exceptions.py +1 -3
  20. cognite/neat/legacy/graph/loaders/core/rdf_to_assets.py +4 -8
  21. cognite/neat/legacy/graph/loaders/core/rdf_to_relationships.py +2 -4
  22. cognite/neat/legacy/graph/loaders/rdf_to_dms.py +2 -4
  23. cognite/neat/legacy/graph/loaders/validator.py +1 -1
  24. cognite/neat/legacy/graph/transformations/transformer.py +1 -2
  25. cognite/neat/legacy/rules/exporters/_rules2dms.py +1 -2
  26. cognite/neat/legacy/rules/exporters/_validation.py +4 -8
  27. cognite/neat/legacy/rules/importers/_base.py +0 -4
  28. cognite/neat/legacy/rules/importers/_dms2rules.py +0 -2
  29. cognite/neat/legacy/rules/models/rdfpath.py +1 -2
  30. cognite/neat/legacy/workflows/examples/Export_DMS/workflow.yaml +89 -0
  31. cognite/neat/legacy/workflows/examples/Export_Rules_to_Ontology/workflow.yaml +152 -0
  32. cognite/neat/legacy/workflows/examples/Extract_DEXPI_Graph_and_Export_Rules/workflow.yaml +139 -0
  33. cognite/neat/legacy/workflows/examples/Extract_RDF_Graph_and_Generate_Assets/workflow.yaml +270 -0
  34. cognite/neat/legacy/workflows/examples/Import_DMS/workflow.yaml +65 -0
  35. cognite/neat/legacy/workflows/examples/Ontology_to_Data_Model/workflow.yaml +116 -0
  36. cognite/neat/legacy/workflows/examples/Validate_Rules/workflow.yaml +67 -0
  37. cognite/neat/legacy/workflows/examples/Validate_Solution_Model/workflow.yaml +64 -0
  38. cognite/neat/legacy/workflows/examples/Visualize_Data_Model_Using_Mock_Graph/workflow.yaml +95 -0
  39. cognite/neat/legacy/workflows/examples/Visualize_Semantic_Data_Model/workflow.yaml +111 -0
  40. cognite/neat/rules/exporters/_rules2excel.py +2 -2
  41. cognite/neat/rules/exporters/_validation.py +6 -8
  42. cognite/neat/rules/importers/_base.py +2 -4
  43. cognite/neat/rules/importers/_dms2rules.py +2 -4
  44. cognite/neat/rules/importers/_dtdl2rules/dtdl_importer.py +2 -4
  45. cognite/neat/rules/importers/_dtdl2rules/spec.py +2 -4
  46. cognite/neat/rules/importers/_owl2rules/_owl2rules.py +2 -4
  47. cognite/neat/rules/importers/_spreadsheet2rules.py +4 -8
  48. cognite/neat/rules/importers/_yaml2rules.py +2 -4
  49. cognite/neat/rules/issues/dms.py +2 -4
  50. cognite/neat/rules/issues/formatters.py +3 -1
  51. cognite/neat/rules/models/entities.py +1 -2
  52. cognite/neat/rules/models/rdfpath.py +1 -2
  53. cognite/neat/rules/models/rules/_dms_architect_rules.py +2 -1
  54. cognite/neat/rules/models/rules/_dms_rules_write.py +11 -22
  55. cognite/neat/utils/cdf_loaders/_data_modeling.py +3 -1
  56. cognite/neat/utils/cdf_loaders/_ingestion.py +2 -4
  57. cognite/neat/utils/spreadsheet.py +2 -4
  58. cognite/neat/utils/utils.py +2 -4
  59. cognite/neat/workflows/base.py +5 -5
  60. cognite/neat/workflows/manager.py +32 -22
  61. cognite/neat/workflows/model.py +3 -3
  62. cognite/neat/workflows/steps/lib/__init__.py +0 -7
  63. cognite/neat/workflows/steps/lib/current/__init__.py +6 -0
  64. cognite/neat/workflows/steps/lib/{rules_exporter.py → current/rules_exporter.py} +8 -8
  65. cognite/neat/workflows/steps/lib/{rules_importer.py → current/rules_importer.py} +4 -4
  66. cognite/neat/workflows/steps/lib/io/__init__.py +1 -0
  67. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_contextualization.py +2 -2
  68. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_extractor.py +9 -9
  69. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_loader.py +9 -9
  70. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_store.py +4 -4
  71. cognite/neat/workflows/steps/lib/{v1 → legacy}/graph_transformer.py +2 -2
  72. cognite/neat/workflows/steps/lib/{v1 → legacy}/rules_exporter.py +15 -17
  73. cognite/neat/workflows/steps/lib/{v1 → legacy}/rules_importer.py +7 -7
  74. cognite/neat/workflows/steps/step_model.py +5 -9
  75. cognite/neat/workflows/steps_registry.py +20 -11
  76. {cognite_neat-0.75.8.dist-info → cognite_neat-0.75.9.dist-info}/METADATA +1 -1
  77. {cognite_neat-0.75.8.dist-info → cognite_neat-0.75.9.dist-info}/RECORD +87 -76
  78. cognite/neat/app/api/data_classes/configuration.py +0 -121
  79. /cognite/neat/app/ui/neat-app/build/static/js/{main.4345d42f.js.LICENSE.txt → main.ec7f72e2.js.LICENSE.txt} +0 -0
  80. /cognite/neat/workflows/steps/lib/{graph_extractor.py → current/graph_extractor.py} +0 -0
  81. /cognite/neat/workflows/steps/lib/{graph_loader.py → current/graph_loader.py} +0 -0
  82. /cognite/neat/workflows/steps/lib/{graph_store.py → current/graph_store.py} +0 -0
  83. /cognite/neat/workflows/steps/lib/{rules_validator.py → current/rules_validator.py} +0 -0
  84. /cognite/neat/workflows/steps/lib/{io_steps.py → io/io_steps.py} +0 -0
  85. /cognite/neat/workflows/steps/lib/{v1 → legacy}/__init__.py +0 -0
  86. {cognite_neat-0.75.8.dist-info → cognite_neat-0.75.9.dist-info}/LICENSE +0 -0
  87. {cognite_neat-0.75.8.dist-info → cognite_neat-0.75.9.dist-info}/WHEEL +0 -0
  88. {cognite_neat-0.75.8.dist-info → cognite_neat-0.75.9.dist-info}/entry_points.txt +0 -0
@@ -654,7 +654,8 @@ class _DMSExporter:
654
654
  views_not_in_model = {view.view.as_id() for view in rules.views if not view.in_model}
655
655
  data_model = rules.metadata.as_data_model()
656
656
  data_model.views = sorted(
657
- [view_id for view_id in views.as_ids() if view_id not in views_not_in_model], key=lambda v: v.as_tuple() # type: ignore[union-attr]
657
+ [view_id for view_id in views.as_ids() if view_id not in views_not_in_model],
658
+ key=lambda v: v.as_tuple(), # type: ignore[union-attr]
658
659
  )
659
660
 
660
661
  spaces = self._create_spaces(rules.metadata, containers, views, data_model)
@@ -89,18 +89,15 @@ class DMSPropertyWrite:
89
89
 
90
90
  @classmethod
91
91
  @overload
92
- def load(cls, data: None) -> None:
93
- ...
92
+ def load(cls, data: None) -> None: ...
94
93
 
95
94
  @classmethod
96
95
  @overload
97
- def load(cls, data: dict[str, Any]) -> "DMSPropertyWrite":
98
- ...
96
+ def load(cls, data: dict[str, Any]) -> "DMSPropertyWrite": ...
99
97
 
100
98
  @classmethod
101
99
  @overload
102
- def load(cls, data: list[dict[str, Any]]) -> list["DMSPropertyWrite"]:
103
- ...
100
+ def load(cls, data: list[dict[str, Any]]) -> list["DMSPropertyWrite"]: ...
104
101
 
105
102
  @classmethod
106
103
  def load(
@@ -179,18 +176,15 @@ class DMSContainerWrite:
179
176
 
180
177
  @classmethod
181
178
  @overload
182
- def load(cls, data: None) -> None:
183
- ...
179
+ def load(cls, data: None) -> None: ...
184
180
 
185
181
  @classmethod
186
182
  @overload
187
- def load(cls, data: dict[str, Any]) -> "DMSContainerWrite":
188
- ...
183
+ def load(cls, data: dict[str, Any]) -> "DMSContainerWrite": ...
189
184
 
190
185
  @classmethod
191
186
  @overload
192
- def load(cls, data: list[dict[str, Any]]) -> list["DMSContainerWrite"]:
193
- ...
187
+ def load(cls, data: list[dict[str, Any]]) -> list["DMSContainerWrite"]: ...
194
188
 
195
189
  @classmethod
196
190
  def load(
@@ -242,18 +236,15 @@ class DMSViewWrite:
242
236
 
243
237
  @classmethod
244
238
  @overload
245
- def load(cls, data: None) -> None:
246
- ...
239
+ def load(cls, data: None) -> None: ...
247
240
 
248
241
  @classmethod
249
242
  @overload
250
- def load(cls, data: dict[str, Any]) -> "DMSViewWrite":
251
- ...
243
+ def load(cls, data: dict[str, Any]) -> "DMSViewWrite": ...
252
244
 
253
245
  @classmethod
254
246
  @overload
255
- def load(cls, data: list[dict[str, Any]]) -> list["DMSViewWrite"]:
256
- ...
247
+ def load(cls, data: list[dict[str, Any]]) -> list["DMSViewWrite"]: ...
257
248
 
258
249
  @classmethod
259
250
  def load(cls, data: dict[str, Any] | list[dict[str, Any]] | None) -> "DMSViewWrite | list[DMSViewWrite] | None":
@@ -306,13 +297,11 @@ class DMSRulesWrite:
306
297
 
307
298
  @classmethod
308
299
  @overload
309
- def load(cls, data: dict[str, Any]) -> "DMSRulesWrite":
310
- ...
300
+ def load(cls, data: dict[str, Any]) -> "DMSRulesWrite": ...
311
301
 
312
302
  @classmethod
313
303
  @overload
314
- def load(cls, data: None) -> None:
315
- ...
304
+ def load(cls, data: None) -> None: ...
316
305
 
317
306
  @classmethod
318
307
  def load(cls, data: dict | None) -> "DMSRulesWrite | None":
@@ -60,7 +60,9 @@ class DataModelingLoader(
60
60
  except CogniteAPIError as e:
61
61
  failed_items = {failed.as_id() for failed in e.failed if hasattr(failed, "as_id")}
62
62
  to_redeploy = [
63
- item for item in items if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
63
+ item
64
+ for item in items
65
+ if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
64
66
  ]
65
67
  if not to_redeploy:
66
68
  # Avoid infinite loop
@@ -81,12 +81,10 @@ class RawTableLoader(ResourceLoader[RawTableID, RawTableWrite, RawTable, RawTabl
81
81
  return item.as_id()
82
82
 
83
83
  @overload
84
- def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]:
85
- ...
84
+ def _groupby_database(self, items: Sequence[RawTableWrite]) -> Iterable[tuple[str, Iterable[RawTableWrite]]]: ...
86
85
 
87
86
  @overload
88
- def _groupby_database(self, items: SequenceNotStr[RawTableID]) -> Iterable[tuple[str, Iterable[RawTableID]]]:
89
- ...
87
+ def _groupby_database(self, items: SequenceNotStr[RawTableID]) -> Iterable[tuple[str, Iterable[RawTableID]]]: ...
90
88
 
91
89
  def _groupby_database(
92
90
  self, items: Sequence[RawTableWrite] | SequenceNotStr[RawTableID]
@@ -37,8 +37,7 @@ def read_individual_sheet(
37
37
  sheet_name: str,
38
38
  return_read_info: Literal[True],
39
39
  expected_headers: list[str] | None = None,
40
- ) -> tuple[list[dict], SpreadsheetRead]:
41
- ...
40
+ ) -> tuple[list[dict], SpreadsheetRead]: ...
42
41
 
43
42
 
44
43
  @overload
@@ -47,8 +46,7 @@ def read_individual_sheet(
47
46
  sheet_name: str,
48
47
  return_read_info: Literal[False] = False,
49
48
  expected_headers: list[str] | None = None,
50
- ) -> list[dict]:
51
- ...
49
+ ) -> list[dict]: ...
52
50
 
53
51
 
54
52
  def read_individual_sheet(
@@ -74,13 +74,11 @@ def _get_cognite_client(config: CogniteClientConfig, credentials: CredentialProv
74
74
 
75
75
 
76
76
  @overload
77
- def remove_namespace(*URI: URIRef | str, special_separator: str = "#_") -> str:
78
- ...
77
+ def remove_namespace(*URI: URIRef | str, special_separator: str = "#_") -> str: ...
79
78
 
80
79
 
81
80
  @overload
82
- def remove_namespace(*URI: tuple[URIRef | str, ...], special_separator: str = "#_") -> tuple[str, ...]:
83
- ...
81
+ def remove_namespace(*URI: tuple[URIRef | str, ...], special_separator: str = "#_") -> tuple[str, ...]: ...
84
82
 
85
83
 
86
84
  def remove_namespace(
@@ -11,8 +11,8 @@ import yaml
11
11
  from cognite.client import ClientConfig, CogniteClient
12
12
  from prometheus_client import Gauge
13
13
 
14
- from cognite.neat.app.api.configuration import Config
15
14
  from cognite.neat.app.monitoring.metrics import NeatMetricsCollector
15
+ from cognite.neat.config import Config
16
16
  from cognite.neat.utils.utils import retry_decorator
17
17
  from cognite.neat.workflows import cdf_store, utils
18
18
  from cognite.neat.workflows._exceptions import ConfigurationNotSet, InvalidStepOutputException
@@ -45,7 +45,7 @@ class BaseWorkflow:
45
45
  self,
46
46
  name: str,
47
47
  client: CogniteClient,
48
- steps_registry: StepsRegistry | None = None,
48
+ steps_registry: StepsRegistry,
49
49
  workflow_steps: list[WorkflowStepDefinition] | None = None,
50
50
  default_dataset_id: int | None = None,
51
51
  ):
@@ -81,7 +81,7 @@ class BaseWorkflow:
81
81
  self.auto_workflow_cleanup = False
82
82
  self.step_classes = None
83
83
  self.data: dict[str, DataContract | FlowMessage | CdfStore | CogniteClient | None] = {}
84
- self.steps_registry: StepsRegistry = steps_registry or StepsRegistry()
84
+ self.steps_registry: StepsRegistry = steps_registry
85
85
 
86
86
  def start(self, sync=False, is_ephemeral=False, **kwargs) -> FlowMessage | None:
87
87
  """Starts workflow execution.sync=True will block until workflow is completed and
@@ -223,7 +223,7 @@ class BaseWorkflow:
223
223
 
224
224
  def copy(self) -> "BaseWorkflow":
225
225
  """Create a copy of the workflow"""
226
- new_instance = self.__class__(self.name, self.cdf_client)
226
+ new_instance = self.__class__(self.name, self.cdf_client, self.steps_registry)
227
227
  new_instance.workflow_steps = self.workflow_steps
228
228
  new_instance.configs = self.configs
229
229
  new_instance.set_task_builder(self.task_builder)
@@ -345,7 +345,7 @@ class BaseWorkflow:
345
345
  raise Exception(f"Workflow {self.name} is not running , step {step_name} is skipped")
346
346
  self.state = WorkflowState.RUNNING_WAITING
347
347
  timeout = 3000000.0
348
- if "wait_timeout" in step.params and step.params["wait_timeout"]:
348
+ if step.params.get("wait_timeout"):
349
349
  timeout = float(step.params["wait_timeout"])
350
350
  # reporting workflow execution before waiting for event
351
351
  logging.info(f"Workflow {self.name} is waiting for event")
@@ -4,15 +4,15 @@ import shutil
4
4
  import sys
5
5
  import time
6
6
  import traceback
7
- from pathlib import Path
8
7
 
9
8
  from cognite.client import CogniteClient
10
9
  from prometheus_client import Gauge
11
10
  from pydantic import BaseModel
12
11
 
12
+ from cognite.neat.config import Config
13
13
  from cognite.neat.workflows import BaseWorkflow
14
14
  from cognite.neat.workflows.base import WorkflowDefinition
15
- from cognite.neat.workflows.model import FlowMessage, InstanceStartMethod, WorkflowState
15
+ from cognite.neat.workflows.model import FlowMessage, InstanceStartMethod, WorkflowState, WorkflowStepDefinition
16
16
  from cognite.neat.workflows.steps_registry import StepsRegistry
17
17
  from cognite.neat.workflows.tasks import WorkflowTaskBuilder
18
18
 
@@ -28,32 +28,21 @@ class WorkflowStartStatus(BaseModel, arbitrary_types_allowed=True):
28
28
  class WorkflowManager:
29
29
  """Workflow manager is responsible for loading, saving and managing workflows
30
30
  client: CogniteClient
31
- registry_storage_type: str = "file"
32
- workflows_storage_path: Path = Path("workflows")
33
- rules_storage_path: Path = Path("rules")
34
- data_set_id: int = None,
31
+ config: Config
35
32
  """
36
33
 
37
- def __init__(
38
- self,
39
- client: CogniteClient | None = None,
40
- registry_storage_type: str = "file",
41
- workflows_storage_path: Path | None = None,
42
- rules_storage_path: Path | None = None,
43
- data_store_path: Path | None = None,
44
- data_set_id: int | None = None,
45
- ):
34
+ def __init__(self, client: CogniteClient, config: Config):
46
35
  self.client = client
47
- self.data_set_id = data_set_id
48
- self.data_store_path = data_store_path
36
+ self.data_set_id = config.cdf_default_dataset_id
37
+ self.data_store_path = config.data_store_path
49
38
  self.workflow_registry: dict[str, BaseWorkflow] = {}
50
39
  self.ephemeral_instance_registry: dict[str, BaseWorkflow] = {}
51
- self.workflows_storage_type = registry_storage_type
52
- # todo use pathlib
53
- self.workflows_storage_path = workflows_storage_path if workflows_storage_path else Path("workflows")
54
- self.rules_storage_path = rules_storage_path if rules_storage_path else Path("rules")
40
+ self.workflows_storage_type = config.workflows_store_type
41
+ self.config = config
42
+ self.workflows_storage_path = config.workflows_store_path
43
+ self.rules_storage_path = config.rules_store_path
55
44
  self.task_builder = WorkflowTaskBuilder(client, self)
56
- self.steps_registry = StepsRegistry(self.data_store_path)
45
+ self.steps_registry = StepsRegistry(self.config)
57
46
  self.steps_registry.load_step_classes()
58
47
 
59
48
  def update_cdf_client(self, client: CogniteClient):
@@ -206,10 +195,21 @@ class WorkflowManager:
206
195
  self, workflow_name: str, step_id: str = "", flow_msg: FlowMessage | None = None, sync: bool | None = None
207
196
  ) -> WorkflowStartStatus:
208
197
  retrieved = self.get_workflow(workflow_name)
198
+
209
199
  if retrieved is None:
210
200
  return WorkflowStartStatus(
211
201
  workflow_instance=None, is_success=False, status_text="Workflow not found in registry"
212
202
  )
203
+
204
+ if self._is_workflow_made_of_mixed_steps(retrieved.workflow_steps):
205
+ retrieved.state = WorkflowState.FAILED
206
+ return WorkflowStartStatus(
207
+ workflow_instance=None,
208
+ is_success=False,
209
+ status_text="Workflow consists of both legacy and current steps. "
210
+ "Please update the workflow to use only current steps.",
211
+ )
212
+
213
213
  workflow = retrieved
214
214
  retrieved_step = workflow.get_trigger_step(step_id)
215
215
  if retrieved_step is None:
@@ -280,3 +280,13 @@ class WorkflowManager:
280
280
  return WorkflowStartStatus(
281
281
  workflow_instance=None, is_success=False, status_text="Unsupported workflow start method"
282
282
  )
283
+
284
+ def _is_workflow_made_of_mixed_steps(self, steps: list[WorkflowStepDefinition]):
285
+ legacy_steps = 0
286
+ current_steps = 0
287
+ for step in steps:
288
+ if step.method in self.steps_registry.categorized_steps["legacy"]:
289
+ legacy_steps += 1
290
+ if step.method in self.steps_registry.categorized_steps["current"]:
291
+ current_steps += 1
292
+ return legacy_steps > 0 and current_steps > 0
@@ -45,9 +45,9 @@ class FlowMessage(BaseModel):
45
45
  headers: dict[str, str] | None = None # The headers of the message
46
46
  output_text: str | None = None # The output text of the step that is captured in the execution log
47
47
  error_text: str | None = None # The error text of the step that is captured in the execution log
48
- next_step_ids: list[
49
- str
50
- ] | None = None # If set, the workflow will skip default route and go to the next step in the list
48
+ next_step_ids: list[str] | None = (
49
+ None # If set, the workflow will skip default route and go to the next step in the list
50
+ )
51
51
  step_execution_status: StepExecutionStatus = StepExecutionStatus.UNKNOWN # The status of the step execution
52
52
 
53
53
 
@@ -1,7 +0,0 @@
1
- from .io_steps import * # noqa
2
- from .rules_importer import * # noqa
3
- from .rules_validator import * # noqa
4
- from .rules_exporter import * # noqa
5
- from .graph_store import * # noqa
6
- from .graph_loader import * # noqa
7
- from .graph_extractor import * # noqa
@@ -0,0 +1,6 @@
1
+ from .rules_importer import * # noqa
2
+ from .rules_validator import * # noqa
3
+ from .rules_exporter import * # noqa
4
+ from .graph_store import * # noqa
5
+ from .graph_loader import * # noqa
6
+ from .graph_extractor import * # noqa
@@ -101,7 +101,7 @@ class DeleteDataModelFromCDF(Step):
101
101
  report_lines.append("\n\n# ERRORS\n\n")
102
102
  report_lines.extend(errors)
103
103
 
104
- output_dir = self.data_store_path / Path("staging")
104
+ output_dir = self.config.staging_path
105
105
  output_dir.mkdir(parents=True, exist_ok=True)
106
106
  report_file = "dms_component_creation_report.txt"
107
107
  report_full_path = output_dir / report_file
@@ -203,7 +203,7 @@ class RulesToDMS(Step):
203
203
  existing_handling=existing_components_handling,
204
204
  )
205
205
 
206
- output_dir = self.data_store_path / Path("staging")
206
+ output_dir = self.config.staging_path
207
207
  output_dir.mkdir(parents=True, exist_ok=True)
208
208
  file_name = (
209
209
  input_rules.metadata.external_id
@@ -223,7 +223,7 @@ class RulesToDMS(Step):
223
223
  report_lines.append("\n\n# ERRORS\n\n")
224
224
  report_lines.extend(errors)
225
225
 
226
- output_dir = self.data_store_path / Path("staging")
226
+ output_dir = self.config.staging_path
227
227
  output_dir.mkdir(parents=True, exist_ok=True)
228
228
  report_file = "dms_component_creation_report.txt"
229
229
  report_full_path = output_dir / report_file
@@ -348,7 +348,7 @@ class RulesToOntology(Step):
348
348
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
349
349
  )
350
350
 
351
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "ontology", "ttl")
351
+ default_path = self.config.staging_path / _get_default_file_name(rules, "ontology", "ttl")
352
352
 
353
353
  storage_path = (
354
354
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -399,7 +399,7 @@ class RulesToSHACL(Step):
399
399
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
400
400
  )
401
401
 
402
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "shacl", "ttl")
402
+ default_path = self.config.staging_path / _get_default_file_name(rules, "shacl", "ttl")
403
403
 
404
404
  storage_path = (
405
405
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -450,7 +450,7 @@ class RulesToSemanticDataModel(Step):
450
450
  step_execution_status=StepExecutionStatus.ABORT_AND_FAIL,
451
451
  )
452
452
 
453
- default_path = self.data_store_path / "staging" / _get_default_file_name(rules, "semantic-data-model", "ttl")
453
+ default_path = self.config.staging_path / _get_default_file_name(rules, "semantic-data-model", "ttl")
454
454
 
455
455
  storage_path = (
456
456
  self.data_store_path / Path(self.configs["File path"]) if self.configs["File path"] else default_path
@@ -513,7 +513,7 @@ class RulesToCDFTransformations(Step):
513
513
  dms_exporter = exporters.DMSExporter(
514
514
  export_pipeline=True, instance_space=instance_space, export_components=["spaces"]
515
515
  )
516
- output_dir = self.data_store_path / Path("staging")
516
+ output_dir = self.config.staging_path
517
517
  output_dir.mkdir(parents=True, exist_ok=True)
518
518
  file_name = (
519
519
  input_rules.metadata.external_id
@@ -533,7 +533,7 @@ class RulesToCDFTransformations(Step):
533
533
  report_lines.append("\n\n# ERRORS\n\n")
534
534
  report_lines.extend(errors)
535
535
 
536
- output_dir = self.data_store_path / Path("staging")
536
+ output_dir = self.config.staging_path
537
537
  output_dir.mkdir(parents=True, exist_ok=True)
538
538
  report_file = "pipeline_creation_report.txt"
539
539
  report_full_path = output_dir / report_file
@@ -75,7 +75,7 @@ class ExcelToRules(Step):
75
75
  rules, issues = excel_importer.to_rules(errors="continue", role=role_enum)
76
76
 
77
77
  if rules is None:
78
- output_dir = self.data_store_path / Path("staging")
78
+ output_dir = self.config.staging_path
79
79
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
80
80
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
81
81
  report_file = report_writer.default_file_name
@@ -137,7 +137,7 @@ class OntologyToRules(Step):
137
137
  make_compliant = self.configs.get("Make compliant", "True") == "True"
138
138
 
139
139
  if file_name:
140
- rules_file_path = Path(self.data_store_path) / "rules" / file_name
140
+ rules_file_path = self.config.rules_store_path / file_name
141
141
  elif full_path:
142
142
  rules_file_path = full_path
143
143
  else:
@@ -154,7 +154,7 @@ class OntologyToRules(Step):
154
154
  rules, issues = ontology_importer.to_rules(errors="continue", role=role_enum)
155
155
 
156
156
  if rules is None:
157
- output_dir = self.data_store_path / Path("staging")
157
+ output_dir = self.config.staging_path
158
158
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
159
159
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
160
160
  report_file = report_writer.default_file_name
@@ -226,7 +226,7 @@ class DMSToRules(Step):
226
226
  rules, issues = dms_importer.to_rules(errors="continue", role=role_enum)
227
227
 
228
228
  if rules is None:
229
- output_dir = self.data_store_path / Path("staging")
229
+ output_dir = self.config.staging_path
230
230
  report_writer = FORMATTER_BY_NAME[self.configs["Report formatter"]]()
231
231
  report_writer.write_to_file(issues, file_or_dir_path=output_dir)
232
232
  report_file = report_writer.default_file_name
@@ -0,0 +1 @@
1
+ from .io_steps import * # noqa
@@ -7,11 +7,11 @@ from cognite.neat.workflows.steps.data_contracts import SolutionGraph, SourceGra
7
7
  from cognite.neat.workflows.steps.step_model import Configurable, Step
8
8
 
9
9
  __all__ = ["SimpleGraphEntityMatcher"]
10
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
10
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
11
11
 
12
12
 
13
13
  class SimpleGraphEntityMatcher(Step):
14
- version = "private-alpha"
14
+ version = "legacy"
15
15
  description = "The step matches entities in the graph and creates links based on provided configurations"
16
16
  category = CATEGORY
17
17
  configurables: ClassVar[list[Configurable]] = [
@@ -29,7 +29,7 @@ __all__ = [
29
29
  "ExtractGraphFromDexpiFile",
30
30
  ]
31
31
 
32
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
32
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
33
33
 
34
34
 
35
35
  class ExtractGraphFromRdfFile(Step):
@@ -38,7 +38,7 @@ class ExtractGraphFromRdfFile(Step):
38
38
  """
39
39
 
40
40
  description = "This step extract instances from a file into the source graph. The file must be in RDF format."
41
- version = "private-alpha"
41
+ version = "legacy"
42
42
  category = CATEGORY
43
43
  configurables: ClassVar[list[Configurable]] = [
44
44
  Configurable(
@@ -91,7 +91,7 @@ class ExtractGraphFromDexpiFile(Step):
91
91
  """
92
92
 
93
93
  description = "This step converts DEXPI P&ID (XML) into Knowledge Graph"
94
- version = "private-alpha"
94
+ version = "legacy"
95
95
  category = CATEGORY
96
96
  configurables: ClassVar[list[Configurable]] = [
97
97
  Configurable(
@@ -130,7 +130,7 @@ class ExtractGraphFromGraphCapturingSheet(Step):
130
130
  """
131
131
 
132
132
  description = "This step extracts nodes and edges from graph capturing spreadsheet and load them into graph"
133
- version = "private-alpha"
133
+ version = "legacy"
134
134
  category = CATEGORY
135
135
  configurables: ClassVar[list[Configurable]] = [
136
136
  Configurable(
@@ -187,7 +187,7 @@ class ExtractGraphFromMockGraph(Step):
187
187
  """
188
188
 
189
189
  description = "This step generate mock graph based on the defined classes and target number of instances"
190
- version = "private-alpha"
190
+ version = "legacy"
191
191
  category = CATEGORY
192
192
  configurables: ClassVar[list[Configurable]] = [
193
193
  Configurable(
@@ -239,7 +239,7 @@ class ExtractGraphFromRulesInstanceSheet(Step):
239
239
 
240
240
  description = "This step extracts instances from Rules object and loads them into the graph."
241
241
  category = CATEGORY
242
- version = "private-alpha"
242
+ version = "legacy"
243
243
 
244
244
  configurables: ClassVar[list[Configurable]] = [
245
245
  Configurable(
@@ -276,7 +276,7 @@ class ExtractGraphFromRulesDataModel(Step):
276
276
 
277
277
  description = "This step extracts data model from rules file and loads it into source graph."
278
278
  category = CATEGORY
279
- version = "private-alpha"
279
+ version = "legacy"
280
280
 
281
281
  def run( # type: ignore[override, syntax]
282
282
  self, transformation_rules: RulesData, source_graph: SourceGraph
@@ -321,7 +321,7 @@ class ExtractGraphFromJsonFile(Step):
321
321
 
322
322
  description = "This step extracts instances from json file and loads them into a graph store"
323
323
  category = CATEGORY
324
- version = "private-alpha"
324
+ version = "legacy"
325
325
  configurables: ClassVar[list[Configurable]] = [
326
326
  Configurable(
327
327
  name="file_name", value="data_dump.json", label="Full path to the file containing data dump in JSON format"
@@ -504,7 +504,7 @@ class ExtractGraphFromAvevaPiAssetFramework(Step):
504
504
 
505
505
  description = "This step extracts instances from Aveva PI AF and loads them into a graph store"
506
506
  category = CATEGORY
507
- version = "private-alpha"
507
+ version = "legacy"
508
508
  configurables: ClassVar[list[Configurable]] = [
509
509
  Configurable(
510
510
  name="file_name",
@@ -52,7 +52,7 @@ __all__ = [
52
52
  "LoadGraphToRdfFile",
53
53
  ]
54
54
 
55
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
55
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
56
56
 
57
57
 
58
58
  class LoadLabelsToCDF(Step):
@@ -62,7 +62,7 @@ class LoadLabelsToCDF(Step):
62
62
 
63
63
  description = "This step creates default NEAT labels in CDF"
64
64
  category = CATEGORY
65
- version = "private-alpha"
65
+ version = "legacy"
66
66
  configurables: ClassVar[list[Configurable]] = [
67
67
  Configurable(name="data_set_id", value="", label=("CDF dataset id to which the labels will be added."))
68
68
  ]
@@ -176,7 +176,7 @@ class LoadGraphToRdfFile(Step):
176
176
 
177
177
  description = "The step generates nodes and edges from the graph"
178
178
  category = CATEGORY
179
- version = "private-alpha"
179
+ version = "legacy"
180
180
  configurables: ClassVar[list[Configurable]] = [
181
181
  Configurable(
182
182
  name="graph_name",
@@ -227,7 +227,7 @@ class LoadNodesToCDF(Step):
227
227
 
228
228
  description = "This step uploads nodes to CDF"
229
229
  category = CATEGORY
230
- version = "private-alpha"
230
+ version = "legacy"
231
231
 
232
232
  def run(self, cdf_client: CogniteClient, nodes: Nodes) -> FlowMessage: # type: ignore[override, syntax]
233
233
  if nodes.nodes:
@@ -244,7 +244,7 @@ class LoadEdgesToCDF(Step):
244
244
 
245
245
  description = "This step uploads edges to CDF"
246
246
  category = CATEGORY
247
- version = "private-alpha"
247
+ version = "legacy"
248
248
 
249
249
  def run(self, cdf_client: CogniteClient, edges: Edges) -> FlowMessage: # type: ignore[override, syntax]
250
250
  if edges.edges:
@@ -263,7 +263,7 @@ class GenerateAssetsFromGraph(Step):
263
263
  "The step generates assets from the graph ,categorizes them and stores them in CategorizedAssets object"
264
264
  )
265
265
  category = CATEGORY
266
- version = "private-alpha"
266
+ version = "legacy"
267
267
  configurables: ClassVar[list[Configurable]] = [
268
268
  Configurable(name="data_set_id", value="", label=("CDF dataset id to which the labels will be added.")),
269
269
  Configurable(
@@ -469,7 +469,7 @@ class LoadAssetsToCDF(Step):
469
469
 
470
470
  description = "This step uploads categorized assets to CDF"
471
471
  category = CATEGORY
472
- version = "private-alpha"
472
+ version = "legacy"
473
473
 
474
474
  def run( # type: ignore[override]
475
475
  self, cdf_client: CogniteClient, categorized_assets: CategorizedAssets, flow_msg: FlowMessage
@@ -528,7 +528,7 @@ class GenerateRelationshipsFromGraph(Step):
528
528
 
529
529
  description = "This step generates relationships from the graph and saves them to CategorizedRelationships object"
530
530
  category = CATEGORY
531
- version = "private-alpha"
531
+ version = "legacy"
532
532
  configurables: ClassVar[list[Configurable]] = [
533
533
  Configurable(name="data_set_id", value="", label=("CDF dataset id to which the labels will be added.")),
534
534
  Configurable(
@@ -597,7 +597,7 @@ class LoadRelationshipsToCDF(Step):
597
597
 
598
598
  description = "This step uploads relationships to CDF"
599
599
  category = CATEGORY
600
- version = "private-alpha"
600
+ version = "legacy"
601
601
 
602
602
  def run( # type: ignore[override, syntax]
603
603
  self, client: CogniteClient, categorized_relationships: CategorizedRelationships
@@ -11,7 +11,7 @@ from cognite.neat.workflows.steps.step_model import Configurable, Step
11
11
 
12
12
  __all__ = ["ResetGraphStores", "ConfigureGraphStore"]
13
13
 
14
- CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [VERSION 1]"
14
+ CATEGORY = __name__.split(".")[-1].replace("_", " ").title() + " [LEGACY]"
15
15
 
16
16
 
17
17
  class ConfigureDefaultGraphStores(Step):
@@ -21,7 +21,7 @@ class ConfigureDefaultGraphStores(Step):
21
21
 
22
22
  description = "This step initializes the source and solution graph stores."
23
23
  category = CATEGORY
24
- version = "private-alpha"
24
+ version = "legacy"
25
25
  configurables: ClassVar[list[Configurable]] = [
26
26
  Configurable(
27
27
  name="source_rdf_store.type",
@@ -154,7 +154,7 @@ class ResetGraphStores(Step):
154
154
 
155
155
  description = "This step resets graph stores to their initial state (clears all data)."
156
156
  category = CATEGORY
157
- version = "private-alpha"
157
+ version = "legacy"
158
158
  configurables: ClassVar[list[Configurable]] = [
159
159
  Configurable(
160
160
  name="graph_name",
@@ -186,7 +186,7 @@ class ConfigureGraphStore(Step):
186
186
  """
187
187
 
188
188
  description = "This step initializes the source and solution graph stores."
189
- version = "private-alpha"
189
+ version = "legacy"
190
190
  category = CATEGORY
191
191
  configurables: ClassVar[list[Configurable]] = [
192
192
  Configurable(