py-data-engine 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data_engine/__init__.py +37 -0
- data_engine/application/__init__.py +39 -0
- data_engine/application/actions.py +42 -0
- data_engine/application/catalog.py +151 -0
- data_engine/application/control.py +213 -0
- data_engine/application/details.py +73 -0
- data_engine/application/runtime.py +449 -0
- data_engine/application/workspace.py +62 -0
- data_engine/authoring/__init__.py +14 -0
- data_engine/authoring/builder.py +31 -0
- data_engine/authoring/execution/__init__.py +6 -0
- data_engine/authoring/execution/app.py +6 -0
- data_engine/authoring/execution/context.py +82 -0
- data_engine/authoring/execution/continuous.py +176 -0
- data_engine/authoring/execution/grouped.py +106 -0
- data_engine/authoring/execution/logging.py +83 -0
- data_engine/authoring/execution/polling.py +135 -0
- data_engine/authoring/execution/runner.py +210 -0
- data_engine/authoring/execution/single.py +171 -0
- data_engine/authoring/flow.py +361 -0
- data_engine/authoring/helpers.py +160 -0
- data_engine/authoring/model.py +59 -0
- data_engine/authoring/primitives.py +430 -0
- data_engine/authoring/services.py +42 -0
- data_engine/devtools/__init__.py +3 -0
- data_engine/devtools/project_ast_map.py +503 -0
- data_engine/docs/__init__.py +1 -0
- data_engine/docs/sphinx_source/_static/custom.css +13 -0
- data_engine/docs/sphinx_source/api.rst +42 -0
- data_engine/docs/sphinx_source/conf.py +37 -0
- data_engine/docs/sphinx_source/guides/app-runtime-and-workspaces.md +397 -0
- data_engine/docs/sphinx_source/guides/authoring-flow-modules.md +215 -0
- data_engine/docs/sphinx_source/guides/configuring-flows.md +185 -0
- data_engine/docs/sphinx_source/guides/core-concepts.md +208 -0
- data_engine/docs/sphinx_source/guides/database-methods.md +107 -0
- data_engine/docs/sphinx_source/guides/duckdb-helpers.md +462 -0
- data_engine/docs/sphinx_source/guides/flow-context.md +538 -0
- data_engine/docs/sphinx_source/guides/flow-methods.md +206 -0
- data_engine/docs/sphinx_source/guides/getting-started.md +271 -0
- data_engine/docs/sphinx_source/guides/project-inventory.md +5683 -0
- data_engine/docs/sphinx_source/guides/project-map.md +118 -0
- data_engine/docs/sphinx_source/guides/recipes.md +268 -0
- data_engine/docs/sphinx_source/index.rst +22 -0
- data_engine/domain/__init__.py +92 -0
- data_engine/domain/actions.py +69 -0
- data_engine/domain/catalog.py +128 -0
- data_engine/domain/details.py +214 -0
- data_engine/domain/diagnostics.py +56 -0
- data_engine/domain/errors.py +104 -0
- data_engine/domain/inspection.py +99 -0
- data_engine/domain/logs.py +118 -0
- data_engine/domain/operations.py +172 -0
- data_engine/domain/operator.py +72 -0
- data_engine/domain/runs.py +155 -0
- data_engine/domain/runtime.py +279 -0
- data_engine/domain/source_state.py +17 -0
- data_engine/domain/support.py +54 -0
- data_engine/domain/time.py +23 -0
- data_engine/domain/workspace.py +159 -0
- data_engine/flow_modules/__init__.py +1 -0
- data_engine/flow_modules/flow_module_compiler.py +179 -0
- data_engine/flow_modules/flow_module_loader.py +201 -0
- data_engine/helpers/__init__.py +25 -0
- data_engine/helpers/duckdb.py +705 -0
- data_engine/hosts/__init__.py +1 -0
- data_engine/hosts/daemon/__init__.py +23 -0
- data_engine/hosts/daemon/app.py +221 -0
- data_engine/hosts/daemon/bootstrap.py +69 -0
- data_engine/hosts/daemon/client.py +465 -0
- data_engine/hosts/daemon/commands.py +64 -0
- data_engine/hosts/daemon/composition.py +310 -0
- data_engine/hosts/daemon/constants.py +15 -0
- data_engine/hosts/daemon/entrypoints.py +97 -0
- data_engine/hosts/daemon/lifecycle.py +191 -0
- data_engine/hosts/daemon/manager.py +272 -0
- data_engine/hosts/daemon/ownership.py +126 -0
- data_engine/hosts/daemon/runtime_commands.py +188 -0
- data_engine/hosts/daemon/runtime_control.py +31 -0
- data_engine/hosts/daemon/server.py +84 -0
- data_engine/hosts/daemon/shared_state.py +147 -0
- data_engine/hosts/daemon/state_sync.py +101 -0
- data_engine/platform/__init__.py +1 -0
- data_engine/platform/identity.py +35 -0
- data_engine/platform/local_settings.py +146 -0
- data_engine/platform/theme.py +259 -0
- data_engine/platform/workspace_models.py +190 -0
- data_engine/platform/workspace_policy.py +333 -0
- data_engine/runtime/__init__.py +1 -0
- data_engine/runtime/file_watch.py +185 -0
- data_engine/runtime/ledger_models.py +116 -0
- data_engine/runtime/runtime_db.py +938 -0
- data_engine/runtime/shared_state.py +523 -0
- data_engine/services/__init__.py +49 -0
- data_engine/services/daemon.py +64 -0
- data_engine/services/daemon_state.py +40 -0
- data_engine/services/flow_catalog.py +102 -0
- data_engine/services/flow_execution.py +48 -0
- data_engine/services/ledger.py +85 -0
- data_engine/services/logs.py +65 -0
- data_engine/services/runtime_binding.py +105 -0
- data_engine/services/runtime_execution.py +126 -0
- data_engine/services/runtime_history.py +62 -0
- data_engine/services/settings.py +58 -0
- data_engine/services/shared_state.py +28 -0
- data_engine/services/theme.py +59 -0
- data_engine/services/workspace_provisioning.py +224 -0
- data_engine/services/workspaces.py +74 -0
- data_engine/ui/__init__.py +3 -0
- data_engine/ui/cli/__init__.py +19 -0
- data_engine/ui/cli/app.py +161 -0
- data_engine/ui/cli/commands_doctor.py +178 -0
- data_engine/ui/cli/commands_run.py +80 -0
- data_engine/ui/cli/commands_start.py +100 -0
- data_engine/ui/cli/commands_workspace.py +97 -0
- data_engine/ui/cli/dependencies.py +44 -0
- data_engine/ui/cli/parser.py +56 -0
- data_engine/ui/gui/__init__.py +25 -0
- data_engine/ui/gui/app.py +116 -0
- data_engine/ui/gui/bootstrap.py +487 -0
- data_engine/ui/gui/bootstrapper.py +140 -0
- data_engine/ui/gui/cache_models.py +23 -0
- data_engine/ui/gui/control_support.py +185 -0
- data_engine/ui/gui/controllers/__init__.py +6 -0
- data_engine/ui/gui/controllers/flows.py +439 -0
- data_engine/ui/gui/controllers/runtime.py +245 -0
- data_engine/ui/gui/dialogs/__init__.py +12 -0
- data_engine/ui/gui/dialogs/messages.py +88 -0
- data_engine/ui/gui/dialogs/previews.py +222 -0
- data_engine/ui/gui/helpers/__init__.py +62 -0
- data_engine/ui/gui/helpers/inspection.py +81 -0
- data_engine/ui/gui/helpers/lifecycle.py +112 -0
- data_engine/ui/gui/helpers/scroll.py +28 -0
- data_engine/ui/gui/helpers/theming.py +87 -0
- data_engine/ui/gui/icons/dark_light.svg +12 -0
- data_engine/ui/gui/icons/documentation.svg +1 -0
- data_engine/ui/gui/icons/failed.svg +3 -0
- data_engine/ui/gui/icons/group.svg +4 -0
- data_engine/ui/gui/icons/home.svg +2 -0
- data_engine/ui/gui/icons/manual.svg +2 -0
- data_engine/ui/gui/icons/poll.svg +2 -0
- data_engine/ui/gui/icons/schedule.svg +4 -0
- data_engine/ui/gui/icons/settings.svg +2 -0
- data_engine/ui/gui/icons/started.svg +3 -0
- data_engine/ui/gui/icons/success.svg +3 -0
- data_engine/ui/gui/icons/view-log.svg +3 -0
- data_engine/ui/gui/icons.py +50 -0
- data_engine/ui/gui/launcher.py +48 -0
- data_engine/ui/gui/presenters/__init__.py +72 -0
- data_engine/ui/gui/presenters/docs.py +140 -0
- data_engine/ui/gui/presenters/logs.py +58 -0
- data_engine/ui/gui/presenters/runtime_projection.py +29 -0
- data_engine/ui/gui/presenters/sidebar.py +88 -0
- data_engine/ui/gui/presenters/steps.py +148 -0
- data_engine/ui/gui/presenters/workspace.py +39 -0
- data_engine/ui/gui/presenters/workspace_binding.py +75 -0
- data_engine/ui/gui/presenters/workspace_settings.py +182 -0
- data_engine/ui/gui/preview_models.py +37 -0
- data_engine/ui/gui/render_support.py +241 -0
- data_engine/ui/gui/rendering/__init__.py +12 -0
- data_engine/ui/gui/rendering/artifacts.py +95 -0
- data_engine/ui/gui/rendering/icons.py +50 -0
- data_engine/ui/gui/runtime.py +47 -0
- data_engine/ui/gui/state_support.py +193 -0
- data_engine/ui/gui/support.py +214 -0
- data_engine/ui/gui/surface.py +209 -0
- data_engine/ui/gui/theme.py +720 -0
- data_engine/ui/gui/widgets/__init__.py +34 -0
- data_engine/ui/gui/widgets/config.py +41 -0
- data_engine/ui/gui/widgets/logs.py +62 -0
- data_engine/ui/gui/widgets/panels.py +507 -0
- data_engine/ui/gui/widgets/sidebar.py +130 -0
- data_engine/ui/gui/widgets/steps.py +84 -0
- data_engine/ui/tui/__init__.py +5 -0
- data_engine/ui/tui/app.py +222 -0
- data_engine/ui/tui/bootstrap.py +475 -0
- data_engine/ui/tui/bootstrapper.py +117 -0
- data_engine/ui/tui/controllers/__init__.py +6 -0
- data_engine/ui/tui/controllers/flows.py +349 -0
- data_engine/ui/tui/controllers/runtime.py +167 -0
- data_engine/ui/tui/runtime.py +34 -0
- data_engine/ui/tui/state_support.py +141 -0
- data_engine/ui/tui/support.py +63 -0
- data_engine/ui/tui/theme.py +204 -0
- data_engine/ui/tui/widgets.py +123 -0
- data_engine/views/__init__.py +109 -0
- data_engine/views/actions.py +80 -0
- data_engine/views/artifacts.py +58 -0
- data_engine/views/flow_display.py +69 -0
- data_engine/views/logs.py +54 -0
- data_engine/views/models.py +96 -0
- data_engine/views/presentation.py +133 -0
- data_engine/views/runs.py +62 -0
- data_engine/views/state.py +39 -0
- data_engine/views/status.py +13 -0
- data_engine/views/text.py +109 -0
- py_data_engine-0.1.0.dist-info/METADATA +330 -0
- py_data_engine-0.1.0.dist-info/RECORD +200 -0
- py_data_engine-0.1.0.dist-info/WHEEL +5 -0
- py_data_engine-0.1.0.dist-info/entry_points.txt +2 -0
- py_data_engine-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""Flow catalog loading services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Callable
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from data_engine.authoring.builder import Flow, _title_case_words
|
|
9
|
+
from data_engine.domain import FlowCatalogEntry, default_flow_state, flow_category
|
|
10
|
+
from data_engine.authoring.model import FlowValidationError
|
|
11
|
+
from data_engine.flow_modules.flow_module_loader import FlowModuleDefinition, discover_flow_module_definitions
|
|
12
|
+
from data_engine.platform.workspace_models import path_display
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _flow_paths(flow: Flow) -> tuple[str, str]:
|
|
16
|
+
trigger = flow.trigger
|
|
17
|
+
source = getattr(trigger, "source", None) if trigger is not None else None
|
|
18
|
+
target = getattr(flow.mirror_spec, "root", None)
|
|
19
|
+
return path_display(source), path_display(target)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _flow_interval(flow: Flow) -> str:
|
|
23
|
+
trigger = flow.trigger
|
|
24
|
+
if trigger is None:
|
|
25
|
+
return "-"
|
|
26
|
+
if getattr(trigger, "interval", None) is not None:
|
|
27
|
+
return str(trigger.interval)
|
|
28
|
+
times = getattr(trigger, "times", ())
|
|
29
|
+
if times:
|
|
30
|
+
return ", ".join(str(value) for value in times)
|
|
31
|
+
if getattr(trigger, "time", None) is not None:
|
|
32
|
+
return str(trigger.time)
|
|
33
|
+
return "-"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def flow_catalog_entry_from_flow(flow: Flow, *, description: str | None) -> FlowCatalogEntry:
|
|
37
|
+
source_root, target_root = _flow_paths(flow)
|
|
38
|
+
operation_items = tuple(step.label for step in flow.steps)
|
|
39
|
+
operations = " -> ".join(operation_items) or "(no steps)"
|
|
40
|
+
mode = flow.mode
|
|
41
|
+
derived_title = flow.label or _title_case_words(flow.name or "", empty="Flow")
|
|
42
|
+
return FlowCatalogEntry(
|
|
43
|
+
name=flow.name,
|
|
44
|
+
group=flow.group,
|
|
45
|
+
title=derived_title,
|
|
46
|
+
description=description or "",
|
|
47
|
+
source_root=source_root,
|
|
48
|
+
target_root=target_root,
|
|
49
|
+
mode=mode,
|
|
50
|
+
interval=_flow_interval(flow),
|
|
51
|
+
operations=operations,
|
|
52
|
+
operation_items=operation_items,
|
|
53
|
+
state=default_flow_state(mode),
|
|
54
|
+
valid=True,
|
|
55
|
+
category=flow_category(mode),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _invalid_entry(definition: FlowModuleDefinition, exc: Exception) -> FlowCatalogEntry:
|
|
60
|
+
return FlowCatalogEntry(
|
|
61
|
+
name=definition.name,
|
|
62
|
+
group=None,
|
|
63
|
+
title=_title_case_words(definition.name, empty="Flow"),
|
|
64
|
+
description=definition.description or "",
|
|
65
|
+
source_root="(not set)",
|
|
66
|
+
target_root="(not set)",
|
|
67
|
+
mode="manual",
|
|
68
|
+
interval="-",
|
|
69
|
+
operations="Unavailable",
|
|
70
|
+
operation_items=(),
|
|
71
|
+
state="invalid",
|
|
72
|
+
valid=False,
|
|
73
|
+
category="manual",
|
|
74
|
+
error=str(exc),
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class FlowCatalogService:
|
|
79
|
+
"""Own flow catalog loading through an explicit discovery dependency."""
|
|
80
|
+
|
|
81
|
+
def __init__(
|
|
82
|
+
self,
|
|
83
|
+
*,
|
|
84
|
+
discover_definitions_func: Callable[..., tuple[FlowModuleDefinition, ...]] = discover_flow_module_definitions,
|
|
85
|
+
) -> None:
|
|
86
|
+
self._discover_definitions = discover_definitions_func
|
|
87
|
+
|
|
88
|
+
def load_entries(self, *, workspace_root: Path | None = None) -> tuple[FlowCatalogEntry, ...]:
|
|
89
|
+
"""Return discovered flow catalog entries for the requested workspace root."""
|
|
90
|
+
entries: list[FlowCatalogEntry] = []
|
|
91
|
+
definitions = self._discover_definitions(data_root=workspace_root)
|
|
92
|
+
if not definitions:
|
|
93
|
+
raise FlowValidationError("No flow modules discovered.")
|
|
94
|
+
for definition in definitions:
|
|
95
|
+
try:
|
|
96
|
+
entries.append(flow_catalog_entry_from_flow(definition.build(), description=definition.description))
|
|
97
|
+
except Exception as exc:
|
|
98
|
+
entries.append(_invalid_entry(definition, exc))
|
|
99
|
+
return tuple(sorted(entries, key=lambda entry: entry.name))
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
__all__ = ["FlowCatalogService", "flow_catalog_entry_from_flow"]
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Executable flow loading services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Callable
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from data_engine.flow_modules.flow_module_loader import discover_flow_module_definitions, load_flow_module_definition
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from data_engine.authoring.flow import Flow
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _default_load_flow(name: str, *, data_root: Path | None = None) -> "Flow":
|
|
16
|
+
return load_flow_module_definition(name, data_root=data_root).build()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _default_discover_flows(*, data_root: Path | None = None) -> tuple["Flow", ...]:
|
|
20
|
+
return tuple(definition.build() for definition in discover_flow_module_definitions(data_root=data_root))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class FlowExecutionService:
|
|
24
|
+
"""Own executable flow loading through an explicit loader dependency."""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
*,
|
|
29
|
+
load_flow_func: Callable[..., "Flow"] = _default_load_flow,
|
|
30
|
+
discover_flows_func: Callable[..., tuple["Flow", ...]] = _default_discover_flows,
|
|
31
|
+
) -> None:
|
|
32
|
+
self._load_flow = load_flow_func
|
|
33
|
+
self._discover_flows = discover_flows_func
|
|
34
|
+
|
|
35
|
+
def load_flow(self, name: str, *, workspace_root: Path | None = None) -> "Flow":
|
|
36
|
+
"""Return one executable flow definition by name."""
|
|
37
|
+
return self._load_flow(name, data_root=workspace_root)
|
|
38
|
+
|
|
39
|
+
def load_flows(self, names: tuple[str, ...], *, workspace_root: Path | None = None) -> tuple["Flow", ...]:
|
|
40
|
+
"""Return executable flow definitions for the requested names."""
|
|
41
|
+
return tuple(self.load_flow(name, workspace_root=workspace_root) for name in names)
|
|
42
|
+
|
|
43
|
+
def discover_flows(self, *, workspace_root: Path | None = None) -> tuple["Flow", ...]:
|
|
44
|
+
"""Return all executable flow definitions for the requested workspace root."""
|
|
45
|
+
return self._discover_flows(data_root=workspace_root)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
__all__ = ["FlowExecutionService"]
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""Runtime ledger services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Callable
|
|
7
|
+
|
|
8
|
+
from data_engine.platform.workspace_policy import RuntimeLayoutPolicy
|
|
9
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LedgerService:
|
|
13
|
+
"""Own workspace-local runtime ledger access and client session bookkeeping."""
|
|
14
|
+
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
open_ledger_func: Callable[[Path], RuntimeLedger] | None = None,
|
|
18
|
+
*,
|
|
19
|
+
runtime_layout_policy: RuntimeLayoutPolicy | None = None,
|
|
20
|
+
) -> None:
|
|
21
|
+
self.runtime_layout_policy = runtime_layout_policy or RuntimeLayoutPolicy()
|
|
22
|
+
self._open_ledger_func = open_ledger_func or self._open_default_ledger
|
|
23
|
+
|
|
24
|
+
def _open_default_ledger(self, workspace_root: Path) -> RuntimeLedger:
|
|
25
|
+
paths = self.runtime_layout_policy.resolve_paths(data_root=workspace_root)
|
|
26
|
+
return RuntimeLedger(paths.runtime_db_path)
|
|
27
|
+
|
|
28
|
+
def open_for_workspace(self, workspace_root: Path) -> RuntimeLedger:
|
|
29
|
+
"""Open the configured runtime ledger for one workspace root."""
|
|
30
|
+
return self._open_ledger_func(Path(workspace_root).expanduser().resolve())
|
|
31
|
+
|
|
32
|
+
def close(self, ledger: RuntimeLedger) -> None:
|
|
33
|
+
"""Close one runtime ledger connection."""
|
|
34
|
+
ledger.close()
|
|
35
|
+
|
|
36
|
+
def register_client_session(
|
|
37
|
+
self,
|
|
38
|
+
ledger: RuntimeLedger,
|
|
39
|
+
*,
|
|
40
|
+
client_id: str,
|
|
41
|
+
workspace_id: str,
|
|
42
|
+
client_kind: str,
|
|
43
|
+
pid: int,
|
|
44
|
+
) -> None:
|
|
45
|
+
"""Register or refresh one active local client session."""
|
|
46
|
+
ledger.upsert_client_session(
|
|
47
|
+
client_id=client_id,
|
|
48
|
+
workspace_id=workspace_id,
|
|
49
|
+
client_kind=client_kind,
|
|
50
|
+
pid=pid,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def remove_client_session(self, ledger: RuntimeLedger, client_id: str) -> None:
|
|
54
|
+
"""Remove one active local client session row."""
|
|
55
|
+
ledger.remove_client_session(client_id)
|
|
56
|
+
|
|
57
|
+
def purge_process_client_sessions(
|
|
58
|
+
self,
|
|
59
|
+
ledger: RuntimeLedger,
|
|
60
|
+
*,
|
|
61
|
+
workspace_id: str,
|
|
62
|
+
client_kind: str,
|
|
63
|
+
pid: int,
|
|
64
|
+
) -> None:
|
|
65
|
+
"""Remove all client sessions for one workspace/client-kind/process tuple."""
|
|
66
|
+
ledger.remove_client_sessions_for_process(
|
|
67
|
+
workspace_id=workspace_id,
|
|
68
|
+
client_kind=client_kind,
|
|
69
|
+
pid=pid,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def count_live_client_sessions(
|
|
73
|
+
self,
|
|
74
|
+
ledger: RuntimeLedger,
|
|
75
|
+
workspace_id: str,
|
|
76
|
+
*,
|
|
77
|
+
exclude_client_id: str | None = None,
|
|
78
|
+
) -> int:
|
|
79
|
+
"""Return the number of currently live client sessions for one workspace."""
|
|
80
|
+
if exclude_client_id is None:
|
|
81
|
+
return ledger.count_live_client_sessions(workspace_id)
|
|
82
|
+
return ledger.count_live_client_sessions(workspace_id, exclude_client_id=exclude_client_id)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
__all__ = ["LedgerService"]
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""Operator log history services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
|
|
7
|
+
from data_engine.domain import FlowLogEntry, FlowRunState
|
|
8
|
+
from data_engine.domain.logs import parse_runtime_message
|
|
9
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
10
|
+
from data_engine.views.logs import FlowLogStore
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class LogService:
|
|
14
|
+
"""Own operator log-store construction and log history queries."""
|
|
15
|
+
|
|
16
|
+
def create_store(self, runtime_ledger: RuntimeLedger | None = None) -> FlowLogStore:
|
|
17
|
+
"""Create one log store backed by the given runtime ledger."""
|
|
18
|
+
store = FlowLogStore(self._hydrate_entries(runtime_ledger))
|
|
19
|
+
store._runtime_ledger = runtime_ledger
|
|
20
|
+
return store
|
|
21
|
+
|
|
22
|
+
def reload(self, store: FlowLogStore) -> None:
|
|
23
|
+
"""Reload one log store from its attached runtime ledger."""
|
|
24
|
+
entries = self._hydrate_entries(getattr(store, "_runtime_ledger", None))
|
|
25
|
+
store.clear()
|
|
26
|
+
for entry in entries:
|
|
27
|
+
store.append_entry(entry)
|
|
28
|
+
|
|
29
|
+
def append_entry(self, store: FlowLogStore, entry: FlowLogEntry) -> None:
|
|
30
|
+
"""Append one log entry to the current store."""
|
|
31
|
+
store.append_entry(entry)
|
|
32
|
+
|
|
33
|
+
def clear_flow(self, store: FlowLogStore, flow_name: str | None) -> None:
|
|
34
|
+
"""Clear one flow's visible log history from the current store."""
|
|
35
|
+
store.clear_flow(flow_name)
|
|
36
|
+
|
|
37
|
+
def all_entries(self, store: FlowLogStore) -> tuple[FlowLogEntry, ...]:
|
|
38
|
+
"""Return every entry currently held in the store."""
|
|
39
|
+
return tuple(store._entries)
|
|
40
|
+
|
|
41
|
+
def entries_for_flow(self, store: FlowLogStore, flow_name: str | None) -> tuple[FlowLogEntry, ...]:
|
|
42
|
+
"""Return flow-scoped entries for one selected flow."""
|
|
43
|
+
return store.entries_for_flow(flow_name)
|
|
44
|
+
|
|
45
|
+
def runs_for_flow(self, store: FlowLogStore, flow_name: str | None) -> tuple[FlowRunState, ...]:
|
|
46
|
+
"""Return grouped run history for one selected flow."""
|
|
47
|
+
return store.runs_for_flow(flow_name)
|
|
48
|
+
|
|
49
|
+
def _hydrate_entries(self, runtime_ledger: RuntimeLedger | None) -> tuple[FlowLogEntry, ...]:
|
|
50
|
+
"""Build in-memory flow log entries from one runtime ledger."""
|
|
51
|
+
if runtime_ledger is None:
|
|
52
|
+
return ()
|
|
53
|
+
return tuple(
|
|
54
|
+
FlowLogEntry(
|
|
55
|
+
line=FlowLogEntry.format_runtime_message(entry.message),
|
|
56
|
+
kind="flow" if entry.flow_name is not None else "system",
|
|
57
|
+
event=parse_runtime_message(entry.message),
|
|
58
|
+
flow_name=entry.flow_name,
|
|
59
|
+
created_at_utc=datetime.fromisoformat(entry.created_at_utc),
|
|
60
|
+
)
|
|
61
|
+
for entry in runtime_ledger.list_logs()
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
__all__ = ["LogService"]
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"""Workspace runtime binding services for operator surfaces."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from os import getpid
|
|
7
|
+
|
|
8
|
+
from data_engine.hosts.daemon.manager import WorkspaceDaemonManager
|
|
9
|
+
from data_engine.platform.workspace_models import WorkspacePaths
|
|
10
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
11
|
+
from data_engine.services.daemon_state import DaemonStateService
|
|
12
|
+
from data_engine.services.ledger import LedgerService
|
|
13
|
+
from data_engine.services.logs import LogService
|
|
14
|
+
from data_engine.views.logs import FlowLogStore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass(frozen=True)
|
|
18
|
+
class WorkspaceRuntimeBinding:
|
|
19
|
+
"""Concrete runtime resources bound to one selected workspace."""
|
|
20
|
+
|
|
21
|
+
workspace_paths: WorkspacePaths
|
|
22
|
+
runtime_ledger: RuntimeLedger
|
|
23
|
+
log_store: FlowLogStore
|
|
24
|
+
daemon_manager: WorkspaceDaemonManager
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class WorkspaceRuntimeBindingService:
|
|
28
|
+
"""Own concrete runtime binding lifecycle for GUI/TUI surfaces."""
|
|
29
|
+
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
*,
|
|
33
|
+
ledger_service: LedgerService,
|
|
34
|
+
log_service: LogService,
|
|
35
|
+
daemon_state_service: DaemonStateService,
|
|
36
|
+
) -> None:
|
|
37
|
+
self.ledger_service = ledger_service
|
|
38
|
+
self.log_service = log_service
|
|
39
|
+
self.daemon_state_service = daemon_state_service
|
|
40
|
+
|
|
41
|
+
def open_binding(self, workspace_paths: WorkspacePaths) -> WorkspaceRuntimeBinding:
|
|
42
|
+
"""Open one concrete runtime binding for a workspace selection."""
|
|
43
|
+
runtime_ledger = self.ledger_service.open_for_workspace(workspace_paths.workspace_root)
|
|
44
|
+
return WorkspaceRuntimeBinding(
|
|
45
|
+
workspace_paths=workspace_paths,
|
|
46
|
+
runtime_ledger=runtime_ledger,
|
|
47
|
+
log_store=self.log_service.create_store(runtime_ledger),
|
|
48
|
+
daemon_manager=self.daemon_state_service.create_manager(workspace_paths),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def close_binding(self, binding: WorkspaceRuntimeBinding) -> None:
|
|
52
|
+
"""Close one concrete runtime binding."""
|
|
53
|
+
self.ledger_service.close(binding.runtime_ledger)
|
|
54
|
+
|
|
55
|
+
def register_client_session(
|
|
56
|
+
self,
|
|
57
|
+
binding: WorkspaceRuntimeBinding,
|
|
58
|
+
*,
|
|
59
|
+
client_id: str,
|
|
60
|
+
client_kind: str,
|
|
61
|
+
pid: int | None = None,
|
|
62
|
+
) -> None:
|
|
63
|
+
"""Register or refresh one local client session for the binding workspace."""
|
|
64
|
+
self.ledger_service.register_client_session(
|
|
65
|
+
binding.runtime_ledger,
|
|
66
|
+
client_id=client_id,
|
|
67
|
+
workspace_id=binding.workspace_paths.workspace_id,
|
|
68
|
+
client_kind=client_kind,
|
|
69
|
+
pid=getpid() if pid is None else pid,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def remove_client_session(self, binding: WorkspaceRuntimeBinding, client_id: str) -> None:
|
|
73
|
+
"""Remove one active local client session row."""
|
|
74
|
+
self.ledger_service.remove_client_session(binding.runtime_ledger, client_id)
|
|
75
|
+
|
|
76
|
+
def purge_process_client_sessions(
|
|
77
|
+
self,
|
|
78
|
+
binding: WorkspaceRuntimeBinding,
|
|
79
|
+
*,
|
|
80
|
+
client_kind: str,
|
|
81
|
+
pid: int | None = None,
|
|
82
|
+
) -> None:
|
|
83
|
+
"""Remove all client sessions for this workspace/client-kind/process tuple."""
|
|
84
|
+
self.ledger_service.purge_process_client_sessions(
|
|
85
|
+
binding.runtime_ledger,
|
|
86
|
+
workspace_id=binding.workspace_paths.workspace_id,
|
|
87
|
+
client_kind=client_kind,
|
|
88
|
+
pid=getpid() if pid is None else pid,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def count_live_client_sessions(
|
|
92
|
+
self,
|
|
93
|
+
binding: WorkspaceRuntimeBinding,
|
|
94
|
+
*,
|
|
95
|
+
exclude_client_id: str | None = None,
|
|
96
|
+
) -> int:
|
|
97
|
+
"""Return the number of live local client sessions for the binding workspace."""
|
|
98
|
+
return self.ledger_service.count_live_client_sessions(
|
|
99
|
+
binding.runtime_ledger,
|
|
100
|
+
binding.workspace_paths.workspace_id,
|
|
101
|
+
exclude_client_id=exclude_client_id,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
__all__ = ["WorkspaceRuntimeBinding", "WorkspaceRuntimeBindingService"]
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""Runtime execution services for flow runs and grouped engine runs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Callable
|
|
6
|
+
from threading import Event
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from data_engine.authoring.execution import _FlowRuntime, _GroupedFlowRuntime
|
|
10
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from data_engine.authoring.flow import Flow
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RuntimeExecutionService:
|
|
17
|
+
"""Own executable runtime construction for manual and grouped runs."""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
*,
|
|
22
|
+
flow_runtime_type: type[_FlowRuntime] = _FlowRuntime,
|
|
23
|
+
grouped_runtime_type: type[_GroupedFlowRuntime] = _GroupedFlowRuntime,
|
|
24
|
+
) -> None:
|
|
25
|
+
self._flow_runtime_type = flow_runtime_type
|
|
26
|
+
self._grouped_runtime_type = grouped_runtime_type
|
|
27
|
+
|
|
28
|
+
def run_once(
|
|
29
|
+
self,
|
|
30
|
+
flow: "Flow",
|
|
31
|
+
*,
|
|
32
|
+
runtime_ledger: RuntimeLedger | None = None,
|
|
33
|
+
flow_stop_event: Event | None = None,
|
|
34
|
+
) -> object:
|
|
35
|
+
"""Run one flow as a one-shot execution."""
|
|
36
|
+
runtime = self._flow_runtime_type(
|
|
37
|
+
(flow,),
|
|
38
|
+
continuous=False,
|
|
39
|
+
flow_stop_event=flow_stop_event,
|
|
40
|
+
runtime_ledger=runtime_ledger,
|
|
41
|
+
)
|
|
42
|
+
return runtime.run()
|
|
43
|
+
|
|
44
|
+
def preview(
|
|
45
|
+
self,
|
|
46
|
+
flow: "Flow",
|
|
47
|
+
*,
|
|
48
|
+
use: str | None = None,
|
|
49
|
+
runtime_ledger: RuntimeLedger | None = None,
|
|
50
|
+
) -> object:
|
|
51
|
+
"""Preview one flow through the one-shot runtime path."""
|
|
52
|
+
runtime = self._flow_runtime_type(
|
|
53
|
+
(flow,),
|
|
54
|
+
continuous=False,
|
|
55
|
+
runtime_ledger=runtime_ledger,
|
|
56
|
+
)
|
|
57
|
+
return runtime.preview(use=use)
|
|
58
|
+
|
|
59
|
+
def run_manual(
|
|
60
|
+
self,
|
|
61
|
+
flow: "Flow",
|
|
62
|
+
*,
|
|
63
|
+
runtime_ledger: RuntimeLedger,
|
|
64
|
+
flow_stop_event: Event,
|
|
65
|
+
) -> object:
|
|
66
|
+
"""Run one flow as a manual one-shot execution."""
|
|
67
|
+
return self.run_once(
|
|
68
|
+
flow,
|
|
69
|
+
runtime_ledger=runtime_ledger,
|
|
70
|
+
flow_stop_event=flow_stop_event,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def run_continuous(
|
|
74
|
+
self,
|
|
75
|
+
flow: "Flow",
|
|
76
|
+
*,
|
|
77
|
+
runtime_ledger: RuntimeLedger | None = None,
|
|
78
|
+
flow_stop_event: Event | None = None,
|
|
79
|
+
) -> object:
|
|
80
|
+
"""Run one flow continuously."""
|
|
81
|
+
runtime = self._flow_runtime_type(
|
|
82
|
+
(flow,),
|
|
83
|
+
continuous=True,
|
|
84
|
+
flow_stop_event=flow_stop_event,
|
|
85
|
+
runtime_ledger=runtime_ledger,
|
|
86
|
+
)
|
|
87
|
+
return runtime.run()
|
|
88
|
+
|
|
89
|
+
def run_grouped(
|
|
90
|
+
self,
|
|
91
|
+
flows: tuple["Flow", ...],
|
|
92
|
+
*,
|
|
93
|
+
runtime_ledger: RuntimeLedger,
|
|
94
|
+
runtime_stop_event: Event,
|
|
95
|
+
flow_stop_event: Event,
|
|
96
|
+
) -> object:
|
|
97
|
+
"""Run grouped automated flows continuously."""
|
|
98
|
+
runtime = self._grouped_runtime_type(
|
|
99
|
+
flows,
|
|
100
|
+
continuous=True,
|
|
101
|
+
runtime_stop_event=runtime_stop_event,
|
|
102
|
+
flow_stop_event=flow_stop_event,
|
|
103
|
+
runtime_ledger=runtime_ledger,
|
|
104
|
+
)
|
|
105
|
+
return runtime.run()
|
|
106
|
+
|
|
107
|
+
def run_grouped_continuous(
|
|
108
|
+
self,
|
|
109
|
+
flows: tuple["Flow", ...],
|
|
110
|
+
*,
|
|
111
|
+
runtime_ledger: RuntimeLedger | None = None,
|
|
112
|
+
runtime_stop_event: Event | None = None,
|
|
113
|
+
flow_stop_event: Event | None = None,
|
|
114
|
+
) -> object:
|
|
115
|
+
"""Run grouped automated flows continuously with optional runtime controls."""
|
|
116
|
+
runtime = self._grouped_runtime_type(
|
|
117
|
+
flows,
|
|
118
|
+
continuous=True,
|
|
119
|
+
runtime_stop_event=runtime_stop_event,
|
|
120
|
+
flow_stop_event=flow_stop_event,
|
|
121
|
+
runtime_ledger=runtime_ledger,
|
|
122
|
+
)
|
|
123
|
+
return runtime.run()
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
__all__ = ["RuntimeExecutionService"]
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Runtime history query services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from data_engine.domain import FlowCatalogLike, FlowLogEntry, FlowRunState, StepOutputIndex
|
|
8
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class RuntimeHistoryService:
|
|
12
|
+
"""Own persisted run/step history queries used by operator surfaces."""
|
|
13
|
+
|
|
14
|
+
def rebuild_step_outputs(
|
|
15
|
+
self,
|
|
16
|
+
ledger: RuntimeLedger,
|
|
17
|
+
flow_cards: dict[str, FlowCatalogLike],
|
|
18
|
+
) -> StepOutputIndex:
|
|
19
|
+
"""Rebuild latest successful per-step output paths for visible flows."""
|
|
20
|
+
rebuilt: dict[str, dict[str, Path]] = {}
|
|
21
|
+
for flow_name, card in flow_cards.items():
|
|
22
|
+
outputs: dict[str, Path] = {}
|
|
23
|
+
for run in ledger.list_runs(flow_name=flow_name):
|
|
24
|
+
for step_run in ledger.list_step_runs(run.run_id):
|
|
25
|
+
if step_run.status != "success" or not step_run.output_path:
|
|
26
|
+
continue
|
|
27
|
+
if step_run.step_label not in card.operation_items or step_run.step_label in outputs:
|
|
28
|
+
continue
|
|
29
|
+
output_path = Path(str(step_run.output_path))
|
|
30
|
+
if output_path.exists():
|
|
31
|
+
outputs[step_run.step_label] = output_path
|
|
32
|
+
if len(outputs) == len(card.operation_items):
|
|
33
|
+
break
|
|
34
|
+
rebuilt[flow_name] = outputs
|
|
35
|
+
return StepOutputIndex.from_mapping(rebuilt)
|
|
36
|
+
|
|
37
|
+
def error_text_for_entry(
|
|
38
|
+
self,
|
|
39
|
+
ledger: RuntimeLedger,
|
|
40
|
+
run_group: FlowRunState,
|
|
41
|
+
entry: FlowLogEntry,
|
|
42
|
+
) -> tuple[str, str | None]:
|
|
43
|
+
"""Return one user-facing error title and persisted error text for a failed entry."""
|
|
44
|
+
run_id = run_group.key[1]
|
|
45
|
+
event = entry.event
|
|
46
|
+
detail_text: str | None = None
|
|
47
|
+
title = "Run Error"
|
|
48
|
+
if event is not None and event.step_name is not None:
|
|
49
|
+
for step_run in ledger.list_step_runs(run_id):
|
|
50
|
+
if step_run.step_label == event.step_name and step_run.status == "failed":
|
|
51
|
+
detail_text = step_run.error_text
|
|
52
|
+
title = f"{event.step_name} Error"
|
|
53
|
+
break
|
|
54
|
+
if detail_text is None:
|
|
55
|
+
for run in ledger.list_runs(flow_name=run_group.key[0]):
|
|
56
|
+
if run.run_id == run_id:
|
|
57
|
+
detail_text = run.error_text
|
|
58
|
+
break
|
|
59
|
+
return title, detail_text
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
__all__ = ["RuntimeHistoryService"]
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Machine-local settings services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Callable
|
|
7
|
+
|
|
8
|
+
from data_engine.platform.local_settings import LocalSettingsStore, default_settings_db_path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SettingsService:
|
|
12
|
+
"""Own machine-local settings persistence for operator surfaces."""
|
|
13
|
+
|
|
14
|
+
def __init__(self, store: LocalSettingsStore) -> None:
|
|
15
|
+
self._store = store
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def default_store(cls, *, app_root: Path | None = None) -> LocalSettingsStore:
|
|
19
|
+
"""Open the default local settings store for one app root."""
|
|
20
|
+
return LocalSettingsStore(default_settings_db_path(app_root=app_root))
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def open_default(
|
|
24
|
+
cls,
|
|
25
|
+
*,
|
|
26
|
+
app_root: Path | None = None,
|
|
27
|
+
store_factory: Callable[[Path | None], LocalSettingsStore] | None = None,
|
|
28
|
+
) -> "SettingsService":
|
|
29
|
+
"""Open the default local settings store for the current app root."""
|
|
30
|
+
store_factory = store_factory or (lambda root: cls.default_store(app_root=root))
|
|
31
|
+
return cls(store_factory(app_root))
|
|
32
|
+
|
|
33
|
+
def workspace_collection_root(self) -> Path | None:
|
|
34
|
+
"""Return the saved local workspace collection root override, when present."""
|
|
35
|
+
return self._store.workspace_collection_root()
|
|
36
|
+
|
|
37
|
+
def set_workspace_collection_root(self, value: Path | str | None) -> None:
|
|
38
|
+
"""Persist the local workspace collection root override."""
|
|
39
|
+
self._store.set_workspace_collection_root(value)
|
|
40
|
+
|
|
41
|
+
def default_workspace_id(self) -> str | None:
|
|
42
|
+
"""Return the saved default workspace id, when present."""
|
|
43
|
+
return self._store.default_workspace_id()
|
|
44
|
+
|
|
45
|
+
def set_default_workspace_id(self, value: str | None) -> None:
|
|
46
|
+
"""Persist the default workspace id."""
|
|
47
|
+
self._store.set_default_workspace_id(value)
|
|
48
|
+
|
|
49
|
+
def runtime_root(self) -> Path | None:
|
|
50
|
+
"""Return the saved runtime/artifact root override, when present."""
|
|
51
|
+
return self._store.runtime_root()
|
|
52
|
+
|
|
53
|
+
def set_runtime_root(self, value: Path | str | None) -> None:
|
|
54
|
+
"""Persist the runtime/artifact root override."""
|
|
55
|
+
self._store.set_runtime_root(value)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
__all__ = ["SettingsService"]
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""Shared workspace snapshot and hydration services."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from data_engine.platform.workspace_models import WorkspacePaths
|
|
8
|
+
from data_engine.runtime.runtime_db import RuntimeLedger
|
|
9
|
+
from data_engine.runtime.shared_state import hydrate_local_runtime_state, lease_is_stale, read_lease_metadata
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SharedStateService:
|
|
13
|
+
"""Own lease-based shared snapshot hydration for operator surfaces."""
|
|
14
|
+
|
|
15
|
+
def hydrate_local_runtime(self, paths: WorkspacePaths, ledger: RuntimeLedger) -> None:
|
|
16
|
+
"""Replace one local runtime ledger from the shared workspace snapshots."""
|
|
17
|
+
hydrate_local_runtime_state(paths, ledger)
|
|
18
|
+
|
|
19
|
+
def read_lease_metadata(self, paths: WorkspacePaths) -> dict[str, Any] | None:
|
|
20
|
+
"""Return current workspace lease metadata, if present."""
|
|
21
|
+
return read_lease_metadata(paths)
|
|
22
|
+
|
|
23
|
+
def lease_is_stale(self, paths: WorkspacePaths, *, stale_after_seconds: float) -> bool:
|
|
24
|
+
"""Return whether current workspace lease metadata is stale."""
|
|
25
|
+
return lease_is_stale(paths, stale_after_seconds=stale_after_seconds)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
__all__ = ["SharedStateService"]
|