nostromo 0.dev0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Danila Ganchar
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,26 @@
1
+ Metadata-Version: 2.1
2
+ Name: nostromo
3
+ Version: 0.dev0
4
+ Summary: pipeline builder, runner, process manager, background jobs, job scheduling
5
+ License: MIT
6
+ Author: Danila Ganchar
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: License :: OSI Approved :: MIT License
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.9
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
15
+ Requires-Dist: bigtree (>=0.22.3,<0.23.0)
16
+ Requires-Dist: inject (==4.1)
17
+ Requires-Dist: psutil (>=6.1.0,<7.0.0)
18
+ Requires-Dist: pypyr (==5.9.1)
19
+ Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
20
+ Requires-Dist: redis (==5.0.1)
21
+ Requires-Dist: saq[redis] (>=0.18.3,<0.19.0)
22
+ Requires-Dist: setuptools (>=70.2.0,<71.0.0)
23
+ Requires-Dist: textual (==0.85.2)
24
+ Description-Content-Type: text/markdown
25
+
26
+
File without changes
File without changes
File without changes
@@ -0,0 +1,52 @@
1
+ import json
2
+ from dataclasses import dataclass, field
3
+ from datetime import datetime, timedelta, UTC
4
+ from typing import List, Union
5
+
6
+
7
+ @dataclass
8
+ class PipelineTaskGroup:
9
+ name: str
10
+ children: List[Union['PipelineTask']] = field(default_factory=list)
11
+
12
+
13
+ @dataclass
14
+ class PipelineTask:
15
+ name: str
16
+ children: List[Union['PipelineTaskGroup', 'PipelineTask']] = field(default_factory=list)
17
+
18
+
19
+ class PipelineRunStatus:
20
+ KILLED = 'killed'
21
+ RUNNING = 'running'
22
+
23
+
24
+ @dataclass
25
+ class Pipeline:
26
+ name: str
27
+ pipeline_dir: str = ''
28
+ schedule: str = ''
29
+
30
+
31
+ @dataclass
32
+ class PipelineRun:
33
+ pipeline_name: str
34
+ logs_dir: str
35
+ started_at: datetime
36
+ finished_at: Union[datetime, str] = ''
37
+ pid: int = 0
38
+ status: str = PipelineRunStatus.RUNNING
39
+
40
+ @property
41
+ def duration(self) -> timedelta:
42
+ if self.finished_at:
43
+ return self.finished_at - self.started_at
44
+ return datetime.now(UTC) - self.started_at
45
+
46
+ def to_json(self) -> str:
47
+ data = self.__dict__
48
+ for key in ('started_at', 'finished_at'):
49
+ value = getattr(self, key)
50
+ if value:
51
+ data[key] = value.isoformat()
52
+ return json.dumps(data)
File without changes
@@ -0,0 +1,32 @@
1
+ from abc import ABCMeta, abstractmethod
2
+ from typing import Protocol
3
+
4
+
5
+ class ExecutorProtocol(Protocol, metaclass=ABCMeta):
6
+ @abstractmethod
7
+ async def is_scheduler_running(self) -> bool:
8
+ pass
9
+
10
+ @abstractmethod
11
+ async def set_scheduler_running(self, is_scheduler_running: bool) -> None:
12
+ pass
13
+
14
+ @abstractmethod
15
+ async def send_to_executor(self, cmd: str):
16
+ pass
17
+
18
+ @abstractmethod
19
+ async def wait_and_exec_commands(self):
20
+ pass
21
+
22
+ @abstractmethod
23
+ async def action_pause_pipeline(self):
24
+ pass
25
+
26
+ @abstractmethod
27
+ async def action_kill_pipeline(self):
28
+ pass
29
+
30
+ @abstractmethod
31
+ async def wait_and_exec_actions(self):
32
+ pass
@@ -0,0 +1,45 @@
1
+ import abc
2
+ from pathlib import Path
3
+ from typing import Protocol
4
+
5
+
6
+ class PathProtocol(Protocol, metaclass=abc.ABCMeta):
7
+ @property
8
+ @abc.abstractmethod
9
+ def root(self) -> Path:
10
+ pass
11
+
12
+ @property
13
+ @abc.abstractmethod
14
+ def scripts(self) -> Path:
15
+ pass
16
+
17
+ @property
18
+ @abc.abstractmethod
19
+ def env_file(self) -> Path:
20
+ pass
21
+
22
+ @property
23
+ @abc.abstractmethod
24
+ def storage(self) -> Path:
25
+ pass
26
+
27
+ @property
28
+ @abc.abstractmethod
29
+ def pipelines(self) -> Path:
30
+ pass
31
+
32
+ @property
33
+ @abc.abstractmethod
34
+ def logs(self) -> Path:
35
+ pass
36
+
37
+ @property
38
+ @abc.abstractmethod
39
+ def pipelines_logs(self) -> Path:
40
+ pass
41
+
42
+ @property
43
+ @abc.abstractmethod
44
+ def scheduler_logs(self) -> Path:
45
+ pass
@@ -0,0 +1,46 @@
1
+ import abc
2
+ from typing import Protocol, List, Dict
3
+
4
+ from ..models.pipeline import Pipeline, PipelineTask, PipelineTaskGroup, PipelineRun
5
+
6
+
7
+ class PipelinesProtocol(Protocol, metaclass=abc.ABCMeta):
8
+ @property
9
+ @abc.abstractmethod
10
+ def entrypoints(self) -> List[str]:
11
+ pass
12
+
13
+ @abc.abstractmethod
14
+ def get_scripts_by_entrypoint(self, entrypoint: str) -> List[str]:
15
+ pass
16
+
17
+ @abc.abstractmethod
18
+ def get_nostromo_env(self) -> Dict[str, str]:
19
+ pass
20
+
21
+ @abc.abstractmethod
22
+ def get_task_names(self, pipeline: Pipeline) -> List[str]:
23
+ pass
24
+
25
+ @abc.abstractmethod
26
+ def get_group_names(self, pipeline: Pipeline) -> List[str]:
27
+ pass
28
+
29
+ @abc.abstractmethod
30
+ def add_task_to_group(self, group_name: str, task: PipelineTask) -> PipelineTaskGroup:
31
+ pass
32
+
33
+ def get_pipelines(self) -> List[Pipeline]:
34
+ pass
35
+
36
+ async def run_pipeline_by_name(self, name: str) -> PipelineRun:
37
+ pass
38
+
39
+ def get_pipeline_by_name(self, name: str) -> Pipeline:
40
+ pass
41
+
42
+ async def get_last_pipeline_run(self, pipeline_name: str) -> PipelineRun:
43
+ pass
44
+
45
+ async def kill_pipeline(self, run: PipelineRun) -> PipelineRun:
46
+ pass
@@ -0,0 +1,24 @@
1
+ import abc
2
+ from typing import Protocol
3
+
4
+ from textual.scroll_view import ScrollView
5
+
6
+
7
+ class UILogProtocol(Protocol, metaclass=abc.ABCMeta):
8
+ @property
9
+ @abc.abstractmethod
10
+ def log(self) -> ScrollView:
11
+ pass
12
+
13
+ @abc.abstractmethod
14
+ def info(self, message: str, title: str = '') -> None:
15
+ pass
16
+
17
+ @abc.abstractmethod
18
+ def warning(self, message: str, title: str = '') -> None:
19
+ pass
20
+
21
+ @abc.abstractmethod
22
+ def error(self, message: str, title: str = '') -> None:
23
+ pass
24
+
File without changes
@@ -0,0 +1,15 @@
1
+ import inject
2
+ from textual.app import ComposeResult
3
+ from textual.screen import Screen
4
+ from textual.widgets import Footer
5
+
6
+ from nostromo.protocols.ui_log import UILogProtocol
7
+
8
+
9
+ class LogsScreen(Screen):
10
+ _log: UILogProtocol = inject.attr(UILogProtocol)
11
+ BINDINGS = [('ctrl+l', 'app.pop_screen', 'Close Logs')]
12
+
13
+ def compose(self) -> ComposeResult:
14
+ yield self._log.log
15
+ yield Footer()
@@ -0,0 +1,11 @@
1
+ from textual.app import ComposeResult
2
+ from textual.screen import Screen
3
+
4
+ from nostromo.widgets.pipeline_builder import PipelineBuilderWidget
5
+
6
+
7
+ class PipelineBuilderScreen(Screen):
8
+ BINDINGS = [('ctrl+n', 'app.pop_screen', 'Close Pipeline Builder')]
9
+
10
+ def compose(self) -> ComposeResult:
11
+ yield PipelineBuilderWidget()
File without changes
@@ -0,0 +1,55 @@
1
+ import abc
2
+ import os
3
+ from pathlib import Path, PosixPath
4
+
5
+ from nostromo.protocols.path import PathProtocol
6
+
7
+
8
+ class LocalPath(PathProtocol, metaclass=abc.ABCMeta):
9
+ def __init__(self):
10
+ nostromo_path = os.environ.get('NOSTROMO_HOME')
11
+ if not nostromo_path:
12
+ nostromo_path = '~/.nostromo'
13
+
14
+ self._root = PosixPath(nostromo_path).expanduser()
15
+
16
+ self._scripts = PosixPath(os.path.join(str(self._root), 'scripts'))
17
+ self._storage = PosixPath(os.path.join(str(self._root), '.storage'))
18
+ self._env_file = PosixPath(os.path.join(str(self._storage), 'env.yml'))
19
+ self._pipelines = PosixPath(os.path.join(str(self._storage), 'pipelines'))
20
+
21
+ self._logs = PosixPath(os.path.join(str(self._storage), 'logs'))
22
+ self._pipelines_logs = PosixPath(os.path.join(str(self._logs), 'pipelines'))
23
+ self._scheduler_logs = PosixPath(os.path.join(str(self._logs), 'scheduler'))
24
+
25
+ @property
26
+ def root(self) -> Path:
27
+ return self._root
28
+
29
+ @property
30
+ def scripts(self) -> Path:
31
+ return self._scripts
32
+
33
+ @property
34
+ def env_file(self) -> Path:
35
+ return self._env_file
36
+
37
+ @property
38
+ def storage(self) -> Path:
39
+ return self._storage
40
+
41
+ @property
42
+ def pipelines(self) -> Path:
43
+ return self._pipelines
44
+
45
+ @property
46
+ def logs(self) -> Path:
47
+ return self._logs
48
+
49
+ @property
50
+ def pipelines_logs(self) -> Path:
51
+ return self._pipelines_logs
52
+
53
+ @property
54
+ def scheduler_logs(self) -> Path:
55
+ return self._scheduler_logs
@@ -0,0 +1,176 @@
1
+ import codecs
2
+ import json
3
+ import os
4
+ import subprocess
5
+ from datetime import datetime, UTC, timezone
6
+ from typing import List, Dict
7
+
8
+ import inject
9
+ import psutil
10
+ import yaml
11
+ from dotenv import dotenv_values
12
+ from psutil import NoSuchProcess
13
+ from redis.asyncio import Redis
14
+
15
+ from ..models.pipeline import Pipeline, PipelineTask, PipelineTaskGroup, PipelineRun, PipelineRunStatus
16
+ from ..protocols.path import PathProtocol
17
+ from ..protocols.pipelines import PipelinesProtocol
18
+
19
+
20
+ class PypyrPipelines(PipelinesProtocol):
21
+ _paths = inject.attr(PathProtocol)
22
+
23
+ def __init__(self, redis: Redis):
24
+ self._redis = redis
25
+
26
+ @property
27
+ def logs_dt_format(self) -> str:
28
+ return '%Y-%m-%d-%H_%M_%S'
29
+
30
+ @property
31
+ def entrypoints(self) -> List[str]:
32
+ entrypoints = []
33
+ for entrypoint in (
34
+ 'sh',
35
+ 'pyenv',
36
+ 'docker',
37
+ ):
38
+ try:
39
+ subprocess.check_output(['which', entrypoint])
40
+ entrypoints.append(entrypoint)
41
+ except subprocess.CalledProcessError:
42
+ continue
43
+
44
+ return entrypoints
45
+
46
+ def get_task_names(self, pipeline: Pipeline) -> List[str]:
47
+ pass
48
+
49
+ def get_group_names(self, pipeline: Pipeline) -> List[str]:
50
+ pass
51
+
52
+ def add_task_to_group(self, group_name: str, task: PipelineTask) -> PipelineTaskGroup:
53
+ pass
54
+
55
+ def get_scripts_by_entrypoint(self, entrypoint: str) -> List[str]:
56
+ result = []
57
+ if entrypoint == 'docker':
58
+ images = subprocess.check_output(['docker', 'images', '--format=json'])
59
+ images = images.decode().split('\n')
60
+
61
+ for image_str in images:
62
+ if not image_str:
63
+ continue
64
+
65
+ image = json.loads(image_str)
66
+ result.append(f'{image["Repository"]}:{image["Tag"]}')
67
+ else:
68
+ endswith = '.py' if entrypoint in ('pyenv', ) else '.sh'
69
+ for file in os.scandir(self._paths.scripts):
70
+ if file.name.endswith(endswith):
71
+ result.append(file.name)
72
+
73
+ return sorted(result)
74
+
75
+ def get_nostromo_env(self) -> Dict[str, str]:
76
+ config = dotenv_values(str(self._paths.env_file))
77
+ config['NOSTOMO_HOME'] = os.environ.get('NOSTOMO_HOME')
78
+ return dict(sorted(config.items(), key=lambda x: x[0]))
79
+
80
+ def _load_pipeline_from_yml(self, folder_name: str) -> Pipeline:
81
+ full_dir = os.path.join(self._paths.pipelines, folder_name)
82
+ with codecs.open(os.path.join(full_dir, '.pipeline.yaml')) as stream:
83
+ params = yaml.safe_load(stream) or {}
84
+ params['pipeline_dir'] = full_dir
85
+ params['name'] = folder_name
86
+ return Pipeline(**params)
87
+
88
+ def get_pipelines(self) -> List[Pipeline]:
89
+ return [
90
+ self._load_pipeline_from_yml(p)
91
+ for p in os.listdir(self._paths.pipelines)
92
+ ]
93
+
94
+ def _get_pipeline_logs_dir(self, name: str) -> str:
95
+ return os.path.join(str(self._paths.pipelines_logs), name)
96
+
97
+ async def get_last_pipeline_run(self, pipeline_name: str) -> PipelineRun or None:
98
+ pattern = self._get_pipeline_runs_key(pipeline_name) + '*'
99
+ keys = await self._redis.scan(match=pattern.encode(), count=1)
100
+ if not keys[1]:
101
+ return
102
+
103
+ run_key = keys[1][0].decode()
104
+ data = await self._redis.get(run_key)
105
+ if not data:
106
+ return
107
+
108
+ record: dict = json.loads(data.decode('utf-8'))
109
+ for key in ('started_at', 'finished_at'):
110
+ if record.get(key):
111
+ record[key] = datetime.fromisoformat(record[key])
112
+
113
+ return PipelineRun(**record)
114
+
115
+ async def kill_pipeline(self, run: PipelineRun) -> PipelineRun:
116
+ try:
117
+ parent = psutil.Process(run.pid)
118
+ except NoSuchProcess:
119
+ run.status = PipelineRunStatus.KILLED
120
+ run.finished_at = datetime.now(UTC)
121
+ await self._save_pipeline_run(run)
122
+ return run
123
+
124
+ for child in parent.children(recursive=True):
125
+ try:
126
+ child.kill()
127
+ except NoSuchProcess:
128
+ pass
129
+
130
+ parent.kill()
131
+ run.status = PipelineRunStatus.KILLED
132
+ run.finished_at = datetime.now(UTC)
133
+ await self._save_pipeline_run(run)
134
+
135
+ def _get_pipeline_runs_key(self, pipeline_name: str) -> str:
136
+ return f'nostromo:pipeline_run:{pipeline_name}'
137
+
138
+ async def _save_pipeline_run(self, run: PipelineRun) -> None:
139
+ key = self._get_pipeline_runs_key(pipeline_name=run.pipeline_name)
140
+ run_dt = run.started_at.strftime(self.logs_dt_format)
141
+ key = f'{key}:{run_dt}'
142
+
143
+ await self._redis.set(key, run.to_json())
144
+
145
+ def get_pipeline_by_name(self, name: str) -> Pipeline:
146
+ return self._load_pipeline_from_yml(name)
147
+
148
+ async def run_pipeline_by_name(self, name: str) -> PipelineRun:
149
+ pipeline = self.get_pipeline_by_name(name)
150
+ pypyr_path = subprocess.check_output(
151
+ 'which pypyr',
152
+ shell=True,
153
+ env=os.environ,
154
+ text=True
155
+ ).replace('\n', '')
156
+
157
+ run = datetime.now(UTC)
158
+ logs_dir = os.path.join(self._get_pipeline_logs_dir(name), run.strftime(self.logs_dt_format))
159
+
160
+ os.makedirs(logs_dir, exist_ok=True)
161
+ process = subprocess.Popen(
162
+ f'{pypyr_path} .run pipeline_log_dir={logs_dir}',
163
+ shell=True,
164
+ text=True,
165
+ cwd=pipeline.pipeline_dir,
166
+ )
167
+
168
+ pipeline_run = PipelineRun(
169
+ pid=process.pid,
170
+ logs_dir=logs_dir,
171
+ pipeline_name=pipeline.name,
172
+ started_at=run,
173
+ )
174
+
175
+ await self._save_pipeline_run(pipeline_run)
176
+ return pipeline_run
@@ -0,0 +1,31 @@
1
+ from redis.asyncio import Redis
2
+ from saq import Queue
3
+
4
+ from ..protocols.executor import ExecutorProtocol
5
+
6
+
7
+ class RedisExecutor(ExecutorProtocol):
8
+ def __init__(self, redis_url: str):
9
+ self._queue = Queue.from_url(redis_url)
10
+ self._client = Redis.from_url(redis_url)
11
+
12
+ async def send_to_executor(self, cmd: str):
13
+ return await super().send_to_executor(cmd)
14
+
15
+ async def wait_and_exec_commands(self):
16
+ return await super().wait_and_exec_commands()
17
+
18
+ async def is_scheduler_running(self) -> bool:
19
+ return await super().is_scheduler_running()
20
+
21
+ async def set_scheduler_running(self, is_scheduler_running: bool) -> None:
22
+ return await super().set_scheduler_running(is_scheduler_running)
23
+
24
+ async def action_pause_pipeline(self):
25
+ pass
26
+
27
+ async def action_kill_pipeline(self):
28
+ pass
29
+
30
+ async def wait_and_exec_actions(self):
31
+ pass
@@ -0,0 +1,52 @@
1
+ from datetime import datetime, UTC
2
+
3
+ from textual.notifications import SeverityLevel
4
+ from textual.widgets import RichLog
5
+
6
+ from nostromo.protocols.ui_log import UILogProtocol
7
+
8
+
9
+ class RichUILogService(UILogProtocol):
10
+ def __init__(self, log: RichLog):
11
+ self._log = log
12
+
13
+ @property
14
+ def log(self) -> RichLog:
15
+ return self._log
16
+
17
+ def _get_colored_severity(self, severity: SeverityLevel) -> str:
18
+ color = 'blue'
19
+ if severity == 'warning':
20
+ color = 'yellow'
21
+ elif severity == 'error':
22
+ color = 'red'
23
+
24
+ if severity == 'information':
25
+ severity = 'info'
26
+
27
+ return f'[{color}]{severity.upper()}[/{color}] '
28
+
29
+ def _write(
30
+ self,
31
+ message: str,
32
+ title: str = '',
33
+ severity: SeverityLevel = 'information',
34
+ ) -> None:
35
+ dt = datetime.now(UTC).strftime('%Y-%m-%d %H:%M:%S')
36
+ msg = ''.join([
37
+ f'{dt} {self._get_colored_severity(severity)}',
38
+ f'{title}. {message}' if title else message,
39
+ ])
40
+
41
+ self.log.write(msg)
42
+ if title:
43
+ self.log.notify(message, title=title, severity=severity, timeout=7)
44
+
45
+ def info(self, message: str, title: str = '') -> None:
46
+ self._write(message, title)
47
+
48
+ def warning(self, message: str, title: str = '') -> None:
49
+ self._write(message, title, 'warning')
50
+
51
+ def error(self, message: str, title: str = '', notification: bool = False) -> None:
52
+ self._write(message, title, 'error')
File without changes
@@ -0,0 +1,122 @@
1
+ from asyncio import sleep
2
+
3
+ import inject
4
+ from rich.text import Text
5
+ from textual import work, events
6
+ from textual.app import ComposeResult
7
+ from textual.widget import Widget
8
+ from textual.widgets import DataTable
9
+
10
+ from nostromo.protocols.pipelines import PipelinesProtocol
11
+ from nostromo.protocols.ui_log import UILogProtocol
12
+
13
+
14
+ class _PipelinesTable(DataTable):
15
+ DEFAULT_CSS = """
16
+ _PipelinesTable {color: #c7c9ca; border-bottom: #1047A9; padding-bottom: 1} # FFBC40
17
+ _PipelinesTable > .datatable--cursor {background: #1047A9; color: #c7c9ca}
18
+ _PipelinesTable > .datatable--fixed-cursor {background: #1e1e1e; color: #c7c9ca}
19
+ _PipelinesTable > .datatable--fixed {background: #1e1e1e; color: #c7c9ca}
20
+ _PipelinesTable > .datatable--hover {background: #1e1e1e; color: #c7c9ca}
21
+ _PipelinesTable > .datatable--header {background: #1e1e1e; color: #FFBC40}
22
+ """
23
+ _log = inject.attr(UILogProtocol)
24
+ _pipelines = inject.attr(PipelinesProtocol)
25
+ BINDINGS = [
26
+ ('ctrl+r', 'run_pipeline()', 'Run Pipeline'),
27
+ ('ctrl+k', 'kill_pipeline()', 'Kill Pipeline'),
28
+ ('ctrl+p', 'stop_pipeline()', 'Pause Pipeline'),
29
+ ]
30
+
31
+ def action_run_pipeline(self):
32
+ self._run_pipeline_by_name()
33
+
34
+ @work(exclusive=True)
35
+ async def _run_pipeline_by_name(self):
36
+ pipeline_name = self.get_cell_at(self.cursor_coordinate)
37
+ self.refresh_bindings()
38
+ await self._pipelines.run_pipeline_by_name(pipeline_name)
39
+ self._log.info(pipeline_name, 'Pipeline started')
40
+
41
+ def action_kill_pipeline(self):
42
+ self._kill_pipeline()
43
+
44
+ @work(exclusive=True)
45
+ async def _kill_pipeline(self):
46
+ pipeline_name = self.get_cell_at(self.cursor_coordinate)
47
+ self._log.warning(f'Kill pipeline {pipeline_name}')
48
+ last_run = await self._pipelines.get_last_pipeline_run(pipeline_name)
49
+ if not last_run or last_run.finished_at:
50
+ self._log.error(f'{pipeline_name} is not running', 'Kill Error')
51
+ return
52
+
53
+ await self._pipelines.kill_pipeline(last_run)
54
+ self._log.warning(pipeline_name, 'Pipeline killed')
55
+
56
+ @property
57
+ def default_opacity(self) -> float:
58
+ return 0.7
59
+
60
+ def on_mount(self):
61
+ self.styles.opacity = self.default_opacity
62
+ if not self.rows:
63
+ return
64
+
65
+ self.track_pipelines()
66
+
67
+ def _on_focus(self, event: events.Focus) -> None:
68
+ self.styles.opacity = 1
69
+
70
+ def _on_blur(self, event: events.Blur) -> None:
71
+ self.styles.opacity = self.default_opacity
72
+
73
+ @work(exclusive=True)
74
+ async def track_pipelines(self):
75
+ while True:
76
+ for row in self.rows.values():
77
+ last_run = await self._pipelines.get_last_pipeline_run(row.key.value)
78
+ if not last_run:
79
+ continue
80
+
81
+ finished = last_run.finished_at.strftime('%Y-%m-%d %H:%M:%S') if last_run.finished_at else ''
82
+ self.update_cell(row.key, 'Last Run', last_run.started_at.strftime('%Y-%m-%d %H:%M:%S'))
83
+ self.update_cell(row.key, 'Duration', str(last_run.duration).split('.')[0])
84
+ self.update_cell(row.key, 'Finished', finished)
85
+
86
+ await sleep(1)
87
+
88
+
89
+ class PipelienesContent(Widget):
90
+ _pipelines = inject.attr(PipelinesProtocol)
91
+
92
+ @classmethod
93
+ def get_id(cls):
94
+ return 'Pipelines'
95
+
96
+ def compose(self) -> ComposeResult:
97
+ table = _PipelinesTable()
98
+ table.cursor_type = 'row'
99
+ table.zebra_stripes = True
100
+ padding = ' ' * 20
101
+
102
+ for col in (
103
+ 'Name',
104
+ 'Schedule',
105
+ 'Last Run',
106
+ 'Duration',
107
+ 'Finished',
108
+ ):
109
+ table.add_column(Text(col, justify='center'), key=col)
110
+
111
+ for pipeline in self._pipelines.get_pipelines():
112
+ table.add_row(
113
+ pipeline.name,
114
+ pipeline.schedule,
115
+ padding,
116
+ '',
117
+ padding,
118
+ key=pipeline.name,
119
+ )
120
+
121
+ table.sort('Name')
122
+ yield table
@@ -0,0 +1,12 @@
1
+ from textual.app import ComposeResult
2
+ from textual.widget import Widget
3
+ from textual.widgets import Label
4
+
5
+
6
+ class SchedulerContent(Widget):
7
+ @classmethod
8
+ def get_id(cls):
9
+ return 'Scheduler'
10
+
11
+ def compose(self) -> ComposeResult:
12
+ yield Label('Scheduler')
File without changes
@@ -0,0 +1,103 @@
1
+ import re
2
+ from asyncio import sleep
3
+ from copy import deepcopy
4
+ from datetime import datetime, UTC
5
+ from typing import List
6
+
7
+ from textual import events, work
8
+ from textual.reactive import reactive
9
+ from textual.widgets import OptionList
10
+
11
+
12
+ class DefaultOptionList(OptionList):
13
+ DEFAULT_CSS = """
14
+ DefaultOptionList {
15
+ layer: above;
16
+ }
17
+ """
18
+ _options_copy = []
19
+ _input_buffer = reactive('')
20
+ _last_input_dt = datetime.now(UTC)
21
+ value = reactive('')
22
+
23
+ def on_mount(self):
24
+ self._options_copy = deepcopy(self._options)
25
+ self.clear_options()
26
+ if self._options_copy:
27
+ self.add_option(self._options_copy[0])
28
+ self.value = self._options_copy[0].prompt
29
+
30
+ def on_key(self, event: events.Key) -> None:
31
+ if event.key == 'tab' and len(self._options) > 1:
32
+ self.action_select()
33
+
34
+ if re.fullmatch(r'[A-Za-z0-9]', event.key) or event.key == 'backspace':
35
+ self.clear_options()
36
+ if event.key == 'backspace':
37
+ if len(self._input_buffer) == 1:
38
+ self._input_buffer = ''
39
+ self.action_select()
40
+ return
41
+
42
+ self._input_buffer = self._input_buffer[:-1]
43
+ else:
44
+ self._input_buffer += event.key
45
+
46
+ self.add_option(self._input_buffer)
47
+ self._last_input_dt = datetime.now(UTC)
48
+ self._find_options()
49
+
50
+ @work(exclusive=True)
51
+ async def _find_options(self):
52
+ await sleep(0.5)
53
+ self.clear_options()
54
+ for option in self._options_copy:
55
+ prompt = option if isinstance(option, str) else option.prompt
56
+ if prompt.lower().find(self._input_buffer) > -1:
57
+ self.add_option(option)
58
+
59
+ if self._options:
60
+ self.action_page_down()
61
+ else:
62
+ self.add_option(self._input_buffer)
63
+
64
+ self._input_buffer = ''
65
+
66
+ def action_select(self) -> None:
67
+ # disabled select
68
+ if len(self._options) == 1:
69
+ selected = self.get_option_at_index(0)
70
+ move = True
71
+ self.clear_options()
72
+
73
+ for ix, entrypoint in enumerate(self._options_copy):
74
+ self.add_option(entrypoint)
75
+ if move:
76
+ self.action_page_down()
77
+
78
+ prompt = entrypoint if isinstance(entrypoint, str) else entrypoint.prompt
79
+ if prompt == selected.prompt:
80
+ self.value = prompt
81
+ move = False
82
+
83
+ return
84
+
85
+ # opened with an option
86
+ if self.highlighted:
87
+ option = self.get_option_at_index(self.highlighted)
88
+ self.clear_options()
89
+ self.add_option(option)
90
+ self.value = option.prompt
91
+ return
92
+
93
+ # init - show first
94
+ if self._options_copy:
95
+ self.clear_options()
96
+ self.add_option(self._options_copy[0])
97
+ self.value = self._options_copy[0]
98
+
99
+ def reset_options(self, options: List[str]):
100
+ self._options_copy = deepcopy(options)
101
+ self.clear_options()
102
+ if options:
103
+ self.add_option(options[0])
@@ -0,0 +1,35 @@
1
+ from textual.app import ComposeResult
2
+ from textual.containers import Horizontal, Vertical
3
+ from textual.widget import Widget
4
+ from textual.widgets import Label, OptionList
5
+
6
+
7
+ class HorizontalForm(Horizontal):
8
+ DEFAULT_CSS = """
9
+ HorizontalForm Label {
10
+ padding: 1;
11
+ text-align: right;
12
+ }
13
+
14
+ DefaultOptionList {
15
+ min-height: 3;
16
+ }
17
+ """
18
+
19
+ def __init__(self, *form_items: Widget, name: str | None = None, id: str | None = None, classes: str | None = None,
20
+ disabled: bool = False) -> None:
21
+ super().__init__(*[], name=name, id=id, classes=classes, disabled=disabled)
22
+ self._form_items = form_items
23
+
24
+ def compose(self) -> ComposeResult:
25
+ labels = Vertical(*[Label(w.name) for w in self._form_items])
26
+ labels.styles.max_width = max(*[len(w.name) for w in self._form_items])
27
+ yield labels
28
+
29
+ items = []
30
+ for item in self._form_items:
31
+ if isinstance(item, OptionList):
32
+ items.append(Label(item.name))
33
+ items.append(item)
34
+
35
+ yield Vertical(*items)
@@ -0,0 +1,63 @@
1
+ import inject
2
+ from textual import on
3
+ from textual.validation import Regex
4
+ from textual.widget import Widget
5
+ from textual.widgets import Input, Button, Footer, Pretty
6
+
7
+ from nostromo.protocols.path import PathProtocol
8
+ from nostromo.protocols.pipelines import PipelinesProtocol
9
+ from nostromo.protocols.ui_log import UILogProtocol
10
+ from nostromo.widgets.default_option_list import DefaultOptionList
11
+ from nostromo.widgets.horizontal_form import HorizontalForm
12
+
13
+
14
+ class _EntrypointOptions(DefaultOptionList):
15
+ DEFAULT_CSS = """
16
+ _EntrypointOptions {
17
+ layer: above;
18
+ }
19
+ """
20
+ _pipelines = inject.attr(PipelinesProtocol)
21
+
22
+ def _reset_scripts_options(self):
23
+ script_options: DefaultOptionList = self.parent.parent.query_one('#ScriptOptions')
24
+ script_options.clear_options()
25
+ options = self._pipelines.get_scripts_by_entrypoint(self.value)
26
+ script_options.reset_options(options)
27
+
28
+ def action_select(self) -> None:
29
+ super().action_select()
30
+ self._reset_scripts_options()
31
+
32
+
33
+ class PipelineBuilderWidget(Widget):
34
+ _pipelines = inject.attr(PipelinesProtocol)
35
+ _paths = inject.attr(PathProtocol)
36
+
37
+ def compose(self) -> None:
38
+ yield Footer()
39
+ yield HorizontalForm(
40
+ _EntrypointOptions(*self._pipelines.entrypoints, name='entrypoint'),
41
+ DefaultOptionList(id='ScriptOptions', name='ScriptOptions'),
42
+ Input(placeholder='calculate_events', name='Task Name', validators=[
43
+ Regex('^[a-z0-9_]*$', failure_description='Task name does not match regular expression "^[a-z0-9_]*$"')
44
+ ]),
45
+ # Input(placeholder='--param1=value1 --param2=value2', name='Parameters'),
46
+ RunCmdButton('Run', variant='success', name=''),
47
+ )
48
+ yield Pretty([])
49
+
50
+ @on(Input.Changed)
51
+ def show_invalid_reasons(self, event: Input.Changed) -> None:
52
+ # Updating the UI to show the reasons why validation failed
53
+ if not event.validation_result.is_valid:
54
+ self.query_one(Pretty).update(event.validation_result.failure_descriptions)
55
+ else:
56
+ self.query_one(Pretty).update([])
57
+
58
+
59
+ class RunCmdButton(Button):
60
+ _log = inject.attr(UILogProtocol)
61
+
62
+ def on_button_pressed(self, event: Button.Pressed) -> None:
63
+ self._log.info('test', 'Run Script')
@@ -0,0 +1,25 @@
1
+ [build-system]
2
+ requires = ["poetry-core==1.9.1"]
3
+ build-backend = "poetry.core.masonry.api"
4
+
5
+
6
+ [tool.poetry]
7
+ name = "nostromo"
8
+ version = "0.dev.0"
9
+ description = "pipeline builder, runner, process manager, background jobs, job scheduling"
10
+ authors = ["Danila Ganchar"]
11
+ license = "MIT"
12
+ readme = "README.md"
13
+
14
+
15
+ [tool.poetry.dependencies]
16
+ python = "^3.9"
17
+ textual = "0.85.2"
18
+ pyyaml = "^6.0.1"
19
+ setuptools = "^70.2.0"
20
+ inject = "4.1"
21
+ pypyr = "5.9.1"
22
+ bigtree = "^0.22.3"
23
+ psutil = "^6.1.0"
24
+ redis = "5.0.1"
25
+ saq = {extras = ["redis"], version = "^0.18.3"}