FlowerPower 0.9.13.1__py3-none-any.whl → 1.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. flowerpower/__init__.py +17 -2
  2. flowerpower/cfg/__init__.py +201 -149
  3. flowerpower/cfg/base.py +122 -24
  4. flowerpower/cfg/pipeline/__init__.py +254 -0
  5. flowerpower/cfg/pipeline/adapter.py +66 -0
  6. flowerpower/cfg/pipeline/run.py +40 -11
  7. flowerpower/cfg/pipeline/schedule.py +69 -79
  8. flowerpower/cfg/project/__init__.py +149 -0
  9. flowerpower/cfg/project/adapter.py +57 -0
  10. flowerpower/cfg/project/job_queue.py +165 -0
  11. flowerpower/cli/__init__.py +92 -37
  12. flowerpower/cli/job_queue.py +878 -0
  13. flowerpower/cli/mqtt.py +32 -1
  14. flowerpower/cli/pipeline.py +559 -406
  15. flowerpower/cli/utils.py +29 -18
  16. flowerpower/flowerpower.py +12 -8
  17. flowerpower/fs/__init__.py +20 -2
  18. flowerpower/fs/base.py +350 -26
  19. flowerpower/fs/ext.py +797 -216
  20. flowerpower/fs/storage_options.py +1097 -55
  21. flowerpower/io/base.py +13 -18
  22. flowerpower/io/loader/__init__.py +28 -0
  23. flowerpower/io/loader/deltatable.py +7 -10
  24. flowerpower/io/metadata.py +1 -0
  25. flowerpower/io/saver/__init__.py +28 -0
  26. flowerpower/io/saver/deltatable.py +4 -3
  27. flowerpower/job_queue/__init__.py +252 -0
  28. flowerpower/job_queue/apscheduler/__init__.py +11 -0
  29. flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
  30. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
  31. flowerpower/job_queue/apscheduler/manager.py +1063 -0
  32. flowerpower/job_queue/apscheduler/setup.py +524 -0
  33. flowerpower/job_queue/apscheduler/trigger.py +169 -0
  34. flowerpower/job_queue/apscheduler/utils.py +309 -0
  35. flowerpower/job_queue/base.py +382 -0
  36. flowerpower/job_queue/rq/__init__.py +10 -0
  37. flowerpower/job_queue/rq/_trigger.py +37 -0
  38. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
  39. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
  40. flowerpower/job_queue/rq/manager.py +1449 -0
  41. flowerpower/job_queue/rq/setup.py +150 -0
  42. flowerpower/job_queue/rq/utils.py +69 -0
  43. flowerpower/pipeline/__init__.py +5 -0
  44. flowerpower/pipeline/base.py +118 -0
  45. flowerpower/pipeline/io.py +407 -0
  46. flowerpower/pipeline/job_queue.py +505 -0
  47. flowerpower/pipeline/manager.py +1586 -0
  48. flowerpower/pipeline/registry.py +560 -0
  49. flowerpower/pipeline/runner.py +560 -0
  50. flowerpower/pipeline/visualizer.py +142 -0
  51. flowerpower/plugins/mqtt/__init__.py +12 -0
  52. flowerpower/plugins/mqtt/cfg.py +16 -0
  53. flowerpower/plugins/mqtt/manager.py +789 -0
  54. flowerpower/settings.py +110 -0
  55. flowerpower/utils/logging.py +21 -0
  56. flowerpower/utils/misc.py +57 -9
  57. flowerpower/utils/sql.py +122 -24
  58. flowerpower/utils/templates.py +2 -142
  59. flowerpower-1.0.0b2.dist-info/METADATA +324 -0
  60. flowerpower-1.0.0b2.dist-info/RECORD +94 -0
  61. flowerpower/_web/__init__.py +0 -61
  62. flowerpower/_web/routes/config.py +0 -103
  63. flowerpower/_web/routes/pipelines.py +0 -173
  64. flowerpower/_web/routes/scheduler.py +0 -136
  65. flowerpower/cfg/pipeline/tracker.py +0 -14
  66. flowerpower/cfg/project/open_telemetry.py +0 -8
  67. flowerpower/cfg/project/tracker.py +0 -11
  68. flowerpower/cfg/project/worker.py +0 -19
  69. flowerpower/cli/scheduler.py +0 -309
  70. flowerpower/cli/web.py +0 -44
  71. flowerpower/event_handler.py +0 -23
  72. flowerpower/mqtt.py +0 -609
  73. flowerpower/pipeline.py +0 -2499
  74. flowerpower/scheduler.py +0 -680
  75. flowerpower/tui.py +0 -79
  76. flowerpower/utils/datastore.py +0 -186
  77. flowerpower/utils/eventbroker.py +0 -127
  78. flowerpower/utils/executor.py +0 -58
  79. flowerpower/utils/trigger.py +0 -140
  80. flowerpower-0.9.13.1.dist-info/METADATA +0 -586
  81. flowerpower-0.9.13.1.dist-info/RECORD +0 -76
  82. /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
  83. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/WHEEL +0 -0
  84. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/entry_points.txt +0 -0
  85. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/top_level.txt +0 -0
@@ -1,173 +0,0 @@
1
- import os
2
- from pathlib import Path
3
- from typing import Dict, List, Optional, Any
4
-
5
- from fastapi import APIRouter, Request, Form, HTTPException, Depends
6
- from fastapi.responses import HTMLResponse, RedirectResponse
7
- from pydantic import BaseModel
8
-
9
- from ...pipeline import PipelineManager, Pipeline
10
- from .. import templates
11
-
12
- router = APIRouter(prefix="/pipelines", tags=["pipelines"])
13
-
14
- base_dir = os.environ.get("FLOWERPOWER_BASE_DIR", str(Path.cwd()))
15
-
16
- class PipelineRunRequest(BaseModel):
17
- name: str
18
- executor: Optional[str] = None
19
- inputs: Optional[Dict[str, Any]] = None
20
- final_vars: Optional[List[str]] = None
21
- config: Optional[Dict[str, Any]] = None
22
- with_tracker: bool = False
23
- with_opentelemetry: bool = False
24
- with_progressbar: bool = False
25
-
26
- class PipelineScheduleRequest(BaseModel):
27
- name: str
28
- trigger_type: str = "cron"
29
- executor: Optional[str] = None
30
- inputs: Optional[Dict[str, Any]] = None
31
- final_vars: Optional[List[str]] = None
32
- config: Optional[Dict[str, Any]] = None
33
- with_tracker: bool = False
34
- with_opentelemetry: bool = False
35
- with_progressbar: bool = False
36
- paused: bool = False
37
- crontab: Optional[str] = None
38
- overwrite: bool = False
39
-
40
- @router.get("/", response_class=HTMLResponse)
41
- async def list_pipelines(request: Request):
42
- """List all available pipelines"""
43
- pipeline_manager = PipelineManager(base_dir=base_dir)
44
- pipelines = pipeline_manager.list_pipelines() or [] # Ensure we have a list
45
-
46
- return templates.TemplateResponse(
47
- "pipelines/list.html",
48
- {
49
- "request": request,
50
- "pipelines": pipelines,
51
- "base_dir": base_dir
52
- }
53
- )
54
-
55
- @router.get("/{name}", response_class=HTMLResponse)
56
- async def get_pipeline(request: Request, name: str):
57
- """Get detailed pipeline view"""
58
- pipeline = Pipeline(name=name, base_dir=base_dir)
59
- summary = pipeline.get_summary()
60
-
61
- # Get pipeline code
62
- code = None
63
- pipeline_path = Path(base_dir) / "pipelines" / f"{name}.py"
64
- if pipeline_path.exists():
65
- with open(pipeline_path, "r") as f:
66
- code = f.read()
67
-
68
- return templates.TemplateResponse(
69
- "pipelines/detail.html",
70
- {
71
- "request": request,
72
- "pipeline": name,
73
- "summary": summary,
74
- "code": code,
75
- "base_dir": base_dir
76
- }
77
- )
78
-
79
- @router.post("/{name}/run")
80
- async def run_pipeline(request: Request, name: str, run_data: PipelineRunRequest):
81
- """Run a pipeline"""
82
- try:
83
- pipeline = Pipeline(name=name, base_dir=base_dir)
84
- result = pipeline.run(
85
- inputs=run_data.inputs,
86
- final_vars=run_data.final_vars,
87
- config=run_data.config,
88
- executor=run_data.executor,
89
- with_tracker=run_data.with_tracker,
90
- with_opentelemetry=run_data.with_opentelemetry,
91
- with_progressbar=run_data.with_progressbar
92
- )
93
-
94
- return {"success": True, "message": f"Pipeline {name} executed successfully", "result": result}
95
- except Exception as e:
96
- raise HTTPException(status_code=500, detail=str(e))
97
-
98
- @router.post("/{name}/schedule")
99
- async def schedule_pipeline(request: Request, name: str, schedule_data: PipelineScheduleRequest):
100
- """Schedule a pipeline"""
101
- try:
102
- pipeline = Pipeline(name=name, base_dir=base_dir)
103
- kwargs = {}
104
- if schedule_data.crontab:
105
- kwargs["crontab"] = schedule_data.crontab
106
-
107
- schedule_id = pipeline.schedule(
108
- trigger_type=schedule_data.trigger_type,
109
- inputs=schedule_data.inputs,
110
- final_vars=schedule_data.final_vars,
111
- config=schedule_data.config,
112
- executor=schedule_data.executor,
113
- with_tracker=schedule_data.with_tracker,
114
- with_opentelemetry=schedule_data.with_opentelemetry,
115
- with_progressbar=schedule_data.with_progressbar,
116
- paused=schedule_data.paused,
117
- overwrite=schedule_data.overwrite,
118
- **kwargs
119
- )
120
-
121
- return {"success": True, "message": f"Pipeline {name} scheduled successfully", "schedule_id": schedule_id}
122
- except Exception as e:
123
- raise HTTPException(status_code=500, detail=str(e))
124
-
125
- @router.post("/{name}/update-code")
126
- async def update_pipeline_code(
127
- request: Request,
128
- name: str,
129
- code: str = Form(...),
130
- ):
131
- """Update pipeline code"""
132
- try:
133
- pipeline_path = Path(base_dir) / "pipelines" / f"{name}.py"
134
- with open(pipeline_path, "w") as f:
135
- f.write(code)
136
-
137
- return {"success": True, "message": f"Pipeline code updated successfully"}
138
- except Exception as e:
139
- raise HTTPException(status_code=500, detail=str(e))
140
-
141
- @router.get("/{name}/edit", response_class=HTMLResponse)
142
- async def edit_pipeline(request: Request, name: str):
143
- """Edit pipeline code and configuration"""
144
- pipeline = Pipeline(name=name, base_dir=base_dir)
145
- summary = pipeline.get_summary()
146
-
147
- # Get pipeline code
148
- code = None
149
- pipeline_path = Path(base_dir) / "pipelines" / f"{name}.py"
150
- if pipeline_path.exists():
151
- with open(pipeline_path, "r") as f:
152
- code = f.read()
153
-
154
- return templates.TemplateResponse(
155
- "pipelines/edit.html",
156
- {
157
- "request": request,
158
- "pipeline": name,
159
- "summary": summary,
160
- "code": code,
161
- "base_dir": base_dir
162
- }
163
- )
164
-
165
- @router.post("/new")
166
- async def create_pipeline(name: str = Form(...)):
167
- """Create a new pipeline"""
168
- try:
169
- pipeline_manager = PipelineManager(base_dir=base_dir)
170
- pipeline_manager.new(name=name)
171
- return RedirectResponse(url=f"/pipelines/{name}/edit", status_code=303)
172
- except Exception as e:
173
- raise HTTPException(status_code=500, detail=str(e))
@@ -1,136 +0,0 @@
1
- import os
2
- import importlib.util
3
- from pathlib import Path
4
-
5
- from fastapi import APIRouter, Request, HTTPException, BackgroundTasks
6
- from fastapi.responses import HTMLResponse
7
-
8
- from ...pipeline import PipelineManager
9
- from .. import templates
10
-
11
- router = APIRouter(prefix="/scheduler", tags=["scheduler"])
12
-
13
- base_dir = os.environ.get("FLOWERPOWER_BASE_DIR", str(Path.cwd()))
14
-
15
- # Check if scheduler is available
16
- has_scheduler = importlib.util.find_spec("apscheduler") is not None
17
- if not has_scheduler:
18
- # Create minimal router if scheduler not available
19
- @router.get("/", response_class=HTMLResponse)
20
- async def scheduler_not_available(request: Request):
21
- return templates.TemplateResponse(
22
- "scheduler/not_available.html",
23
- {"request": request}
24
- )
25
- else:
26
- from ...scheduler import SchedulerManager
27
-
28
- # Worker process handler
29
- worker_process = None
30
-
31
- def start_worker_process(base_dir):
32
- global worker_process
33
- import subprocess
34
- import sys
35
-
36
- cmd = [
37
- sys.executable,
38
- "-m", "flowerpower",
39
- "scheduler",
40
- "start-worker",
41
- "--base-dir", base_dir
42
- ]
43
- worker_process = subprocess.Popen(cmd)
44
-
45
- def stop_worker_process():
46
- global worker_process
47
- if worker_process:
48
- worker_process.terminate()
49
- worker_process = None
50
-
51
- @router.get("/", response_class=HTMLResponse)
52
- async def scheduler_dashboard(request: Request):
53
- """Scheduler dashboard with schedules and jobs"""
54
- pipeline_manager = PipelineManager(base_dir=base_dir)
55
-
56
- with SchedulerManager(fs=pipeline_manager._fs, role="scheduler") as sm:
57
- schedules = sm.get_schedules(as_dict=True)
58
- jobs = sm.get_jobs(as_dict=True)
59
-
60
- return templates.TemplateResponse(
61
- "scheduler/dashboard.html",
62
- {
63
- "request": request,
64
- "schedules": schedules,
65
- "jobs": jobs,
66
- "worker_running": worker_process is not None,
67
- "base_dir": base_dir
68
- }
69
- )
70
-
71
- @router.post("/start-worker")
72
- async def start_worker(background_tasks: BackgroundTasks):
73
- """Start scheduler worker"""
74
- global worker_process
75
-
76
- if worker_process is not None:
77
- return {"success": False, "message": "Worker is already running"}
78
-
79
- try:
80
- background_tasks.add_task(start_worker_process, base_dir)
81
- return {"success": True, "message": "Worker started successfully"}
82
- except Exception as e:
83
- raise HTTPException(status_code=500, detail=str(e))
84
-
85
- @router.post("/stop-worker")
86
- async def stop_worker():
87
- """Stop scheduler worker"""
88
- global worker_process
89
-
90
- if worker_process is None:
91
- return {"success": False, "message": "No worker is running"}
92
-
93
- try:
94
- stop_worker_process()
95
- return {"success": True, "message": "Worker stopped successfully"}
96
- except Exception as e:
97
- raise HTTPException(status_code=500, detail=str(e))
98
-
99
- @router.post("/pause-schedule/{schedule_id}")
100
- async def pause_schedule(schedule_id: str):
101
- """Pause a scheduled pipeline"""
102
- try:
103
- pipeline_manager = PipelineManager(base_dir=base_dir)
104
-
105
- with SchedulerManager(fs=pipeline_manager._fs, role="scheduler") as sm:
106
- sm.pause_schedule(schedule_id)
107
-
108
- return {"success": True, "message": f"Schedule {schedule_id} paused successfully"}
109
- except Exception as e:
110
- raise HTTPException(status_code=500, detail=str(e))
111
-
112
- @router.post("/resume-schedule/{schedule_id}")
113
- async def resume_schedule(schedule_id: str):
114
- """Resume a paused schedule"""
115
- try:
116
- pipeline_manager = PipelineManager(base_dir=base_dir)
117
-
118
- with SchedulerManager(fs=pipeline_manager._fs, role="scheduler") as sm:
119
- sm.resume_schedule(schedule_id)
120
-
121
- return {"success": True, "message": f"Schedule {schedule_id} resumed successfully"}
122
- except Exception as e:
123
- raise HTTPException(status_code=500, detail=str(e))
124
-
125
- @router.delete("/remove-schedule/{schedule_id}")
126
- async def remove_schedule(schedule_id: str):
127
- """Remove a schedule"""
128
- try:
129
- pipeline_manager = PipelineManager(base_dir=base_dir)
130
-
131
- with SchedulerManager(fs=pipeline_manager._fs, role="scheduler") as sm:
132
- sm.remove_schedule(schedule_id)
133
-
134
- return {"success": True, "message": f"Schedule {schedule_id} removed successfully"}
135
- except Exception as e:
136
- raise HTTPException(status_code=500, detail=str(e))
@@ -1,14 +0,0 @@
1
- from munch import Munch, munchify
2
- from pydantic import Field
3
-
4
- from ..base import BaseConfig
5
-
6
-
7
- class PipelineTrackerConfig(BaseConfig):
8
- project_id: int | None = None
9
- # version: str | None = None
10
- dag_name: str | None = None
11
- tags: dict | Munch = Field(default_factory=dict)
12
-
13
- def model_post_init(self, __context):
14
- self.tags = munchify(self.tags)
@@ -1,8 +0,0 @@
1
- from pydantic import Field
2
-
3
- from ..base import BaseConfig
4
-
5
-
6
- class ProjectOpenTelemetryConfig(BaseConfig):
7
- host: str = Field(default="localhost")
8
- port: int = Field(default=6831)
@@ -1,11 +0,0 @@
1
- from pydantic import Field
2
-
3
- from ..base import BaseConfig
4
-
5
-
6
- class ProjectTrackerConfig(BaseConfig):
7
- username: str | None = Field(default=None)
8
- api_url: str = "http://localhost:8241"
9
- ui_url: str = "http://localhost:8242"
10
- api_key: str | None = Field(default=None)
11
- verify: bool = False
@@ -1,19 +0,0 @@
1
- import datetime as dt
2
-
3
- from munch import Munch, munchify
4
- from pydantic import Field
5
-
6
- from ..base import BaseConfig
7
-
8
-
9
- class ProjectWorkerConfig(BaseConfig):
10
- data_store: dict | Munch = Field(default_factory=dict)
11
- event_broker: dict | Munch = Field(default_factory=dict)
12
- cleanup_interval: int | float | dt.timedelta = Field(default=300) # int in secods
13
- max_concurrent_jobs: int = Field(default=10)
14
-
15
- def model_post_init(self, __context):
16
- if isinstance(self.data_store, dict):
17
- self.data_store = munchify(self.data_store)
18
- if isinstance(self.event_broker, dict):
19
- self.event_broker = munchify(self.event_broker)
@@ -1,309 +0,0 @@
1
- import typer
2
-
3
- from ..scheduler import SchedulerManager # Adjust import as needed
4
- from .utils import \
5
- parse_dict_or_list_param # Assuming you have this utility function
6
-
7
- # Create a Typer app for scheduler commands
8
- app = typer.Typer(help="Scheduler management commands")
9
-
10
-
11
- @app.command()
12
- def start_worker(
13
- name: str | None = None,
14
- base_dir: str | None = None,
15
- background: bool = False,
16
- storage_options: str | None = None,
17
- ):
18
- """
19
- Start a scheduler worker.
20
-
21
- Args:
22
- name: Name of the scheduler
23
- base_dir: Base directory for the scheduler
24
- background: Run in background
25
- storage_options: Storage options as JSON or key=value pairs
26
- """
27
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
28
-
29
- with SchedulerManager(
30
- name, base_dir, storage_options=parsed_storage_options
31
- ) as manager:
32
- manager.start_worker(background=background)
33
-
34
-
35
- @app.command()
36
- def remove_all_schedules(
37
- name: str | None = None,
38
- base_dir: str | None = None,
39
- storage_options: str | None = None,
40
- ):
41
- """
42
- Remove all schedules from the scheduler.
43
-
44
- Args:
45
- name: Name of the scheduler
46
- base_dir: Base directory for the scheduler
47
- storage_options: Storage options as JSON or key=value pairs
48
- """
49
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
50
-
51
- with SchedulerManager(
52
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
53
- ) as manager:
54
- manager.remove_all_schedules()
55
-
56
-
57
- # @app.command()
58
- # def add_schedule(
59
- # name: str | None = None,
60
- # base_dir: str | None = None,
61
- # storage_options: str | None = None,
62
- # trigger_type: str = "cron",
63
- # **kwargs,
64
- # ) -> str:
65
- # """
66
- # Add a schedule to the scheduler.
67
-
68
- # Args:
69
- # name: Name of the scheduler
70
- # base_dir: Base directory for the scheduler
71
- # storage_options: Storage options as JSON or key=value pairs
72
- # trigger_type: Type of trigger (cron, interval, etc.)
73
- # **kwargs: Additional schedule parameters
74
- # """
75
- # parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
76
-
77
- # with SchedulerManager(name, base_dir, role="scheduler") as manager:
78
- # schedule_id = manager.add_schedule(
79
- # storage_options=parsed_storage_options, type=trigger_type, **kwargs
80
- # )
81
-
82
- # typer.echo(f"Schedule added with ID: {schedule_id}")
83
- # return schedule_id
84
-
85
-
86
- # @app.command()
87
- # def add_job(
88
- # name: str | None = None,
89
- # base_dir: str | None = None,
90
- # storage_options: str | None = None,
91
- # **kwargs,
92
- # ) -> str:
93
- # """
94
- # Add a job to the scheduler.
95
-
96
- # Args:
97
- # name: Name of the scheduler
98
- # base_dir: Base directory for the scheduler
99
- # storage_options: Storage options as JSON or key=value pairs
100
- # **kwargs: Job parameters
101
- # """
102
- # parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
103
-
104
- # with SchedulerManager(name, base_dir, role="scheduler") as manager:
105
- # job_id = manager.add_job(storage_options=parsed_storage_options, **kwargs)
106
-
107
- # typer.echo(f"Job added with ID: {job_id}")
108
- # return str(job_id)
109
-
110
-
111
- # @app.command()
112
- # def run_job(
113
- # name: str | None = None,
114
- # base_dir: str | None = None,
115
- # storage_options: str | None = None,
116
- # **kwargs,
117
- # ):
118
- # """
119
- # Run a job and return its result.
120
-
121
- # Args:
122
- # name: Name of the scheduler
123
- # base_dir: Base directory for the scheduler
124
- # storage_options: Storage options as JSON or key=value pairs
125
- # **kwargs: Job parameters
126
- # """
127
- # parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
128
-
129
- # with SchedulerManager(name, base_dir, role="scheduler") as manager:
130
- # result = manager.run_job(storage_options=parsed_storage_options, **kwargs)
131
-
132
- # typer.echo("Job result:")
133
- # typer.echo(result)
134
- # return result
135
-
136
-
137
- @app.command()
138
- def get_schedules(
139
- name: str | None = None,
140
- base_dir: str | None = None,
141
- storage_options: str | None = None,
142
- ):
143
- """
144
- Get all schedules from the scheduler.
145
-
146
- Args:
147
- name: Name of the scheduler
148
- base_dir: Base directory for the scheduler
149
- storage_options: Storage options as JSON or key=value pairs
150
- """
151
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
152
-
153
- with SchedulerManager(
154
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
155
- ) as manager:
156
- schedules = manager.get_schedules()
157
-
158
- typer.echo("Schedules:")
159
- for schedule in schedules:
160
- typer.echo(schedule)
161
- return schedules
162
-
163
-
164
- @app.command()
165
- def get_tasks(
166
- name: str | None = None,
167
- base_dir: str | None = None,
168
- storage_options: str | None = None,
169
- ):
170
- """
171
- Get all tasks from the scheduler.
172
-
173
- Args:
174
- name: Name of the scheduler
175
- base_dir: Base directory for the scheduler
176
- storage_options: Storage options as JSON or key=value pairs
177
- """
178
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
179
-
180
- with SchedulerManager(
181
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
182
- ) as manager:
183
- tasks = manager.get_tasks()
184
-
185
- typer.echo("Tasks:")
186
- for task in tasks:
187
- typer.echo(task)
188
- return tasks
189
-
190
-
191
- @app.command()
192
- def get_jobs(
193
- name: str | None = None,
194
- base_dir: str | None = None,
195
- storage_options: str | None = None,
196
- ):
197
- """
198
- Get all jobs from the scheduler.
199
-
200
- Args:
201
- name: Name of the scheduler
202
- base_dir: Base directory for the scheduler
203
- storage_options: Storage options as JSON or key=value pairs
204
- """
205
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
206
-
207
- with SchedulerManager(
208
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
209
- ) as manager:
210
- jobs = manager.get_jobs()
211
-
212
- typer.echo("Jobs:")
213
- for job in jobs:
214
- typer.echo(job)
215
- return jobs
216
-
217
-
218
- @app.command()
219
- def get_job_result(
220
- job_id: str,
221
- name: str | None = None,
222
- base_dir: str | None = None,
223
- storage_options: str | None = None,
224
- ):
225
- """
226
- Get the result of a specific job.
227
-
228
- Args:
229
- job_id: ID of the job
230
- name: Name of the scheduler
231
- base_dir: Base directory for the scheduler
232
- storage_options: Storage options as JSON or key=value pairs
233
- """
234
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
235
-
236
- with SchedulerManager(
237
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
238
- ) as manager:
239
- result = manager.get_job_result(job_id)
240
-
241
- typer.echo("Job Result:")
242
- typer.echo(result)
243
- return result
244
-
245
-
246
- @app.command()
247
- def show_schedules(
248
- name: str | None = None,
249
- base_dir: str | None = None,
250
- storage_options: str | None = None,
251
- ):
252
- """
253
- Show all schedules in the scheduler.
254
-
255
- Args:
256
- name: Name of the scheduler
257
- base_dir: Base directory for the scheduler
258
- storage_options: Storage options as JSON or key=value pairs
259
- """
260
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
261
-
262
- with SchedulerManager(
263
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
264
- ) as manager:
265
- manager.show_schedules()
266
-
267
-
268
- @app.command()
269
- def show_jobs(
270
- name: str | None = None,
271
- base_dir: str | None = None,
272
- storage_options: str | None = None,
273
- ):
274
- """
275
- Show all jobs in the scheduler.
276
-
277
- Args:
278
- name: Name of the scheduler
279
- base_dir: Base directory for the scheduler
280
- storage_options: Storage options as JSON or key=value pairs
281
- """
282
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
283
-
284
- with SchedulerManager(
285
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
286
- ) as manager:
287
- manager.show_jobs()
288
-
289
-
290
- @app.command()
291
- def show_tasks(
292
- name: str | None = None,
293
- base_dir: str | None = None,
294
- storage_options: str | None = None,
295
- ):
296
- """
297
- Show all tasks in the scheduler.
298
-
299
- Args:
300
- name: Name of the scheduler
301
- base_dir: Base directory for the scheduler
302
- storage_options: Storage options as JSON or key=value pairs
303
- """
304
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict") or {}
305
-
306
- with SchedulerManager(
307
- name, base_dir, role="scheduler", storage_options=parsed_storage_options
308
- ) as manager:
309
- manager.show_tasks()