orchestrator-core 4.6.5__py3-none-any.whl → 4.7.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "4.6.5"
16
+ __version__ = "4.7.0rc1"
17
17
 
18
18
 
19
19
  from structlog import get_logger
@@ -22,6 +22,7 @@ from orchestrator.api.api_v1.endpoints import (
22
22
  product_blocks,
23
23
  products,
24
24
  resource_types,
25
+ schedules,
25
26
  settings,
26
27
  subscription_customer_descriptions,
27
28
  subscriptions,
@@ -88,6 +89,9 @@ api_router.include_router(
88
89
  api_router.include_router(
89
90
  ws.router, prefix="/ws", tags=["Core", "Events"]
90
91
  ) # Auth on the websocket is handled in the Websocket Manager
92
+ api_router.include_router(
93
+ schedules.router, prefix="/schedules", tags=["Core", "Schedules"], dependencies=[Depends(authorize)]
94
+ )
91
95
 
92
96
  if llm_settings.SEARCH_ENABLED:
93
97
  from orchestrator.api.api_v1.endpoints import search
@@ -13,6 +13,7 @@
13
13
 
14
14
  """Module that implements process related API endpoints."""
15
15
 
16
+ import asyncio
16
17
  import struct
17
18
  import zlib
18
19
  from http import HTTPStatus
@@ -62,10 +63,12 @@ from orchestrator.utils.enrich_process import enrich_process
62
63
  from orchestrator.websocket import (
63
64
  WS_CHANNELS,
64
65
  broadcast_invalidate_status_counts,
66
+ broadcast_invalidate_status_counts_async,
65
67
  broadcast_process_update_to_websocket,
66
68
  websocket_manager,
67
69
  )
68
70
  from orchestrator.workflow import ProcessStat, ProcessStatus, StepList, Workflow
71
+ from orchestrator.workflows import get_workflow
69
72
  from pydantic_forms.types import JSON, State
70
73
 
71
74
  router = APIRouter()
@@ -175,7 +178,7 @@ def delete(process_id: UUID) -> None:
175
178
  status_code=HTTPStatus.CREATED,
176
179
  dependencies=[Depends(check_global_lock, use_cache=False)],
177
180
  )
178
- def new_process(
181
+ async def new_process(
179
182
  workflow_key: str,
180
183
  request: Request,
181
184
  json_data: list[dict[str, Any]] | None = Body(...),
@@ -183,8 +186,21 @@ def new_process(
183
186
  user_model: OIDCUserModel | None = Depends(authenticate),
184
187
  ) -> dict[str, UUID]:
185
188
  broadcast_func = api_broadcast_process_data(request)
186
- process_id = start_process(
187
- workflow_key, user_inputs=json_data, user_model=user_model, user=user, broadcast_func=broadcast_func
189
+
190
+ workflow = get_workflow(workflow_key)
191
+ if not workflow:
192
+ raise_status(HTTPStatus.NOT_FOUND, "Workflow does not exist")
193
+
194
+ if not await workflow.authorize_callback(user_model):
195
+ raise_status(HTTPStatus.FORBIDDEN, f"User is not authorized to execute '{workflow_key}' workflow")
196
+
197
+ process_id = await asyncio.to_thread(
198
+ start_process,
199
+ workflow_key,
200
+ user_inputs=json_data,
201
+ user_model=user_model,
202
+ user=user,
203
+ broadcast_func=broadcast_func,
188
204
  )
189
205
 
190
206
  return {"id": process_id}
@@ -196,14 +212,14 @@ def new_process(
196
212
  status_code=HTTPStatus.NO_CONTENT,
197
213
  dependencies=[Depends(check_global_lock, use_cache=False)],
198
214
  )
199
- def resume_process_endpoint(
215
+ async def resume_process_endpoint(
200
216
  process_id: UUID,
201
217
  request: Request,
202
218
  json_data: JSON = Body(...),
203
219
  user: str = Depends(user_name),
204
220
  user_model: OIDCUserModel | None = Depends(authenticate),
205
221
  ) -> None:
206
- process = _get_process(process_id)
222
+ process = await asyncio.to_thread(_get_process, process_id)
207
223
 
208
224
  if not can_be_resumed(process.last_status):
209
225
  raise_status(HTTPStatus.CONFLICT, f"Resuming a {process.last_status.lower()} workflow is not possible")
@@ -211,16 +227,16 @@ def resume_process_endpoint(
211
227
  pstat = load_process(process)
212
228
  auth_resume, auth_retry = get_auth_callbacks(get_steps_to_evaluate_for_rbac(pstat), pstat.workflow)
213
229
  if process.last_status == ProcessStatus.SUSPENDED:
214
- if auth_resume is not None and not auth_resume(user_model):
230
+ if auth_resume is not None and not (await auth_resume(user_model)):
215
231
  raise_status(HTTPStatus.FORBIDDEN, "User is not authorized to resume step")
216
232
  elif process.last_status in (ProcessStatus.FAILED, ProcessStatus.WAITING):
217
- if auth_retry is not None and not auth_retry(user_model):
233
+ if auth_retry is not None and not (await auth_retry(user_model)):
218
234
  raise_status(HTTPStatus.FORBIDDEN, "User is not authorized to retry step")
219
235
 
220
- broadcast_invalidate_status_counts()
236
+ await broadcast_invalidate_status_counts_async()
221
237
  broadcast_func = api_broadcast_process_data(request)
222
238
 
223
- resume_process(process, user=user, user_inputs=json_data, broadcast_func=broadcast_func)
239
+ await asyncio.to_thread(resume_process, process, user=user, user_inputs=json_data, broadcast_func=broadcast_func)
224
240
 
225
241
 
226
242
  @router.post(
@@ -0,0 +1,44 @@
1
+ # Copyright 2019-2025 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ from http import HTTPStatus
14
+
15
+ import structlog
16
+ from fastapi.routing import APIRouter
17
+
18
+ from orchestrator.schedules.service import add_scheduled_task_to_queue
19
+ from orchestrator.schemas.schedules import APSchedulerJobCreate, APSchedulerJobDelete, APSchedulerJobUpdate
20
+
21
+ logger = structlog.get_logger(__name__)
22
+
23
+ router: APIRouter = APIRouter()
24
+
25
+
26
+ @router.post("/", status_code=HTTPStatus.CREATED)
27
+ def create_scheduled_task(payload: APSchedulerJobCreate) -> dict[str, str]:
28
+ """Create a scheduled task."""
29
+ add_scheduled_task_to_queue(payload)
30
+ return {"message": "Added to Create Queue", "status": "CREATED"}
31
+
32
+
33
+ @router.put("/", status_code=HTTPStatus.OK)
34
+ async def update_scheduled_task(payload: APSchedulerJobUpdate) -> dict[str, str]:
35
+ """Update a scheduled task."""
36
+ add_scheduled_task_to_queue(payload)
37
+ return {"message": "Added to Update Queue", "status": "UPDATED"}
38
+
39
+
40
+ @router.delete("/", status_code=HTTPStatus.OK)
41
+ async def delete_scheduled_task(payload: APSchedulerJobDelete) -> dict[str, str]:
42
+ """Delete a scheduled task."""
43
+ add_scheduled_task_to_queue(payload)
44
+ return {"message": "Added to Delete Queue", "status": "DELETED"}
@@ -10,17 +10,26 @@
10
10
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
-
14
-
15
13
  import time
14
+ from typing import cast
16
15
 
17
16
  import typer
17
+ from redis import Redis
18
18
 
19
19
  from orchestrator.schedules.scheduler import (
20
20
  get_all_scheduler_tasks,
21
21
  get_scheduler,
22
22
  get_scheduler_task,
23
23
  )
24
+ from orchestrator.schedules.service import (
25
+ SCHEDULER_QUEUE,
26
+ add_scheduled_task_to_queue,
27
+ workflow_scheduler_queue,
28
+ )
29
+ from orchestrator.schemas.schedules import APSchedulerJobCreate
30
+ from orchestrator.services.workflows import get_workflow_by_name
31
+ from orchestrator.settings import app_settings
32
+ from orchestrator.utils.redis_client import create_redis_client
24
33
 
25
34
  app: typer.Typer = typer.Typer()
26
35
 
@@ -28,10 +37,29 @@ app: typer.Typer = typer.Typer()
28
37
  @app.command()
29
38
  def run() -> None:
30
39
  """Start scheduler and loop eternally to keep thread alive."""
31
- with get_scheduler():
32
- while True:
40
+
41
+ def _get_scheduled_task_item_from_queue(redis_conn: Redis) -> tuple[str, bytes] | None:
42
+ """Get an item from the Redis Queue for scheduler tasks."""
43
+ try:
44
+ return redis_conn.brpop(SCHEDULER_QUEUE, timeout=1)
45
+ except ConnectionError as e:
46
+ typer.echo(f"There was a connection error with Redis. Retrying in 3 seconds... {e}")
47
+ time.sleep(3)
48
+ except Exception as e:
49
+ typer.echo(f"There was an unexpected error with Redis. Retrying in 1 second... {e}")
33
50
  time.sleep(1)
34
51
 
52
+ return None
53
+
54
+ with get_scheduler() as scheduler_connection:
55
+ redis_connection = create_redis_client(app_settings.CACHE_URI)
56
+ while True:
57
+ item = _get_scheduled_task_item_from_queue(redis_connection)
58
+ if not item:
59
+ continue
60
+
61
+ workflow_scheduler_queue(item, scheduler_connection)
62
+
35
63
 
36
64
  @app.command()
37
65
  def show_schedule() -> None:
@@ -59,3 +87,45 @@ def force(task_id: str) -> None:
59
87
  except Exception as e:
60
88
  typer.echo(f"Task execution failed: {e}")
61
89
  raise typer.Exit(code=1)
90
+
91
+
92
+ @app.command()
93
+ def load_initial_schedule() -> None:
94
+ """Load the initial schedule into the scheduler."""
95
+ initial_schedules = [
96
+ {
97
+ "name": "Task Resume Workflows",
98
+ "workflow_name": "task_resume_workflows",
99
+ "workflow_id": "",
100
+ "trigger": "interval",
101
+ "trigger_kwargs": {"hours": 1},
102
+ },
103
+ {
104
+ "name": "Task Clean Up Tasks",
105
+ "workflow_name": "task_clean_up_tasks",
106
+ "workflow_id": "",
107
+ "trigger": "interval",
108
+ "trigger_kwargs": {"hours": 6},
109
+ },
110
+ {
111
+ "name": "Task Validate Subscriptions",
112
+ "workflow_name": "task_validate_subscriptions",
113
+ "workflow_id": "",
114
+ "trigger": "cron",
115
+ "trigger_kwargs": {"hour": 0, "minute": 10},
116
+ },
117
+ ]
118
+
119
+ for schedule in initial_schedules:
120
+ # enrich with workflow id
121
+ workflow_name = cast(str, schedule.get("workflow_name"))
122
+ workflow = get_workflow_by_name(workflow_name)
123
+
124
+ if not workflow:
125
+ typer.echo(f"Workflow '{schedule['workflow_name']}' not found. Skipping schedule.")
126
+ continue
127
+
128
+ schedule["workflow_id"] = workflow.workflow_id
129
+
130
+ typer.echo(f"Initial Schedule: {schedule}")
131
+ add_scheduled_task_to_queue(APSchedulerJobCreate(**schedule)) # type: ignore
@@ -75,6 +75,9 @@ def alter_embedding_column_dimension(new_dimension: int) -> None:
75
75
  db.session.execute(text(f"ALTER TABLE search_queries ADD COLUMN query_embedding vector({new_dimension})"))
76
76
 
77
77
  db.session.commit()
78
+
79
+ db.session.close()
80
+
78
81
  logger.info(f"Altered embedding columns to dimension {new_dimension} in ai_search_index and search_queries")
79
82
 
80
83
  except SQLAlchemyError as e:
orchestrator/db/models.py CHANGED
@@ -29,9 +29,11 @@ from sqlalchemy import (
29
29
  CheckConstraint,
30
30
  Column,
31
31
  Enum,
32
+ Float,
32
33
  ForeignKey,
33
34
  Index,
34
35
  Integer,
36
+ LargeBinary,
35
37
  PrimaryKeyConstraint,
36
38
  Select,
37
39
  String,
@@ -796,3 +798,27 @@ class AiSearchIndex(BaseModel):
796
798
  content_hash = mapped_column(String(64), nullable=False, index=True)
797
799
 
798
800
  __table_args__ = (PrimaryKeyConstraint("entity_id", "path", name="pk_ai_search_index"),)
801
+
802
+
803
+ class APSchedulerJobStoreModel(BaseModel):
804
+ __tablename__ = "apscheduler_jobs"
805
+
806
+ id = mapped_column(String(191), primary_key=True)
807
+ next_run_time = mapped_column(Float, nullable=True)
808
+ job_state = mapped_column(LargeBinary, nullable=False)
809
+
810
+
811
+ class WorkflowApschedulerJob(BaseModel):
812
+ __tablename__ = "workflows_apscheduler_jobs"
813
+
814
+ workflow_id = mapped_column(
815
+ UUIDType, ForeignKey("workflows.workflow_id", ondelete="CASCADE"), primary_key=True, nullable=False
816
+ )
817
+
818
+ # Notice the VARCHAR(512) for schedule_id to accommodate longer IDs so
819
+ # that if APScheduler changes its ID format in the future, we are covered.
820
+ schedule_id = mapped_column(
821
+ String(512), ForeignKey("apscheduler_jobs.id", ondelete="CASCADE"), primary_key=True, nullable=False
822
+ )
823
+
824
+ __table_args__ = (UniqueConstraint("workflow_id", "schedule_id", name="uq_workflow_schedule"),)
@@ -38,4 +38,4 @@ class Workflow:
38
38
  workflow_table = get_original_model(self, WorkflowTable)
39
39
  workflow = get_workflow(workflow_table.name)
40
40
 
41
- return workflow.authorize_callback(oidc_user) # type: ignore
41
+ return await workflow.authorize_callback(oidc_user) # type: ignore
@@ -0,0 +1,106 @@
1
+ """Create linker table workflow_apscheduler.
2
+
3
+ Revision ID: 961eddbd4c13
4
+ Revises: 850dccac3b02
5
+ Create Date: 2025-11-18 10:38:57.211087
6
+
7
+ """
8
+
9
+ from uuid import uuid4
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision = "961eddbd4c13"
16
+ down_revision = "850dccac3b02"
17
+ branch_labels = None
18
+ depends_on = None
19
+
20
+
21
+ workflows = [
22
+ {
23
+ "name": "task_validate_subscriptions",
24
+ "description": "Validate subscriptions",
25
+ "workflow_id": uuid4(),
26
+ "target": "SYSTEM",
27
+ },
28
+ ]
29
+
30
+
31
+ def _create_workflows() -> None:
32
+ conn = op.get_bind()
33
+ for workflow in workflows:
34
+ conn.execute(
35
+ sa.text(
36
+ "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING"
37
+ ),
38
+ workflow,
39
+ )
40
+
41
+
42
+ def _downgrade_create_workflows() -> None:
43
+ conn = op.get_bind()
44
+ for workflow in workflows:
45
+ conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]})
46
+
47
+
48
+ def _create_apscheduler_jobs_table_if_not_exists() -> None:
49
+ # Check if the apscheduler_jobs table exists and create it if it does not exist.
50
+ conn = op.get_bind()
51
+ inspector = sa.inspect(conn)
52
+ if "apscheduler_jobs" not in inspector.get_table_names():
53
+ op.execute(
54
+ sa.text(
55
+ """
56
+ CREATE TABLE apscheduler_jobs
57
+ (
58
+ id VARCHAR(191) NOT NULL PRIMARY KEY,
59
+ next_run_time DOUBLE PRECISION,
60
+ job_state bytea NOT NULL
61
+ );
62
+ """
63
+ )
64
+ )
65
+
66
+
67
+ def _create_workflows_table_if_not_exists() -> None:
68
+ # Notice the VARCHAR(512) for schedule_id to accommodate longer IDs
69
+ # This so that if APScheduler changes its ID format in the future, we are covered.
70
+ op.execute(
71
+ sa.text(
72
+ """
73
+ CREATE TABLE workflows_apscheduler_jobs (
74
+ workflow_id UUID NOT NULL,
75
+ schedule_id VARCHAR(512) NOT NULL,
76
+ PRIMARY KEY (workflow_id, schedule_id),
77
+ CONSTRAINT fk_workflow
78
+ FOREIGN KEY (workflow_id) REFERENCES public.workflows (workflow_id)
79
+ ON DELETE CASCADE,
80
+ CONSTRAINT fk_schedule
81
+ FOREIGN KEY (schedule_id) REFERENCES public.apscheduler_jobs (id)
82
+ ON DELETE CASCADE,
83
+ CONSTRAINT uq_workflow_schedule UNIQUE (workflow_id, schedule_id)
84
+ );
85
+ """
86
+ )
87
+ )
88
+
89
+ op.create_index("ix_workflows_apscheduler_jobs_schedule_id", "workflows_apscheduler_jobs", ["schedule_id"])
90
+
91
+
92
+ def upgrade() -> None:
93
+ _create_apscheduler_jobs_table_if_not_exists()
94
+ _create_workflows_table_if_not_exists()
95
+ _create_workflows()
96
+
97
+
98
+ def downgrade() -> None:
99
+ op.execute(
100
+ sa.text(
101
+ """
102
+ DROP TABLE IF EXISTS workflows_apscheduler_jobs;
103
+ """
104
+ )
105
+ )
106
+ _downgrade_create_workflows()
@@ -10,16 +10,15 @@
10
10
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
+ import warnings
13
14
 
14
-
15
- from orchestrator.schedules.resume_workflows import run_resume_workflows
16
- from orchestrator.schedules.task_vacuum import vacuum_tasks
17
15
  from orchestrator.schedules.validate_products import validate_products
18
- from orchestrator.schedules.validate_subscriptions import validate_subscriptions
19
16
 
17
+ warnings.warn(
18
+ "ALL_SCHEDULERS is deprecated; scheduling is now handled entirely through the scheduler API.",
19
+ DeprecationWarning,
20
+ stacklevel=2,
21
+ )
20
22
  ALL_SCHEDULERS: list = [
21
- run_resume_workflows,
22
- vacuum_tasks,
23
- validate_subscriptions,
24
23
  validate_products,
25
24
  ]
@@ -11,7 +11,6 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
 
14
-
15
14
  from contextlib import contextmanager
16
15
  from datetime import datetime
17
16
  from typing import Any, Generator
@@ -27,6 +26,7 @@ from orchestrator.db.filters import Filter
27
26
  from orchestrator.db.filters.filters import CallableErrorHandler
28
27
  from orchestrator.db.sorting import Sort
29
28
  from orchestrator.db.sorting.sorting import SortOrder
29
+ from orchestrator.schedules.service import get_linker_entries_by_schedule_ids
30
30
  from orchestrator.utils.helpers import camel_to_snake, to_camel
31
31
 
32
32
  executors = {
@@ -75,6 +75,7 @@ def get_scheduler(paused: bool = False) -> Generator[BackgroundScheduler, Any, N
75
75
 
76
76
  class ScheduledTask(BaseModel):
77
77
  id: str
78
+ workflow_id: str | None = None
78
79
  name: str | None = None
79
80
  next_run_time: datetime | None = None
80
81
  trigger: str
@@ -161,6 +162,29 @@ def default_error_handler(message: str, **context) -> None: # type: ignore
161
162
  raise ValueError(f"{message} {_format_context(context)}")
162
163
 
163
164
 
165
+ def enrich_with_workflow_id(scheduled_tasks: list[ScheduledTask]) -> list[ScheduledTask]:
166
+ """Does a get call to the linker table to get the workflow_id for each scheduled task.
167
+
168
+ Returns all the scheduled tasks with the workflow_id added.
169
+ """
170
+ schedule_ids = [task.id for task in scheduled_tasks]
171
+
172
+ entries = {
173
+ str(entry.schedule_id): str(entry.workflow_id) for entry in get_linker_entries_by_schedule_ids(schedule_ids)
174
+ }
175
+
176
+ return [
177
+ ScheduledTask(
178
+ id=task.id,
179
+ workflow_id=entries.get(task.id, None),
180
+ name=task.name,
181
+ next_run_time=task.next_run_time,
182
+ trigger=str(task.trigger),
183
+ )
184
+ for task in scheduled_tasks
185
+ ]
186
+
187
+
164
188
  def get_scheduler_tasks(
165
189
  first: int = 10,
166
190
  after: int = 0,
@@ -171,6 +195,7 @@ def get_scheduler_tasks(
171
195
  scheduled_tasks = get_all_scheduler_tasks()
172
196
  scheduled_tasks = filter_scheduled_tasks(scheduled_tasks, error_handler, filter_by)
173
197
  scheduled_tasks = sort_scheduled_tasks(scheduled_tasks, error_handler, sort_by)
198
+ scheduled_tasks = enrich_with_workflow_id(scheduled_tasks)
174
199
 
175
200
  total = len(scheduled_tasks)
176
201
  paginated_tasks = scheduled_tasks[after : after + first + 1]
@@ -178,6 +203,7 @@ def get_scheduler_tasks(
178
203
  return [
179
204
  ScheduledTask(
180
205
  id=task.id,
206
+ workflow_id=task.workflow_id,
181
207
  name=task.name,
182
208
  next_run_time=task.next_run_time,
183
209
  trigger=str(task.trigger),
@@ -0,0 +1,253 @@
1
+ # Copyright 2019-2025 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ import json
14
+ import logging
15
+ from uuid import UUID, uuid4
16
+
17
+ from apscheduler.schedulers.base import BaseScheduler
18
+ from apscheduler.triggers.cron import CronTrigger
19
+ from apscheduler.triggers.date import DateTrigger
20
+ from apscheduler.triggers.interval import IntervalTrigger
21
+ from sqlalchemy import delete
22
+
23
+ from orchestrator import app_settings
24
+ from orchestrator.db import db
25
+ from orchestrator.db.models import WorkflowApschedulerJob
26
+ from orchestrator.schemas.schedules import (
27
+ APSchedulerJobCreate,
28
+ APSchedulerJobDelete,
29
+ APSchedulerJobs,
30
+ APSchedulerJobUpdate,
31
+ APSJobAdapter,
32
+ )
33
+ from orchestrator.services.processes import start_process
34
+ from orchestrator.services.workflows import get_workflow_by_workflow_id
35
+ from orchestrator.utils.redis_client import create_redis_client
36
+
37
+ redis_connection = create_redis_client(app_settings.CACHE_URI)
38
+
39
+ SCHEDULER_QUEUE = "scheduler:queue:"
40
+
41
+
42
+ logger = logging.getLogger(__name__)
43
+
44
+
45
+ def serialize_payload(payload: APSchedulerJobs) -> bytes:
46
+ """Serialize the payload to bytes for Redis storage.
47
+
48
+ Args:
49
+ payload: APSchedulerJobs The scheduled task payload.
50
+ """
51
+ data = json.loads(payload.model_dump_json())
52
+ data["scheduled_type"] = payload.scheduled_type
53
+ return json.dumps(data).encode()
54
+
55
+
56
+ def deserialize_payload(bytes_dump: bytes) -> APSchedulerJobs:
57
+ """Deserialize the payload from bytes for Redis retrieval.
58
+
59
+ Args:
60
+ bytes_dump: bytes The serialized payload.
61
+ """
62
+ json_dump = bytes_dump.decode()
63
+ return APSJobAdapter.validate_json(json_dump)
64
+
65
+
66
+ def add_scheduled_task_to_queue(payload: APSchedulerJobs) -> None:
67
+ """Create a scheduled task service function.
68
+
69
+ We need to create a apscheduler job, and put the workflow and schedule_id in
70
+ the linker table workflows_apscheduler_jobs.
71
+
72
+ Args:
73
+ payload: APSchedulerJobCreate The scheduled task to create.
74
+ """
75
+ bytes_dump = serialize_payload(payload)
76
+ redis_connection.lpush(SCHEDULER_QUEUE, bytes_dump)
77
+ logger.info("Added scheduled task to queue.")
78
+
79
+
80
+ def get_linker_entries_by_schedule_ids(schedule_ids: list[str]) -> list[WorkflowApschedulerJob]:
81
+ """Get linker table entries for multiple schedule IDs in a single query.
82
+
83
+ Args:
84
+ schedule_ids: list[str] — One or many schedule IDs.
85
+
86
+ Returns:
87
+ list[WorkflowApschedulerJob]: All linker table rows matching those IDs.
88
+ """
89
+ if not schedule_ids:
90
+ return []
91
+
92
+ return db.session.query(WorkflowApschedulerJob).filter(WorkflowApschedulerJob.schedule_id.in_(schedule_ids)).all()
93
+
94
+
95
+ def _add_linker_entry(workflow_id: UUID, schedule_id: str) -> None:
96
+ """Add an entry to the linker table workflows_apscheduler_jobs.
97
+
98
+ Args:
99
+ workflow_id: UUID The workflow ID.
100
+ schedule_id: str The schedule ID.
101
+ """
102
+ workflows_apscheduler_job = WorkflowApschedulerJob(workflow_id=workflow_id, schedule_id=schedule_id)
103
+ db.session.add(workflows_apscheduler_job)
104
+ db.session.commit()
105
+
106
+
107
+ def _delete_linker_entry(workflow_id: UUID, schedule_id: str) -> None:
108
+ """Delete an entry from the linker table workflows_apscheduler_jobs.
109
+
110
+ Args:
111
+ workflow_id: UUID The workflow ID.
112
+ schedule_id: str The schedule ID.
113
+ """
114
+ db.session.execute(
115
+ delete(WorkflowApschedulerJob).where(
116
+ WorkflowApschedulerJob.workflow_id == workflow_id, WorkflowApschedulerJob.schedule_id == schedule_id
117
+ )
118
+ )
119
+ db.session.commit()
120
+
121
+
122
+ def run_start_workflow_scheduler_task(workflow_name: str) -> None:
123
+ """Function to start a workflow from the scheduler.
124
+
125
+ Args:
126
+ workflow_name: str The name of the workflow to start.
127
+ """
128
+ logger.info(f"Starting workflow: {workflow_name}")
129
+ start_process(workflow_name)
130
+
131
+
132
+ def _add_scheduled_task(payload: APSchedulerJobCreate, scheduler_connection: BaseScheduler) -> None:
133
+ """Create a new scheduled task in the scheduler and also in the linker table.
134
+
135
+ Args:
136
+ payload: APSchedulerJobCreate The scheduled task to create.
137
+ scheduler_connection: BaseScheduler The scheduler connection.
138
+ """
139
+ logger.info(f"Adding scheduled task: {payload}")
140
+
141
+ workflow_description = None
142
+ # Check if a workflow exists - we cannot schedule a non-existing workflow
143
+ workflow = get_workflow_by_workflow_id(str(payload.workflow_id))
144
+ if not workflow:
145
+ raise ValueError(f"Workflow with id {payload.workflow_id} does not exist.")
146
+ workflow_description = workflow.description
147
+
148
+ # This function is always the same for scheduled tasks, it will run the workflow
149
+ func = run_start_workflow_scheduler_task
150
+
151
+ # Ensure payload has required data
152
+ if not payload.trigger or not payload.workflow_name or not payload.trigger_kwargs or not payload.workflow_id:
153
+ raise ValueError("Trigger must be specified for scheduled tasks.")
154
+
155
+ schedule_id = str(uuid4())
156
+ scheduler_connection.add_job(
157
+ func=func,
158
+ trigger=payload.trigger,
159
+ id=schedule_id,
160
+ name=payload.name or workflow_description,
161
+ kwargs={"workflow_name": payload.workflow_name},
162
+ **(payload.trigger_kwargs or {}),
163
+ )
164
+
165
+ _add_linker_entry(workflow_id=payload.workflow_id, schedule_id=schedule_id)
166
+
167
+
168
+ def _build_trigger_on_update(
169
+ trigger_name: str | None, trigger_kwargs: dict
170
+ ) -> IntervalTrigger | CronTrigger | DateTrigger | None:
171
+ if not trigger_name or not trigger_kwargs:
172
+ logger.info("Skipping building trigger as no trigger information is provided.")
173
+ return None
174
+
175
+ match trigger_name:
176
+ case "interval":
177
+ return IntervalTrigger(**trigger_kwargs)
178
+ case "cron":
179
+ return CronTrigger(**trigger_kwargs)
180
+ case "date":
181
+ return DateTrigger(**trigger_kwargs)
182
+ case _:
183
+ raise ValueError(f"Invalid trigger type: {trigger_name}")
184
+
185
+
186
+ def _update_scheduled_task(payload: APSchedulerJobUpdate, scheduler_connection: BaseScheduler) -> None:
187
+ """Update an existing scheduled task in the scheduler.
188
+
189
+ Only allow update of name and trigger
190
+ Job id must be that of an existing job
191
+ Do not insert in linker table - it should already exist.
192
+
193
+ Args:
194
+ payload: APSchedulerJobUpdate The scheduled task to update.
195
+ scheduler_connection: BaseScheduler The scheduler connection.
196
+ """
197
+ logger.info(f"Updating scheduled task: {payload}")
198
+
199
+ schedule_id = str(payload.schedule_id)
200
+ job = scheduler_connection.get_job(job_id=schedule_id)
201
+ if not job:
202
+ raise ValueError(f"Schedule Job with id {schedule_id} does not exist.")
203
+
204
+ trigger = _build_trigger_on_update(payload.trigger, payload.trigger_kwargs or {})
205
+ modify_kwargs = {}
206
+
207
+ if trigger:
208
+ job = job.reschedule(trigger=trigger)
209
+
210
+ if payload.name:
211
+ modify_kwargs["name"] = payload.name
212
+
213
+ job.modify(**modify_kwargs)
214
+
215
+
216
+ def _delete_scheduled_task(payload: APSchedulerJobDelete, scheduler_connection: BaseScheduler) -> None:
217
+ """Delete an existing scheduled task in the scheduler and also in the linker table.
218
+
219
+ Args:
220
+ payload: APSchedulerJobDelete The scheduled task to delete.
221
+ scheduler_connection: BaseScheduler The scheduler connection.
222
+ """
223
+ logger.info(f"Deleting scheduled task: {payload}")
224
+
225
+ schedule_id = str(payload.schedule_id)
226
+ scheduler_connection.remove_job(job_id=schedule_id)
227
+ _delete_linker_entry(workflow_id=payload.workflow_id, schedule_id=schedule_id)
228
+
229
+
230
+ def workflow_scheduler_queue(queue_item: tuple[str, bytes], scheduler_connection: BaseScheduler) -> None:
231
+ """Process an item from the scheduler queue.
232
+
233
+ Args:
234
+ queue_item: tuple[str, bytes] The item from the scheduler queue.
235
+ scheduler_connection: BaseScheduler The scheduler connection.
236
+ """
237
+ try:
238
+ _, bytes_dump = queue_item
239
+ payload = deserialize_payload(bytes_dump)
240
+ match payload:
241
+ case APSchedulerJobCreate():
242
+ _add_scheduled_task(payload, scheduler_connection)
243
+
244
+ case APSchedulerJobUpdate():
245
+ _update_scheduled_task(payload, scheduler_connection)
246
+
247
+ case APSchedulerJobDelete():
248
+ _delete_scheduled_task(payload, scheduler_connection)
249
+
250
+ case _:
251
+ logger.warning(f"Unexpected schedule type: {payload}") # type: ignore
252
+ except Exception:
253
+ logger.exception("Error processing scheduler queue item")
@@ -0,0 +1,71 @@
1
+ # Copyright 2019-2025 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ from typing import Annotated, Any, Literal, Union
14
+ from uuid import UUID
15
+
16
+ from pydantic import BaseModel, Field, TypeAdapter
17
+
18
+ SCHEDULER_Q_CREATE = "create"
19
+ SCHEDULER_Q_UPDATE = "update"
20
+ SCHEDULER_Q_DELETE = "delete"
21
+
22
+
23
+ class APSchedulerJob(BaseModel):
24
+ scheduled_type: Literal["create", "update", "delete"] = Field(..., description="Discriminator for job type")
25
+
26
+
27
+ class APSchedulerJobCreate(APSchedulerJob):
28
+ name: str | None = Field(None, description="Human readable name e.g. 'My Process'")
29
+ workflow_name: str = Field(..., description="Name of the workflow to run e.g. 'my_workflow_name'")
30
+ workflow_id: UUID = Field(..., description="UUID of the workflow associated with this scheduled task")
31
+
32
+ trigger: Literal["interval", "cron", "date"] = Field(..., description="APScheduler trigger type")
33
+ trigger_kwargs: dict[str, Any] = Field(
34
+ default_factory=lambda: {},
35
+ description="Arguments passed to the trigger on job creation",
36
+ examples=[{"hours": 12}, {"minutes": 30}, {"days": 1, "hours": 2}],
37
+ )
38
+
39
+ scheduled_type: Literal["create"] = Field("create", frozen=True)
40
+
41
+
42
+ class APSchedulerJobUpdate(APSchedulerJob):
43
+ name: str | None = Field(None, description="Human readable name e.g. 'My Process'")
44
+ schedule_id: UUID = Field(..., description="UUID of the scheduled task")
45
+
46
+ trigger: Literal["interval", "cron", "date"] | None = Field(None, description="APScheduler trigger type")
47
+ trigger_kwargs: dict[str, Any] | None = Field(
48
+ default=None,
49
+ description="Arguments passed to the job function",
50
+ examples=[{"hours": 12}, {"minutes": 30}, {"days": 1, "hours": 2}],
51
+ )
52
+
53
+ scheduled_type: Literal["update"] = Field("update", frozen=True)
54
+
55
+
56
+ class APSchedulerJobDelete(APSchedulerJob):
57
+ workflow_id: UUID = Field(..., description="UUID of the workflow associated with this scheduled task")
58
+ schedule_id: UUID | None = Field(None, description="UUID of the scheduled task")
59
+
60
+ scheduled_type: Literal["delete"] = Field("delete", frozen=True)
61
+
62
+
63
+ APSchedulerJobs = Annotated[
64
+ Union[
65
+ APSchedulerJobCreate,
66
+ APSchedulerJobUpdate,
67
+ APSchedulerJobDelete,
68
+ ],
69
+ Field(discriminator="scheduled_type"),
70
+ ]
71
+ APSJobAdapter = TypeAdapter(APSchedulerJobs) # type: ignore
@@ -421,10 +421,6 @@ def _run_process_async(process_id: UUID, f: Callable) -> UUID:
421
421
  return process_id
422
422
 
423
423
 
424
- def error_message_unauthorized(workflow_key: str) -> str:
425
- return f"User is not authorized to execute '{workflow_key}' workflow"
426
-
427
-
428
424
  def create_process(
429
425
  workflow_key: str,
430
426
  user_inputs: list[State] | None = None,
@@ -442,9 +438,6 @@ def create_process(
442
438
  if not workflow:
443
439
  raise_status(HTTPStatus.NOT_FOUND, "Workflow does not exist")
444
440
 
445
- if not workflow.authorize_callback(user_model):
446
- raise_status(HTTPStatus.FORBIDDEN, error_message_unauthorized(workflow_key))
447
-
448
441
  initial_state = {
449
442
  "process_id": process_id,
450
443
  "reporter": user,
@@ -64,6 +64,10 @@ def get_workflow_by_name(workflow_name: str) -> WorkflowTable | None:
64
64
  return db.session.scalar(select(WorkflowTable).where(WorkflowTable.name == workflow_name))
65
65
 
66
66
 
67
+ def get_workflow_by_workflow_id(workflow_id: str) -> WorkflowTable | None:
68
+ return db.session.scalar(select(WorkflowTable).where(WorkflowTable.workflow_id == workflow_id))
69
+
70
+
67
71
  def get_validation_product_workflows_for_subscription(
68
72
  subscription: SubscriptionTable,
69
73
  ) -> list:
@@ -1,4 +1,4 @@
1
- from collections.abc import Callable
1
+ from collections.abc import Awaitable, Callable
2
2
  from typing import TypeAlias, TypeVar
3
3
 
4
4
  from oauth2_lib.fastapi import OIDCUserModel
@@ -7,4 +7,4 @@ from oauth2_lib.fastapi import OIDCUserModel
7
7
 
8
8
  # Can instead use "type Authorizer = ..." in later Python versions.
9
9
  T = TypeVar("T", bound=OIDCUserModel)
10
- Authorizer: TypeAlias = Callable[[T | None], bool]
10
+ Authorizer: TypeAlias = Callable[[T | None], Awaitable[bool]]
@@ -105,6 +105,19 @@ async def invalidate_subscription_cache(subscription_id: UUID | UUIDstr, invalid
105
105
  await broadcast_invalidate_cache({"type": "subscriptions", "id": str(subscription_id)})
106
106
 
107
107
 
108
+ async def broadcast_invalidate_status_counts_async() -> None:
109
+ """Broadcast message to invalidate the status counts of the connected websocket clients.
110
+
111
+ This breaks the pattern of `sync_` prefixes to maintain backwards compatibility of
112
+ broadcast_invalidate_status_counts, a sync function.
113
+ """
114
+ if not websocket_manager.enabled:
115
+ logger.debug("WebSocketManager is not enabled. Skip broadcasting through websocket.")
116
+ return
117
+
118
+ await broadcast_invalidate_cache({"type": "processStatusCounts"})
119
+
120
+
108
121
  def broadcast_invalidate_status_counts() -> None:
109
122
  """Broadcast message to invalidate the status counts of the connected websocket clients."""
110
123
  if not websocket_manager.enabled:
@@ -148,4 +161,5 @@ __all__ = [
148
161
  "broadcast_process_update_to_websocket_async",
149
162
  "WS_CHANNELS",
150
163
  "broadcast_invalidate_status_counts",
164
+ "broadcast_invalidate_status_counts_async",
151
165
  ]
orchestrator/workflow.py CHANGED
@@ -193,7 +193,7 @@ def _handle_simple_input_form_generator(f: StateInputStepFunc) -> StateInputForm
193
193
  return form_generator
194
194
 
195
195
 
196
- def allow(_: OIDCUserModel | None) -> bool:
196
+ async def allow(_: OIDCUserModel | None) -> bool:
197
197
  """Default function to return True in absence of user-defined authorize function."""
198
198
  return True
199
199
 
@@ -111,5 +111,6 @@ LazyWorkflowInstance(".tasks.cleanup_tasks_log", "task_clean_up_tasks")
111
111
  LazyWorkflowInstance(".tasks.resume_workflows", "task_resume_workflows")
112
112
  LazyWorkflowInstance(".tasks.validate_products", "task_validate_products")
113
113
  LazyWorkflowInstance(".tasks.validate_product_type", "task_validate_product_type")
114
+ LazyWorkflowInstance(".tasks.validate_subscriptions", "task_validate_subscriptions")
114
115
 
115
116
  __doc__ = make_workflow_index_doc(ALL_WORKFLOWS)
@@ -16,7 +16,6 @@ from threading import BoundedSemaphore
16
16
 
17
17
  import structlog
18
18
 
19
- from orchestrator.schedules.scheduler import scheduler
20
19
  from orchestrator.services.subscriptions import (
21
20
  get_subscriptions_on_product_table,
22
21
  get_subscriptions_on_product_table_in_sync,
@@ -26,6 +25,8 @@ from orchestrator.services.workflows import (
26
25
  start_validation_workflow_for_workflows,
27
26
  )
28
27
  from orchestrator.settings import app_settings
28
+ from orchestrator.targets import Target
29
+ from orchestrator.workflow import StepList, init, step, workflow
29
30
 
30
31
  logger = structlog.get_logger(__name__)
31
32
 
@@ -33,7 +34,7 @@ logger = structlog.get_logger(__name__)
33
34
  task_semaphore = BoundedSemaphore(value=2)
34
35
 
35
36
 
36
- @scheduler.scheduled_job(id="subscriptions-validator", name="Subscriptions Validator", trigger="cron", hour=0, minute=10) # type: ignore[misc]
37
+ @step("Validate subscriptions")
37
38
  def validate_subscriptions() -> None:
38
39
  if app_settings.VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS:
39
40
  # Automatically re-validate out-of-sync subscriptions. This is not recommended for production.
@@ -53,3 +54,8 @@ def validate_subscriptions() -> None:
53
54
  break
54
55
 
55
56
  start_validation_workflow_for_workflows(subscription=subscription, workflows=validation_product_workflows)
57
+
58
+
59
+ @workflow("Validate subscriptions", target=Target.SYSTEM)
60
+ def task_validate_subscriptions() -> StepList:
61
+ return init >> validate_subscriptions
@@ -16,6 +16,7 @@
16
16
  "task_resume_workflows": "Resume all workflows that are stuck on tasks with the status 'waiting'",
17
17
  "task_validate_products": "Validate Products and Subscriptions",
18
18
  "task_validate_product_type": "Validate all subscriptions of Product Type",
19
- "reset_subscription_description": "Reset description of a subscription to default"
19
+ "reset_subscription_description": "Reset description of a subscription to default",
20
+ "task_validate_subscriptions": "Validate subscriptions"
20
21
  }
21
22
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: orchestrator-core
3
- Version: 4.6.5
3
+ Version: 4.7.0rc1
4
4
  Summary: This is the orchestrator workflow engine.
5
5
  Author-email: SURF <automation-beheer@surf.nl>
6
6
  Requires-Python: >=3.11,<3.15
@@ -37,14 +37,14 @@ Requires-Dist: apscheduler>=3.11.0
37
37
  Requires-Dist: click==8.*
38
38
  Requires-Dist: deepmerge==2.0
39
39
  Requires-Dist: deprecated>=1.2.18
40
- Requires-Dist: fastapi~=0.121.1
40
+ Requires-Dist: fastapi~=0.124.0
41
41
  Requires-Dist: fastapi-etag==0.4.0
42
42
  Requires-Dist: itsdangerous>=2.2.0
43
43
  Requires-Dist: jinja2==3.1.6
44
44
  Requires-Dist: more-itertools~=10.8.0
45
45
  Requires-Dist: nwa-stdlib~=1.11.0
46
46
  Requires-Dist: oauth2-lib>=2.5.0
47
- Requires-Dist: orjson==3.11.4
47
+ Requires-Dist: orjson==3.11.5
48
48
  Requires-Dist: pgvector>=0.4.1
49
49
  Requires-Dist: prometheus-client==0.23.1
50
50
  Requires-Dist: psycopg2-binary==2.9.11
@@ -52,9 +52,9 @@ Requires-Dist: pydantic-forms>=1.4.0
52
52
  Requires-Dist: pydantic-settings~=2.12.0
53
53
  Requires-Dist: pydantic[email]~=2.12.4
54
54
  Requires-Dist: python-dateutil==2.9.0.post0
55
- Requires-Dist: python-rapidjson>=1.22,<1.23
55
+ Requires-Dist: python-rapidjson>=1.23,<1.24
56
56
  Requires-Dist: pytz==2025.2
57
- Requires-Dist: redis==5.3.1
57
+ Requires-Dist: redis==7.1.0
58
58
  Requires-Dist: semver==3.0.4
59
59
  Requires-Dist: sentry-sdk[fastapi]>=2.29.1
60
60
  Requires-Dist: sqlalchemy==2.0.44
@@ -64,11 +64,11 @@ Requires-Dist: structlog>=25.4.0
64
64
  Requires-Dist: tabulate==0.9.0
65
65
  Requires-Dist: typer==0.20.0
66
66
  Requires-Dist: uvicorn[standard]~=0.38.0
67
- Requires-Dist: pydantic-ai-slim >=1.9.0 ; extra == "agent"
68
- Requires-Dist: ag-ui-protocol>=0.1.8 ; extra == "agent"
67
+ Requires-Dist: pydantic-ai-slim >=1.27.0 ; extra == "agent"
68
+ Requires-Dist: ag-ui-protocol>=0.1.10 ; extra == "agent"
69
69
  Requires-Dist: litellm>=1.75.7 ; extra == "agent"
70
- Requires-Dist: celery~=5.5.1 ; extra == "celery"
71
- Requires-Dist: litellm>=1.75.7 ; extra == "search"
70
+ Requires-Dist: celery~=5.6.0 ; extra == "celery"
71
+ Requires-Dist: litellm>=1.80.0 ; extra == "search"
72
72
  Project-URL: Documentation, https://workfloworchestrator.org/orchestrator-core
73
73
  Project-URL: Homepage, https://workfloworchestrator.org/orchestrator-core
74
74
  Project-URL: Source, https://github.com/workfloworchestrator/orchestrator-core
@@ -1,4 +1,4 @@
1
- orchestrator/__init__.py,sha256=hehQZgPnU4S_qlMQAFlyyXe8CPEjkPHZYOOzKd5Ff2M,1454
1
+ orchestrator/__init__.py,sha256=HWuhzwoxPlAnuAGVtn9GBpZS5X185yqB_j-WnUzXnVE,1457
2
2
  orchestrator/agentic_app.py,sha256=ouiyyZiS4uS6Lox2DtbGGRnb2njJBMSHpSAGe-T5rX0,3028
3
3
  orchestrator/app.py,sha256=w8ubXaaogwjmwLM0TXqZaLkAhmaOTWzVlwiYbi5mHeE,13203
4
4
  orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
@@ -10,20 +10,21 @@ orchestrator/settings.py,sha256=mvs1VhBYth6Zp55HsNroML4DU1jiq5SkVM47_BLgcIo,4662
10
10
  orchestrator/targets.py,sha256=d7Fyh_mWIWPivA_E7DTNFpZID3xFW_K0JlZ5nksVX7k,830
11
11
  orchestrator/types.py,sha256=qzs7xx5AYRmKbpYRyJJP3wuDb0W0bcAzefCN0RWLAco,15459
12
12
  orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
13
- orchestrator/workflow.py,sha256=QvAbJ3ySuqUnKrQ_EQ6PQqgkK1TmKZ7COioCCmeABwc,46391
13
+ orchestrator/workflow.py,sha256=JF81oC2JqdmQwM6VcrfavRMYLv-dQa_bNeH0dkAFe-w,46397
14
14
  orchestrator/api/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
15
15
  orchestrator/api/error_handling.py,sha256=YrPCxSa-DSa9KwqIMlXI-KGBGnbGIW5ukOPiikUH9E4,1502
16
16
  orchestrator/api/helpers.py,sha256=s0QRHYw8AvEmlkmRhuEzz9xixaZKUF3YuPzUVHkcoXk,6933
17
17
  orchestrator/api/models.py,sha256=z9BDBx7uI4KBHWbD_LVrLsqNQ0_w-Mg9Qiy7PR_rZhk,5996
18
18
  orchestrator/api/api_v1/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
19
- orchestrator/api/api_v1/api.py,sha256=1qQRsIxKXLW3kcmSV5u3_v1TZk5RcNWb4ZOyLguhTKY,3488
19
+ orchestrator/api/api_v1/api.py,sha256=vS3UpUG0EKuVGTa1FlSqIN8RHhDrrHIjPQP7CqWlzJo,3637
20
20
  orchestrator/api/api_v1/endpoints/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
21
21
  orchestrator/api/api_v1/endpoints/agent.py,sha256=9_s3nchTr9ua_1Sxs0kJl2mH_20o-DlUaaQPmY4vRMk,1732
22
22
  orchestrator/api/api_v1/endpoints/health.py,sha256=iaxs1XX1_250_gKNsspuULCV2GEMBjbtjsmfQTOvMAI,1284
23
- orchestrator/api/api_v1/endpoints/processes.py,sha256=z_bantT415cCuAw0viVrsi7HquHKMSmdjnuLkk9OZKI,16324
23
+ orchestrator/api/api_v1/endpoints/processes.py,sha256=2pA996SqE7WUuR1sYMNN1gl4l1hTGS-jlYu6u2dRQaQ,16890
24
24
  orchestrator/api/api_v1/endpoints/product_blocks.py,sha256=kZ6ywIOsS_S2qGq7RvZ4KzjvaS1LmwbGWR37AKRvWOw,2146
25
25
  orchestrator/api/api_v1/endpoints/products.py,sha256=BfFtwu9dZXEQbtKxYj9icc73GKGvAGMR5ytyf41nQlQ,3081
26
26
  orchestrator/api/api_v1/endpoints/resource_types.py,sha256=gGyuaDyOD0TAVoeFGaGmjDGnQ8eQQArOxKrrk4MaDzA,2145
27
+ orchestrator/api/api_v1/endpoints/schedules.py,sha256=eTG_4CQkiIi2akJUN4xDGuU_OvF6Ml6uye5MmQ_WJbc,1731
27
28
  orchestrator/api/api_v1/endpoints/search.py,sha256=R_OzfJfquoaTeGBwXTbomh16aYy0ael6_Xn3WkKfyjg,8575
28
29
  orchestrator/api/api_v1/endpoints/settings.py,sha256=5s-k169podZjgGHUbVDmSQwpY_3Cs_Bbf2PPtZIkBcw,6184
29
30
  orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py,sha256=1_6LtgQleoq3M6z_W-Qz__Bj3OFUweoPrUqHMwSH6AM,3288
@@ -40,7 +41,7 @@ orchestrator/cli/migrate_domain_models.py,sha256=WRXy_1OnziQwpsCFZXvjB30nDJtjj0i
40
41
  orchestrator/cli/migrate_tasks.py,sha256=bju8XColjSZD0v3rS4kl-24dLr8En_H4-6enBmqd494,7255
41
42
  orchestrator/cli/migrate_workflows.py,sha256=nxUpx0vgEIc_8aJrjAyrw3E9Dt8JmaamTts8oiQ4vHY,8923
42
43
  orchestrator/cli/migration_helpers.py,sha256=C5tpkP5WEBr7G9S-1k1hgSI8ili6xd9Z5ygc9notaK0,4110
43
- orchestrator/cli/scheduler.py,sha256=4jWpgxx0j0UFoba4Kw0nOEM6slr5XffDYBkm6hzK_C0,1766
44
+ orchestrator/cli/scheduler.py,sha256=U-YLa_SkwGVAf7jQ8EQugtwTuycw3DtXhmCxlMpiQqQ,4396
44
45
  orchestrator/cli/domain_gen_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
46
  orchestrator/cli/domain_gen_helpers/fixed_input_helpers.py,sha256=uzpwsaau81hHSxNMOS9-o7kF-9_78R0f_UE0AvWooZQ,6775
46
47
  orchestrator/cli/domain_gen_helpers/helpers.py,sha256=tIPxn8ezED_xYZxH7ZAtQLwkDc6RNmLZVxWAoJ3a9lw,4203
@@ -106,7 +107,7 @@ orchestrator/cli/helpers/print_helpers.py,sha256=b3ePg6HfBLKPYBBVr5XOA__JnFEMI5H
106
107
  orchestrator/cli/search/__init__.py,sha256=K15_iW9ogR7xtX7qHDal4H09tmwVGnOBZWyPBLWhuzc,1274
107
108
  orchestrator/cli/search/display.py,sha256=PKy9sOTpq0WUdSfY2khLrIQ1OdAfsyl95ogF1Z6Dae0,3629
108
109
  orchestrator/cli/search/index_llm.py,sha256=VQlLNGXro4ZbehWZIMKPHGpGE4oF1at2bkWZssimWc4,2707
109
- orchestrator/cli/search/resize_embedding.py,sha256=iJdM7L6Kyq4CzRjXHWLwpGRiMnKK7xZ9133C0THebBE,4847
110
+ orchestrator/cli/search/resize_embedding.py,sha256=5p2SmFqR-fHH1euTF8mSI3MmoXwN7yiV-JOZ7rdoxw0,4876
110
111
  orchestrator/cli/search/search_explore.py,sha256=LNAn6w13Q69fpv8CHcicHAbabrctrjGvwTjjJyC0AZY,8447
111
112
  orchestrator/cli/search/speedtest.py,sha256=J_l-8WxgN3YnqmwnbRhDyVbeqtvk3d2SfIpRBOJuhvE,4840
112
113
  orchestrator/config/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
@@ -116,7 +117,7 @@ orchestrator/db/database.py,sha256=MU_w_e95ho2dVb2JDnt_KFYholx___XDkiQXbc8wCkI,1
116
117
  orchestrator/db/helpers.py,sha256=L8kEdnSSNGnUpZhdeGx2arCodakWN8vSpKdfjoLuHdY,831
117
118
  orchestrator/db/listeners.py,sha256=UBPYcH0FE3a7aZQu_D0O_JMXpXIRYXC0gjSAvlv5GZo,1142
118
119
  orchestrator/db/loaders.py,sha256=ez6JzQ3IKVkC_oLAkVlIIiI8Do7hXbdcPKCvUSLxRog,7962
119
- orchestrator/db/models.py,sha256=AeBHLm0PLtoSAHoXzxjy9JsDQA5JhPvS7Ro55ynu_tk,31275
120
+ orchestrator/db/models.py,sha256=qsXOonEDaxn_UXfuXLQfdnm1PV6a-JZVpXa46vd--Es,32201
120
121
  orchestrator/db/filters/__init__.py,sha256=RUj6P0XxEBhYj0SN5wH5-Vf_Wt_ilZR_n9DSar5m9oM,371
121
122
  orchestrator/db/filters/filters.py,sha256=55RtpQwM2rhrk4A6CCSeSXoo-BT9GnQoNTryA8CtLEg,5020
122
123
  orchestrator/db/filters/process.py,sha256=xvGhyfo_MZ1xhLvFC6yULjcT4mJk0fKc1glJIYgsWLE,4018
@@ -204,7 +205,7 @@ orchestrator/graphql/schemas/settings.py,sha256=drhm5VcLmUbiYAk6WUSJcyJqjNM96E6G
204
205
  orchestrator/graphql/schemas/strawberry_pydantic_patch.py,sha256=CjNUhTKdYmLiaem-WY_mzw4HASIeaZitxGF8pPocqVw,1602
205
206
  orchestrator/graphql/schemas/subscription.py,sha256=hTA34C27kgLguH9V53173CxMKIWiQKh3vFzyJ2yBfE0,9918
206
207
  orchestrator/graphql/schemas/version.py,sha256=HSzVg_y4Sjd5_H5rRUtu3FJKOG_8ifhvBNt_qjOtC-E,92
207
- orchestrator/graphql/schemas/workflow.py,sha256=ewE5mRuqMq7rnx8Au2eTUm3YTY1pivOWATNacZQ-trY,1773
208
+ orchestrator/graphql/schemas/workflow.py,sha256=AAHQ2QecTPiAARAcB9Y4Y-ccEmUbpWuAfTYpl6WsbwU,1779
208
209
  orchestrator/graphql/utils/__init__.py,sha256=1JvenzEVW1CBa1sGVI9I8IWnnoXIkb1hneDqph9EEZY,524
209
210
  orchestrator/graphql/utils/create_resolver_error_handler.py,sha256=XzCnL482M4wz3fg5fUdGUwCAuzSZQ9Ufu1mscLyeoWU,1227
210
211
  orchestrator/graphql/utils/get_query_loaders.py,sha256=abS_HJ7K9een78gMiGq3IhwGwxQXHvZygExe0h_t9ns,815
@@ -257,13 +258,12 @@ orchestrator/migrations/versions/schema/2025-05-08_161918133bec_add_is_task_to_w
257
258
  orchestrator/migrations/versions/schema/2025-07-01_93fc5834c7e5_changed_timestamping_fields_in_process_steps.py,sha256=Oezd8b2qaI1Kyq-sZFVFmdzd4d9NjXrf6HtJGk11fy0,1914
258
259
  orchestrator/migrations/versions/schema/2025-07-04_4b58e336d1bf_deprecating_workflow_target_in_.py,sha256=xnD6w-97R4ClS7rbmXQEXc36K3fdcXKhCy7ZZNy_FX4,742
259
260
  orchestrator/migrations/versions/schema/2025-07-28_850dccac3b02_update_description_of_resume_workflows_.py,sha256=R6Qoga83DJ1IL0WYPu0u5u2ZvAmqGlDmUMv_KtJyOhQ,812
260
- orchestrator/schedules/__init__.py,sha256=Zy0fTOBMGIRFoh5iVFDLF9_PRAFaONYDThGK9EsysWo,981
261
- orchestrator/schedules/resume_workflows.py,sha256=jRnVRWDy687pQu-gtk80ecwiLSdrvtL15tG3U2zWA6I,891
262
- orchestrator/schedules/scheduler.py,sha256=9d6n-J2_GB6crOoVSCK29IfaktfUyzQYcTZl7gRTZ5c,6250
261
+ orchestrator/migrations/versions/schema/2025-11-18_961eddbd4c13_create_linker_table_workflow_apscheduler.py,sha256=wJ01G2fpph9gYq0haeYRoSD21cTLOdn41axCSMyWV8o,3109
262
+ orchestrator/schedules/__init__.py,sha256=WNjy4D4QdOKJh6ZEHJ119tXL1xJ3L7kMPuJWYnayjtE,868
263
+ orchestrator/schedules/scheduler.py,sha256=8o7DoVs9Q1Q231FVMpv3tXtKbaydeNkYQ1h6kl7U1X4,7198
263
264
  orchestrator/schedules/scheduling.py,sha256=_mbpHMhijey8Y56ebtJ4wVkrp_kPVRm8hoByzlQF4SE,2821
264
- orchestrator/schedules/task_vacuum.py,sha256=mxb7fsy1GphRwvUWi_lvwNaj51YAXUdIDlkOJd90AFI,874
265
+ orchestrator/schedules/service.py,sha256=CYWb_gB5Dw57AIiQtSVcLW4sEE69zNoWGuSe2WEIj_8,8940
265
266
  orchestrator/schedules/validate_products.py,sha256=_ucUG9HecskG2eN3tcDSiMzJK9gN3kZB1dXjrtxcApY,1324
266
- orchestrator/schedules/validate_subscriptions.py,sha256=bUBV45aEuqVdtqYBAXh1lX4O5vuNTeTfds4J_zq35dI,2113
267
267
  orchestrator/schemas/__init__.py,sha256=YDyZ0YBvzB4ML9oDBCBPGnBvf680zFFgUzg7X0tYBRY,2326
268
268
  orchestrator/schemas/base.py,sha256=Vc444LetsINLRhG2SxW9Bq01hOzChPOhQWCImQTr-As,930
269
269
  orchestrator/schemas/engine_settings.py,sha256=LF8al7tJssiilb5A4emPtUYo0tVDSaT1Lvo_DN_ttrY,1296
@@ -273,6 +273,7 @@ orchestrator/schemas/process.py,sha256=UACBNt-4g4v9Y528u-gZ-Wk7YxwJHhnI4cEu5CtQm
273
273
  orchestrator/schemas/product.py,sha256=MhMCh058ZuS2RJq-wSmxIPUNlhQexxXIx3DSz2OmOh4,1570
274
274
  orchestrator/schemas/product_block.py,sha256=kCqvm6qadHpegMr9aWI_fYX-T7mS-5S-ldPxnGQZg7M,1519
275
275
  orchestrator/schemas/resource_type.py,sha256=VDju4XywcDDLxdpbWU62RTvR9QF8x_GRrpTlN_NE8uI,1064
276
+ orchestrator/schemas/schedules.py,sha256=Gb427IGR5mPTjKN8STwUhAWCJMCywJkrS8OetiiHTKY,2844
276
277
  orchestrator/schemas/search.py,sha256=d_Vs1qU9Z5zuXN4pDk6jrVwiUXRKZ93U-tHW5Zfrw-w,1546
277
278
  orchestrator/schemas/search_requests.py,sha256=j2X98eLRTr_dYeGcIc78iPyKoYaAgvl1NQjYV4L1CGY,1925
278
279
  orchestrator/schemas/subscription.py,sha256=-jXyHZIed9Xlia18ksSDyenblNN6Q2yM2FlGELyJ458,3423
@@ -331,7 +332,7 @@ orchestrator/services/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8X
331
332
  orchestrator/services/fixed_inputs.py,sha256=kyz7s2HLzyDulvcq-ZqefTw1om86COvyvTjz0_5CmgI,876
332
333
  orchestrator/services/input_state.py,sha256=6BZOpb3cHpO18K-XG-3QUIV9pIM25_ufdODrp5CmXG4,2390
333
334
  orchestrator/services/process_broadcast_thread.py,sha256=D44YbjF8mRqGuznkRUV4SoRn1J0lfy_x1H508GnSVlU,4649
334
- orchestrator/services/processes.py,sha256=LpJbq13UJOrNUKorwYBTV4-MJj-XLXFv6LBk7iyQgl8,30622
335
+ orchestrator/services/processes.py,sha256=vMk30ImSE_0NXM1ffiBvXvaenAeqEYgQbbu_m-4ruGk,30350
335
336
  orchestrator/services/products.py,sha256=BP4KyE8zO-8z7Trrs5T6zKBOw53S9BfBJnHWI3p6u5Y,1943
336
337
  orchestrator/services/resource_types.py,sha256=_QBy_JOW_X3aSTqH0CuLrq4zBJL0p7Q-UDJUcuK2_qc,884
337
338
  orchestrator/services/settings.py,sha256=HEWfFulgoEDwgfxGEO__QTr5fDiwNBEj1UhAeTAdbLQ,3159
@@ -340,12 +341,12 @@ orchestrator/services/subscription_relations.py,sha256=aIdyzwyyy58OFhwjRPCPgnQTU
340
341
  orchestrator/services/subscriptions.py,sha256=XhJ5ygAAyWUIZHULhKyi1uU5DwkKZhzdxxn9vdQZYiA,27281
341
342
  orchestrator/services/tasks.py,sha256=mR3Fj1VsudltpanJKI2PvrxersyhVQ1skp8H7r3XnYI,5288
342
343
  orchestrator/services/translations.py,sha256=GyP8soUFGej8AS8uulBsk10CCK6Kwfjv9AHMFm3ElQY,1713
343
- orchestrator/services/workflows.py,sha256=iEkt2OBuTwkDru4V6ZSKatnw0b96ZdPV-VQqeZ9EOgU,4015
344
+ orchestrator/services/workflows.py,sha256=H_oAMDuDVmFtCjpuDA75LA4hn5V8ay-uQe_AR98G1V8,4192
344
345
  orchestrator/services/executors/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
345
346
  orchestrator/services/executors/celery.py,sha256=j5xJo7sZAdTtc0GmmJzoYVfzuYKiqAdAe5QbtPv0bPI,4937
346
347
  orchestrator/services/executors/threadpool.py,sha256=SA0Lns17fP7qp5Y0bLZB7YzZ-sYKrmHQdYTeqs9dnV0,4931
347
348
  orchestrator/utils/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
348
- orchestrator/utils/auth.py,sha256=CyjVbouzP5-eQ8Fe2kpBdMaBIJ7Ej-Cx8MLq0iaIHa8,344
349
+ orchestrator/utils/auth.py,sha256=H2wqAqq7qPka9SW5gcPbW1ES01nqAfCh_3PnxnBG-0Y,366
349
350
  orchestrator/utils/crypt.py,sha256=18eNamYWMllPkxyRtWIde3FDr3rSF74R5SAL6WsCj9Y,5584
350
351
  orchestrator/utils/datetime.py,sha256=a1WQ_yvu7MA0TiaRpC5avwbOSFdrj4eMrV4a7I2sD5Q,1477
351
352
  orchestrator/utils/deprecation_logger.py,sha256=oqju7ecJcB_r7cMnldaOAA79QUZYS_h69IkDrFV9nAg,875
@@ -365,11 +366,11 @@ orchestrator/utils/search_query.py,sha256=ji5LHtrzohGz6b1IG41cnPdpWXzLEzz4SGWgHl
365
366
  orchestrator/utils/state.py,sha256=ELH08cxvpmpnJg_ae0sMi9m_QX6SqHxNzOFaJgyW9gM,14344
366
367
  orchestrator/utils/strings.py,sha256=N0gWjmQaMjE9_99VtRvRaU8IBLTKMgBKSXcTZ9TpWAg,1077
367
368
  orchestrator/utils/validate_data_version.py,sha256=3Eioy2wE2EWKSgkyMKcEKrkCAfUIAq-eb73iRcpgppw,184
368
- orchestrator/websocket/__init__.py,sha256=V79jskk1z3uPIYgu0Gt6JLzuqr7NGfNeAZ-hbBqoUv4,5745
369
+ orchestrator/websocket/__init__.py,sha256=M8_QSXOZ9R2hxEu8HoJhQA2YAEAiRXmYJn47Vvz-Y7Q,6321
369
370
  orchestrator/websocket/websocket_manager.py,sha256=hwlG9FDXcNU42jDNNsPMQLIyrvEpGX5cm_vrONsLH8s,2763
370
371
  orchestrator/websocket/managers/broadcast_websocket_manager.py,sha256=fwoSgTjkHJ2GmsLTU9dqQpAA9i8b1McPu7gLNzxtfG4,5401
371
372
  orchestrator/websocket/managers/memory_websocket_manager.py,sha256=lF5EEx1iFMCGEkTbItTDr88NENMSaSeG1QrJ7teoPkY,3324
372
- orchestrator/workflows/__init__.py,sha256=NzIGGI-8SNAwCk2YqH6sHhEWbgAY457ntDwjO15N8v4,4131
373
+ orchestrator/workflows/__init__.py,sha256=FbwcAYJh8oSi0QFjXXXomdl9c8whCa_qSt_vPXcwasE,4216
373
374
  orchestrator/workflows/modify_note.py,sha256=eXt5KQvrkOXf-3YEXCn2XbBLP9N-n1pUYRW2t8Odupo,2150
374
375
  orchestrator/workflows/removed_workflow.py,sha256=V0Da5TEdfLdZZKD38ig-MTp3_IuE7VGqzHHzvPYQmLI,909
375
376
  orchestrator/workflows/steps.py,sha256=VVLRK9_7KzrBlnK7L8eSmRMNVOO7VJBh5OSjHQHM9fU,7019
@@ -379,8 +380,9 @@ orchestrator/workflows/tasks/cleanup_tasks_log.py,sha256=BfWYbPXhnLAHUJ0mlODDnjZ
379
380
  orchestrator/workflows/tasks/resume_workflows.py,sha256=T3iobSJjVgiupe0rClD34kUZ7KF4pL5yK2AVeRLZog8,4313
380
381
  orchestrator/workflows/tasks/validate_product_type.py,sha256=lo2TX_MZOfcOmYFjLyD82FrJ5AAN3HOsE6BhDVFuy9Q,3210
381
382
  orchestrator/workflows/tasks/validate_products.py,sha256=GZJBoFF-WMphS7ghMs2-gqvV2iL1F0POhk0uSNt93n0,8510
382
- orchestrator/workflows/translations/en-GB.json,sha256=Gc5gy_RghZOeSNcJIntAsz_7DsCg8n_vzoHBPXxCn_U,908
383
- orchestrator_core-4.6.5.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
384
- orchestrator_core-4.6.5.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
385
- orchestrator_core-4.6.5.dist-info/METADATA,sha256=VmDSDmF04lRtR_MSn6yqZc80yZRaFnf2iVO1QeJJgBU,6416
386
- orchestrator_core-4.6.5.dist-info/RECORD,,
383
+ orchestrator/workflows/tasks/validate_subscriptions.py,sha256=OZtqO6aJR4KwEFvBioV2gWAjmyLiWxe9Wlps3YmFh9w,2200
384
+ orchestrator/workflows/translations/en-GB.json,sha256=ObBlH9XILJ9uNaGcJexi3IB0e6P8CKFKRgu29luIEM8,973
385
+ orchestrator_core-4.7.0rc1.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
386
+ orchestrator_core-4.7.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
387
+ orchestrator_core-4.7.0rc1.dist-info/METADATA,sha256=usMBg-ktGcbAy1WVADeof9gd_oBI4OQg2fPJfOhv60A,6421
388
+ orchestrator_core-4.7.0rc1.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- # Copyright 2019-2020 SURF.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
-
14
-
15
- from orchestrator.schedules.scheduler import scheduler
16
- from orchestrator.services.processes import start_process
17
-
18
-
19
- @scheduler.scheduled_job(id="resume-workflows", name="Resume workflows", trigger="interval", hours=1) # type: ignore[misc]
20
- def run_resume_workflows() -> None:
21
- start_process("task_resume_workflows")
@@ -1,21 +0,0 @@
1
- # Copyright 2019-2020 SURF.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
-
14
-
15
- from orchestrator.schedules.scheduler import scheduler
16
- from orchestrator.services.processes import start_process
17
-
18
-
19
- @scheduler.scheduled_job(id="clean-tasks", name="Clean up tasks", trigger="interval", hours=6) # type: ignore[misc]
20
- def vacuum_tasks() -> None:
21
- start_process("task_clean_up_tasks")