orchestrator-core 4.3.1__py3-none-any.whl → 4.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "4.3.1"
16
+ __version__ = "4.4.0"
17
17
 
18
18
  from orchestrator.app import OrchestratorCore
19
19
  from orchestrator.settings import app_settings
@@ -12,6 +12,7 @@
12
12
  # limitations under the License.
13
13
 
14
14
  """Module that implements subscription related API endpoints."""
15
+
15
16
  from http import HTTPStatus
16
17
  from typing import Any
17
18
  from uuid import UUID
@@ -13,12 +13,13 @@
13
13
 
14
14
 
15
15
  import logging
16
- from time import sleep
16
+ import time
17
17
 
18
- import schedule
19
18
  import typer
20
19
 
21
- from orchestrator.schedules import ALL_SCHEDULERS
20
+ from orchestrator.schedules.scheduler import (
21
+ get_paused_scheduler,
22
+ )
22
23
 
23
24
  log = logging.getLogger(__name__)
24
25
 
@@ -27,36 +28,41 @@ app: typer.Typer = typer.Typer()
27
28
 
28
29
  @app.command()
29
30
  def run() -> None:
30
- """Loop eternally and run schedulers at configured times."""
31
- for s in ALL_SCHEDULERS:
32
- job = getattr(schedule.every(s.period), s.time_unit)
33
- if s.at:
34
- job = job.at(s.at)
35
- job.do(s).tag(s.name)
36
- log.info("Starting Schedule")
37
- for j in schedule.jobs:
38
- log.info("%s: %s", ", ".join(j.tags), j)
39
- while True:
40
- schedule.run_pending()
41
- idle = schedule.idle_seconds()
42
- if idle < 0:
43
- log.info("Next job in queue is scheduled in the past, run it now.")
44
- else:
45
- log.info("Sleeping for %d seconds", idle)
46
- sleep(idle)
31
+ """Start scheduler and loop eternally to keep thread alive."""
32
+ with get_paused_scheduler() as scheduler:
33
+ scheduler.resume()
34
+
35
+ while True:
36
+ time.sleep(1)
47
37
 
48
38
 
49
39
  @app.command()
50
40
  def show_schedule() -> None:
51
- """Show the currently configured schedule."""
52
- for s in ALL_SCHEDULERS:
53
- at_str = f"@ {s.at} " if s.at else ""
54
- typer.echo(f"{s.name}: {s.__name__} {at_str}every {s.period} {s.time_unit}")
41
+ """Show the currently configured schedule.
42
+
43
+ in cli underscore is replaced by a dash `show-schedule`
44
+ """
45
+ with get_paused_scheduler() as scheduler:
46
+ jobs = scheduler.get_jobs()
47
+
48
+ for job in jobs:
49
+ typer.echo(f"[{job.id}] Next run: {job.next_run_time} | Trigger: {job.trigger}")
55
50
 
56
51
 
57
52
  @app.command()
58
- def force(keyword: str) -> None:
59
- """Force the execution of (a) scheduler(s) based on a keyword."""
60
- for s in ALL_SCHEDULERS:
61
- if keyword in s.name or keyword in s.__name__:
62
- s()
53
+ def force(job_id: str) -> None:
54
+ """Force the execution of (a) scheduler(s) based on a job_id."""
55
+ with get_paused_scheduler() as scheduler:
56
+ job = scheduler.get_job(job_id)
57
+
58
+ if not job:
59
+ typer.echo(f"Job '{job_id}' not found.")
60
+ raise typer.Exit(code=1)
61
+
62
+ typer.echo(f"Running job [{job.id}] now...")
63
+ try:
64
+ job.func(*job.args or (), **job.kwargs or {})
65
+ typer.echo("Job executed successfully.")
66
+ except Exception as e:
67
+ typer.echo(f"Job execution failed: {e}")
68
+ raise typer.Exit(code=1)
@@ -0,0 +1,36 @@
1
+ import structlog
2
+
3
+ from orchestrator.db.filters import Filter
4
+ from orchestrator.db.sorting import Sort
5
+ from orchestrator.graphql.pagination import Connection
6
+ from orchestrator.graphql.schemas.scheduled_task import ScheduledTaskGraphql
7
+ from orchestrator.graphql.types import GraphqlFilter, GraphqlSort, OrchestratorInfo
8
+ from orchestrator.graphql.utils import create_resolver_error_handler, to_graphql_result_page
9
+ from orchestrator.graphql.utils.is_query_detailed import is_querying_page_data
10
+ from orchestrator.schedules.scheduler import get_scheduler_tasks, scheduled_task_filter_keys, scheduled_task_sort_keys
11
+
12
+ logger = structlog.get_logger(__name__)
13
+
14
+
15
+ async def resolve_scheduled_tasks(
16
+ info: OrchestratorInfo,
17
+ filter_by: list[GraphqlFilter] | None = None,
18
+ sort_by: list[GraphqlSort] | None = None,
19
+ first: int = 10,
20
+ after: int = 0,
21
+ ) -> Connection[ScheduledTaskGraphql]:
22
+ _error_handler = create_resolver_error_handler(info)
23
+
24
+ pydantic_filter_by: list[Filter] = [item.to_pydantic() for item in filter_by] if filter_by else []
25
+ pydantic_sort_by: list[Sort] = [item.to_pydantic() for item in sort_by] if sort_by else []
26
+ scheduled_tasks, total = get_scheduler_tasks(
27
+ first=first, after=after, filter_by=pydantic_filter_by, sort_by=pydantic_sort_by, error_handler=_error_handler
28
+ )
29
+
30
+ graphql_scheduled_tasks = []
31
+ if is_querying_page_data(info):
32
+ graphql_scheduled_tasks = [ScheduledTaskGraphql.from_pydantic(p) for p in scheduled_tasks]
33
+
34
+ return to_graphql_result_page(
35
+ graphql_scheduled_tasks, first, after, total, scheduled_task_filter_keys, scheduled_task_sort_keys
36
+ )
@@ -51,12 +51,14 @@ from orchestrator.graphql.resolvers import (
51
51
  resolve_version,
52
52
  resolve_workflows,
53
53
  )
54
+ from orchestrator.graphql.resolvers.scheduled_tasks import resolve_scheduled_tasks
54
55
  from orchestrator.graphql.schemas import DEFAULT_GRAPHQL_MODELS
55
56
  from orchestrator.graphql.schemas.customer import CustomerType
56
57
  from orchestrator.graphql.schemas.process import ProcessType
57
58
  from orchestrator.graphql.schemas.product import ProductType
58
59
  from orchestrator.graphql.schemas.product_block import ProductBlock
59
60
  from orchestrator.graphql.schemas.resource_type import ResourceType
61
+ from orchestrator.graphql.schemas.scheduled_task import ScheduledTaskGraphql
60
62
  from orchestrator.graphql.schemas.settings import StatusType
61
63
  from orchestrator.graphql.schemas.subscription import SubscriptionInterface
62
64
  from orchestrator.graphql.schemas.version import VersionType
@@ -99,6 +101,9 @@ class OrchestratorQuery:
99
101
  description="Returns information about cache, workers, and global engine settings",
100
102
  )
101
103
  version: VersionType = authenticated_field(resolver=resolve_version, description="Returns version information")
104
+ scheduled_tasks: Connection[ScheduledTaskGraphql] = authenticated_field(
105
+ resolver=resolve_scheduled_tasks, description="Returns scheduled job information"
106
+ )
102
107
 
103
108
 
104
109
  @strawberry.federation.type(description="Orchestrator customer Query")
@@ -82,8 +82,8 @@ class ProcessType:
82
82
  )
83
83
 
84
84
  @strawberry.field(description="Returns user permissions for operations on this process") # type: ignore
85
- def user_permissions(self, info: OrchestratorInfo) -> FormUserPermissionsType:
86
- oidc_user = info.context.get_current_user
85
+ async def user_permissions(self, info: OrchestratorInfo) -> FormUserPermissionsType:
86
+ oidc_user = await info.context.get_current_user
87
87
  workflow = get_workflow(self.workflow_name)
88
88
  process = load_process(db.session.get(ProcessTable, self.process_id)) # type: ignore[arg-type]
89
89
  auth_resume, auth_retry = get_auth_callbacks(get_steps_to_evaluate_for_rbac(process), workflow) # type: ignore[arg-type]
@@ -0,0 +1,8 @@
1
+ import strawberry
2
+
3
+ from orchestrator.schedules.scheduler import ScheduledTask
4
+
5
+
6
+ @strawberry.experimental.pydantic.type(model=ScheduledTask, all_fields=True)
7
+ class ScheduledTaskGraphql:
8
+ pass
@@ -33,8 +33,8 @@ class Workflow:
33
33
  return [Step(name=step.name, assignee=step.assignee) for step in get_workflow(self.name).steps] # type: ignore
34
34
 
35
35
  @strawberry.field(description="Return whether the currently logged-in used is allowed to start this workflow") # type: ignore
36
- def is_allowed(self, info: OrchestratorInfo) -> bool:
37
- oidc_user = info.context.get_current_user
36
+ async def is_allowed(self, info: OrchestratorInfo) -> bool:
37
+ oidc_user = await info.context.get_current_user
38
38
  workflow_table = get_original_model(self, WorkflowTable)
39
39
  workflow = get_workflow(workflow_table.name)
40
40
 
@@ -25,6 +25,6 @@ def _format_context(context: dict) -> str:
25
25
 
26
26
  def create_resolver_error_handler(info: OrchestratorInfo) -> CallableErrorHandler:
27
27
  def handle_error(message: str, **context) -> None: # type: ignore
28
- return register_error(" ".join([message, _format_context(context)]), info, error_type=ErrorType.BAD_REQUEST)
28
+ return register_error(f"{message} {_format_context(context)}", info, error_type=ErrorType.BAD_REQUEST)
29
29
 
30
30
  return handle_error
@@ -13,12 +13,11 @@
13
13
 
14
14
 
15
15
  from orchestrator.schedules.resume_workflows import run_resume_workflows
16
- from orchestrator.schedules.scheduling import SchedulingFunction
17
16
  from orchestrator.schedules.task_vacuum import vacuum_tasks
18
17
  from orchestrator.schedules.validate_products import validate_products
19
18
  from orchestrator.schedules.validate_subscriptions import validate_subscriptions
20
19
 
21
- ALL_SCHEDULERS: list[SchedulingFunction] = [
20
+ ALL_SCHEDULERS: list = [
22
21
  run_resume_workflows,
23
22
  vacuum_tasks,
24
23
  validate_subscriptions,
@@ -12,10 +12,10 @@
12
12
  # limitations under the License.
13
13
 
14
14
 
15
- from orchestrator.schedules.scheduling import scheduler
15
+ from orchestrator.schedules.scheduler import scheduler
16
16
  from orchestrator.services.processes import start_process
17
17
 
18
18
 
19
- @scheduler(name="Resume workflows", time_unit="hour", period=1)
19
+ @scheduler.scheduled_job(id="resume-workflows", name="Resume workflows", trigger="interval", hours=1) # type: ignore[misc]
20
20
  def run_resume_workflows() -> None:
21
21
  start_process("task_resume_workflows")
@@ -0,0 +1,170 @@
1
+ # Copyright 2019-2020 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+
14
+
15
+ from contextlib import contextmanager
16
+ from datetime import datetime
17
+ from typing import Any, Generator
18
+
19
+ from apscheduler.executors.pool import ThreadPoolExecutor
20
+ from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
21
+ from apscheduler.schedulers.background import BackgroundScheduler
22
+ from more_itertools import partition
23
+ from pydantic import BaseModel
24
+
25
+ from orchestrator.db.filters import Filter
26
+ from orchestrator.db.filters.filters import CallableErrorHandler
27
+ from orchestrator.db.sorting import Sort
28
+ from orchestrator.db.sorting.sorting import SortOrder
29
+ from orchestrator.settings import app_settings
30
+ from orchestrator.utils.helpers import camel_to_snake, to_camel
31
+
32
+ jobstores = {"default": SQLAlchemyJobStore(url=str(app_settings.DATABASE_URI))}
33
+ executors = {
34
+ "default": ThreadPoolExecutor(1),
35
+ }
36
+ job_defaults = {
37
+ "coalesce": True,
38
+ }
39
+
40
+ scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)
41
+
42
+
43
+ def scheduler_dispose_db_connections() -> None:
44
+ jobstores["default"].engine.dispose()
45
+
46
+
47
+ @contextmanager
48
+ def get_paused_scheduler() -> Generator[BackgroundScheduler, Any, None]:
49
+ scheduler.start(paused=True)
50
+
51
+ try:
52
+ yield scheduler
53
+ finally:
54
+ scheduler.shutdown()
55
+ scheduler_dispose_db_connections()
56
+
57
+
58
+ class ScheduledTask(BaseModel):
59
+ id: str
60
+ name: str | None = None
61
+ next_run_time: datetime | None = None
62
+ trigger: str
63
+
64
+
65
+ scheduled_task_keys = set(ScheduledTask.model_fields.keys())
66
+ scheduled_task_filter_keys = sorted(scheduled_task_keys | {to_camel(key) for key in scheduled_task_keys})
67
+ scheduled_task_sort_keys = scheduled_task_filter_keys
68
+
69
+
70
+ def scheduled_task_in_filter(job: ScheduledTask, filter_by: list[Filter]) -> bool:
71
+ return any(f.value.lower() in getattr(job, camel_to_snake(f.field), "").lower() for f in filter_by)
72
+
73
+
74
+ def filter_scheduled_tasks(
75
+ scheduled_tasks: list[ScheduledTask],
76
+ handle_filter_error: CallableErrorHandler,
77
+ filter_by: list[Filter] | None = None,
78
+ ) -> list[ScheduledTask]:
79
+ if not filter_by:
80
+ return scheduled_tasks
81
+
82
+ try:
83
+ invalid_filters, valid_filters = partition(lambda x: x.field in scheduled_task_filter_keys, filter_by)
84
+
85
+ if invalid_list := [item.field for item in invalid_filters]:
86
+ handle_filter_error(
87
+ "Invalid filter arguments", invalid_filters=invalid_list, valid_filter_keys=scheduled_task_filter_keys
88
+ )
89
+
90
+ valid_filter_list = list(valid_filters)
91
+ return [task for task in scheduled_tasks if scheduled_task_in_filter(task, valid_filter_list)]
92
+ except Exception as e:
93
+ handle_filter_error(str(e))
94
+ return []
95
+
96
+
97
+ def _invert(value: Any) -> Any:
98
+ """Invert value for descending order."""
99
+ if isinstance(value, (int, float)):
100
+ return -value
101
+ if isinstance(value, str):
102
+ return tuple(-ord(c) for c in value)
103
+ if isinstance(value, datetime):
104
+ return -value.timestamp()
105
+ return value
106
+
107
+
108
+ def sort_key(sort_field: str, sort_order: SortOrder) -> Any:
109
+ def _sort_key(task: Any) -> Any:
110
+ value = getattr(task, camel_to_snake(sort_field), None)
111
+ if sort_field == "next_run_time" and value is None:
112
+ return float("inf") if sort_order == SortOrder.ASC else float("-inf")
113
+ return value if sort_order == SortOrder.ASC else _invert(value)
114
+
115
+ return _sort_key
116
+
117
+
118
+ def sort_scheduled_tasks(
119
+ scheduled_tasks: list[ScheduledTask], handle_sort_error: CallableErrorHandler, sort_by: list[Sort] | None = None
120
+ ) -> list[ScheduledTask]:
121
+ if not sort_by:
122
+ return scheduled_tasks
123
+
124
+ try:
125
+ invalid_sorting, valid_sorting = partition(lambda x: x.field in scheduled_task_sort_keys, sort_by)
126
+ if invalid_list := [item.field for item in invalid_sorting]:
127
+ handle_sort_error(
128
+ "Invalid sort arguments", invalid_sorting=invalid_list, valid_sort_keys=scheduled_task_sort_keys
129
+ )
130
+
131
+ valid_sort_list = list(valid_sorting)
132
+ return sorted(
133
+ scheduled_tasks, key=lambda task: tuple(sort_key(sort.field, sort.order)(task) for sort in valid_sort_list)
134
+ )
135
+ except Exception as e:
136
+ handle_sort_error(str(e))
137
+ return []
138
+
139
+
140
+ def default_error_handler(message: str, **context) -> None: # type: ignore
141
+ from orchestrator.graphql.utils.create_resolver_error_handler import _format_context
142
+
143
+ raise ValueError(f"{message} {_format_context(context)}")
144
+
145
+
146
+ def get_scheduler_tasks(
147
+ first: int = 10,
148
+ after: int = 0,
149
+ filter_by: list[Filter] | None = None,
150
+ sort_by: list[Sort] | None = None,
151
+ error_handler: CallableErrorHandler = default_error_handler,
152
+ ) -> tuple[list[ScheduledTask], int]:
153
+ with get_paused_scheduler() as pauzed_scheduler:
154
+ scheduled_tasks = pauzed_scheduler.get_jobs()
155
+
156
+ scheduled_tasks = filter_scheduled_tasks(scheduled_tasks, error_handler, filter_by)
157
+ scheduled_tasks = sort_scheduled_tasks(scheduled_tasks, error_handler, sort_by)
158
+
159
+ total = len(scheduled_tasks)
160
+ paginated_tasks = scheduled_tasks[after : after + first + 1]
161
+
162
+ return [
163
+ ScheduledTask(
164
+ id=task.id,
165
+ name=task.name,
166
+ next_run_time=task.next_run_time,
167
+ trigger=str(task.trigger),
168
+ )
169
+ for task in paginated_tasks
170
+ ], total
@@ -12,37 +12,77 @@
12
12
  # limitations under the License.
13
13
 
14
14
  from collections.abc import Callable
15
- from typing import Protocol, cast
15
+ from typing import TypeVar
16
16
 
17
- from schedule import CancelJob
17
+ from apscheduler.schedulers.base import BaseScheduler
18
+ from deprecated import deprecated
18
19
 
20
+ from orchestrator.schedules.scheduler import scheduler as default_scheduler # your global scheduler instance
19
21
 
20
- class SchedulingFunction(Protocol):
21
- __name__: str
22
- name: str
23
- time_unit: str
24
- period: int | None
25
- at: str | None
26
-
27
- def __call__(self) -> CancelJob | None: ...
22
+ F = TypeVar("F", bound=Callable[..., object])
28
23
 
29
24
 
25
+ @deprecated(
26
+ reason="We changed from scheduler to apscheduler which has its own decoractor, use `@scheduler.scheduled_job()` from `from orchestrator.scheduling.scheduler import scheduler`"
27
+ )
30
28
  def scheduler(
31
- name: str, time_unit: str, period: int = 1, at: str | None = None
32
- ) -> Callable[[Callable[[], CancelJob | None]], SchedulingFunction]:
33
- """Create schedule.
29
+ name: str,
30
+ time_unit: str,
31
+ period: int = 1,
32
+ at: str | None = None,
33
+ *,
34
+ id: str | None = None,
35
+ scheduler: BaseScheduler = default_scheduler,
36
+ ) -> Callable[[F], F]:
37
+ """APScheduler-compatible decorator to schedule a function.
38
+
39
+ id is necessary with apscheduler, if left empty it takes the function name.
34
40
 
35
- Either specify the period or the at. Examples:
36
- time_unit = "hours", period = 12 -> will run every 12 hours
37
- time_unit = "day", at="01:00" -> will run every day at 1 o'clock
41
+ - `time_unit = "hours", period = 12` → every 12 hours
42
+ - `time_unit = "day", at = "01:00"` → every day at 1 AM
38
43
  """
39
44
 
40
- def _scheduler(f: Callable[[], CancelJob | None]) -> SchedulingFunction:
41
- schedule = cast(SchedulingFunction, f)
42
- schedule.name = name
43
- schedule.time_unit = time_unit
44
- schedule.period = period
45
- schedule.at = at
46
- return schedule
45
+ def decorator(func: F) -> F:
46
+ job_id = id or func.__name__
47
+
48
+ trigger = "interval"
49
+ kwargs: dict[str, int] = {}
50
+ if time_unit == "day" and at:
51
+ trigger = "cron"
52
+ try:
53
+ hour, minute = map(int, at.split(":"))
54
+ except ValueError:
55
+ raise ValueError(f"Invalid time format for 'at': {at}, expected 'HH:MM'")
56
+
57
+ kwargs = {
58
+ "hour": hour,
59
+ "minute": minute,
60
+ }
61
+ else:
62
+ # Map string units to timedelta kwargs for IntervalTrigger
63
+ unit_map = {
64
+ "seconds": "seconds",
65
+ "second": "seconds",
66
+ "minutes": "minutes",
67
+ "minute": "minutes",
68
+ "hours": "hours",
69
+ "hour": "hours",
70
+ "days": "days",
71
+ "day": "days",
72
+ }
73
+
74
+ interval_arg = unit_map.get(time_unit.lower(), time_unit.lower())
75
+ kwargs = {interval_arg: period}
76
+
77
+ scheduler.add_job(
78
+ func,
79
+ trigger=trigger,
80
+ id=job_id,
81
+ name=name,
82
+ replace_existing=True,
83
+ **kwargs,
84
+ )
85
+
86
+ return func
47
87
 
48
- return _scheduler
88
+ return decorator
@@ -12,10 +12,10 @@
12
12
  # limitations under the License.
13
13
 
14
14
 
15
- from orchestrator.schedules.scheduling import scheduler
15
+ from orchestrator.schedules.scheduler import scheduler
16
16
  from orchestrator.services.processes import start_process
17
17
 
18
18
 
19
- @scheduler(name="Clean up tasks", time_unit="hours", period=6)
19
+ @scheduler.scheduled_job(id="clean-tasks", name="Clean up tasks", trigger="interval", hours=6) # type: ignore[misc]
20
20
  def vacuum_tasks() -> None:
21
21
  start_process("task_clean_up_tasks")
@@ -14,11 +14,17 @@ from sqlalchemy import func, select
14
14
 
15
15
  from orchestrator.db import db
16
16
  from orchestrator.db.models import ProcessTable
17
- from orchestrator.schedules.scheduling import scheduler
17
+ from orchestrator.schedules.scheduler import scheduler
18
18
  from orchestrator.services.processes import start_process
19
19
 
20
20
 
21
- @scheduler(name="Validate Products and inactive subscriptions", time_unit="day", at="02:30")
21
+ @scheduler.scheduled_job( # type: ignore[misc]
22
+ id="validate-products",
23
+ name="Validate Products and inactive subscriptions",
24
+ trigger="cron",
25
+ hour=2,
26
+ minute=30,
27
+ )
22
28
  def validate_products() -> None:
23
29
  uncompleted_products = db.session.scalar(
24
30
  select(func.count())
@@ -16,7 +16,7 @@ from threading import BoundedSemaphore
16
16
 
17
17
  import structlog
18
18
 
19
- from orchestrator.schedules.scheduling import scheduler
19
+ from orchestrator.schedules.scheduler import scheduler
20
20
  from orchestrator.services.subscriptions import (
21
21
  get_subscriptions_on_product_table,
22
22
  get_subscriptions_on_product_table_in_sync,
@@ -33,7 +33,7 @@ logger = structlog.get_logger(__name__)
33
33
  task_semaphore = BoundedSemaphore(value=2)
34
34
 
35
35
 
36
- @scheduler(name="Subscriptions Validator", time_unit="day", at="00:10")
36
+ @scheduler.scheduled_job(id="subscriptions-validator", name="Subscriptions Validator", trigger="cron", hour=0, minute=10) # type: ignore[misc]
37
37
  def validate_subscriptions() -> None:
38
38
  if app_settings.VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS:
39
39
  # Automatically re-validate out-of-sync subscriptions. This is not recommended for production.
@@ -12,6 +12,7 @@
12
12
  # limitations under the License.
13
13
 
14
14
  """Module that provides service functions on subscriptions."""
15
+
15
16
  import pickle # noqa: S403
16
17
  from collections import defaultdict
17
18
  from collections.abc import Sequence
@@ -506,6 +507,7 @@ TARGET_DEFAULT_USABLE_MAP: dict[Target, list[str]] = {
506
507
  Target.TERMINATE: ["active", "provisioning"],
507
508
  Target.SYSTEM: ["active"],
508
509
  Target.VALIDATE: ["active"],
510
+ Target.RECONCILE: ["active"],
509
511
  }
510
512
 
511
513
  WF_USABLE_MAP: dict[str, list[str]] = {}
@@ -532,6 +534,7 @@ def subscription_workflows(subscription: SubscriptionTable) -> dict[str, Any]:
532
534
  ... "terminate": [],
533
535
  ... "system": [],
534
536
  ... "validate": [],
537
+ ... "reconcile: [],
535
538
  ... }
536
539
 
537
540
  """
@@ -552,6 +555,7 @@ def subscription_workflows(subscription: SubscriptionTable) -> dict[str, Any]:
552
555
  "terminate": [],
553
556
  "system": [],
554
557
  "validate": [],
558
+ "reconcile": [],
555
559
  }
556
560
  for workflow in subscription.product.workflows:
557
561
  if workflow.name in WF_USABLE_WHILE_OUT_OF_SYNC or workflow.is_task:
orchestrator/targets.py CHANGED
@@ -23,3 +23,4 @@ class Target(strEnum):
23
23
  TERMINATE = "TERMINATE"
24
24
  SYSTEM = "SYSTEM"
25
25
  VALIDATE = "VALIDATE"
26
+ RECONCILE = "RECONCILE"
@@ -154,7 +154,7 @@ def _generate_modify_form(workflow_target: str, workflow_name: str) -> InputForm
154
154
 
155
155
 
156
156
  def wrap_modify_initial_input_form(initial_input_form: InputStepFunc | None) -> StateInputStepFunc | None:
157
- """Wrap initial input for modify and terminate workflows.
157
+ """Wrap initial input for modify, reconcile and terminate workflows.
158
158
 
159
159
  This is needed because the frontend expects all modify workflows to start with a page that only contains the
160
160
  subscription id. It also expects the second page to have some user visible inputs and the subscription id *again*.
@@ -355,6 +355,53 @@ def validate_workflow(description: str) -> Callable[[Callable[[], StepList]], Wo
355
355
  return _validate_workflow
356
356
 
357
357
 
358
+ def reconcile_workflow(
359
+ description: str,
360
+ additional_steps: StepList | None = None,
361
+ authorize_callback: Authorizer | None = None,
362
+ retry_auth_callback: Authorizer | None = None,
363
+ ) -> Callable[[Callable[[], StepList]], Workflow]:
364
+ """Similar to a modify_workflow but without required input user input to perform a sync with external systems based on the subscriptions existing configuration.
365
+
366
+ Use this for subscription reconcile workflows.
367
+
368
+ Example::
369
+
370
+ @reconcile_workflow("Reconcile l2vpn")
371
+ def reconcile_l2vpn() -> StepList:
372
+ return (
373
+ begin
374
+ >> update_l2vpn_in_external_systems
375
+ )
376
+ """
377
+
378
+ wrapped_reconcile_initial_input_form_generator = wrap_modify_initial_input_form(None)
379
+
380
+ def _reconcile_workflow(f: Callable[[], StepList]) -> Workflow:
381
+ steplist = (
382
+ init
383
+ >> store_process_subscription()
384
+ >> unsync
385
+ >> f()
386
+ >> (additional_steps or StepList())
387
+ >> resync
388
+ >> refresh_subscription_search_index
389
+ >> done
390
+ )
391
+
392
+ return make_workflow(
393
+ f,
394
+ description,
395
+ wrapped_reconcile_initial_input_form_generator,
396
+ Target.RECONCILE,
397
+ steplist,
398
+ authorize_callback=authorize_callback,
399
+ retry_auth_callback=retry_auth_callback,
400
+ )
401
+
402
+ return _reconcile_workflow
403
+
404
+
358
405
  def ensure_provisioning_status(modify_steps: Step | StepList) -> StepList:
359
406
  """Decorator to ensure subscription modifications are executed only during Provisioning status."""
360
407
  return (
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: orchestrator-core
3
- Version: 4.3.1
3
+ Version: 4.4.0
4
4
  Summary: This is the orchestrator workflow engine.
5
5
  Author-email: SURF <automation-beheer@surf.nl>
6
6
  Requires-Python: >=3.11,<3.14
@@ -32,6 +32,7 @@ Classifier: Typing :: Typed
32
32
  License-File: LICENSE
33
33
  Requires-Dist: alembic==1.16.1
34
34
  Requires-Dist: anyio>=3.7.0
35
+ Requires-Dist: apscheduler>=3.11.0
35
36
  Requires-Dist: click==8.*
36
37
  Requires-Dist: deepmerge==2.0
37
38
  Requires-Dist: deprecated>=1.2.18
@@ -52,7 +53,6 @@ Requires-Dist: python-dateutil==2.8.2
52
53
  Requires-Dist: python-rapidjson>=1.18,<1.21
53
54
  Requires-Dist: pytz==2025.2
54
55
  Requires-Dist: redis==5.1.1
55
- Requires-Dist: schedule==1.1.0
56
56
  Requires-Dist: semver==3.0.4
57
57
  Requires-Dist: sentry-sdk[fastapi]~=2.29.1
58
58
  Requires-Dist: sqlalchemy==2.0.41
@@ -1,11 +1,11 @@
1
- orchestrator/__init__.py,sha256=A485Z6oxvKfpqVhk24XIbDwJZrxQE0dCBgzWyjrzkOw,1063
1
+ orchestrator/__init__.py,sha256=mRsVPvC1u2rI_N-BlO3WFwcBGnHqkbyV8Ck2axmu3w0,1063
2
2
  orchestrator/app.py,sha256=7UrXKjBKNSEaSSXAd5ww_RdMFhFqE4yvfj8faS2MzAA,12089
3
3
  orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
4
4
  orchestrator/log_config.py,sha256=1tPRX5q65e57a6a_zEii_PFK8SzWT0mnA5w2sKg4hh8,1853
5
5
  orchestrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  orchestrator/security.py,sha256=iXFxGxab54aav7oHEKLAVkTgrQMJGHy6IYLojEnD7gI,2422
7
7
  orchestrator/settings.py,sha256=2Kgc6m3qUCcSM3Z_IVUeehfgO0QphMFkLrS0RC3sU-U,4365
8
- orchestrator/targets.py,sha256=WizBgnp8hWX9YLFUIju7ewSubiwQqinCvyiYNcXHbHI,802
8
+ orchestrator/targets.py,sha256=d7Fyh_mWIWPivA_E7DTNFpZID3xFW_K0JlZ5nksVX7k,830
9
9
  orchestrator/types.py,sha256=qzs7xx5AYRmKbpYRyJJP3wuDb0W0bcAzefCN0RWLAco,15459
10
10
  orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
11
11
  orchestrator/workflow.py,sha256=meDCPnyyX_n5PsMUaFy2wWb5EKNm1_ff7zRDBYrbcDg,45901
@@ -23,7 +23,7 @@ orchestrator/api/api_v1/endpoints/products.py,sha256=BfFtwu9dZXEQbtKxYj9icc73GKG
23
23
  orchestrator/api/api_v1/endpoints/resource_types.py,sha256=gGyuaDyOD0TAVoeFGaGmjDGnQ8eQQArOxKrrk4MaDzA,2145
24
24
  orchestrator/api/api_v1/endpoints/settings.py,sha256=5s-k169podZjgGHUbVDmSQwpY_3Cs_Bbf2PPtZIkBcw,6184
25
25
  orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py,sha256=1_6LtgQleoq3M6z_W-Qz__Bj3OFUweoPrUqHMwSH6AM,3288
26
- orchestrator/api/api_v1/endpoints/subscriptions.py,sha256=zn_LeVfmp2uw7CszK4BvQ5n37hZccy3K2htkoDgF1sI,9809
26
+ orchestrator/api/api_v1/endpoints/subscriptions.py,sha256=7KaodccUiMkcVnrFnK2azp_V_-hGudcIyhov5WwVGQY,9810
27
27
  orchestrator/api/api_v1/endpoints/translations.py,sha256=dIWh_fCnZZUxJoGiNeJ49DK_xpf75IpR_0EIMSvzIvY,963
28
28
  orchestrator/api/api_v1/endpoints/user.py,sha256=RyI32EXVu6I-IxWjz0XB5zQWzzLL60zKXLgLqLH02xU,1827
29
29
  orchestrator/api/api_v1/endpoints/workflows.py,sha256=_0vhGiQeu3-z16Zi0WmuDWBs8gmed6BzRNwYH_sF6AY,1977
@@ -36,7 +36,7 @@ orchestrator/cli/migrate_domain_models.py,sha256=WRXy_1OnziQwpsCFZXvjB30nDJtjj0i
36
36
  orchestrator/cli/migrate_tasks.py,sha256=bju8XColjSZD0v3rS4kl-24dLr8En_H4-6enBmqd494,7255
37
37
  orchestrator/cli/migrate_workflows.py,sha256=nxUpx0vgEIc_8aJrjAyrw3E9Dt8JmaamTts8oiQ4vHY,8923
38
38
  orchestrator/cli/migration_helpers.py,sha256=C5tpkP5WEBr7G9S-1k1hgSI8ili6xd9Z5ygc9notaK0,4110
39
- orchestrator/cli/scheduler.py,sha256=iCKBWYUwQIYTDqKQ9rMVvs2sNiAzE-J2SkV170TPP2g,1896
39
+ orchestrator/cli/scheduler.py,sha256=2q6xT_XVOodY3e_qzIV98MWNvKvrbFpOJajWesj1fcs,1911
40
40
  orchestrator/cli/domain_gen_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  orchestrator/cli/domain_gen_helpers/fixed_input_helpers.py,sha256=uzpwsaau81hHSxNMOS9-o7kF-9_78R0f_UE0AvWooZQ,6775
42
42
  orchestrator/cli/domain_gen_helpers/helpers.py,sha256=tIPxn8ezED_xYZxH7ZAtQLwkDc6RNmLZVxWAoJ3a9lw,4203
@@ -158,7 +158,7 @@ orchestrator/forms/validators/product_id.py,sha256=u5mURLT0pOhbFLdwvYcy2_2fXMt35
158
158
  orchestrator/graphql/__init__.py,sha256=avq8Yg3Jr_9pJqh7ClyIAOX7YSg1eM_AWmt5C3FRYUY,1440
159
159
  orchestrator/graphql/autoregistration.py,sha256=pF2jbMKG26MvYoMSa6ZpqpHjVks7_NvSRFymHTgmfjs,6342
160
160
  orchestrator/graphql/pagination.py,sha256=iqVDn3GPZpiQhEydfwkBJLURY-X8wwUphS8Lkeg0BOc,2413
161
- orchestrator/graphql/schema.py,sha256=gwZ3nAgKL0zlpc-aK58hSUAGPVD11Tb3aRSSK9hC39I,9204
161
+ orchestrator/graphql/schema.py,sha256=dw4m4sM1ek2DscB8vINN6L8vVDE0h5GXclHGa8CiUJo,9537
162
162
  orchestrator/graphql/types.py,sha256=_kHKMusrRPuRtF4wm42NsBzoFZ4egbu3ibMmhd2D6Fs,5432
163
163
  orchestrator/graphql/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
164
164
  orchestrator/graphql/extensions/model_cache.py,sha256=1uhMRjBs9eK7zJ1Y6P6BopX06822w2Yh9jliwYvG6yQ,1085
@@ -174,6 +174,7 @@ orchestrator/graphql/resolvers/process.py,sha256=Hqs1F7-gw0yO_ioHjh2eLAyxrK2WSuL
174
174
  orchestrator/graphql/resolvers/product.py,sha256=uPBmYwMdau-zUqNjoDl-LDn927u3aCFW5JQ4A_it8q0,2772
175
175
  orchestrator/graphql/resolvers/product_block.py,sha256=Ker1CpxGab5h2BZujOHHwRUj8W4uphRr3WSpQGk2PnI,2939
176
176
  orchestrator/graphql/resolvers/resource_type.py,sha256=SREZXjkLYpuo4nCM8DqVeImIrZcP3xDiWr_gq4wWaxQ,2956
177
+ orchestrator/graphql/resolvers/scheduled_tasks.py,sha256=QsnesRrj8ESuS9vPKG9DXYcG2Wfj9m5LWGeZgmc6hu8,1640
177
178
  orchestrator/graphql/resolvers/settings.py,sha256=xVYqxo-EWQ24F4hUHm9OZeN9vsqQXJzIJ1_HF4Ci9Cs,3777
178
179
  orchestrator/graphql/resolvers/subscription.py,sha256=57niFv-JCro_wm0peJ5Ne04F2WIPuJ-Lx2h8yd9qubA,6541
179
180
  orchestrator/graphql/resolvers/version.py,sha256=qgwe1msPOexeg3RHCscJ8s45vNfMhYh9ZKyCZ3MNw30,809
@@ -184,17 +185,18 @@ orchestrator/graphql/schemas/customer_description.py,sha256=fize71IMpkvk_rTzcqCY
184
185
  orchestrator/graphql/schemas/errors.py,sha256=VRl-Zd1FHMnscyozhfxzqeEUZ0ERAWum_Y8YwjGxwmA,203
185
186
  orchestrator/graphql/schemas/fixed_input.py,sha256=1yqYHADQRgHz8OIP7ObYsPFS-gmzfkCvEO0a-KKf7zI,513
186
187
  orchestrator/graphql/schemas/helpers.py,sha256=Kpj4kIbmoKKN35bdgUSwQvGUIbeg7VJAVMEq65YS_ik,346
187
- orchestrator/graphql/schemas/process.py,sha256=g3noYh_USfnaK59fnoX2DI5tAf1PhdLMJGI_lA2xX1M,4966
188
+ orchestrator/graphql/schemas/process.py,sha256=wN4pKDuPbPHyyfGYaqFXMXxKTDm_zIwmyCOhSu5H1Iw,4978
188
189
  orchestrator/graphql/schemas/product.py,sha256=vUCqcjrKBJj-VKSrMYPKzjmmxLMXL7alKTJ8UdUkhTg,4342
189
190
  orchestrator/graphql/schemas/product_block.py,sha256=Qk9cbA6vm7ZPrhdgPHatKRuy6TytBmxSr97McEOxAu8,2860
190
191
  orchestrator/graphql/schemas/resource_type.py,sha256=s5d_FwQXL2-Sc-IDUxTJun5qFQ4zOP4-XcHF9ql-t1g,898
192
+ orchestrator/graphql/schemas/scheduled_task.py,sha256=22Kb7r2pUoefz9AeiTYQ1t6YBYCmAs_zVp7FqNdN5HQ,194
191
193
  orchestrator/graphql/schemas/settings.py,sha256=drhm5VcLmUbiYAk6WUSJcyJqjNM96E6GvpxVdPAobnA,999
192
194
  orchestrator/graphql/schemas/strawberry_pydantic_patch.py,sha256=CjNUhTKdYmLiaem-WY_mzw4HASIeaZitxGF8pPocqVw,1602
193
195
  orchestrator/graphql/schemas/subscription.py,sha256=hTA34C27kgLguH9V53173CxMKIWiQKh3vFzyJ2yBfE0,9918
194
196
  orchestrator/graphql/schemas/version.py,sha256=HSzVg_y4Sjd5_H5rRUtu3FJKOG_8ifhvBNt_qjOtC-E,92
195
- orchestrator/graphql/schemas/workflow.py,sha256=WLbegRNxOfvXg4kPYrO5KPBwtHmUofAr2pvZT2JsW1c,1761
197
+ orchestrator/graphql/schemas/workflow.py,sha256=ewE5mRuqMq7rnx8Au2eTUm3YTY1pivOWATNacZQ-trY,1773
196
198
  orchestrator/graphql/utils/__init__.py,sha256=1JvenzEVW1CBa1sGVI9I8IWnnoXIkb1hneDqph9EEZY,524
197
- orchestrator/graphql/utils/create_resolver_error_handler.py,sha256=PpQMVwGrE9t0nZ12TwoxPxksXxEwQM7lSNPeh7qW3vk,1233
199
+ orchestrator/graphql/utils/create_resolver_error_handler.py,sha256=XzCnL482M4wz3fg5fUdGUwCAuzSZQ9Ufu1mscLyeoWU,1227
198
200
  orchestrator/graphql/utils/get_query_loaders.py,sha256=abS_HJ7K9een78gMiGq3IhwGwxQXHvZygExe0h_t9ns,815
199
201
  orchestrator/graphql/utils/get_selected_fields.py,sha256=0hBcQkU-7TNVO_KG-MmLItKm0O3gmbqoxXNkLHO-wHo,1002
200
202
  orchestrator/graphql/utils/get_selected_paths.py,sha256=H0btESeOr3_VB7zy5Cx25OS0uzBcg2Y1I-arAmSOnsQ,1382
@@ -245,12 +247,13 @@ orchestrator/migrations/versions/schema/2025-05-08_161918133bec_add_is_task_to_w
245
247
  orchestrator/migrations/versions/schema/2025-07-01_93fc5834c7e5_changed_timestamping_fields_in_process_steps.py,sha256=Oezd8b2qaI1Kyq-sZFVFmdzd4d9NjXrf6HtJGk11fy0,1914
246
248
  orchestrator/migrations/versions/schema/2025-07-04_4b58e336d1bf_deprecating_workflow_target_in_.py,sha256=xnD6w-97R4ClS7rbmXQEXc36K3fdcXKhCy7ZZNy_FX4,742
247
249
  orchestrator/migrations/versions/schema/2025-07-28_850dccac3b02_update_description_of_resume_workflows_.py,sha256=R6Qoga83DJ1IL0WYPu0u5u2ZvAmqGlDmUMv_KtJyOhQ,812
248
- orchestrator/schedules/__init__.py,sha256=JnnaglfK1qYUBKI6Dd9taV-tCZIPlAdAkHtnkJDMXxY,1066
249
- orchestrator/schedules/resume_workflows.py,sha256=kSotzTAXjX7p9fpSYiGOpuxuTQfv54eRFAe0YSG0DHc,832
250
- orchestrator/schedules/scheduling.py,sha256=ehtwgpbvMOk1jhn-hHgVzg_9wLJkI6l3mRY3DcO9ZVY,1526
251
- orchestrator/schedules/task_vacuum.py,sha256=eovnuKimU8SFRw1IF62MsAVFSdgeeV1u57kapUbz8As,821
252
- orchestrator/schedules/validate_products.py,sha256=YMr7ASSqdXM6pd6oZu0kr8mfmH8If16MzprrsHdN_ZU,1234
253
- orchestrator/schedules/validate_subscriptions.py,sha256=9SYvsn4BJ5yo_1nu555hWjl5XffTx7QMaRhH5oOjM9E,2042
250
+ orchestrator/schedules/__init__.py,sha256=Zy0fTOBMGIRFoh5iVFDLF9_PRAFaONYDThGK9EsysWo,981
251
+ orchestrator/schedules/resume_workflows.py,sha256=jRnVRWDy687pQu-gtk80ecwiLSdrvtL15tG3U2zWA6I,891
252
+ orchestrator/schedules/scheduler.py,sha256=_Y6TB-GKNJM0Nk7CRLuMnw0djFEBrDm999GOOcBuBeQ,5880
253
+ orchestrator/schedules/scheduling.py,sha256=_mbpHMhijey8Y56ebtJ4wVkrp_kPVRm8hoByzlQF4SE,2821
254
+ orchestrator/schedules/task_vacuum.py,sha256=mxb7fsy1GphRwvUWi_lvwNaj51YAXUdIDlkOJd90AFI,874
255
+ orchestrator/schedules/validate_products.py,sha256=zWFQeVn3F8LP3joExLiKdmHs008pZsO-RolcIXHjFyE,1322
256
+ orchestrator/schedules/validate_subscriptions.py,sha256=bUBV45aEuqVdtqYBAXh1lX4O5vuNTeTfds4J_zq35dI,2113
254
257
  orchestrator/schemas/__init__.py,sha256=YDyZ0YBvzB4ML9oDBCBPGnBvf680zFFgUzg7X0tYBRY,2326
255
258
  orchestrator/schemas/base.py,sha256=Vc444LetsINLRhG2SxW9Bq01hOzChPOhQWCImQTr-As,930
256
259
  orchestrator/schemas/engine_settings.py,sha256=LF8al7tJssiilb5A4emPtUYo0tVDSaT1Lvo_DN_ttrY,1296
@@ -273,7 +276,7 @@ orchestrator/services/resource_types.py,sha256=_QBy_JOW_X3aSTqH0CuLrq4zBJL0p7Q-U
273
276
  orchestrator/services/settings.py,sha256=HEWfFulgoEDwgfxGEO__QTr5fDiwNBEj1UhAeTAdbLQ,3159
274
277
  orchestrator/services/settings_env_variables.py,sha256=iPErQjqPQCxKs0sPhefB16d8SBBVUi6eiRnFBK5bgqA,2196
275
278
  orchestrator/services/subscription_relations.py,sha256=aIdyzwyyy58OFhwjRPCPgnQTUTmChu6SeSQRIleQoDE,13138
276
- orchestrator/services/subscriptions.py,sha256=nr2HI89nC0lYjzTh2j-lEQ5cPQK43LNZv3gvP6jbepw,27189
279
+ orchestrator/services/subscriptions.py,sha256=XhJ5ygAAyWUIZHULhKyi1uU5DwkKZhzdxxn9vdQZYiA,27281
277
280
  orchestrator/services/tasks.py,sha256=mR3Fj1VsudltpanJKI2PvrxersyhVQ1skp8H7r3XnYI,5288
278
281
  orchestrator/services/translations.py,sha256=GyP8soUFGej8AS8uulBsk10CCK6Kwfjv9AHMFm3ElQY,1713
279
282
  orchestrator/services/workflows.py,sha256=iEkt2OBuTwkDru4V6ZSKatnw0b96ZdPV-VQqeZ9EOgU,4015
@@ -309,14 +312,14 @@ orchestrator/workflows/__init__.py,sha256=NzIGGI-8SNAwCk2YqH6sHhEWbgAY457ntDwjO1
309
312
  orchestrator/workflows/modify_note.py,sha256=eXt5KQvrkOXf-3YEXCn2XbBLP9N-n1pUYRW2t8Odupo,2150
310
313
  orchestrator/workflows/removed_workflow.py,sha256=V0Da5TEdfLdZZKD38ig-MTp3_IuE7VGqzHHzvPYQmLI,909
311
314
  orchestrator/workflows/steps.py,sha256=CZxfzkG5ANJYwuYTkQ4da2RpQqIjXCtey_Uy1ezRAZ4,6479
312
- orchestrator/workflows/utils.py,sha256=bhX9vm3oc9k6RSaESl34v4Nrh40G4Ys91INoTjZ0XVM,13966
315
+ orchestrator/workflows/utils.py,sha256=VUCDoIl5XAKtIeAJpVpyW2pCIg3PoVWfwGn28BYlYhA,15424
313
316
  orchestrator/workflows/tasks/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
314
317
  orchestrator/workflows/tasks/cleanup_tasks_log.py,sha256=BfWYbPXhnLAHUJ0mlODDnjZnQQAvKCZJDVTwbwOWI04,1624
315
318
  orchestrator/workflows/tasks/resume_workflows.py,sha256=T3iobSJjVgiupe0rClD34kUZ7KF4pL5yK2AVeRLZog8,4313
316
319
  orchestrator/workflows/tasks/validate_product_type.py,sha256=paG-NAY1bdde3Adt8zItkcBKf5Pxw6f5ngGW6an6dYU,3192
317
320
  orchestrator/workflows/tasks/validate_products.py,sha256=GZJBoFF-WMphS7ghMs2-gqvV2iL1F0POhk0uSNt93n0,8510
318
321
  orchestrator/workflows/translations/en-GB.json,sha256=ST53HxkphFLTMjFHonykDBOZ7-P_KxksktZU3GbxLt0,846
319
- orchestrator_core-4.3.1.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
320
- orchestrator_core-4.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
321
- orchestrator_core-4.3.1.dist-info/METADATA,sha256=1lkABfKjfjXMQaj6Iw3CcmCkunXswkZ0RBLe61A1fAY,5960
322
- orchestrator_core-4.3.1.dist-info/RECORD,,
322
+ orchestrator_core-4.4.0.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
323
+ orchestrator_core-4.4.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
324
+ orchestrator_core-4.4.0.dist-info/METADATA,sha256=097W21kPObcri-l74UPc7CdMTBaeHobEeCXn_uqJu1Y,5964
325
+ orchestrator_core-4.4.0.dist-info/RECORD,,