orchestrator-core 2.9.2rc2__py3-none-any.whl → 2.10.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "2.9.2rc2"
16
+ __version__ = "2.10.0rc1"
17
17
 
18
18
  from orchestrator.app import OrchestratorCore
19
19
  from orchestrator.settings import app_settings
@@ -240,6 +240,7 @@ def abort_process_endpoint(process_id: UUID, request: Request, user: str = Depen
240
240
  broadcast_func = api_broadcast_process_data(request)
241
241
  try:
242
242
  abort_process(process, user, broadcast_func=broadcast_func)
243
+ broadcast_invalidate_status_counts()
243
244
  return
244
245
  except Exception as e:
245
246
  raise_status(HTTPStatus.INTERNAL_SERVER_ERROR, str(e))
@@ -25,6 +25,7 @@ import orchestrator.workflows
25
25
  from orchestrator.cli.domain_gen_helpers.types import ModelUpdates
26
26
  from orchestrator.cli.helpers.print_helpers import COLOR, str_fmt
27
27
  from orchestrator.cli.migrate_domain_models import create_domain_models_migration_sql
28
+ from orchestrator.cli.migrate_tasks import create_tasks_migration_wizard
28
29
  from orchestrator.cli.migrate_workflows import create_workflows_migration_wizard
29
30
  from orchestrator.cli.migration_helpers import create_migration_file
30
31
  from orchestrator.db import init_database
@@ -397,3 +398,80 @@ def migrate_workflows(
397
398
 
398
399
  create_migration_file(alembic_cfg(), sql_upgrade_str, sql_downgrade_str, message, preamble=preamble)
399
400
  return None
401
+
402
+
403
+ @app.command(help="Create migration file based on diff tasks in db")
404
+ def migrate_tasks(
405
+ message: str = typer.Argument(..., help="Migration name"),
406
+ test: bool = typer.Option(False, help="Optional boolean if you don't want to generate a migration file"),
407
+ ) -> tuple[list[dict], list[dict]] | None:
408
+ """The `migrate-tasks` command creates a migration file based on the difference between tasks in the database and registered TaskInstances in your codebase.
409
+
410
+ !!! warning "BACKUP YOUR DATABASE BEFORE USING THE MIGRATION!"
411
+
412
+ You will be prompted with inputs for new models and resource type updates.
413
+ Resource type updates are only handled when it's renamed in all product blocks.
414
+
415
+ Args:
416
+ message: Message/description of the generated migration.
417
+ test: Optional boolean if you don't want to generate a migration file.
418
+
419
+ Returns None unless `--test` is used, in which case it returns:
420
+ - tuple:
421
+ - list of upgrade SQL statements in string format.
422
+ - list of downgrade SQL statements in string format.
423
+
424
+ CLI Arguments:
425
+ ```sh
426
+ Arguments:
427
+ MESSAGE Migration name [required]
428
+
429
+ Options:
430
+ --test / --no-test Optional boolean if you don't want to generate a migration
431
+ file [default: no-test]
432
+ ```
433
+ """
434
+ if not app_settings.TESTING:
435
+ init_database(app_settings)
436
+
437
+ if test:
438
+ print( # noqa: T001, T201
439
+ f"{str_fmt('NOTE:', flags=[COLOR.BOLD, COLOR.CYAN])} Running in test mode. No migration file will be generated.\n"
440
+ )
441
+
442
+ tasks_to_add, tasks_to_delete = create_tasks_migration_wizard()
443
+
444
+ # String 'template' arguments
445
+ import_str = "from orchestrator.migrations.helpers import create_task, delete_workflow\n"
446
+ tpl_preamble_lines = []
447
+ tpl_upgrade_lines = []
448
+ tpl_downgrade_lines = []
449
+
450
+ if tasks_to_add:
451
+ tpl_preamble_lines.append(f"new_tasks = {json.dumps(tasks_to_add, indent=4)}\n")
452
+ tpl_upgrade_lines.extend([(" " * 4) + "for task in new_tasks:", (" " * 8) + "create_task(conn, task)"])
453
+ tpl_downgrade_lines.extend(
454
+ [(" " * 4) + "for task in new_tasks:", (" " * 8) + 'delete_workflow(conn, task["name"])']
455
+ )
456
+
457
+ if tasks_to_delete:
458
+ tpl_preamble_lines.append(f"old_tasks = {json.dumps(tasks_to_delete, indent=4)}\n")
459
+ tpl_upgrade_lines.extend(
460
+ [(" " * 4) + "for task in old_tasks:", (" " * 8) + 'delete_workflow(conn, task["name"])']
461
+ )
462
+ tpl_downgrade_lines.extend([(" " * 4) + "for task in old_tasks:", (" " * 8) + "create_task(conn, task)"])
463
+
464
+ preamble = "\n".join(
465
+ [
466
+ import_str,
467
+ *tpl_preamble_lines,
468
+ ]
469
+ )
470
+ sql_upgrade_str = "\n".join(tpl_upgrade_lines)
471
+ sql_downgrade_str = "\n".join(tpl_downgrade_lines)
472
+
473
+ if test:
474
+ return tasks_to_add, tasks_to_delete
475
+
476
+ create_migration_file(alembic_cfg(), sql_upgrade_str, sql_downgrade_str, message, preamble=preamble)
477
+ return None
@@ -0,0 +1,188 @@
1
+ import itertools
2
+ import operator
3
+ from collections.abc import Iterable
4
+ from typing import TypeVar, cast
5
+
6
+ import structlog
7
+ from sqlalchemy import select
8
+ from tabulate import tabulate
9
+
10
+ import orchestrator.workflows
11
+ from orchestrator.cli.helpers.input_helpers import _enumerate_menu_keys, _prompt_user_menu, get_user_input
12
+ from orchestrator.cli.helpers.print_helpers import COLOR, noqa_print, print_fmt
13
+ from orchestrator.db import WorkflowTable, db
14
+ from orchestrator.targets import Target
15
+ from orchestrator.workflow import Workflow
16
+ from orchestrator.workflows import get_workflow
17
+
18
+ logger = structlog.get_logger(__name__)
19
+
20
+ T = TypeVar("T")
21
+
22
+
23
+ def _print_tasks_table(tasks: list[WorkflowTable]) -> None:
24
+ items = [(task.name, task.description) for task in tasks]
25
+ print_fmt(
26
+ tabulate(
27
+ items,
28
+ headers=["#", "name", "description"],
29
+ showindex=itertools.count(1),
30
+ ),
31
+ end="\n\n",
32
+ )
33
+
34
+
35
+ def _add_task(tasks: dict[str, Workflow], state: dict) -> dict:
36
+ print_fmt("\nAdd new task\n", flags=[COLOR.UNDERLINE])
37
+
38
+ if not tasks:
39
+ noqa_print("No registered tasks found to add to the database")
40
+ return state
41
+
42
+ noqa_print("\nWhich task should be added?")
43
+
44
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
45
+ task_options = [(task, task) for task in tasks.keys() if task not in already_used_tasks]
46
+ task_name = _prompt_user_menu([*task_options, ("cancel", None)], keys=[*_enumerate_menu_keys(task_options), "q"])
47
+ if not task_name:
48
+ # Menu cancelled
49
+ return state
50
+
51
+ task_to_add = {"name": task_name, "description": tasks[task_name].description}
52
+ return {**state, "tasks_to_add": [*state["tasks_to_add"], task_to_add]}
53
+
54
+
55
+ def _delete_task(tasks: Iterable[WorkflowTable], state: dict) -> dict:
56
+ print_fmt("\nDelete existing task\n", flags=[COLOR.UNDERLINE])
57
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
58
+ items = [(task.name, task.description) for task in tasks if task not in already_used_tasks]
59
+ items = sorted(items, key=operator.itemgetter(1, 1))
60
+ keys = ["#", "name", "description"]
61
+ if not items:
62
+ noqa_print("No deletable tasks in database")
63
+ return state
64
+
65
+ print_fmt(
66
+ tabulate(
67
+ items,
68
+ headers=keys,
69
+ showindex=itertools.count(1),
70
+ ),
71
+ end="\n\n",
72
+ )
73
+ task_num = get_user_input("Which task do you want to delete? (q to cancel) ", "q")
74
+ if not task_num.isdigit():
75
+ return state
76
+ task_index = int(task_num) - 1
77
+ if 0 <= task_index < len(items):
78
+ item = dict(zip(keys[1:], items[task_index]))
79
+ return {**state, "tasks_to_delete": [*state["tasks_to_delete"], item]}
80
+ return state
81
+
82
+
83
+ def _show_state(state: dict) -> dict:
84
+ print_fmt("\nTasks to add:", flags=[COLOR.GREEN])
85
+ print_fmt(
86
+ tabulate(
87
+ state["tasks_to_add"],
88
+ headers="keys",
89
+ showindex=itertools.count(1),
90
+ ),
91
+ end="\n\n",
92
+ )
93
+
94
+ print_fmt("Tasks to delete:", flags=[COLOR.RED])
95
+ print_fmt(
96
+ tabulate(
97
+ state["tasks_to_delete"],
98
+ headers="keys",
99
+ showindex=itertools.count(1),
100
+ ),
101
+ end="\n\n",
102
+ )
103
+ return state
104
+
105
+
106
+ def delete_dangling_tasks(tasks: list[WorkflowTable], state: dict) -> dict:
107
+ if not tasks:
108
+ noqa_print("No dangling tasks found.")
109
+ return state
110
+
111
+ print_fmt(
112
+ "\nThe following tasks were found in the database that do not have a corresponding LazyWorkflowInstance:\n"
113
+ )
114
+ _print_tasks_table(tasks)
115
+ should_delete_dangling_tasks = (
116
+ get_user_input("Do you wish to delete all dangling tasks from the database? [y/n]: ", "n").lower() == "y"
117
+ )
118
+
119
+ if not should_delete_dangling_tasks:
120
+ noqa_print("Cancelling")
121
+ return state
122
+
123
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
124
+ keys = ["name", "description"]
125
+ items = [
126
+ {"name": task.name, "description": task.description}
127
+ for k, task in zip(keys, tasks)
128
+ if task.name not in already_used_tasks
129
+ ]
130
+ return {**state, "tasks_to_delete": [*state["tasks_to_delete"], *items]}
131
+
132
+
133
+ def create_tasks_migration_wizard() -> tuple[list[dict], list[dict]]:
134
+ """Create tuple with lists for tasks to add and delete.
135
+
136
+ Returns tuple:
137
+ - list of task items to add in the migration
138
+ - list of task items to delete in the migration
139
+ """
140
+ database_tasks = {
141
+ task.name: task for task in list(db.session.scalars(select(WorkflowTable))) if task.target == Target.SYSTEM
142
+ }
143
+ registered_wf_instances = {
144
+ task: cast(Workflow, get_workflow(task)) for task in orchestrator.workflows.ALL_WORKFLOWS.keys()
145
+ }
146
+
147
+ registered_tasks = dict(
148
+ filter(
149
+ lambda task: task[1].target == Target.SYSTEM and task[0] in database_tasks.keys(),
150
+ registered_wf_instances.items(),
151
+ )
152
+ )
153
+ available_tasks = dict(
154
+ filter(
155
+ lambda task: task[1].target == Target.SYSTEM and task[0] not in database_tasks.keys(),
156
+ registered_wf_instances.items(),
157
+ )
158
+ )
159
+ unregistered_tasks = [task for task in database_tasks.values() if task.name not in registered_tasks.keys()]
160
+
161
+ # Main menu loop
162
+ state = {"tasks_to_add": [], "tasks_to_delete": [], "done": False}
163
+ while not state["done"]:
164
+ print_fmt("\nWhat do you want to do?\n", flags=[COLOR.UNDERLINE, COLOR.BOLD])
165
+ choice_fn = _prompt_user_menu(
166
+ [
167
+ ("Add task to database", lambda s: _add_task(available_tasks, s)),
168
+ ("Delete task from database", lambda s: _delete_task(database_tasks.values(), s)),
169
+ (
170
+ "Delete unregistered tasks from database",
171
+ lambda s: delete_dangling_tasks(unregistered_tasks, s),
172
+ ),
173
+ ("Finish and create migration file", lambda s: {**s, "done": True, "abort": False}),
174
+ ("Show current diff", _show_state),
175
+ ("Quit menu without creating a migration file", lambda s: {**s, "done": True, "abort": True}),
176
+ ],
177
+ keys=["a", "x", "c", "y", "d", "q"],
178
+ )
179
+ if choice_fn:
180
+ state = choice_fn(state) # type: ignore
181
+
182
+ if state.get("abort"):
183
+ return [], []
184
+
185
+ logger.info("Create tasks", create_tasks=state["tasks_to_add"])
186
+ logger.info("Delete tasks", delete_tasks=state["tasks_to_delete"])
187
+
188
+ return state["tasks_to_add"], state["tasks_to_delete"] # type: ignore
@@ -21,6 +21,7 @@ from sqlalchemy import BinaryExpression, Cast, ColumnClause, ColumnElement, Stri
21
21
  from sqlalchemy.sql.functions import coalesce
22
22
  from sqlalchemy.sql.operators import eq
23
23
 
24
+ from orchestrator.settings import app_settings
24
25
  from orchestrator.utils.search_query import Node, WhereCondGenerator
25
26
 
26
27
 
@@ -67,7 +68,7 @@ def _filter_string(field: ColumnElement) -> WhereCondGenerator:
67
68
  if node[0] == "ValueGroup":
68
69
  vals = [w[1] for w in node[1] if w[0] in ["Word", "PrefixWord"]] # Only works for (Prefix)Words atm
69
70
  return field.in_(vals)
70
- return field.ilike(f"%{node[1]}%")
71
+ return field.ilike(f"{node[1]}") if app_settings.FILTER_BY_MODE == "exact" else field.ilike(f"%{node[1]}%")
71
72
 
72
73
  return _clause_gen
73
74
 
@@ -458,7 +458,7 @@ class DomainModel(BaseModel):
458
458
 
459
459
 
460
460
  def get_depends_on_product_block_type_list(
461
- product_block_types: dict[str, type["ProductBlockModel"] | tuple[type["ProductBlockModel"]]]
461
+ product_block_types: dict[str, type["ProductBlockModel"] | tuple[type["ProductBlockModel"]]],
462
462
  ) -> list[type["ProductBlockModel"]]:
463
463
  product_blocks_types_in_model = []
464
464
  for product_block_type in product_block_types.values():
@@ -5,6 +5,7 @@ from orchestrator.graphql.resolvers.product_block import resolve_product_blocks
5
5
  from orchestrator.graphql.resolvers.resource_type import resolve_resource_types
6
6
  from orchestrator.graphql.resolvers.settings import SettingsMutation, resolve_settings
7
7
  from orchestrator.graphql.resolvers.subscription import resolve_subscription, resolve_subscriptions
8
+ from orchestrator.graphql.resolvers.version import resolve_version
8
9
  from orchestrator.graphql.resolvers.workflow import resolve_workflows
9
10
 
10
11
  __all__ = [
@@ -19,4 +20,5 @@ __all__ = [
19
20
  "resolve_customer",
20
21
  "resolve_resource_types",
21
22
  "resolve_workflows",
23
+ "resolve_version",
22
24
  ]
@@ -0,0 +1,25 @@
1
+ from structlog import get_logger
2
+
3
+ from orchestrator import __version__
4
+ from orchestrator.graphql.schemas.version import VersionType
5
+ from orchestrator.graphql.types import OrchestratorInfo
6
+ from orchestrator.graphql.utils import create_resolver_error_handler
7
+
8
+ logger = get_logger(__name__)
9
+
10
+
11
+ VERSIONS = [f"orchestrator-core: {__version__}"]
12
+
13
+
14
+ def resolve_version(info: OrchestratorInfo) -> VersionType | None:
15
+ logger.debug("resolve_version() called")
16
+ _error_handler = create_resolver_error_handler(info)
17
+
18
+ ver = None
19
+ try:
20
+ ver = VersionType(application_versions=VERSIONS)
21
+ except Exception as e:
22
+ logger.error(f"Error getting version: {str(e)}")
23
+ _error_handler("Failed to retrieve orchestrator_core version", extensions={"code": "PACKAGE_VERSION_ERROR"})
24
+
25
+ return ver
@@ -47,6 +47,7 @@ from orchestrator.graphql.resolvers import (
47
47
  resolve_settings,
48
48
  resolve_subscription,
49
49
  resolve_subscriptions,
50
+ resolve_version,
50
51
  resolve_workflows,
51
52
  )
52
53
  from orchestrator.graphql.schemas import DEFAULT_GRAPHQL_MODELS
@@ -57,6 +58,7 @@ from orchestrator.graphql.schemas.product_block import ProductBlock
57
58
  from orchestrator.graphql.schemas.resource_type import ResourceType
58
59
  from orchestrator.graphql.schemas.settings import StatusType
59
60
  from orchestrator.graphql.schemas.subscription import SubscriptionInterface
61
+ from orchestrator.graphql.schemas.version import VersionType
60
62
  from orchestrator.graphql.schemas.workflow import Workflow
61
63
  from orchestrator.graphql.types import SCALAR_OVERRIDES, OrchestratorContext, ScalarOverrideType, StrawberryModelType
62
64
  from orchestrator.services.process_broadcast_thread import ProcessDataBroadcastThread
@@ -95,6 +97,7 @@ class OrchestratorQuery:
95
97
  resolver=resolve_settings,
96
98
  description="Returns information about cache, workers, and global engine settings",
97
99
  )
100
+ version: VersionType = authenticated_field(resolver=resolve_version, description="Returns version information")
98
101
 
99
102
 
100
103
  @strawberry.federation.type(description="Orchestrator customer Query")
@@ -0,0 +1,6 @@
1
+ import strawberry
2
+
3
+
4
+ @strawberry.type
5
+ class VersionType:
6
+ application_versions: list[str]
@@ -164,6 +164,36 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
164
164
  )
165
165
 
166
166
 
167
+ def create_task(conn: sa.engine.Connection, task: dict) -> None:
168
+ """Create a new task.
169
+
170
+ Args:
171
+ conn: DB connection as available in migration main file.
172
+ task: Dict with data for a new workflow.
173
+ name: Name of the task.
174
+ description: Description of the workflow.
175
+
176
+ Usage:
177
+ >>> task = {
178
+ "name": "task_name",
179
+ "description": "task description",
180
+ }
181
+ >>> create_workflow(conn, task)
182
+ """
183
+
184
+ conn.execute(
185
+ sa.text(
186
+ """
187
+ INSERT INTO workflows(name, target, description)
188
+ VALUES (:name, 'SYSTEM', :description)
189
+ ON CONFLICT DO NOTHING
190
+ RETURNING workflow_id
191
+ """
192
+ ),
193
+ task,
194
+ )
195
+
196
+
167
197
  def create_workflows(conn: sa.engine.Connection, new: dict) -> None:
168
198
  """Create new workflows.
169
199
 
@@ -894,7 +924,7 @@ def delete_product_block(conn: sa.engine.Connection, name: str) -> None:
894
924
 
895
925
 
896
926
  def delete_workflow(conn: sa.engine.Connection, name: str) -> None:
897
- """Delete a workflow and it's occurrences in products.
927
+ """Delete a workflow and its occurrences in products.
898
928
 
899
929
  Note: the cascading delete rules in postgres will also ensure removal from `products_workflows`.
900
930
 
orchestrator/settings.py CHANGED
@@ -14,6 +14,7 @@
14
14
  import secrets
15
15
  import string
16
16
  from pathlib import Path
17
+ from typing import Literal
17
18
 
18
19
  from pydantic import PostgresDsn, RedisDsn
19
20
  from pydantic_settings import BaseSettings
@@ -82,6 +83,7 @@ class AppSettings(BaseSettings):
82
83
  ENABLE_GRAPHQL_PROFILING_EXTENSION: bool = False
83
84
  ENABLE_GRAPHQL_STATS_EXTENSION: bool = False
84
85
  VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS: bool = False
86
+ FILTER_BY_MODE: Literal["partial", "exact"] = "exact"
85
87
 
86
88
 
87
89
  app_settings = AppSettings()
@@ -86,7 +86,7 @@ def default_get_subscription_id(data: Any) -> UUID:
86
86
 
87
87
 
88
88
  def delete_subscription_from_redis(
89
- extract_fn: Callable[..., UUID] = default_get_subscription_id
89
+ extract_fn: Callable[..., UUID] = default_get_subscription_id,
90
90
  ) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
91
91
  def _delete_subscription_from_redis(func: Callable[..., Any]) -> Callable[..., Any]:
92
92
  @functools.wraps(func)
@@ -5,12 +5,10 @@ from itertools import chain
5
5
  from typing import Any, Union, cast
6
6
 
7
7
  import structlog
8
- from sqlalchemy import BinaryExpression, ColumnElement, CompoundSelect, Select, column, false, func, not_, or_, select
8
+ from sqlalchemy import BinaryExpression, ColumnElement, CompoundSelect, Select, false, not_, or_
9
9
  from sqlalchemy.orm import InstrumentedAttribute, MappedColumn
10
10
 
11
- from orchestrator.db import db
12
11
  from orchestrator.db.database import BaseModel
13
- from orchestrator.db.helpers import get_postgres_version
14
12
  from orchestrator.utils.helpers import camel_to_snake
15
13
 
16
14
  logger = structlog.getLogger(__name__)
@@ -257,28 +255,10 @@ class Parser:
257
255
 
258
256
 
259
257
  class TSQueryVisitor:
260
-
261
258
  _glue_chars = re.compile(r"[-_@#$%^&]")
262
259
 
263
260
  @staticmethod
264
261
  def _split_term(term: str) -> list[str]:
265
- """Workaround for the way Postgres <14 parses text with to_tsquery.
266
-
267
- For Postgres <14, we issue a database query to parse the text for us in a way that is compatible with to_tsvector. This is the same behavior as Postgres >=14.
268
- """
269
-
270
- # TODO: Remove this workaround when support for Postgres <14 is dropped
271
- # https://github.com/workfloworchestrator/orchestrator-core/issues/621
272
-
273
- if get_postgres_version() < 14:
274
- # tokid 12 is the space separator token
275
- stmt = (
276
- select(func.array_agg(column("token")))
277
- .select_from(func.ts_parse("default", func.replace(term, "-", "_")))
278
- .where(column("tokid") != 12)
279
- )
280
- return db.session.scalar(stmt) or []
281
-
282
262
  return [part.strip() for part in TSQueryVisitor._glue_chars.split(term) if part and not part.isspace()]
283
263
 
284
264
  @staticmethod
@@ -183,11 +183,12 @@ def _build_arguments(func: StepFunc | InputStepFunc, state: State) -> list: # n
183
183
  raise KeyError(f"Could not find key '{name}' in state.")
184
184
  elif is_list_type(param.annotation, SubscriptionModel):
185
185
  subscription_ids = map(_get_sub_id, state.get(name, []))
186
- subscriptions = [
187
- # Actual type is first argument from list type
188
- get_args(param.annotation)[0].from_subscription(subscription_id)
189
- for subscription_id in subscription_ids
190
- ]
186
+ # Actual type is first argument from list type
187
+ if (actual_type := get_args(param.annotation)[0]) == Any:
188
+ raise ValueError(
189
+ f"Step function argument '{param.name}' cannot be serialized from database with type 'Any'"
190
+ )
191
+ subscriptions = [actual_type.from_subscription(subscription_id) for subscription_id in subscription_ids]
191
192
  arguments.append(subscriptions)
192
193
  elif is_optional_type(param.annotation, SubscriptionModel):
193
194
  subscription_id = _get_sub_id(state.get(name))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: orchestrator-core
3
- Version: 2.9.2rc2
3
+ Version: 2.10.0rc1
4
4
  Summary: This is the orchestrator workflow engine.
5
5
  Requires-Python: >=3.11,<3.14
6
6
  Classifier: Intended Audience :: Information Technology
@@ -27,36 +27,36 @@ Classifier: Programming Language :: Python :: 3.12
27
27
  Classifier: Programming Language :: Python :: 3.11
28
28
  Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
29
29
  Classifier: Topic :: Internet :: WWW/HTTP
30
- Requires-Dist: alembic==1.13.3
30
+ Requires-Dist: alembic==1.14.1
31
31
  Requires-Dist: anyio>=3.7.0
32
32
  Requires-Dist: click==8.*
33
33
  Requires-Dist: deprecated
34
34
  Requires-Dist: deepmerge==1.1.1
35
35
  Requires-Dist: fastapi~=0.115.2
36
36
  Requires-Dist: fastapi-etag==0.4.0
37
- Requires-Dist: more-itertools~=10.5.0
37
+ Requires-Dist: more-itertools~=10.6.0
38
38
  Requires-Dist: itsdangerous
39
39
  Requires-Dist: Jinja2==3.1.5
40
- Requires-Dist: orjson==3.10.9
40
+ Requires-Dist: orjson==3.10.15
41
41
  Requires-Dist: psycopg2-binary==2.9.10
42
42
  Requires-Dist: pydantic[email]~=2.8.2
43
- Requires-Dist: pydantic-settings~=2.5.2
43
+ Requires-Dist: pydantic-settings~=2.7.1
44
44
  Requires-Dist: python-dateutil==2.8.2
45
45
  Requires-Dist: python-rapidjson>=1.18,<1.21
46
46
  Requires-Dist: pytz==2024.1
47
47
  Requires-Dist: redis==5.0.3
48
48
  Requires-Dist: schedule==1.1.0
49
49
  Requires-Dist: sentry-sdk[fastapi]~=2.18.0
50
- Requires-Dist: SQLAlchemy==2.0.36
50
+ Requires-Dist: SQLAlchemy==2.0.38
51
51
  Requires-Dist: SQLAlchemy-Utils==0.41.2
52
52
  Requires-Dist: structlog
53
- Requires-Dist: typer==0.12.5
53
+ Requires-Dist: typer==0.15.1
54
54
  Requires-Dist: uvicorn[standard]~=0.32.0
55
55
  Requires-Dist: nwa-stdlib~=1.9.0
56
56
  Requires-Dist: oauth2-lib~=2.4.0
57
57
  Requires-Dist: tabulate==0.9.0
58
58
  Requires-Dist: strawberry-graphql>=0.246.2
59
- Requires-Dist: pydantic-forms~=1.1.3
59
+ Requires-Dist: pydantic-forms~=1.2.1
60
60
  Requires-Dist: celery~=5.4.0 ; extra == "celery"
61
61
  Requires-Dist: toml ; extra == "dev"
62
62
  Requires-Dist: bumpversion ; extra == "dev"
@@ -1,10 +1,10 @@
1
- orchestrator/__init__.py,sha256=Si6jkyprNLow9W0WKRVqn6elJdlthYCP0gV7K5RQXvw,1058
1
+ orchestrator/__init__.py,sha256=-KfRTZJfyQYW2nDBgJPwnkSzI_ZuAWc0QC414IwuWEs,1059
2
2
  orchestrator/app.py,sha256=_2e3JMYgH_egOScokFVpFuTlJWGGwH0KYgZajDdm--0,11563
3
3
  orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
4
4
  orchestrator/log_config.py,sha256=1tPRX5q65e57a6a_zEii_PFK8SzWT0mnA5w2sKg4hh8,1853
5
5
  orchestrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  orchestrator/security.py,sha256=_W_wFkjmlwVwwHRsql69iMoqRvDCiaA63i5rvRHSrZ0,2414
7
- orchestrator/settings.py,sha256=55AIpcxDcJJP61Jh-y6-wyqOMrzuukx5SjBTam3sNHk,3532
7
+ orchestrator/settings.py,sha256=SFF-jjkVkq_UILzuZbvhBUN3408PeG0y9AFZiCipBVs,3617
8
8
  orchestrator/targets.py,sha256=QphHzEUTRhue3pKTPYklxDdvCQF1ANxt0r_HjbaxVCc,766
9
9
  orchestrator/types.py,sha256=cXUWV9EtoO4Dx3h3YJHaGVxmw-5XLt6Mow-dALdhTWk,16230
10
10
  orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
@@ -17,7 +17,7 @@ orchestrator/api/api_v1/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n
17
17
  orchestrator/api/api_v1/api.py,sha256=zGPSCX-nCebZXN2OT9QA_ChAtpsK53hpxZ7F2x_0gjI,2332
18
18
  orchestrator/api/api_v1/endpoints/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
19
19
  orchestrator/api/api_v1/endpoints/health.py,sha256=iaxs1XX1_250_gKNsspuULCV2GEMBjbtjsmfQTOvMAI,1284
20
- orchestrator/api/api_v1/endpoints/processes.py,sha256=bsS8CqpfE3q5uIYZeZYiKjlbwHYSliFJQu130ov8cL8,12716
20
+ orchestrator/api/api_v1/endpoints/processes.py,sha256=21BG32TslSgG2ePs68jUYAy8rDuohTXgzWETOpuNzyI,12761
21
21
  orchestrator/api/api_v1/endpoints/products.py,sha256=Qyj9OzlfWfgsWe9Homd60LFco91VaJ1gkgXxn0AmP6Q,2143
22
22
  orchestrator/api/api_v1/endpoints/settings.py,sha256=QiSih8zOUombxXk5Hd7MACq5BC5Y9w-BrmgBdTPRIDg,6141
23
23
  orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py,sha256=Elu4DVJoNtUFq_b3pG1Ws8StrUIo_jTViff2TJqe6ZU,3398
@@ -26,10 +26,11 @@ orchestrator/api/api_v1/endpoints/translations.py,sha256=dIWh_fCnZZUxJoGiNeJ49DK
26
26
  orchestrator/api/api_v1/endpoints/user.py,sha256=RyI32EXVu6I-IxWjz0XB5zQWzzLL60zKXLgLqLH02xU,1827
27
27
  orchestrator/api/api_v1/endpoints/ws.py,sha256=1l7E0ag_sZ6UMfQPHlmew7ENwxjm6fflBwcMZAb7V-k,2786
28
28
  orchestrator/cli/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
29
- orchestrator/cli/database.py,sha256=qMpE9qXfJ55SioBBVymXu4hn7pkLR2G7SYPXb_snhFY,16184
29
+ orchestrator/cli/database.py,sha256=6Q9TZHAHaBEgP4mQArVsXKcQ1V7D2KM3ClMZ4PgSzA0,19274
30
30
  orchestrator/cli/generate.py,sha256=SBaYfRijXPF9r3VxarPKTiDzDcB6GBMMQvecQIb_ZLQ,7377
31
31
  orchestrator/cli/main.py,sha256=GC_kxa9OZ-Y0ig_klfWc6ysOQuPVTUmTmDRj3m8cJHA,983
32
32
  orchestrator/cli/migrate_domain_models.py,sha256=OhjNuIheytgShpMYCZ18leNUzk17ExhtkCqx7Ww03R8,20371
33
+ orchestrator/cli/migrate_tasks.py,sha256=PoNMRQadY3_VtDfR3OKjbjf01Tu-Q2VJd8YAVB_zrUc,6669
33
34
  orchestrator/cli/migrate_workflows.py,sha256=-_nsKUcVa14-Ug3aSppU9yk-oWlK411SX33WqzD1p4M,8979
34
35
  orchestrator/cli/migration_helpers.py,sha256=C5tpkP5WEBr7G9S-1k1hgSI8ili6xd9Z5ygc9notaK0,4110
35
36
  orchestrator/cli/scheduler.py,sha256=iCKBWYUwQIYTDqKQ9rMVvs2sNiAzE-J2SkV170TPP2g,1896
@@ -112,7 +113,7 @@ orchestrator/db/filters/resource_type.py,sha256=7aH4_n8vPpsySFnnN8SefN8h964glmEi
112
113
  orchestrator/db/filters/subscription.py,sha256=IV7ur7yyKFNUQRx0gZPelcMLHjuUPU0Rx4oZ6Shbn6A,1519
113
114
  orchestrator/db/filters/workflow.py,sha256=osyyEmOFuev6q5lizHeUvgxf1Nji3fZtlbf2_lzSNao,1276
114
115
  orchestrator/db/filters/search_filters/__init__.py,sha256=a7yfEAA-qpD_PHZH5LeqSjrLeGAvQrDsJp7mzVwDMwo,562
115
- orchestrator/db/filters/search_filters/inferred_filter.py,sha256=B3WuA6yi3AFhkgbr8yK0UnqiZNUZ1h1aNFQCtNqaP7I,5591
116
+ orchestrator/db/filters/search_filters/inferred_filter.py,sha256=LTRrcLY7hTu8Dr18-xLVMdge9_MNCvtfcCH0CbaJc2Y,5711
116
117
  orchestrator/db/range/__init__.py,sha256=mAOCJrQLMMeks0zTK5resf5t-01GrlFN8nIT1gNbqEQ,162
117
118
  orchestrator/db/range/range.py,sha256=sOSzkRqvLXvoTYxOd_Q0OWQ9lSqrXTnwXh4DVLGGSK0,2175
118
119
  orchestrator/db/sorting/__init__.py,sha256=d33YUS6uAI75l35Mpb-GB9t9sd03jCxq0V-kLmijVOc,353
@@ -133,7 +134,7 @@ orchestrator/distlock/managers/__init__.py,sha256=ImIkNsrXcyE7-NgRWqEhUXUuUzda9K
133
134
  orchestrator/distlock/managers/memory_distlock_manager.py,sha256=HWQafcVKBF-Cka_wukZZ1GM69AWPVOpJPje3quIebQ8,3114
134
135
  orchestrator/distlock/managers/redis_distlock_manager.py,sha256=Lk0Krw7dQD58uleAz3Eancc4La-xSCFHxB8ymg3qWf0,3271
135
136
  orchestrator/domain/__init__.py,sha256=Rnt9XXHasAgieQiLT0JhUFRrysa9EIubvzcd5kk3Gvc,894
136
- orchestrator/domain/base.py,sha256=8iiz1IP6CSrr5pz_0oqRNj5MoHY4PR9E30hx8Zrlrq4,61928
137
+ orchestrator/domain/base.py,sha256=2yehcuXSkq3H2wl8y19qeY98BQX_aL2i5QxxAU896JI,61929
137
138
  orchestrator/domain/customer_description.py,sha256=v7o6TTN4oc6bWHZU-jCT-fUYvkeYahbpXOwlKXofuI8,3360
138
139
  orchestrator/domain/helpers.py,sha256=2j2j_7J8qvniHxxpdoEQsoVpC-llkn0tbww2eCA0K1A,989
139
140
  orchestrator/domain/lifecycle.py,sha256=ROYJ5t6JFy5PwE9nmApS54NIEw0dwk-2iZC-OzW18-U,2882
@@ -147,7 +148,7 @@ orchestrator/forms/validators/product_id.py,sha256=u5mURLT0pOhbFLdwvYcy2_2fXMt35
147
148
  orchestrator/graphql/__init__.py,sha256=avq8Yg3Jr_9pJqh7ClyIAOX7YSg1eM_AWmt5C3FRYUY,1440
148
149
  orchestrator/graphql/autoregistration.py,sha256=pF2jbMKG26MvYoMSa6ZpqpHjVks7_NvSRFymHTgmfjs,6342
149
150
  orchestrator/graphql/pagination.py,sha256=iqVDn3GPZpiQhEydfwkBJLURY-X8wwUphS8Lkeg0BOc,2413
150
- orchestrator/graphql/schema.py,sha256=K1qTYOKyhAj0OK-vkctwkTKxZ_3P_dZ7mAgFRnrazWY,8892
151
+ orchestrator/graphql/schema.py,sha256=JNAd_MNyFkKzFIutaA6bwtgUsahGDt9RhWZIpFAtVAg,9090
151
152
  orchestrator/graphql/types.py,sha256=0aaRM1cUaWUC2SaWRtgLmSe2_2C0KuwA4LMCav4SdPg,5037
152
153
  orchestrator/graphql/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
153
154
  orchestrator/graphql/extensions/stats.py,sha256=pGhEBQg45XvqZhRobcrCSGwt5AGmR3gflsm1dYiIg5g,2018
@@ -155,7 +156,7 @@ orchestrator/graphql/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
155
156
  orchestrator/graphql/loaders/subscriptions.py,sha256=31zE2WC7z-tPIUmVpU1QWOJvNbLvF7sYgY7JAQ6OPJg,1856
156
157
  orchestrator/graphql/mutations/customer_description.py,sha256=37yX92fE1Sc51O9i-gP8JfD3HdsvpR3TtbgYqKtGC-w,3343
157
158
  orchestrator/graphql/mutations/start_process.py,sha256=8vLVvmBwL1ujbZJoI_8YE3VAgI-J2RTzgrTZJC8THZ4,1576
158
- orchestrator/graphql/resolvers/__init__.py,sha256=v6G9OboMuqEdZAB4RfCNjQZhJyXcvuZ_gC7RN9gTSrU,941
159
+ orchestrator/graphql/resolvers/__init__.py,sha256=EEw9NO4LAryfrpkLlgsNQ9rytKd0usBDx95OURRV6sg,1031
159
160
  orchestrator/graphql/resolvers/customer.py,sha256=tq06MtMoaqFwqn3YQvSv0VmROW7QJZRJp1ykO4tUhck,934
160
161
  orchestrator/graphql/resolvers/helpers.py,sha256=8NeuiJD9CPus4BhRK9nDsSMhb2LhW1W7nrEpyj_J3M4,771
161
162
  orchestrator/graphql/resolvers/process.py,sha256=8UQaw3Mqjwubq_V7iW4O2KsFpqlPKLyXkPRVNS9eKec,4965
@@ -164,6 +165,7 @@ orchestrator/graphql/resolvers/product_block.py,sha256=BAYW66KT_1mozNidfBxBI7l3_
164
165
  orchestrator/graphql/resolvers/resource_type.py,sha256=NRDKPFqwktOvVLLm2QCLxVPwzXIYEMJKUN3DS_kS49o,2926
165
166
  orchestrator/graphql/resolvers/settings.py,sha256=M-WJGFN0NmINc7SP9plJn689Sn7DVZ5pMIIpoceMrsw,3700
166
167
  orchestrator/graphql/resolvers/subscription.py,sha256=57niFv-JCro_wm0peJ5Ne04F2WIPuJ-Lx2h8yd9qubA,6541
168
+ orchestrator/graphql/resolvers/version.py,sha256=qgwe1msPOexeg3RHCscJ8s45vNfMhYh9ZKyCZ3MNw30,809
167
169
  orchestrator/graphql/resolvers/workflow.py,sha256=YUwPklwYesgmRS4d0eIQdgVmkyhgGbkQZ9uC1Oe8EyA,2776
168
170
  orchestrator/graphql/schemas/__init__.py,sha256=dWG4DNzWq5akQ3v5tSAvT03HLxPWXa09Gx8rTz_MHmk,940
169
171
  orchestrator/graphql/schemas/customer.py,sha256=ZptVFG0qauZaoy29KDrh6k5xAnacNCTavmQrZMH8czc,147
@@ -178,6 +180,7 @@ orchestrator/graphql/schemas/resource_type.py,sha256=s5d_FwQXL2-Sc-IDUxTJun5qFQ4
178
180
  orchestrator/graphql/schemas/settings.py,sha256=drhm5VcLmUbiYAk6WUSJcyJqjNM96E6GvpxVdPAobnA,999
179
181
  orchestrator/graphql/schemas/strawberry_pydantic_patch.py,sha256=CjNUhTKdYmLiaem-WY_mzw4HASIeaZitxGF8pPocqVw,1602
180
182
  orchestrator/graphql/schemas/subscription.py,sha256=_ra7MG9P2w7_WMiMx-zTOaAMinGlTKN4gwE9vej-5V8,9573
183
+ orchestrator/graphql/schemas/version.py,sha256=HSzVg_y4Sjd5_H5rRUtu3FJKOG_8ifhvBNt_qjOtC-E,92
181
184
  orchestrator/graphql/schemas/workflow.py,sha256=0UWU0HGTiAC_5Wzh16clBd74JoYHrr38YIGV86q-si0,1276
182
185
  orchestrator/graphql/utils/__init__.py,sha256=1JvenzEVW1CBa1sGVI9I8IWnnoXIkb1hneDqph9EEZY,524
183
186
  orchestrator/graphql/utils/create_resolver_error_handler.py,sha256=PpQMVwGrE9t0nZ12TwoxPxksXxEwQM7lSNPeh7qW3vk,1233
@@ -191,7 +194,7 @@ orchestrator/graphql/utils/to_graphql_result_page.py,sha256=8ObkJP8reVf-TQOQVPKv
191
194
  orchestrator/migrations/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
192
195
  orchestrator/migrations/alembic.ini,sha256=kMoADqhGeubU8xanILNaqm4oixLy9m4ngYtdGpZcc7I,873
193
196
  orchestrator/migrations/env.py,sha256=AwlgBPYbV2hr5rHNwlOPJ5rs-vRyfmzcWyxae0btpZ4,3382
194
- orchestrator/migrations/helpers.py,sha256=8Ny9k0D-siSKPDf0DNcvhSVtlMU1ieRgLy0l8EtPtRc,42983
197
+ orchestrator/migrations/helpers.py,sha256=5BVDKOFETKyW4kE0-DiTU068VFrRo52n3MKrvt5LfjU,43769
195
198
  orchestrator/migrations/script.py.mako,sha256=607Zrgp-Z-m9WGLt4wewN1QDOmHeifxcePUdADkSZyM,510
196
199
  orchestrator/migrations/templates/alembic.ini.j2,sha256=jA-QykVparwWSNt5XDP0Zk7epLOhK7D87Af-i2shJV4,905
197
200
  orchestrator/migrations/templates/env.py.j2,sha256=RfLAQItZ56Jlzwi6LJfBo92m1-th_bdfkFKD1mwTZIE,2821
@@ -260,9 +263,9 @@ orchestrator/utils/get_subscription_dict.py,sha256=fkgDM54hn5YGUP9_2MOcJApJK1Z6c
260
263
  orchestrator/utils/get_updated_properties.py,sha256=egVZ0R5LNJ4e51Z8SXlU8cmb4tXxG-xb1d7OKwh-7xI,1322
261
264
  orchestrator/utils/helpers.py,sha256=NjUF3IvWdnLulliP8-JQvGGGpHrh0vs0Vm092ynw-ss,3212
262
265
  orchestrator/utils/json.py,sha256=7386sdqkrKYyy4sbn5NscwctH_v1hLyw5172P__rU3g,8341
263
- orchestrator/utils/redis.py,sha256=WZiTjjQIO5TZIRllm-a6cQbndKE7hAxxj6mus_gToOs,7221
264
- orchestrator/utils/search_query.py,sha256=ncJlynwtW-qwL0RcNq4DuAUx9KUMI6llwGAEwLO2QCA,17097
265
- orchestrator/utils/state.py,sha256=gPYHOWDxPvoYZ83WwKPCpeBAsNWOTlkwZz5kAZcM9rw,13011
266
+ orchestrator/utils/redis.py,sha256=fvALD_Yt4lZuIfgCLGJwwQSElgKOLHrxH_RdhSXkeZw,7222
267
+ orchestrator/utils/search_query.py,sha256=ji5LHtrzohGz6b1IG41cnPdpWXzLEzz4SGWgHly_yfU,16205
268
+ orchestrator/utils/state.py,sha256=aNR7XftX9dX-2TKHFu2I2rIRIqFENB7AnlpH6Zs80QA,13181
266
269
  orchestrator/utils/strings.py,sha256=N0gWjmQaMjE9_99VtRvRaU8IBLTKMgBKSXcTZ9TpWAg,1077
267
270
  orchestrator/utils/validate_data_version.py,sha256=3Eioy2wE2EWKSgkyMKcEKrkCAfUIAq-eb73iRcpgppw,184
268
271
  orchestrator/websocket/__init__.py,sha256=V79jskk1z3uPIYgu0Gt6JLzuqr7NGfNeAZ-hbBqoUv4,5745
@@ -280,7 +283,7 @@ orchestrator/workflows/tasks/resume_workflows.py,sha256=wZGNHHQYL7wociSTmoNdDdh5
280
283
  orchestrator/workflows/tasks/validate_product_type.py,sha256=kVuN94hGWcmBNphgpAlGTSiyO2dEhFwgIq87SYjArns,3174
281
284
  orchestrator/workflows/tasks/validate_products.py,sha256=j_aOyxcH8DymlGupSS6XRwQdWx2Ab-c8f8iUvAXBTes,8511
282
285
  orchestrator/workflows/translations/en-GB.json,sha256=ST53HxkphFLTMjFHonykDBOZ7-P_KxksktZU3GbxLt0,846
283
- orchestrator_core-2.9.2rc2.dist-info/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
284
- orchestrator_core-2.9.2rc2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
285
- orchestrator_core-2.9.2rc2.dist-info/METADATA,sha256=s7QCKcWwbLKBl9Wbb_HTUXhEwVoN3ZaQyFyFKbwThgU,4924
286
- orchestrator_core-2.9.2rc2.dist-info/RECORD,,
286
+ orchestrator_core-2.10.0rc1.dist-info/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
287
+ orchestrator_core-2.10.0rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
288
+ orchestrator_core-2.10.0rc1.dist-info/METADATA,sha256=4c4eoSBdGAS9W_ug4mdpkVTLpJlaBZRcSnJX3wkGOs8,4926
289
+ orchestrator_core-2.10.0rc1.dist-info/RECORD,,