orchestrator-core 2.9.2rc3__py3-none-any.whl → 2.10.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "2.9.2rc3"
16
+ __version__ = "2.10.0rc2"
17
17
 
18
18
  from orchestrator.app import OrchestratorCore
19
19
  from orchestrator.settings import app_settings
@@ -19,12 +19,15 @@ from fastapi.routing import APIRouter
19
19
  from orchestrator.api.api_v1.endpoints import (
20
20
  health,
21
21
  processes,
22
+ product_blocks,
22
23
  products,
24
+ resource_types,
23
25
  settings,
24
26
  subscription_customer_descriptions,
25
27
  subscriptions,
26
28
  translations,
27
29
  user,
30
+ workflows,
28
31
  ws,
29
32
  )
30
33
  from orchestrator.security import authorize
@@ -34,14 +37,32 @@ api_router = APIRouter()
34
37
  api_router.include_router(
35
38
  processes.router, prefix="/processes", tags=["Core", "Processes"], dependencies=[Depends(authorize)]
36
39
  )
40
+ api_router.include_router(
41
+ subscriptions.router,
42
+ prefix="/subscriptions",
43
+ tags=["Core", "Subscriptions"],
44
+ dependencies=[Depends(authorize)],
45
+ )
37
46
  api_router.include_router(processes.ws_router, prefix="/processes", tags=["Core", "Processes"])
38
47
  api_router.include_router(
39
48
  products.router, prefix="/products", tags=["Core", "Product"], dependencies=[Depends(authorize)]
40
49
  )
41
50
  api_router.include_router(
42
- subscriptions.router,
43
- prefix="/subscriptions",
44
- tags=["Core", "Subscriptions"],
51
+ product_blocks.router,
52
+ prefix="/product_blocks",
53
+ tags=["Core", "Product Blocks"],
54
+ dependencies=[Depends(authorize)],
55
+ )
56
+ api_router.include_router(
57
+ resource_types.router,
58
+ prefix="/resource_types",
59
+ tags=["Core", "Resource Types"],
60
+ dependencies=[Depends(authorize)],
61
+ )
62
+ api_router.include_router(
63
+ workflows.router,
64
+ prefix="/workflows",
65
+ tags=["Core", "Workflows"],
45
66
  dependencies=[Depends(authorize)],
46
67
  )
47
68
  api_router.include_router(
@@ -240,6 +240,7 @@ def abort_process_endpoint(process_id: UUID, request: Request, user: str = Depen
240
240
  broadcast_func = api_broadcast_process_data(request)
241
241
  try:
242
242
  abort_process(process, user, broadcast_func=broadcast_func)
243
+ broadcast_invalidate_status_counts()
243
244
  return
244
245
  except Exception as e:
245
246
  raise_status(HTTPStatus.INTERNAL_SERVER_ERROR, str(e))
@@ -0,0 +1,56 @@
1
+ # Copyright 2019-2020 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+
14
+ from http import HTTPStatus
15
+ from uuid import UUID
16
+
17
+ from fastapi.param_functions import Body
18
+ from fastapi.routing import APIRouter
19
+
20
+ from orchestrator.api.error_handling import raise_status
21
+ from orchestrator.db import db
22
+ from orchestrator.db.models import ProductBlockTable
23
+ from orchestrator.schemas.product_block import ProductBlockPatchSchema, ProductBlockSchema
24
+
25
+ router = APIRouter()
26
+
27
+
28
+ @router.get("/{product_block_id}", response_model=ProductBlockSchema)
29
+ def get_product_block_description(product_block_id: UUID) -> str:
30
+ product_block = db.session.get(ProductBlockTable, product_block_id)
31
+ if product_block is None:
32
+ raise_status(HTTPStatus.NOT_FOUND)
33
+ return product_block
34
+
35
+
36
+ @router.patch("/{product_block_id}", status_code=HTTPStatus.CREATED, response_model=ProductBlockSchema)
37
+ async def patch_product_block_by_id(
38
+ product_block_id: UUID, data: ProductBlockPatchSchema = Body(...)
39
+ ) -> ProductBlockTable:
40
+ product_block = db.session.get(ProductBlockTable, product_block_id)
41
+ if not product_block:
42
+ raise_status(HTTPStatus.NOT_FOUND, f"Product_block id {product_block_id} not found")
43
+
44
+ return await _patch_product_block_description(data, product_block)
45
+
46
+
47
+ async def _patch_product_block_description(
48
+ data: ProductBlockPatchSchema,
49
+ product_block: ProductBlockTable,
50
+ ) -> ProductBlockTable:
51
+
52
+ updated_properties = data.model_dump(exclude_unset=True)
53
+ description = updated_properties.get("description", product_block.description)
54
+ product_block.description = description
55
+ db.session.commit()
56
+ return product_block
@@ -14,6 +14,7 @@
14
14
  from http import HTTPStatus
15
15
  from uuid import UUID
16
16
 
17
+ from fastapi.param_functions import Body
17
18
  from fastapi.routing import APIRouter
18
19
  from sqlalchemy import select
19
20
  from sqlalchemy.orm import joinedload, selectinload
@@ -21,6 +22,7 @@ from sqlalchemy.orm import joinedload, selectinload
21
22
  from orchestrator.api.error_handling import raise_status
22
23
  from orchestrator.db import ProductBlockTable, ProductTable, db
23
24
  from orchestrator.schemas import ProductSchema
25
+ from orchestrator.schemas.product import ProductPatchSchema
24
26
 
25
27
  router = APIRouter()
26
28
 
@@ -48,6 +50,13 @@ def fetch(tag: str | None = None, product_type: str | None = None) -> list[Produ
48
50
  response_model=ProductSchema,
49
51
  )
50
52
  def product_by_id(product_id: UUID) -> ProductTable:
53
+ product = _product_by_id(product_id)
54
+ if not product:
55
+ raise_status(HTTPStatus.NOT_FOUND, f"Product id {product_id} not found")
56
+ return product
57
+
58
+
59
+ def _product_by_id(product_id: UUID) -> ProductTable | None:
51
60
  stmt = (
52
61
  select(ProductTable)
53
62
  .options(
@@ -57,7 +66,25 @@ def product_by_id(product_id: UUID) -> ProductTable:
57
66
  )
58
67
  .filter(ProductTable.product_id == product_id)
59
68
  )
60
- product = db.session.scalars(stmt).unique().one_or_none()
69
+ return db.session.scalars(stmt).unique().one_or_none()
70
+
71
+
72
+ @router.patch("/{product_id}", status_code=HTTPStatus.CREATED, response_model=ProductSchema)
73
+ async def patch_product_by_id(product_id: UUID, data: ProductPatchSchema = Body(...)) -> ProductTable:
74
+ product = _product_by_id(product_id)
61
75
  if not product:
62
76
  raise_status(HTTPStatus.NOT_FOUND, f"Product id {product_id} not found")
77
+
78
+ return await _patch_product_description(data, product)
79
+
80
+
81
+ async def _patch_product_description(
82
+ data: ProductPatchSchema,
83
+ product: ProductTable,
84
+ ) -> ProductTable:
85
+
86
+ updated_properties = data.model_dump(exclude_unset=True)
87
+ description = updated_properties.get("description", product.description)
88
+ product.description = description
89
+ db.session.commit()
63
90
  return product
@@ -0,0 +1,56 @@
1
+ # Copyright 2019-2020 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+
14
+ from http import HTTPStatus
15
+ from uuid import UUID
16
+
17
+ from fastapi.param_functions import Body
18
+ from fastapi.routing import APIRouter
19
+
20
+ from orchestrator.api.error_handling import raise_status
21
+ from orchestrator.db import db
22
+ from orchestrator.db.models import ResourceTypeTable
23
+ from orchestrator.schemas.resource_type import ResourceTypePatchSchema, ResourceTypeSchema
24
+
25
+ router = APIRouter()
26
+
27
+
28
+ @router.get("/{resource_type_id}", response_model=ResourceTypeSchema)
29
+ def get_resource_type_description(resource_type_id: UUID) -> str:
30
+ resource_type = db.session.get(ResourceTypeTable, resource_type_id)
31
+ if resource_type is None:
32
+ raise_status(HTTPStatus.NOT_FOUND)
33
+ return resource_type
34
+
35
+
36
+ @router.patch("/{resource_type_id}", status_code=HTTPStatus.CREATED, response_model=ResourceTypeSchema)
37
+ async def patch_resource_type_by_id(
38
+ resource_type_id: UUID, data: ResourceTypePatchSchema = Body(...)
39
+ ) -> ResourceTypeTable:
40
+ resource_type = db.session.get(ResourceTypeTable, resource_type_id)
41
+ if not resource_type:
42
+ raise_status(HTTPStatus.NOT_FOUND, f"ResourceType id {resource_type_id} not found")
43
+
44
+ return await _patch_resource_type_description(data, resource_type)
45
+
46
+
47
+ async def _patch_resource_type_description(
48
+ data: ResourceTypePatchSchema,
49
+ resource_type: ResourceTypeTable,
50
+ ) -> ResourceTypeTable:
51
+
52
+ updated_properties = data.model_dump(exclude_unset=True)
53
+ description = updated_properties.get("description", resource_type.description)
54
+ resource_type.description = description
55
+ db.session.commit()
56
+ return resource_type
@@ -0,0 +1,54 @@
1
+ # Copyright 2019-2020 SURF.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+
14
+ from http import HTTPStatus
15
+ from uuid import UUID
16
+
17
+ from fastapi.param_functions import Body
18
+ from fastapi.routing import APIRouter
19
+
20
+ from orchestrator.api.error_handling import raise_status
21
+ from orchestrator.db import db
22
+ from orchestrator.db.models import WorkflowTable
23
+ from orchestrator.schemas.workflow import WorkflowPatchSchema, WorkflowSchema
24
+
25
+ router = APIRouter()
26
+
27
+
28
+ @router.get("/{workflow_id}", response_model=WorkflowSchema)
29
+ def get_workflow_description(workflow_id: UUID) -> str:
30
+ workflow = db.session.get(WorkflowTable, workflow_id)
31
+ if workflow is None:
32
+ raise_status(HTTPStatus.NOT_FOUND)
33
+ return workflow
34
+
35
+
36
+ @router.patch("/{workflow_id}", status_code=HTTPStatus.CREATED, response_model=WorkflowSchema)
37
+ async def patch_workflow_by_id(workflow_id: UUID, data: WorkflowPatchSchema = Body(...)) -> WorkflowTable:
38
+ workflow = db.session.get(WorkflowTable, workflow_id)
39
+ if not workflow:
40
+ raise_status(HTTPStatus.NOT_FOUND, f"Workflow id {workflow_id} not found")
41
+
42
+ return await _patch_workflow_description(data, workflow)
43
+
44
+
45
+ async def _patch_workflow_description(
46
+ data: WorkflowPatchSchema,
47
+ workflow: WorkflowTable,
48
+ ) -> WorkflowTable:
49
+
50
+ updated_properties = data.model_dump(exclude_unset=True)
51
+ description = updated_properties.get("description", workflow.description)
52
+ workflow.description = description
53
+ db.session.commit()
54
+ return workflow
orchestrator/app.py CHANGED
@@ -199,8 +199,9 @@ class OrchestratorCore(FastAPI):
199
199
 
200
200
  def register_graphql(
201
201
  self: "OrchestratorCore",
202
- query: Any = Query,
203
- mutation: Any = Mutation,
202
+ # mypy 1.9 cannot properly inspect these, fixed in 1.15
203
+ query: Any = Query, # type: ignore
204
+ mutation: Any = Mutation, # type: ignore
204
205
  register_models: bool = True,
205
206
  subscription_interface: Any = SubscriptionInterface,
206
207
  graphql_models: StrawberryModelType | None = None,
@@ -25,6 +25,7 @@ import orchestrator.workflows
25
25
  from orchestrator.cli.domain_gen_helpers.types import ModelUpdates
26
26
  from orchestrator.cli.helpers.print_helpers import COLOR, str_fmt
27
27
  from orchestrator.cli.migrate_domain_models import create_domain_models_migration_sql
28
+ from orchestrator.cli.migrate_tasks import create_tasks_migration_wizard
28
29
  from orchestrator.cli.migrate_workflows import create_workflows_migration_wizard
29
30
  from orchestrator.cli.migration_helpers import create_migration_file
30
31
  from orchestrator.db import init_database
@@ -397,3 +398,80 @@ def migrate_workflows(
397
398
 
398
399
  create_migration_file(alembic_cfg(), sql_upgrade_str, sql_downgrade_str, message, preamble=preamble)
399
400
  return None
401
+
402
+
403
+ @app.command(help="Create migration file based on diff tasks in db")
404
+ def migrate_tasks(
405
+ message: str = typer.Argument(..., help="Migration name"),
406
+ test: bool = typer.Option(False, help="Optional boolean if you don't want to generate a migration file"),
407
+ ) -> tuple[list[dict], list[dict]] | None:
408
+ """The `migrate-tasks` command creates a migration file based on the difference between tasks in the database and registered TaskInstances in your codebase.
409
+
410
+ !!! warning "BACKUP YOUR DATABASE BEFORE USING THE MIGRATION!"
411
+
412
+ You will be prompted with inputs for new models and resource type updates.
413
+ Resource type updates are only handled when it's renamed in all product blocks.
414
+
415
+ Args:
416
+ message: Message/description of the generated migration.
417
+ test: Optional boolean if you don't want to generate a migration file.
418
+
419
+ Returns None unless `--test` is used, in which case it returns:
420
+ - tuple:
421
+ - list of upgrade SQL statements in string format.
422
+ - list of downgrade SQL statements in string format.
423
+
424
+ CLI Arguments:
425
+ ```sh
426
+ Arguments:
427
+ MESSAGE Migration name [required]
428
+
429
+ Options:
430
+ --test / --no-test Optional boolean if you don't want to generate a migration
431
+ file [default: no-test]
432
+ ```
433
+ """
434
+ if not app_settings.TESTING:
435
+ init_database(app_settings)
436
+
437
+ if test:
438
+ print( # noqa: T001, T201
439
+ f"{str_fmt('NOTE:', flags=[COLOR.BOLD, COLOR.CYAN])} Running in test mode. No migration file will be generated.\n"
440
+ )
441
+
442
+ tasks_to_add, tasks_to_delete = create_tasks_migration_wizard()
443
+
444
+ # String 'template' arguments
445
+ import_str = "from orchestrator.migrations.helpers import create_task, delete_workflow\n"
446
+ tpl_preamble_lines = []
447
+ tpl_upgrade_lines = []
448
+ tpl_downgrade_lines = []
449
+
450
+ if tasks_to_add:
451
+ tpl_preamble_lines.append(f"new_tasks = {json.dumps(tasks_to_add, indent=4)}\n")
452
+ tpl_upgrade_lines.extend([(" " * 4) + "for task in new_tasks:", (" " * 8) + "create_task(conn, task)"])
453
+ tpl_downgrade_lines.extend(
454
+ [(" " * 4) + "for task in new_tasks:", (" " * 8) + 'delete_workflow(conn, task["name"])']
455
+ )
456
+
457
+ if tasks_to_delete:
458
+ tpl_preamble_lines.append(f"old_tasks = {json.dumps(tasks_to_delete, indent=4)}\n")
459
+ tpl_upgrade_lines.extend(
460
+ [(" " * 4) + "for task in old_tasks:", (" " * 8) + 'delete_workflow(conn, task["name"])']
461
+ )
462
+ tpl_downgrade_lines.extend([(" " * 4) + "for task in old_tasks:", (" " * 8) + "create_task(conn, task)"])
463
+
464
+ preamble = "\n".join(
465
+ [
466
+ import_str,
467
+ *tpl_preamble_lines,
468
+ ]
469
+ )
470
+ sql_upgrade_str = "\n".join(tpl_upgrade_lines)
471
+ sql_downgrade_str = "\n".join(tpl_downgrade_lines)
472
+
473
+ if test:
474
+ return tasks_to_add, tasks_to_delete
475
+
476
+ create_migration_file(alembic_cfg(), sql_upgrade_str, sql_downgrade_str, message, preamble=preamble)
477
+ return None
@@ -11,7 +11,6 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
 
14
- import re
15
14
  from collections import ChainMap
16
15
  from collections.abc import Mapping
17
16
  from pathlib import Path
@@ -56,15 +55,8 @@ def get_product_block_path(product_block: dict) -> Path:
56
55
 
57
56
 
58
57
  def enrich_product_block(product_block: dict) -> dict:
59
- def to_block_name() -> str:
60
- """Separate block name into words."""
61
- type = product_block["type"]
62
- name = re.sub("(.)([A-Z][a-z]+)", r"\1 \2", type)
63
- return re.sub("([a-z0-9])([A-Z])", r"\1 \2", name)
64
-
65
58
  fields = get_all_fields(product_block)
66
- block_name = product_block.get("block_name", to_block_name())
67
-
59
+ block_name = product_block.get("block_name", product_block.get("type"))
68
60
  return product_block | {
69
61
  "fields": fields,
70
62
  "block_name": block_name,
@@ -0,0 +1,188 @@
1
+ import itertools
2
+ import operator
3
+ from collections.abc import Iterable
4
+ from typing import TypeVar, cast
5
+
6
+ import structlog
7
+ from sqlalchemy import select
8
+ from tabulate import tabulate
9
+
10
+ import orchestrator.workflows
11
+ from orchestrator.cli.helpers.input_helpers import _enumerate_menu_keys, _prompt_user_menu, get_user_input
12
+ from orchestrator.cli.helpers.print_helpers import COLOR, noqa_print, print_fmt
13
+ from orchestrator.db import WorkflowTable, db
14
+ from orchestrator.targets import Target
15
+ from orchestrator.workflow import Workflow
16
+ from orchestrator.workflows import get_workflow
17
+
18
+ logger = structlog.get_logger(__name__)
19
+
20
+ T = TypeVar("T")
21
+
22
+
23
+ def _print_tasks_table(tasks: list[WorkflowTable]) -> None:
24
+ items = [(task.name, task.description) for task in tasks]
25
+ print_fmt(
26
+ tabulate(
27
+ items,
28
+ headers=["#", "name", "description"],
29
+ showindex=itertools.count(1),
30
+ ),
31
+ end="\n\n",
32
+ )
33
+
34
+
35
+ def _add_task(tasks: dict[str, Workflow], state: dict) -> dict:
36
+ print_fmt("\nAdd new task\n", flags=[COLOR.UNDERLINE])
37
+
38
+ if not tasks:
39
+ noqa_print("No registered tasks found to add to the database")
40
+ return state
41
+
42
+ noqa_print("\nWhich task should be added?")
43
+
44
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
45
+ task_options = [(task, task) for task in tasks.keys() if task not in already_used_tasks]
46
+ task_name = _prompt_user_menu([*task_options, ("cancel", None)], keys=[*_enumerate_menu_keys(task_options), "q"])
47
+ if not task_name:
48
+ # Menu cancelled
49
+ return state
50
+
51
+ task_to_add = {"name": task_name, "description": tasks[task_name].description}
52
+ return {**state, "tasks_to_add": [*state["tasks_to_add"], task_to_add]}
53
+
54
+
55
+ def _delete_task(tasks: Iterable[WorkflowTable], state: dict) -> dict:
56
+ print_fmt("\nDelete existing task\n", flags=[COLOR.UNDERLINE])
57
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
58
+ items = [(task.name, task.description) for task in tasks if task not in already_used_tasks]
59
+ items = sorted(items, key=operator.itemgetter(1, 1))
60
+ keys = ["#", "name", "description"]
61
+ if not items:
62
+ noqa_print("No deletable tasks in database")
63
+ return state
64
+
65
+ print_fmt(
66
+ tabulate(
67
+ items,
68
+ headers=keys,
69
+ showindex=itertools.count(1),
70
+ ),
71
+ end="\n\n",
72
+ )
73
+ task_num = get_user_input("Which task do you want to delete? (q to cancel) ", "q")
74
+ if not task_num.isdigit():
75
+ return state
76
+ task_index = int(task_num) - 1
77
+ if 0 <= task_index < len(items):
78
+ item = dict(zip(keys[1:], items[task_index]))
79
+ return {**state, "tasks_to_delete": [*state["tasks_to_delete"], item]}
80
+ return state
81
+
82
+
83
+ def _show_state(state: dict) -> dict:
84
+ print_fmt("\nTasks to add:", flags=[COLOR.GREEN])
85
+ print_fmt(
86
+ tabulate(
87
+ state["tasks_to_add"],
88
+ headers="keys",
89
+ showindex=itertools.count(1),
90
+ ),
91
+ end="\n\n",
92
+ )
93
+
94
+ print_fmt("Tasks to delete:", flags=[COLOR.RED])
95
+ print_fmt(
96
+ tabulate(
97
+ state["tasks_to_delete"],
98
+ headers="keys",
99
+ showindex=itertools.count(1),
100
+ ),
101
+ end="\n\n",
102
+ )
103
+ return state
104
+
105
+
106
+ def delete_dangling_tasks(tasks: list[WorkflowTable], state: dict) -> dict:
107
+ if not tasks:
108
+ noqa_print("No dangling tasks found.")
109
+ return state
110
+
111
+ print_fmt(
112
+ "\nThe following tasks were found in the database that do not have a corresponding LazyWorkflowInstance:\n"
113
+ )
114
+ _print_tasks_table(tasks)
115
+ should_delete_dangling_tasks = (
116
+ get_user_input("Do you wish to delete all dangling tasks from the database? [y/n]: ", "n").lower() == "y"
117
+ )
118
+
119
+ if not should_delete_dangling_tasks:
120
+ noqa_print("Cancelling")
121
+ return state
122
+
123
+ already_used_tasks = {task["name"] for task in state["tasks_to_add"] + state["tasks_to_delete"]}
124
+ keys = ["name", "description"]
125
+ items = [
126
+ {"name": task.name, "description": task.description}
127
+ for k, task in zip(keys, tasks)
128
+ if task.name not in already_used_tasks
129
+ ]
130
+ return {**state, "tasks_to_delete": [*state["tasks_to_delete"], *items]}
131
+
132
+
133
+ def create_tasks_migration_wizard() -> tuple[list[dict], list[dict]]:
134
+ """Create tuple with lists for tasks to add and delete.
135
+
136
+ Returns tuple:
137
+ - list of task items to add in the migration
138
+ - list of task items to delete in the migration
139
+ """
140
+ database_tasks = {
141
+ task.name: task for task in list(db.session.scalars(select(WorkflowTable))) if task.target == Target.SYSTEM
142
+ }
143
+ registered_wf_instances = {
144
+ task: cast(Workflow, get_workflow(task)) for task in orchestrator.workflows.ALL_WORKFLOWS.keys()
145
+ }
146
+
147
+ registered_tasks = dict(
148
+ filter(
149
+ lambda task: task[1].target == Target.SYSTEM and task[0] in database_tasks.keys(),
150
+ registered_wf_instances.items(),
151
+ )
152
+ )
153
+ available_tasks = dict(
154
+ filter(
155
+ lambda task: task[1].target == Target.SYSTEM and task[0] not in database_tasks.keys(),
156
+ registered_wf_instances.items(),
157
+ )
158
+ )
159
+ unregistered_tasks = [task for task in database_tasks.values() if task.name not in registered_tasks.keys()]
160
+
161
+ # Main menu loop
162
+ state = {"tasks_to_add": [], "tasks_to_delete": [], "done": False}
163
+ while not state["done"]:
164
+ print_fmt("\nWhat do you want to do?\n", flags=[COLOR.UNDERLINE, COLOR.BOLD])
165
+ choice_fn = _prompt_user_menu(
166
+ [
167
+ ("Add task to database", lambda s: _add_task(available_tasks, s)),
168
+ ("Delete task from database", lambda s: _delete_task(database_tasks.values(), s)),
169
+ (
170
+ "Delete unregistered tasks from database",
171
+ lambda s: delete_dangling_tasks(unregistered_tasks, s),
172
+ ),
173
+ ("Finish and create migration file", lambda s: {**s, "done": True, "abort": False}),
174
+ ("Show current diff", _show_state),
175
+ ("Quit menu without creating a migration file", lambda s: {**s, "done": True, "abort": True}),
176
+ ],
177
+ keys=["a", "x", "c", "y", "d", "q"],
178
+ )
179
+ if choice_fn:
180
+ state = choice_fn(state) # type: ignore
181
+
182
+ if state.get("abort"):
183
+ return [], []
184
+
185
+ logger.info("Create tasks", create_tasks=state["tasks_to_add"])
186
+ logger.info("Delete tasks", delete_tasks=state["tasks_to_delete"])
187
+
188
+ return state["tasks_to_add"], state["tasks_to_delete"] # type: ignore
@@ -21,6 +21,7 @@ from sqlalchemy import BinaryExpression, Cast, ColumnClause, ColumnElement, Stri
21
21
  from sqlalchemy.sql.functions import coalesce
22
22
  from sqlalchemy.sql.operators import eq
23
23
 
24
+ from orchestrator.settings import app_settings
24
25
  from orchestrator.utils.search_query import Node, WhereCondGenerator
25
26
 
26
27
 
@@ -67,7 +68,7 @@ def _filter_string(field: ColumnElement) -> WhereCondGenerator:
67
68
  if node[0] == "ValueGroup":
68
69
  vals = [w[1] for w in node[1] if w[0] in ["Word", "PrefixWord"]] # Only works for (Prefix)Words atm
69
70
  return field.in_(vals)
70
- return field.ilike(f"%{node[1]}%")
71
+ return field.ilike(f"{node[1]}") if app_settings.FILTER_BY_MODE == "exact" else field.ilike(f"%{node[1]}%")
71
72
 
72
73
  return _clause_gen
73
74
 
@@ -5,6 +5,7 @@ from orchestrator.graphql.resolvers.product_block import resolve_product_blocks
5
5
  from orchestrator.graphql.resolvers.resource_type import resolve_resource_types
6
6
  from orchestrator.graphql.resolvers.settings import SettingsMutation, resolve_settings
7
7
  from orchestrator.graphql.resolvers.subscription import resolve_subscription, resolve_subscriptions
8
+ from orchestrator.graphql.resolvers.version import resolve_version
8
9
  from orchestrator.graphql.resolvers.workflow import resolve_workflows
9
10
 
10
11
  __all__ = [
@@ -19,4 +20,5 @@ __all__ = [
19
20
  "resolve_customer",
20
21
  "resolve_resource_types",
21
22
  "resolve_workflows",
23
+ "resolve_version",
22
24
  ]
@@ -0,0 +1,25 @@
1
+ from structlog import get_logger
2
+
3
+ from orchestrator import __version__
4
+ from orchestrator.graphql.schemas.version import VersionType
5
+ from orchestrator.graphql.types import OrchestratorInfo
6
+ from orchestrator.graphql.utils import create_resolver_error_handler
7
+
8
+ logger = get_logger(__name__)
9
+
10
+
11
+ VERSIONS = [f"orchestrator-core: {__version__}"]
12
+
13
+
14
+ def resolve_version(info: OrchestratorInfo) -> VersionType | None:
15
+ logger.debug("resolve_version() called")
16
+ _error_handler = create_resolver_error_handler(info)
17
+
18
+ ver = None
19
+ try:
20
+ ver = VersionType(application_versions=VERSIONS)
21
+ except Exception as e:
22
+ logger.error(f"Error getting version: {str(e)}")
23
+ _error_handler("Failed to retrieve orchestrator_core version", extensions={"code": "PACKAGE_VERSION_ERROR"})
24
+
25
+ return ver
@@ -47,6 +47,7 @@ from orchestrator.graphql.resolvers import (
47
47
  resolve_settings,
48
48
  resolve_subscription,
49
49
  resolve_subscriptions,
50
+ resolve_version,
50
51
  resolve_workflows,
51
52
  )
52
53
  from orchestrator.graphql.schemas import DEFAULT_GRAPHQL_MODELS
@@ -57,6 +58,7 @@ from orchestrator.graphql.schemas.product_block import ProductBlock
57
58
  from orchestrator.graphql.schemas.resource_type import ResourceType
58
59
  from orchestrator.graphql.schemas.settings import StatusType
59
60
  from orchestrator.graphql.schemas.subscription import SubscriptionInterface
61
+ from orchestrator.graphql.schemas.version import VersionType
60
62
  from orchestrator.graphql.schemas.workflow import Workflow
61
63
  from orchestrator.graphql.types import SCALAR_OVERRIDES, OrchestratorContext, ScalarOverrideType, StrawberryModelType
62
64
  from orchestrator.services.process_broadcast_thread import ProcessDataBroadcastThread
@@ -95,6 +97,7 @@ class OrchestratorQuery:
95
97
  resolver=resolve_settings,
96
98
  description="Returns information about cache, workers, and global engine settings",
97
99
  )
100
+ version: VersionType = authenticated_field(resolver=resolve_version, description="Returns version information")
98
101
 
99
102
 
100
103
  @strawberry.federation.type(description="Orchestrator customer Query")
@@ -0,0 +1,6 @@
1
+ import strawberry
2
+
3
+
4
+ @strawberry.type
5
+ class VersionType:
6
+ application_versions: list[str]
@@ -164,6 +164,36 @@ def create_workflow(conn: sa.engine.Connection, workflow: dict) -> None:
164
164
  )
165
165
 
166
166
 
167
+ def create_task(conn: sa.engine.Connection, task: dict) -> None:
168
+ """Create a new task.
169
+
170
+ Args:
171
+ conn: DB connection as available in migration main file.
172
+ task: Dict with data for a new workflow.
173
+ name: Name of the task.
174
+ description: Description of the workflow.
175
+
176
+ Usage:
177
+ >>> task = {
178
+ "name": "task_name",
179
+ "description": "task description",
180
+ }
181
+ >>> create_workflow(conn, task)
182
+ """
183
+
184
+ conn.execute(
185
+ sa.text(
186
+ """
187
+ INSERT INTO workflows(name, target, description)
188
+ VALUES (:name, 'SYSTEM', :description)
189
+ ON CONFLICT DO NOTHING
190
+ RETURNING workflow_id
191
+ """
192
+ ),
193
+ task,
194
+ )
195
+
196
+
167
197
  def create_workflows(conn: sa.engine.Connection, new: dict) -> None:
168
198
  """Create new workflows.
169
199
 
@@ -894,7 +924,7 @@ def delete_product_block(conn: sa.engine.Connection, name: str) -> None:
894
924
 
895
925
 
896
926
  def delete_workflow(conn: sa.engine.Connection, name: str) -> None:
897
- """Delete a workflow and it's occurrences in products.
927
+ """Delete a workflow and its occurrences in products.
898
928
 
899
929
  Note: the cascading delete rules in postgres will also ensure removal from `products_workflows`.
900
930
 
@@ -41,3 +41,7 @@ class ProductSchema(ProductBaseSchema):
41
41
  product_blocks: list[ProductBlockSchema]
42
42
  fixed_inputs: list[FixedInputSchema]
43
43
  workflows: list[WorkflowSchema]
44
+
45
+
46
+ class ProductPatchSchema(OrchestratorBaseModel):
47
+ description: str | None = None
@@ -37,3 +37,7 @@ class ProductBlockSchema(ProductBlockBaseSchema):
37
37
  end_date: datetime | None = None
38
38
  resource_types: list[ResourceTypeSchema] | None = None # type: ignore
39
39
  model_config = ConfigDict(from_attributes=True)
40
+
41
+
42
+ class ProductBlockPatchSchema(OrchestratorBaseModel):
43
+ description: str | None = None
@@ -27,3 +27,7 @@ class ResourceTypeBaseSchema(OrchestratorBaseModel):
27
27
  class ResourceTypeSchema(ResourceTypeBaseSchema):
28
28
  resource_type_id: UUID
29
29
  model_config = ConfigDict(from_attributes=True)
30
+
31
+
32
+ class ResourceTypePatchSchema(OrchestratorBaseModel):
33
+ description: str | None = None
@@ -59,3 +59,7 @@ class SubscriptionWorkflowListsSchema(OrchestratorBaseModel):
59
59
  modify: list[WorkflowListItemSchema]
60
60
  terminate: list[WorkflowListItemSchema]
61
61
  system: list[WorkflowListItemSchema]
62
+
63
+
64
+ class WorkflowPatchSchema(OrchestratorBaseModel):
65
+ description: str | None = None
orchestrator/settings.py CHANGED
@@ -14,6 +14,7 @@
14
14
  import secrets
15
15
  import string
16
16
  from pathlib import Path
17
+ from typing import Literal
17
18
 
18
19
  from pydantic import PostgresDsn, RedisDsn
19
20
  from pydantic_settings import BaseSettings
@@ -82,6 +83,7 @@ class AppSettings(BaseSettings):
82
83
  ENABLE_GRAPHQL_PROFILING_EXTENSION: bool = False
83
84
  ENABLE_GRAPHQL_STATS_EXTENSION: bool = False
84
85
  VALIDATE_OUT_OF_SYNC_SUBSCRIPTIONS: bool = False
86
+ FILTER_BY_MODE: Literal["partial", "exact"] = "exact"
85
87
 
86
88
 
87
89
  app_settings = AppSettings()
@@ -5,12 +5,10 @@ from itertools import chain
5
5
  from typing import Any, Union, cast
6
6
 
7
7
  import structlog
8
- from sqlalchemy import BinaryExpression, ColumnElement, CompoundSelect, Select, column, false, func, not_, or_, select
8
+ from sqlalchemy import BinaryExpression, ColumnElement, CompoundSelect, Select, false, not_, or_
9
9
  from sqlalchemy.orm import InstrumentedAttribute, MappedColumn
10
10
 
11
- from orchestrator.db import db
12
11
  from orchestrator.db.database import BaseModel
13
- from orchestrator.db.helpers import get_postgres_version
14
12
  from orchestrator.utils.helpers import camel_to_snake
15
13
 
16
14
  logger = structlog.getLogger(__name__)
@@ -257,28 +255,10 @@ class Parser:
257
255
 
258
256
 
259
257
  class TSQueryVisitor:
260
-
261
258
  _glue_chars = re.compile(r"[-_@#$%^&]")
262
259
 
263
260
  @staticmethod
264
261
  def _split_term(term: str) -> list[str]:
265
- """Workaround for the way Postgres <14 parses text with to_tsquery.
266
-
267
- For Postgres <14, we issue a database query to parse the text for us in a way that is compatible with to_tsvector. This is the same behavior as Postgres >=14.
268
- """
269
-
270
- # TODO: Remove this workaround when support for Postgres <14 is dropped
271
- # https://github.com/workfloworchestrator/orchestrator-core/issues/621
272
-
273
- if get_postgres_version() < 14:
274
- # tokid 12 is the space separator token
275
- stmt = (
276
- select(func.array_agg(column("token")))
277
- .select_from(func.ts_parse("default", func.replace(term, "-", "_")))
278
- .where(column("tokid") != 12)
279
- )
280
- return db.session.scalar(stmt) or []
281
-
282
262
  return [part.strip() for part in TSQueryVisitor._glue_chars.split(term) if part and not part.isspace()]
283
263
 
284
264
  @staticmethod
@@ -183,11 +183,12 @@ def _build_arguments(func: StepFunc | InputStepFunc, state: State) -> list: # n
183
183
  raise KeyError(f"Could not find key '{name}' in state.")
184
184
  elif is_list_type(param.annotation, SubscriptionModel):
185
185
  subscription_ids = map(_get_sub_id, state.get(name, []))
186
- subscriptions = [
187
- # Actual type is first argument from list type
188
- get_args(param.annotation)[0].from_subscription(subscription_id)
189
- for subscription_id in subscription_ids
190
- ]
186
+ # Actual type is first argument from list type
187
+ if (actual_type := get_args(param.annotation)[0]) == Any:
188
+ raise ValueError(
189
+ f"Step function argument '{param.name}' cannot be serialized from database with type 'Any'"
190
+ )
191
+ subscriptions = [actual_type.from_subscription(subscription_id) for subscription_id in subscription_ids]
191
192
  arguments.append(subscriptions)
192
193
  elif is_optional_type(param.annotation, SubscriptionModel):
193
194
  subscription_id = _get_sub_id(state.get(name))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: orchestrator-core
3
- Version: 2.9.2rc3
3
+ Version: 2.10.0rc2
4
4
  Summary: This is the orchestrator workflow engine.
5
5
  Requires-Python: >=3.11,<3.14
6
6
  Classifier: Intended Audience :: Information Technology
@@ -27,30 +27,30 @@ Classifier: Programming Language :: Python :: 3.12
27
27
  Classifier: Programming Language :: Python :: 3.11
28
28
  Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
29
29
  Classifier: Topic :: Internet :: WWW/HTTP
30
- Requires-Dist: alembic==1.13.3
30
+ Requires-Dist: alembic==1.14.1
31
31
  Requires-Dist: anyio>=3.7.0
32
32
  Requires-Dist: click==8.*
33
33
  Requires-Dist: deprecated
34
34
  Requires-Dist: deepmerge==1.1.1
35
35
  Requires-Dist: fastapi~=0.115.2
36
36
  Requires-Dist: fastapi-etag==0.4.0
37
- Requires-Dist: more-itertools~=10.5.0
37
+ Requires-Dist: more-itertools~=10.6.0
38
38
  Requires-Dist: itsdangerous
39
39
  Requires-Dist: Jinja2==3.1.5
40
- Requires-Dist: orjson==3.10.9
40
+ Requires-Dist: orjson==3.10.15
41
41
  Requires-Dist: psycopg2-binary==2.9.10
42
42
  Requires-Dist: pydantic[email]~=2.8.2
43
- Requires-Dist: pydantic-settings~=2.5.2
43
+ Requires-Dist: pydantic-settings~=2.7.1
44
44
  Requires-Dist: python-dateutil==2.8.2
45
45
  Requires-Dist: python-rapidjson>=1.18,<1.21
46
46
  Requires-Dist: pytz==2024.1
47
47
  Requires-Dist: redis==5.0.3
48
48
  Requires-Dist: schedule==1.1.0
49
49
  Requires-Dist: sentry-sdk[fastapi]~=2.18.0
50
- Requires-Dist: SQLAlchemy==2.0.36
50
+ Requires-Dist: SQLAlchemy==2.0.38
51
51
  Requires-Dist: SQLAlchemy-Utils==0.41.2
52
52
  Requires-Dist: structlog
53
- Requires-Dist: typer==0.12.5
53
+ Requires-Dist: typer==0.15.1
54
54
  Requires-Dist: uvicorn[standard]~=0.32.0
55
55
  Requires-Dist: nwa-stdlib~=1.9.0
56
56
  Requires-Dist: oauth2-lib~=2.4.0
@@ -1,10 +1,10 @@
1
- orchestrator/__init__.py,sha256=ZEfVsV-TQSD6cNrXVHcgemFAbqGTVKsyAnxfA9sraH8,1058
2
- orchestrator/app.py,sha256=_2e3JMYgH_egOScokFVpFuTlJWGGwH0KYgZajDdm--0,11563
1
+ orchestrator/__init__.py,sha256=p507KZH6dyfb9vX3cZ80I0_bDmngfQ255K-SjZGbjiw,1059
2
+ orchestrator/app.py,sha256=8GMzoHjdR0bkgRBCejiG8nIUjeo43f12I3WNNZ89pKE,11659
3
3
  orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
4
4
  orchestrator/log_config.py,sha256=1tPRX5q65e57a6a_zEii_PFK8SzWT0mnA5w2sKg4hh8,1853
5
5
  orchestrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  orchestrator/security.py,sha256=_W_wFkjmlwVwwHRsql69iMoqRvDCiaA63i5rvRHSrZ0,2414
7
- orchestrator/settings.py,sha256=55AIpcxDcJJP61Jh-y6-wyqOMrzuukx5SjBTam3sNHk,3532
7
+ orchestrator/settings.py,sha256=SFF-jjkVkq_UILzuZbvhBUN3408PeG0y9AFZiCipBVs,3617
8
8
  orchestrator/targets.py,sha256=QphHzEUTRhue3pKTPYklxDdvCQF1ANxt0r_HjbaxVCc,766
9
9
  orchestrator/types.py,sha256=cXUWV9EtoO4Dx3h3YJHaGVxmw-5XLt6Mow-dALdhTWk,16230
10
10
  orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
@@ -14,22 +14,26 @@ orchestrator/api/error_handling.py,sha256=YrPCxSa-DSa9KwqIMlXI-KGBGnbGIW5ukOPiik
14
14
  orchestrator/api/helpers.py,sha256=s0QRHYw8AvEmlkmRhuEzz9xixaZKUF3YuPzUVHkcoXk,6933
15
15
  orchestrator/api/models.py,sha256=z9BDBx7uI4KBHWbD_LVrLsqNQ0_w-Mg9Qiy7PR_rZhk,5996
16
16
  orchestrator/api/api_v1/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
17
- orchestrator/api/api_v1/api.py,sha256=zGPSCX-nCebZXN2OT9QA_ChAtpsK53hpxZ7F2x_0gjI,2332
17
+ orchestrator/api/api_v1/api.py,sha256=Q_Yh9w_SBoyx06jV_kf6leGFrAMXoGjF8UikIAie_us,2858
18
18
  orchestrator/api/api_v1/endpoints/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
19
19
  orchestrator/api/api_v1/endpoints/health.py,sha256=iaxs1XX1_250_gKNsspuULCV2GEMBjbtjsmfQTOvMAI,1284
20
- orchestrator/api/api_v1/endpoints/processes.py,sha256=bsS8CqpfE3q5uIYZeZYiKjlbwHYSliFJQu130ov8cL8,12716
21
- orchestrator/api/api_v1/endpoints/products.py,sha256=Qyj9OzlfWfgsWe9Homd60LFco91VaJ1gkgXxn0AmP6Q,2143
20
+ orchestrator/api/api_v1/endpoints/processes.py,sha256=21BG32TslSgG2ePs68jUYAy8rDuohTXgzWETOpuNzyI,12761
21
+ orchestrator/api/api_v1/endpoints/product_blocks.py,sha256=kZ6ywIOsS_S2qGq7RvZ4KzjvaS1LmwbGWR37AKRvWOw,2146
22
+ orchestrator/api/api_v1/endpoints/products.py,sha256=BfFtwu9dZXEQbtKxYj9icc73GKGvAGMR5ytyf41nQlQ,3081
23
+ orchestrator/api/api_v1/endpoints/resource_types.py,sha256=gGyuaDyOD0TAVoeFGaGmjDGnQ8eQQArOxKrrk4MaDzA,2145
22
24
  orchestrator/api/api_v1/endpoints/settings.py,sha256=QiSih8zOUombxXk5Hd7MACq5BC5Y9w-BrmgBdTPRIDg,6141
23
25
  orchestrator/api/api_v1/endpoints/subscription_customer_descriptions.py,sha256=Elu4DVJoNtUFq_b3pG1Ws8StrUIo_jTViff2TJqe6ZU,3398
24
26
  orchestrator/api/api_v1/endpoints/subscriptions.py,sha256=s0nzWY1n8J1Ep-f6LuhRj_LX3shfCq7PsMmHf0_Rzsw,8716
25
27
  orchestrator/api/api_v1/endpoints/translations.py,sha256=dIWh_fCnZZUxJoGiNeJ49DK_xpf75IpR_0EIMSvzIvY,963
26
28
  orchestrator/api/api_v1/endpoints/user.py,sha256=RyI32EXVu6I-IxWjz0XB5zQWzzLL60zKXLgLqLH02xU,1827
29
+ orchestrator/api/api_v1/endpoints/workflows.py,sha256=_0vhGiQeu3-z16Zi0WmuDWBs8gmed6BzRNwYH_sF6AY,1977
27
30
  orchestrator/api/api_v1/endpoints/ws.py,sha256=1l7E0ag_sZ6UMfQPHlmew7ENwxjm6fflBwcMZAb7V-k,2786
28
31
  orchestrator/cli/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
29
- orchestrator/cli/database.py,sha256=qMpE9qXfJ55SioBBVymXu4hn7pkLR2G7SYPXb_snhFY,16184
32
+ orchestrator/cli/database.py,sha256=6Q9TZHAHaBEgP4mQArVsXKcQ1V7D2KM3ClMZ4PgSzA0,19274
30
33
  orchestrator/cli/generate.py,sha256=SBaYfRijXPF9r3VxarPKTiDzDcB6GBMMQvecQIb_ZLQ,7377
31
34
  orchestrator/cli/main.py,sha256=GC_kxa9OZ-Y0ig_klfWc6ysOQuPVTUmTmDRj3m8cJHA,983
32
35
  orchestrator/cli/migrate_domain_models.py,sha256=OhjNuIheytgShpMYCZ18leNUzk17ExhtkCqx7Ww03R8,20371
36
+ orchestrator/cli/migrate_tasks.py,sha256=PoNMRQadY3_VtDfR3OKjbjf01Tu-Q2VJd8YAVB_zrUc,6669
33
37
  orchestrator/cli/migrate_workflows.py,sha256=-_nsKUcVa14-Ug3aSppU9yk-oWlK411SX33WqzD1p4M,8979
34
38
  orchestrator/cli/migration_helpers.py,sha256=C5tpkP5WEBr7G9S-1k1hgSI8ili6xd9Z5ygc9notaK0,4110
35
39
  orchestrator/cli/scheduler.py,sha256=iCKBWYUwQIYTDqKQ9rMVvs2sNiAzE-J2SkV170TPP2g,1896
@@ -56,7 +60,7 @@ orchestrator/cli/generator/generator/enums.py,sha256=ztGxHzpq7l4HDSZswH8UDJlf237
56
60
  orchestrator/cli/generator/generator/helpers.py,sha256=IoHXacEebef7MhUseTVkj05fRryyGMDH94Ai0nGq-nw,9838
57
61
  orchestrator/cli/generator/generator/migration.py,sha256=lDqosegGRKJRs1dN4QZV7lFwdWBKTEwe9DeNqP8OVkY,7045
58
62
  orchestrator/cli/generator/generator/product.py,sha256=W930c-9C8k0kW7I8_SC4mWf045neYcfFpkck5SwHeNQ,2079
59
- orchestrator/cli/generator/generator/product_block.py,sha256=5EthccKVeIzOvJU5sdSKPfEDj-1eEpkZt679vMkrNPE,5036
63
+ orchestrator/cli/generator/generator/product_block.py,sha256=h552YZTuehtaux6PKw5GKWAmBQ6cStOSY4TbaJ1Kcq8,4802
60
64
  orchestrator/cli/generator/generator/settings.py,sha256=_IhRnQ7bpGjqYtFo-OiLky25IKCibdghC6pkHmPIPoI,1379
61
65
  orchestrator/cli/generator/generator/translations.py,sha256=ip0RghW2bY7CoemEab9SxSgjizI44G35AoNHuBsgvrU,1878
62
66
  orchestrator/cli/generator/generator/unittest.py,sha256=cLbPRjBQyKFLDNABoTR3WQ21EisAodHs5Q3EGx4VQ6c,4541
@@ -112,7 +116,7 @@ orchestrator/db/filters/resource_type.py,sha256=7aH4_n8vPpsySFnnN8SefN8h964glmEi
112
116
  orchestrator/db/filters/subscription.py,sha256=IV7ur7yyKFNUQRx0gZPelcMLHjuUPU0Rx4oZ6Shbn6A,1519
113
117
  orchestrator/db/filters/workflow.py,sha256=osyyEmOFuev6q5lizHeUvgxf1Nji3fZtlbf2_lzSNao,1276
114
118
  orchestrator/db/filters/search_filters/__init__.py,sha256=a7yfEAA-qpD_PHZH5LeqSjrLeGAvQrDsJp7mzVwDMwo,562
115
- orchestrator/db/filters/search_filters/inferred_filter.py,sha256=B3WuA6yi3AFhkgbr8yK0UnqiZNUZ1h1aNFQCtNqaP7I,5591
119
+ orchestrator/db/filters/search_filters/inferred_filter.py,sha256=LTRrcLY7hTu8Dr18-xLVMdge9_MNCvtfcCH0CbaJc2Y,5711
116
120
  orchestrator/db/range/__init__.py,sha256=mAOCJrQLMMeks0zTK5resf5t-01GrlFN8nIT1gNbqEQ,162
117
121
  orchestrator/db/range/range.py,sha256=sOSzkRqvLXvoTYxOd_Q0OWQ9lSqrXTnwXh4DVLGGSK0,2175
118
122
  orchestrator/db/sorting/__init__.py,sha256=d33YUS6uAI75l35Mpb-GB9t9sd03jCxq0V-kLmijVOc,353
@@ -147,7 +151,7 @@ orchestrator/forms/validators/product_id.py,sha256=u5mURLT0pOhbFLdwvYcy2_2fXMt35
147
151
  orchestrator/graphql/__init__.py,sha256=avq8Yg3Jr_9pJqh7ClyIAOX7YSg1eM_AWmt5C3FRYUY,1440
148
152
  orchestrator/graphql/autoregistration.py,sha256=pF2jbMKG26MvYoMSa6ZpqpHjVks7_NvSRFymHTgmfjs,6342
149
153
  orchestrator/graphql/pagination.py,sha256=iqVDn3GPZpiQhEydfwkBJLURY-X8wwUphS8Lkeg0BOc,2413
150
- orchestrator/graphql/schema.py,sha256=K1qTYOKyhAj0OK-vkctwkTKxZ_3P_dZ7mAgFRnrazWY,8892
154
+ orchestrator/graphql/schema.py,sha256=JNAd_MNyFkKzFIutaA6bwtgUsahGDt9RhWZIpFAtVAg,9090
151
155
  orchestrator/graphql/types.py,sha256=0aaRM1cUaWUC2SaWRtgLmSe2_2C0KuwA4LMCav4SdPg,5037
152
156
  orchestrator/graphql/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
153
157
  orchestrator/graphql/extensions/stats.py,sha256=pGhEBQg45XvqZhRobcrCSGwt5AGmR3gflsm1dYiIg5g,2018
@@ -155,7 +159,7 @@ orchestrator/graphql/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
155
159
  orchestrator/graphql/loaders/subscriptions.py,sha256=31zE2WC7z-tPIUmVpU1QWOJvNbLvF7sYgY7JAQ6OPJg,1856
156
160
  orchestrator/graphql/mutations/customer_description.py,sha256=37yX92fE1Sc51O9i-gP8JfD3HdsvpR3TtbgYqKtGC-w,3343
157
161
  orchestrator/graphql/mutations/start_process.py,sha256=8vLVvmBwL1ujbZJoI_8YE3VAgI-J2RTzgrTZJC8THZ4,1576
158
- orchestrator/graphql/resolvers/__init__.py,sha256=v6G9OboMuqEdZAB4RfCNjQZhJyXcvuZ_gC7RN9gTSrU,941
162
+ orchestrator/graphql/resolvers/__init__.py,sha256=EEw9NO4LAryfrpkLlgsNQ9rytKd0usBDx95OURRV6sg,1031
159
163
  orchestrator/graphql/resolvers/customer.py,sha256=tq06MtMoaqFwqn3YQvSv0VmROW7QJZRJp1ykO4tUhck,934
160
164
  orchestrator/graphql/resolvers/helpers.py,sha256=8NeuiJD9CPus4BhRK9nDsSMhb2LhW1W7nrEpyj_J3M4,771
161
165
  orchestrator/graphql/resolvers/process.py,sha256=8UQaw3Mqjwubq_V7iW4O2KsFpqlPKLyXkPRVNS9eKec,4965
@@ -164,6 +168,7 @@ orchestrator/graphql/resolvers/product_block.py,sha256=BAYW66KT_1mozNidfBxBI7l3_
164
168
  orchestrator/graphql/resolvers/resource_type.py,sha256=NRDKPFqwktOvVLLm2QCLxVPwzXIYEMJKUN3DS_kS49o,2926
165
169
  orchestrator/graphql/resolvers/settings.py,sha256=M-WJGFN0NmINc7SP9plJn689Sn7DVZ5pMIIpoceMrsw,3700
166
170
  orchestrator/graphql/resolvers/subscription.py,sha256=57niFv-JCro_wm0peJ5Ne04F2WIPuJ-Lx2h8yd9qubA,6541
171
+ orchestrator/graphql/resolvers/version.py,sha256=qgwe1msPOexeg3RHCscJ8s45vNfMhYh9ZKyCZ3MNw30,809
167
172
  orchestrator/graphql/resolvers/workflow.py,sha256=YUwPklwYesgmRS4d0eIQdgVmkyhgGbkQZ9uC1Oe8EyA,2776
168
173
  orchestrator/graphql/schemas/__init__.py,sha256=dWG4DNzWq5akQ3v5tSAvT03HLxPWXa09Gx8rTz_MHmk,940
169
174
  orchestrator/graphql/schemas/customer.py,sha256=ZptVFG0qauZaoy29KDrh6k5xAnacNCTavmQrZMH8czc,147
@@ -178,6 +183,7 @@ orchestrator/graphql/schemas/resource_type.py,sha256=s5d_FwQXL2-Sc-IDUxTJun5qFQ4
178
183
  orchestrator/graphql/schemas/settings.py,sha256=drhm5VcLmUbiYAk6WUSJcyJqjNM96E6GvpxVdPAobnA,999
179
184
  orchestrator/graphql/schemas/strawberry_pydantic_patch.py,sha256=CjNUhTKdYmLiaem-WY_mzw4HASIeaZitxGF8pPocqVw,1602
180
185
  orchestrator/graphql/schemas/subscription.py,sha256=_ra7MG9P2w7_WMiMx-zTOaAMinGlTKN4gwE9vej-5V8,9573
186
+ orchestrator/graphql/schemas/version.py,sha256=HSzVg_y4Sjd5_H5rRUtu3FJKOG_8ifhvBNt_qjOtC-E,92
181
187
  orchestrator/graphql/schemas/workflow.py,sha256=0UWU0HGTiAC_5Wzh16clBd74JoYHrr38YIGV86q-si0,1276
182
188
  orchestrator/graphql/utils/__init__.py,sha256=1JvenzEVW1CBa1sGVI9I8IWnnoXIkb1hneDqph9EEZY,524
183
189
  orchestrator/graphql/utils/create_resolver_error_handler.py,sha256=PpQMVwGrE9t0nZ12TwoxPxksXxEwQM7lSNPeh7qW3vk,1233
@@ -191,7 +197,7 @@ orchestrator/graphql/utils/to_graphql_result_page.py,sha256=8ObkJP8reVf-TQOQVPKv
191
197
  orchestrator/migrations/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
192
198
  orchestrator/migrations/alembic.ini,sha256=kMoADqhGeubU8xanILNaqm4oixLy9m4ngYtdGpZcc7I,873
193
199
  orchestrator/migrations/env.py,sha256=AwlgBPYbV2hr5rHNwlOPJ5rs-vRyfmzcWyxae0btpZ4,3382
194
- orchestrator/migrations/helpers.py,sha256=8Ny9k0D-siSKPDf0DNcvhSVtlMU1ieRgLy0l8EtPtRc,42983
200
+ orchestrator/migrations/helpers.py,sha256=5BVDKOFETKyW4kE0-DiTU068VFrRo52n3MKrvt5LfjU,43769
195
201
  orchestrator/migrations/script.py.mako,sha256=607Zrgp-Z-m9WGLt4wewN1QDOmHeifxcePUdADkSZyM,510
196
202
  orchestrator/migrations/templates/alembic.ini.j2,sha256=jA-QykVparwWSNt5XDP0Zk7epLOhK7D87Af-i2shJV4,905
197
203
  orchestrator/migrations/templates/env.py.j2,sha256=RfLAQItZ56Jlzwi6LJfBo92m1-th_bdfkFKD1mwTZIE,2821
@@ -228,12 +234,12 @@ orchestrator/schemas/engine_settings.py,sha256=BOyFNOn7AqHVdUxXyqmPk5aVdFY5A0cCO
228
234
  orchestrator/schemas/fixed_input.py,sha256=Rth3hT5K7zYuQr1bUY_NJRzb03xEZuT1p6EvYXVNE54,1214
229
235
  orchestrator/schemas/problem_detail.py,sha256=DxiUhWv6EVXLZgdKFv0EYVnCgtkDj7xteDCR0q2f5yw,802
230
236
  orchestrator/schemas/process.py,sha256=NgS1eBRtO2GUCRNsvbvYyjNkR2aBdH-kwcsR_y8DfNU,2354
231
- orchestrator/schemas/product.py,sha256=bIgeLGIsrRiQZ7J36S2Bym8CkV-xhPjn8QoHhZkEBa0,1484
232
- orchestrator/schemas/product_block.py,sha256=mKX9FwQ5TGo9SrrAtDJOhB_nji1LHJ3-mKBrEEoQ-No,1428
233
- orchestrator/schemas/resource_type.py,sha256=z1UQTaW79UlLDzVQtstNo0trXQVT8-GDisxieJPUeYo,973
237
+ orchestrator/schemas/product.py,sha256=MhMCh058ZuS2RJq-wSmxIPUNlhQexxXIx3DSz2OmOh4,1570
238
+ orchestrator/schemas/product_block.py,sha256=kCqvm6qadHpegMr9aWI_fYX-T7mS-5S-ldPxnGQZg7M,1519
239
+ orchestrator/schemas/resource_type.py,sha256=VDju4XywcDDLxdpbWU62RTvR9QF8x_GRrpTlN_NE8uI,1064
234
240
  orchestrator/schemas/subscription.py,sha256=zNy7bb-ww-MEN4QW9xIwxzcNSyFPEgjt5tt1T4Ah0hQ,3383
235
241
  orchestrator/schemas/subscription_descriptions.py,sha256=Ft_jw1U0bf9Z0U8O4OWfLlcl0mXCVT_qYVagBP3GbIQ,1262
236
- orchestrator/schemas/workflow.py,sha256=YvjidAaYz1MsqVsA7DynOlW4kChBO-47M-JCkpSOro4,1890
242
+ orchestrator/schemas/workflow.py,sha256=w-CaRPp9AAddhnd8o_0jPaey1Vnnh-s-A5s5kWlR2pI,1977
237
243
  orchestrator/services/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
238
244
  orchestrator/services/celery.py,sha256=uvXSKuq_XHcF4BgEpt2QgGUfUnpopApF74FsgAQdnFY,4634
239
245
  orchestrator/services/fixed_inputs.py,sha256=kyz7s2HLzyDulvcq-ZqefTw1om86COvyvTjz0_5CmgI,876
@@ -261,8 +267,8 @@ orchestrator/utils/get_updated_properties.py,sha256=egVZ0R5LNJ4e51Z8SXlU8cmb4tXx
261
267
  orchestrator/utils/helpers.py,sha256=NjUF3IvWdnLulliP8-JQvGGGpHrh0vs0Vm092ynw-ss,3212
262
268
  orchestrator/utils/json.py,sha256=7386sdqkrKYyy4sbn5NscwctH_v1hLyw5172P__rU3g,8341
263
269
  orchestrator/utils/redis.py,sha256=fvALD_Yt4lZuIfgCLGJwwQSElgKOLHrxH_RdhSXkeZw,7222
264
- orchestrator/utils/search_query.py,sha256=ncJlynwtW-qwL0RcNq4DuAUx9KUMI6llwGAEwLO2QCA,17097
265
- orchestrator/utils/state.py,sha256=gPYHOWDxPvoYZ83WwKPCpeBAsNWOTlkwZz5kAZcM9rw,13011
270
+ orchestrator/utils/search_query.py,sha256=ji5LHtrzohGz6b1IG41cnPdpWXzLEzz4SGWgHly_yfU,16205
271
+ orchestrator/utils/state.py,sha256=aNR7XftX9dX-2TKHFu2I2rIRIqFENB7AnlpH6Zs80QA,13181
266
272
  orchestrator/utils/strings.py,sha256=N0gWjmQaMjE9_99VtRvRaU8IBLTKMgBKSXcTZ9TpWAg,1077
267
273
  orchestrator/utils/validate_data_version.py,sha256=3Eioy2wE2EWKSgkyMKcEKrkCAfUIAq-eb73iRcpgppw,184
268
274
  orchestrator/websocket/__init__.py,sha256=V79jskk1z3uPIYgu0Gt6JLzuqr7NGfNeAZ-hbBqoUv4,5745
@@ -280,7 +286,7 @@ orchestrator/workflows/tasks/resume_workflows.py,sha256=wZGNHHQYL7wociSTmoNdDdh5
280
286
  orchestrator/workflows/tasks/validate_product_type.py,sha256=kVuN94hGWcmBNphgpAlGTSiyO2dEhFwgIq87SYjArns,3174
281
287
  orchestrator/workflows/tasks/validate_products.py,sha256=j_aOyxcH8DymlGupSS6XRwQdWx2Ab-c8f8iUvAXBTes,8511
282
288
  orchestrator/workflows/translations/en-GB.json,sha256=ST53HxkphFLTMjFHonykDBOZ7-P_KxksktZU3GbxLt0,846
283
- orchestrator_core-2.9.2rc3.dist-info/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
284
- orchestrator_core-2.9.2rc3.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
285
- orchestrator_core-2.9.2rc3.dist-info/METADATA,sha256=3rVMT6j1-fRT6J3eL_MCkSerRs1RURkFTwCs1ejjA74,4924
286
- orchestrator_core-2.9.2rc3.dist-info/RECORD,,
289
+ orchestrator_core-2.10.0rc2.dist-info/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
290
+ orchestrator_core-2.10.0rc2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
291
+ orchestrator_core-2.10.0rc2.dist-info/METADATA,sha256=O8kU95N5X4y2BZNhjkkhn-Jkl4Gc3KWzh2owKvgJxek,4926
292
+ orchestrator_core-2.10.0rc2.dist-info/RECORD,,