orchestrator-core 4.1.0rc2__py3-none-any.whl → 4.2.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. orchestrator/__init__.py +1 -1
  2. orchestrator/api/api_v1/endpoints/processes.py +6 -2
  3. orchestrator/api/api_v1/endpoints/subscriptions.py +25 -2
  4. orchestrator/cli/database.py +8 -1
  5. orchestrator/cli/domain_gen_helpers/helpers.py +44 -2
  6. orchestrator/cli/domain_gen_helpers/product_block_helpers.py +35 -15
  7. orchestrator/cli/domain_gen_helpers/resource_type_helpers.py +5 -5
  8. orchestrator/cli/domain_gen_helpers/types.py +7 -1
  9. orchestrator/cli/generator/templates/create_product.j2 +1 -2
  10. orchestrator/cli/migrate_domain_models.py +16 -5
  11. orchestrator/db/models.py +6 -3
  12. orchestrator/graphql/schemas/process.py +21 -2
  13. orchestrator/graphql/schemas/product.py +8 -9
  14. orchestrator/graphql/schemas/workflow.py +9 -0
  15. orchestrator/graphql/types.py +7 -1
  16. orchestrator/migrations/versions/schema/2025-07-01_93fc5834c7e5_changed_timestamping_fields_in_process_steps.py +65 -0
  17. orchestrator/migrations/versions/schema/2025-07-04_4b58e336d1bf_deprecating_workflow_target_in_.py +30 -0
  18. orchestrator/schemas/process.py +5 -1
  19. orchestrator/services/processes.py +11 -2
  20. orchestrator/utils/enrich_process.py +4 -2
  21. orchestrator/utils/errors.py +2 -1
  22. orchestrator/workflow.py +7 -2
  23. orchestrator/workflows/modify_note.py +1 -1
  24. orchestrator/workflows/steps.py +14 -8
  25. orchestrator/workflows/utils.py +3 -3
  26. orchestrator_core-4.2.0rc2.dist-info/METADATA +167 -0
  27. {orchestrator_core-4.1.0rc2.dist-info → orchestrator_core-4.2.0rc2.dist-info}/RECORD +29 -27
  28. orchestrator_core-4.1.0rc2.dist-info/METADATA +0 -118
  29. {orchestrator_core-4.1.0rc2.dist-info → orchestrator_core-4.2.0rc2.dist-info}/WHEEL +0 -0
  30. {orchestrator_core-4.1.0rc2.dist-info → orchestrator_core-4.2.0rc2.dist-info}/licenses/LICENSE +0 -0
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "4.1.0rc2"
16
+ __version__ = "4.2.0rc2"
17
17
 
18
18
  from orchestrator.app import OrchestratorCore
19
19
  from orchestrator.settings import app_settings
@@ -123,6 +123,7 @@ def get_auth_callbacks(steps: StepList, workflow: Workflow) -> tuple[Authorizer
123
123
  def can_be_resumed(status: ProcessStatus) -> bool:
124
124
  return status in (
125
125
  ProcessStatus.SUSPENDED, # Can be resumed
126
+ ProcessStatus.WAITING, # Can be retried
126
127
  ProcessStatus.FAILED, # Can be retried
127
128
  ProcessStatus.API_UNAVAILABLE, # subtype of FAILED
128
129
  ProcessStatus.INCONSISTENT_DATA, # subtype of FAILED
@@ -158,6 +159,9 @@ def delete(process_id: UUID) -> None:
158
159
  if not process:
159
160
  raise_status(HTTPStatus.NOT_FOUND)
160
161
 
162
+ if not process.is_task:
163
+ raise_status(HTTPStatus.BAD_REQUEST)
164
+
161
165
  db.session.delete(db.session.get(ProcessTable, process_id))
162
166
  db.session.commit()
163
167
 
@@ -209,7 +213,7 @@ def resume_process_endpoint(
209
213
  if process.last_status == ProcessStatus.SUSPENDED:
210
214
  if auth_resume is not None and not auth_resume(user_model):
211
215
  raise_status(HTTPStatus.FORBIDDEN, "User is not authorized to resume step")
212
- elif process.last_status == ProcessStatus.FAILED:
216
+ elif process.last_status in (ProcessStatus.FAILED, ProcessStatus.WAITING):
213
217
  if auth_retry is not None and not auth_retry(user_model):
214
218
  raise_status(HTTPStatus.FORBIDDEN, "User is not authorized to retry step")
215
219
 
@@ -270,7 +274,7 @@ def update_progress_on_awaiting_process_endpoint(
270
274
  @router.put(
271
275
  "/resume-all", response_model=ProcessResumeAllSchema, dependencies=[Depends(check_global_lock, use_cache=False)]
272
276
  )
273
- async def resume_all_processess_endpoint(request: Request, user: str = Depends(user_name)) -> dict[str, int]:
277
+ async def resume_all_processes_endpoint(request: Request, user: str = Depends(user_name)) -> dict[str, int]:
274
278
  """Retry all task processes in status Failed, Waiting, API Unavailable or Inconsistent Data.
275
279
 
276
280
  The retry is started in the background, returning status 200 and number of processes in message.
@@ -47,10 +47,12 @@ from orchestrator.services.subscriptions import (
47
47
  subscription_workflows,
48
48
  )
49
49
  from orchestrator.settings import app_settings
50
+ from orchestrator.targets import Target
50
51
  from orchestrator.types import SubscriptionLifecycle
51
52
  from orchestrator.utils.deprecation_logger import deprecated_endpoint
52
53
  from orchestrator.utils.get_subscription_dict import get_subscription_dict
53
54
  from orchestrator.websocket import sync_invalidate_subscription_cache
55
+ from orchestrator.workflows import get_workflow
54
56
 
55
57
  router = APIRouter()
56
58
 
@@ -100,6 +102,25 @@ def _filter_statuses(filter_statuses: str | None = None) -> list[str]:
100
102
  return statuses
101
103
 
102
104
 
105
+ def _authorized_subscription_workflows(
106
+ subscription: SubscriptionTable, current_user: OIDCUserModel | None
107
+ ) -> dict[str, list[dict[str, list[Any] | str]]]:
108
+ subscription_workflows_dict = subscription_workflows(subscription)
109
+
110
+ for workflow_target in Target.values():
111
+ for workflow_dict in subscription_workflows_dict[workflow_target.lower()]:
112
+ workflow = get_workflow(workflow_dict["name"])
113
+ if not workflow:
114
+ continue
115
+ if (
116
+ not workflow.authorize_callback(current_user) # The current user isn't allowed to run this workflow
117
+ and "reason" not in workflow_dict # and there isn't already a reason why this workflow cannot run
118
+ ):
119
+ workflow_dict["reason"] = "subscription.insufficient_workflow_permissions"
120
+
121
+ return subscription_workflows_dict
122
+
123
+
103
124
  @router.get(
104
125
  "/domain-model/{subscription_id}",
105
126
  response_model=SubscriptionDomainModelSchema | None,
@@ -169,7 +190,9 @@ def subscriptions_search(
169
190
  description="This endpoint is deprecated and will be removed in a future release. Please use the GraphQL query",
170
191
  dependencies=[Depends(deprecated_endpoint)],
171
192
  )
172
- def subscription_workflows_by_id(subscription_id: UUID) -> dict[str, list[dict[str, list[Any] | str]]]:
193
+ def subscription_workflows_by_id(
194
+ subscription_id: UUID, current_user: OIDCUserModel | None = Depends(authenticate)
195
+ ) -> dict[str, list[dict[str, list[Any] | str]]]:
173
196
  subscription = db.session.get(
174
197
  SubscriptionTable,
175
198
  subscription_id,
@@ -181,7 +204,7 @@ def subscription_workflows_by_id(subscription_id: UUID) -> dict[str, list[dict[s
181
204
  if not subscription:
182
205
  raise_status(HTTPStatus.NOT_FOUND)
183
206
 
184
- return subscription_workflows(subscription)
207
+ return _authorized_subscription_workflows(subscription, current_user)
185
208
 
186
209
 
187
210
  @router.put("/{subscription_id}/set_in_sync", response_model=None, status_code=HTTPStatus.OK)
@@ -256,6 +256,9 @@ def migrate_domain_models(
256
256
  test: bool = typer.Option(False, help="Optional boolean if you don't want to generate a migration file"),
257
257
  inputs: str = typer.Option("{}", help="Stringified dict to prefill inputs"),
258
258
  updates: str = typer.Option("{}", help="Stringified dict to map updates instead of using inputs"),
259
+ confirm_warnings: bool = typer.Option(
260
+ False, help="Optional boolean if you want to accept all warning inputs, fully knowing things can go wrong"
261
+ ),
259
262
  ) -> tuple[list[str], list[str]] | None:
260
263
  """Create migration file based on SubscriptionModel.diff_product_in_database. BACKUP DATABASE BEFORE USING THE MIGRATION!.
261
264
 
@@ -282,6 +285,8 @@ def migrate_domain_models(
282
285
  - `updates = { "resource_types": { "old_resource_type_name": "new_resource_type_name" } }`
283
286
  - renaming a resource type to existing resource type: `updates = { "resource_types": { "old_resource_type_name": "new_resource_type_name" } }`
284
287
 
288
+ confirm_warnings: Optional boolean if you want to accept all warning inputs, fully knowing things can go wrong.
289
+
285
290
  Returns None unless `--test` is used, in which case it returns:
286
291
  - tuple:
287
292
  - list of upgrade SQL statements in string format.
@@ -304,7 +309,9 @@ def migrate_domain_models(
304
309
  resource_types=updates_dict.get("resource_types", {}),
305
310
  block_resource_types=updates_dict.get("block_resource_types", {}),
306
311
  )
307
- sql_upgrade_stmts, sql_downgrade_stmts = create_domain_models_migration_sql(inputs_dict, updates_class, bool(test))
312
+ sql_upgrade_stmts, sql_downgrade_stmts = create_domain_models_migration_sql(
313
+ inputs_dict, updates_class, test, confirm_warnings
314
+ )
308
315
 
309
316
  if test:
310
317
  return sql_upgrade_stmts, sql_downgrade_stmts
@@ -2,10 +2,15 @@ from collections.abc import Iterable
2
2
  from itertools import groupby
3
3
 
4
4
  import structlog
5
+ import typer
6
+ from more_itertools import first
5
7
  from sqlalchemy.dialects import postgresql
6
8
  from sqlalchemy.sql.dml import UpdateBase
7
9
 
8
- from orchestrator.domain.base import ProductBlockModel
10
+ from orchestrator.cli.domain_gen_helpers.types import BlockRelationDict
11
+ from orchestrator.cli.helpers.input_helpers import _prompt_user_menu
12
+ from orchestrator.cli.helpers.print_helpers import noqa_print
13
+ from orchestrator.domain.base import ProductBlockModel, SubscriptionModel
9
14
 
10
15
  logger = structlog.get_logger(__name__)
11
16
 
@@ -43,10 +48,47 @@ def map_delete_resource_type_relations(model_diffs: dict[str, dict[str, set[str]
43
48
  return generic_mapper("missing_resource_types_in_model", model_diffs)
44
49
 
45
50
 
46
- def map_create_product_block_relations(model_diffs: dict[str, dict[str, set[str]]]) -> dict[str, set[str]]:
51
+ def map_create_product_to_product_block_relations(model_diffs: dict[str, dict[str, set[str]]]) -> dict[str, set[str]]:
47
52
  return generic_mapper("missing_product_blocks_in_db", model_diffs)
48
53
 
49
54
 
55
+ def format_block_relation_to_dict(
56
+ model_name: str,
57
+ block_to_find_in_props: str,
58
+ models: dict[str, type[SubscriptionModel]] | dict[str, type[ProductBlockModel]],
59
+ confirm_warnings: bool,
60
+ ) -> BlockRelationDict:
61
+ model = models[model_name]
62
+ block_props = model._get_depends_on_product_block_types()
63
+ props = {k for k, v in block_props.items() if v.name == block_to_find_in_props} # type: ignore
64
+
65
+ if len(props) > 1 and not confirm_warnings:
66
+ noqa_print("WARNING: Relating a Product Block multiple times is not supported by this migrator!")
67
+ noqa_print(
68
+ "You will need to create your own migration to create a Product Block Instance for each attribute that is related"
69
+ )
70
+ noqa_print(f"Product Block '{block_to_find_in_props}' has been related multiple times to '{model_name}'")
71
+ noqa_print(f"Attributes the block ('{block_to_find_in_props}') has been related with: {', '.join(props)}")
72
+ noqa_print(f"The relation will only be added to the first attribute ('{first(props)}') want to continue?")
73
+
74
+ if _prompt_user_menu([("yes", "yes"), ("no", "no")]) == "no":
75
+ typer.echo("Aborted.")
76
+ raise typer.Exit(code=1)
77
+
78
+ return BlockRelationDict(name=model_name, attribute_name=first(props))
79
+
80
+
81
+ def map_create_product_block_relations(
82
+ model_diffs: dict[str, dict[str, set[str]]],
83
+ models: dict[str, type[SubscriptionModel]] | dict[str, type[ProductBlockModel]],
84
+ confirm_warnings: bool,
85
+ ) -> dict[str, list[BlockRelationDict]]:
86
+ data = generic_mapper("missing_product_blocks_in_db", model_diffs)
87
+ return {
88
+ k: [format_block_relation_to_dict(b, k, models, confirm_warnings) for b in blocks] for k, blocks in data.items()
89
+ }
90
+
91
+
50
92
  def map_delete_product_block_relations(model_diffs: dict[str, dict[str, set[str]]]) -> dict[str, set[str]]:
51
93
  return generic_mapper("missing_product_blocks_in_model", model_diffs)
52
94
 
@@ -7,8 +7,12 @@ from sqlalchemy import select
7
7
  from sqlalchemy.sql.expression import Delete, Insert
8
8
  from sqlalchemy.sql.selectable import ScalarSelect
9
9
 
10
- from orchestrator.cli.domain_gen_helpers.helpers import get_product_block_names, sql_compile
11
- from orchestrator.cli.domain_gen_helpers.types import DomainModelChanges
10
+ from orchestrator.cli.domain_gen_helpers.helpers import (
11
+ format_block_relation_to_dict,
12
+ get_product_block_names,
13
+ sql_compile,
14
+ )
15
+ from orchestrator.cli.domain_gen_helpers.types import BlockRelationDict, DomainModelChanges
12
16
  from orchestrator.cli.helpers.input_helpers import get_user_input
13
17
  from orchestrator.cli.helpers.print_helpers import COLOR, print_fmt, str_fmt
14
18
  from orchestrator.db import db
@@ -68,13 +72,17 @@ def map_delete_product_blocks(product_blocks: dict[str, type[ProductBlockModel]]
68
72
  return {name for name in existing_product_blocks if name not in product_blocks}
69
73
 
70
74
 
71
- def map_product_block_additional_relations(changes: DomainModelChanges) -> DomainModelChanges:
75
+ def map_product_block_additional_relations(
76
+ changes: DomainModelChanges, models: dict[str, type[ProductBlockModel]], confirm_warnings: bool
77
+ ) -> DomainModelChanges:
72
78
  """Map additional relations for created product blocks.
73
79
 
74
80
  Adds resource type and product block relations.
75
81
 
76
82
  Args:
77
83
  changes: DomainModelChanges class with all changes.
84
+ models: All product block models.
85
+ confirm_warnings: confirm warnings to continue, fully knowing that things can go wrong.
78
86
 
79
87
  Returns: Updated DomainModelChanges.
80
88
  """
@@ -86,7 +94,12 @@ def map_product_block_additional_relations(changes: DomainModelChanges) -> Domai
86
94
  product_blocks_in_model = block_class._get_depends_on_product_block_types()
87
95
  product_blocks_types_in_model = get_depends_on_product_block_type_list(product_blocks_in_model)
88
96
  for product_block_name in get_product_block_names(product_blocks_types_in_model):
89
- changes.create_product_block_relations.setdefault(product_block_name, set()).add(block_name)
97
+ relation_list = changes.create_product_block_relations.get(product_block_name, [])
98
+ new_relation = format_block_relation_to_dict(block_name, product_block_name, models, confirm_warnings)
99
+
100
+ if new_relation not in relation_list:
101
+ changes.create_product_block_relations[product_block_name] = relation_list + [new_relation]
102
+
90
103
  return changes
91
104
 
92
105
 
@@ -147,31 +160,35 @@ def generate_delete_product_blocks_sql(delete_product_blocks: set[str]) -> list[
147
160
  ]
148
161
 
149
162
 
150
- def generate_create_product_block_relations_sql(create_block_relations: dict[str, set[str]]) -> list[str]:
163
+ def generate_create_product_block_relations_sql(
164
+ create_block_relations: dict[str, list[BlockRelationDict]],
165
+ ) -> list[str]:
151
166
  """Generate SQL to create product block to product block relations.
152
167
 
153
168
  Args:
154
169
  create_block_relations: Dict with product blocks by product block
155
170
  - key: product block name.
156
- - value: Set of product block names to relate with.
171
+ - list: List of product blocks to relate with by prop names.
157
172
 
158
173
  Returns: List of SQL to create relation between product blocks.
159
174
  """
160
175
 
161
- def create_block_relation(depends_block_name: str, block_names: set[str]) -> str:
176
+ def create_block_relation(depends_block_name: str, block_relations: list[BlockRelationDict]) -> str:
162
177
  depends_block_id_sql = get_product_block_id(depends_block_name)
163
178
 
164
- def create_block_relation_dict(block_name: str) -> dict[str, ScalarSelect]:
165
- block_id_sql = get_product_block_id(block_name)
179
+ def create_block_relation_dict(block_relation: BlockRelationDict) -> dict[str, ScalarSelect]:
180
+ block_id_sql = get_product_block_id(block_relation["name"])
166
181
  return {"in_use_by_id": block_id_sql, "depends_on_id": depends_block_id_sql}
167
182
 
168
- product_product_block_relation_dicts = [create_block_relation_dict(block_name) for block_name in block_names]
183
+ product_product_block_relation_dicts = [create_block_relation_dict(block) for block in block_relations]
169
184
  return sql_compile(Insert(ProductBlockRelationTable).values(product_product_block_relation_dicts))
170
185
 
171
186
  return [create_block_relation(*item) for item in create_block_relations.items()]
172
187
 
173
188
 
174
- def generate_create_product_block_instance_relations_sql(product_block_relations: dict[str, set[str]]) -> list[str]:
189
+ def generate_create_product_block_instance_relations_sql(
190
+ product_block_relations: dict[str, list[BlockRelationDict]],
191
+ ) -> list[str]:
175
192
  """Generate SQL to create resource type instance values for existing instances.
176
193
 
177
194
  Args:
@@ -183,12 +200,14 @@ def generate_create_product_block_instance_relations_sql(product_block_relations
183
200
  """
184
201
 
185
202
  def create_subscription_instance_relations(
186
- depends_block_name: str, block_names: set[str]
203
+ depends_block_name: str, block_relations: list[BlockRelationDict]
187
204
  ) -> Generator[str, None, None]:
188
205
  depends_block_id_sql = get_product_block_id(depends_block_name)
189
206
 
190
- def map_subscription_instances(block_name: str) -> dict[str, list[dict[str, str | ScalarSelect]]]:
191
- in_use_by_id_sql = get_product_block_id(block_name)
207
+ def map_subscription_instances(
208
+ block_relation: BlockRelationDict,
209
+ ) -> dict[str, list[dict[str, str | ScalarSelect]]]:
210
+ in_use_by_id_sql = get_product_block_id(block_relation["name"])
192
211
  stmt = select(
193
212
  SubscriptionInstanceTable.subscription_instance_id, SubscriptionInstanceTable.subscription_id
194
213
  ).where(SubscriptionInstanceTable.product_block_id.in_(in_use_by_id_sql))
@@ -206,13 +225,14 @@ def generate_create_product_block_instance_relations_sql(product_block_relations
206
225
  "in_use_by_id": instance.subscription_instance_id,
207
226
  "depends_on_id": get_subscription_instance(instance.subscription_id, depends_block_id_sql),
208
227
  "order_id": 0,
228
+ "domain_model_attr": block_relation["attribute_name"],
209
229
  }
210
230
  for instance in subscription_instances
211
231
  ]
212
232
 
213
233
  return {"instance_list": instance_list, "instance_relation_list": instance_relation_list}
214
234
 
215
- create_instance_list = [map_subscription_instances(block_name) for block_name in block_names]
235
+ create_instance_list = [map_subscription_instances(block_relation) for block_relation in block_relations]
216
236
 
217
237
  subscription_instance_dicts = list(flatten(item["instance_list"] for item in create_instance_list))
218
238
  subscription_relation_dicts = list(flatten(item["instance_relation_list"] for item in create_instance_list))
@@ -9,7 +9,7 @@ from sqlalchemy.sql.selectable import ScalarSelect
9
9
 
10
10
  from orchestrator.cli.domain_gen_helpers.helpers import sql_compile
11
11
  from orchestrator.cli.domain_gen_helpers.product_block_helpers import get_product_block_id, get_product_block_ids
12
- from orchestrator.cli.domain_gen_helpers.types import DomainModelChanges
12
+ from orchestrator.cli.domain_gen_helpers.types import BlockRelationDict, DomainModelChanges
13
13
  from orchestrator.cli.helpers.input_helpers import _enumerate_menu_keys, _prompt_user_menu, get_user_input
14
14
  from orchestrator.cli.helpers.print_helpers import COLOR, noqa_print, print_fmt, str_fmt
15
15
  from orchestrator.db import db
@@ -262,8 +262,8 @@ def _has_product_existing_instances(product_name: str) -> bool:
262
262
  return bool(product and get_product_instance_count(product.product_id))
263
263
 
264
264
 
265
- def _find_new_relations(block_name: str, relations: dict[str, set[str]]) -> set[str]:
266
- return set(flatten((list(v) for k, v in relations.items() if block_name in k)))
265
+ def _find_new_block_relations(block_name: str, relations: dict[str, list[BlockRelationDict]]) -> set[str]:
266
+ return {r["name"] for r in relations.get(block_name, [])}
267
267
 
268
268
 
269
269
  def map_create_resource_type_instances(changes: DomainModelChanges) -> dict[str, set[str]]:
@@ -285,11 +285,11 @@ def map_create_resource_type_instances(changes: DomainModelChanges) -> dict[str,
285
285
  if block and get_block_instance_count(block.product_block_id):
286
286
  return True
287
287
 
288
- related_block_names = _find_new_relations(block_name, changes.create_product_block_relations)
288
+ related_block_names = _find_new_block_relations(block_name, changes.create_product_block_relations)
289
289
  if related_block_names:
290
290
  return any(_has_existing_instances(name) for name in related_block_names)
291
291
 
292
- related_product_names = _find_new_relations(block_name, changes.create_product_to_block_relations)
292
+ related_product_names = changes.create_product_to_block_relations.get(block_name, set())
293
293
  return any(_has_product_existing_instances(name) for name in related_product_names)
294
294
 
295
295
  return {
@@ -1,8 +1,14 @@
1
1
  from pydantic import BaseModel
2
+ from typing_extensions import TypedDict
2
3
 
3
4
  from orchestrator.domain.base import ProductBlockModel, SubscriptionModel
4
5
 
5
6
 
7
+ class BlockRelationDict(TypedDict):
8
+ name: str
9
+ attribute_name: str
10
+
11
+
6
12
  class DomainModelChanges(BaseModel):
7
13
  create_products: dict[str, type[SubscriptionModel]] = {}
8
14
  delete_products: set[str] = set()
@@ -10,7 +16,7 @@ class DomainModelChanges(BaseModel):
10
16
  delete_product_to_block_relations: dict[str, set[str]] = {}
11
17
  create_product_blocks: dict[str, type[ProductBlockModel]] = {}
12
18
  delete_product_blocks: set[str] = set()
13
- create_product_block_relations: dict[str, set[str]] = {}
19
+ create_product_block_relations: dict[str, list[BlockRelationDict]] = {}
14
20
  delete_product_block_relations: dict[str, set[str]] = {}
15
21
  create_product_fixed_inputs: dict[str, set[str]] = {}
16
22
  update_product_fixed_inputs: dict[str, dict[str, str]] = {}
@@ -11,7 +11,6 @@ from pydantic_forms.types import FormGenerator, State, UUIDstr
11
11
 
12
12
  from orchestrator.forms import FormPage
13
13
  from orchestrator.forms.validators import Divider, Label, CustomerId, MigrationSummary
14
- from orchestrator.targets import Target
15
14
  from orchestrator.types import SubscriptionLifecycle
16
15
  from orchestrator.workflow import StepList, begin, step
17
16
  from orchestrator.workflows.steps import store_process_subscription
@@ -119,6 +118,6 @@ def create_{{ product.variable }}() -> StepList:
119
118
  return (
120
119
  begin
121
120
  >> construct_{{ product.variable }}_model
122
- >> store_process_subscription(Target.CREATE)
121
+ >> store_process_subscription()
123
122
  # TODO add provision step(s)
124
123
  )
@@ -21,6 +21,7 @@ from orchestrator.cli.domain_gen_helpers.fixed_input_helpers import (
21
21
  from orchestrator.cli.domain_gen_helpers.helpers import (
22
22
  map_create_fixed_inputs,
23
23
  map_create_product_block_relations,
24
+ map_create_product_to_product_block_relations,
24
25
  map_create_resource_type_relations,
25
26
  map_delete_fixed_inputs,
26
27
  map_delete_product_block_relations,
@@ -191,6 +192,7 @@ def map_changes(
191
192
  db_product_names: list[str],
192
193
  inputs: dict[str, dict[str, str]],
193
194
  updates: ModelUpdates | None,
195
+ confirm_warnings: bool,
194
196
  ) -> DomainModelChanges:
195
197
  """Map changes that need to be made to fix differences between models and database.
196
198
 
@@ -203,6 +205,7 @@ def map_changes(
203
205
  db_product_names: Product names out of the database.
204
206
  inputs: Optional Dict with prefilled values.
205
207
  updates: Optional Dict.
208
+ confirm_warnings: confirm warnings to continue, fully knowing that things can go wrong.
206
209
 
207
210
  Returns: Mapped changes.
208
211
  """
@@ -231,7 +234,7 @@ def map_changes(
231
234
  create_product_fixed_inputs=map_create_fixed_inputs(model_diffs["products"]),
232
235
  update_product_fixed_inputs=updates.fixed_inputs,
233
236
  delete_product_fixed_inputs=map_delete_fixed_inputs(model_diffs["products"]),
234
- create_product_to_block_relations=map_create_product_block_relations(model_diffs["products"]),
237
+ create_product_to_block_relations=map_create_product_to_product_block_relations(model_diffs["products"]),
235
238
  delete_product_to_block_relations=map_delete_product_block_relations(model_diffs["products"]),
236
239
  rename_resource_types=updates.resource_types,
237
240
  update_block_resource_types=updates.block_resource_types,
@@ -242,12 +245,14 @@ def map_changes(
242
245
  delete_resource_type_relations=delete_resource_type_relations,
243
246
  create_product_blocks=map_create_product_blocks(product_blocks),
244
247
  delete_product_blocks=map_delete_product_blocks(product_blocks),
245
- create_product_block_relations=map_create_product_block_relations(model_diffs["blocks"]),
248
+ create_product_block_relations=map_create_product_block_relations(
249
+ model_diffs["blocks"], product_blocks, confirm_warnings
250
+ ),
246
251
  delete_product_block_relations=map_delete_product_block_relations(model_diffs["blocks"]),
247
252
  )
248
253
 
249
254
  changes = map_product_additional_relations(changes)
250
- changes = map_product_block_additional_relations(changes)
255
+ changes = map_product_block_additional_relations(changes, product_blocks, confirm_warnings)
251
256
  temp = {key for v in changes.update_block_resource_types.values() for key in v.values()}
252
257
  related_resource_type_names = set(changes.create_resource_type_relations.keys()) | temp
253
258
  existing_renamed_rts = set(changes.rename_resource_types.values())
@@ -334,8 +339,12 @@ def generate_downgrade_sql(changes: DomainModelChanges) -> list[str]:
334
339
  sql_revert_create_product_product_block_relations = generate_delete_product_relations_sql(
335
340
  changes.create_product_to_block_relations,
336
341
  )
342
+
343
+ downgrade_block_relations = {
344
+ k: {b["name"] for b in blocks} for k, blocks in changes.create_product_block_relations.items()
345
+ }
337
346
  sql_revert_create_product_block_depends_blocks = generate_delete_product_block_relations_sql(
338
- changes.create_product_block_relations
347
+ downgrade_block_relations
339
348
  )
340
349
 
341
350
  sql_revert_create_product_blocks = generate_delete_product_blocks_sql(set(changes.create_product_blocks.keys()))
@@ -361,6 +370,7 @@ def create_domain_models_migration_sql(
361
370
  inputs: dict[str, dict[str, str]],
362
371
  updates: ModelUpdates | None,
363
372
  is_test: bool = False,
373
+ confirm_warnings: bool = False,
364
374
  ) -> tuple[list[str], list[str]]:
365
375
  """Create tuple with list for upgrade and downgrade SQL statements based on SubscriptionModel.diff_product_in_database.
366
376
 
@@ -370,6 +380,7 @@ def create_domain_models_migration_sql(
370
380
  inputs: dict with pre-defined input values
371
381
  updates: The model
372
382
  is_test: the bool for if it is test
383
+ confirm_warnings: confirm warnings to continue, fully knowing that things can go wrong.
373
384
 
374
385
  Returns tuple:
375
386
  list of upgrade SQL statements in string format.
@@ -384,7 +395,7 @@ def create_domain_models_migration_sql(
384
395
  product_blocks = map_product_blocks(list(SUBSCRIPTION_MODEL_REGISTRY.values()))
385
396
  model_diffs = map_differences_unique(products, existing_products)
386
397
 
387
- changes = map_changes(model_diffs, products, product_blocks, db_product_names, inputs, updates)
398
+ changes = map_changes(model_diffs, products, product_blocks, db_product_names, inputs, updates, confirm_warnings)
388
399
 
389
400
  logger.info("create_products", create_products=changes.create_products)
390
401
  logger.info("delete_products", delete_products=changes.delete_products)
orchestrator/db/models.py CHANGED
@@ -117,7 +117,7 @@ class ProcessTable(BaseModel):
117
117
  is_task = mapped_column(Boolean, nullable=False, server_default=text("false"), index=True)
118
118
 
119
119
  steps = relationship(
120
- "ProcessStepTable", cascade="delete", passive_deletes=True, order_by="asc(ProcessStepTable.executed_at)"
120
+ "ProcessStepTable", cascade="delete", passive_deletes=True, order_by="asc(ProcessStepTable.completed_at)"
121
121
  )
122
122
  input_states = relationship("InputStateTable", cascade="delete", order_by="desc(InputStateTable.input_time)")
123
123
  process_subscriptions = relationship("ProcessSubscriptionTable", back_populates="process", passive_deletes=True)
@@ -141,7 +141,8 @@ class ProcessStepTable(BaseModel):
141
141
  status = mapped_column(String(50), nullable=False)
142
142
  state = mapped_column(pg.JSONB(), nullable=False)
143
143
  created_by = mapped_column(String(255), nullable=True)
144
- executed_at = mapped_column(UtcTimestamp, server_default=text("statement_timestamp()"), nullable=False)
144
+ completed_at = mapped_column(UtcTimestamp, server_default=text("statement_timestamp()"), nullable=False)
145
+ started_at = mapped_column(UtcTimestamp, server_default=text("statement_timestamp()"), nullable=False)
145
146
  commit_hash = mapped_column(String(40), nullable=True, default=GIT_COMMIT_HASH)
146
147
 
147
148
 
@@ -154,7 +155,9 @@ class ProcessSubscriptionTable(BaseModel):
154
155
  )
155
156
  subscription_id = mapped_column(UUIDType, ForeignKey("subscriptions.subscription_id"), nullable=False, index=True)
156
157
  created_at = mapped_column(UtcTimestamp, server_default=text("current_timestamp()"), nullable=False)
157
- workflow_target = mapped_column(String(255), nullable=False, server_default=Target.CREATE)
158
+
159
+ # FIXME: workflow_target is already stored in the workflow table, this column should get removed in a later release.
160
+ workflow_target = mapped_column(String(255), nullable=True)
158
161
 
159
162
  process = relationship("ProcessTable", back_populates="process_subscriptions")
160
163
  subscription = relationship("SubscriptionTable", back_populates="processes")
@@ -6,14 +6,17 @@ from strawberry.federation.schema_directives import Key
6
6
  from strawberry.scalars import JSON
7
7
 
8
8
  from oauth2_lib.strawberry import authenticated_field
9
+ from orchestrator.api.api_v1.endpoints.processes import get_auth_callbacks, get_current_steps
9
10
  from orchestrator.db import ProcessTable, ProductTable, db
10
11
  from orchestrator.graphql.pagination import EMPTY_PAGE, Connection
11
12
  from orchestrator.graphql.schemas.customer import CustomerType
12
13
  from orchestrator.graphql.schemas.helpers import get_original_model
13
14
  from orchestrator.graphql.schemas.product import ProductType
14
- from orchestrator.graphql.types import GraphqlFilter, GraphqlSort, OrchestratorInfo
15
+ from orchestrator.graphql.types import FormUserPermissionsType, GraphqlFilter, GraphqlSort, OrchestratorInfo
15
16
  from orchestrator.schemas.process import ProcessSchema, ProcessStepSchema
17
+ from orchestrator.services.processes import load_process
16
18
  from orchestrator.settings import app_settings
19
+ from orchestrator.workflows import get_workflow
17
20
 
18
21
  if TYPE_CHECKING:
19
22
  from orchestrator.graphql.schemas.subscription import SubscriptionInterface
@@ -29,7 +32,11 @@ class ProcessStepType:
29
32
  name: strawberry.auto
30
33
  status: strawberry.auto
31
34
  created_by: strawberry.auto
32
- executed: strawberry.auto
35
+ executed: strawberry.auto = strawberry.field(
36
+ deprecation_reason="Deprecated, use 'started' and 'completed' for step start and completion times"
37
+ )
38
+ started: strawberry.auto
39
+ completed: strawberry.auto
33
40
  commit_hash: strawberry.auto
34
41
  state: JSON | None
35
42
  state_delta: JSON | None
@@ -74,6 +81,18 @@ class ProcessType:
74
81
  shortcode=app_settings.DEFAULT_CUSTOMER_SHORTCODE,
75
82
  )
76
83
 
84
+ @strawberry.field(description="Returns user permissions for operations on this process") # type: ignore
85
+ def user_permissions(self, info: OrchestratorInfo) -> FormUserPermissionsType:
86
+ oidc_user = info.context.get_current_user
87
+ workflow = get_workflow(self.workflow_name)
88
+ process = load_process(db.session.get(ProcessTable, self.process_id)) # type: ignore[arg-type]
89
+ auth_resume, auth_retry = get_auth_callbacks(get_current_steps(process), workflow) # type: ignore[arg-type]
90
+
91
+ return FormUserPermissionsType(
92
+ retryAllowed=auth_retry and auth_retry(oidc_user), # type: ignore[arg-type]
93
+ resumeAllowed=auth_resume and auth_resume(oidc_user), # type: ignore[arg-type]
94
+ )
95
+
77
96
  @authenticated_field(description="Returns list of subscriptions of the process") # type: ignore
78
97
  async def subscriptions(
79
98
  self,
@@ -52,21 +52,20 @@ class ProductType:
52
52
  return await resolve_subscriptions(info, filter_by_with_related_subscriptions, sort_by, first, after)
53
53
 
54
54
  @strawberry.field(description="Returns list of all nested productblock names") # type: ignore
55
- async def all_pb_names(self) -> list[str]:
56
-
55
+ async def all_product_block_names(self) -> list[str]:
57
56
  model = get_original_model(self, ProductTable)
58
57
 
59
- def get_all_pb_names(product_blocks: list[ProductBlockTable]) -> Iterable[str]:
58
+ def get_names(product_blocks: list[ProductBlockTable], visited: set) -> Iterable[str]:
60
59
  for product_block in product_blocks:
60
+ if product_block.product_block_id in visited:
61
+ continue
62
+ visited.add(product_block.product_block_id)
61
63
  yield product_block.name
62
-
63
64
  if product_block.depends_on:
64
- yield from get_all_pb_names(product_block.depends_on)
65
-
66
- names: list[str] = list(get_all_pb_names(model.product_blocks))
67
- names.sort()
65
+ yield from get_names(product_block.depends_on, visited)
68
66
 
69
- return names
67
+ names = set(get_names(model.product_blocks, set()))
68
+ return sorted(names)
70
69
 
71
70
  @strawberry.field(description="Return product blocks") # type: ignore
72
71
  async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]:
@@ -5,6 +5,7 @@ import strawberry
5
5
  from orchestrator.config.assignee import Assignee
6
6
  from orchestrator.db import WorkflowTable
7
7
  from orchestrator.graphql.schemas.helpers import get_original_model
8
+ from orchestrator.graphql.types import OrchestratorInfo
8
9
  from orchestrator.schemas import StepSchema, WorkflowSchema
9
10
  from orchestrator.workflows import get_workflow
10
11
 
@@ -30,3 +31,11 @@ class Workflow:
30
31
  @strawberry.field(description="Return all steps for this workflow") # type: ignore
31
32
  def steps(self) -> list[Step]:
32
33
  return [Step(name=step.name, assignee=step.assignee) for step in get_workflow(self.name).steps] # type: ignore
34
+
35
+ @strawberry.field(description="Return whether the currently logged-in used is allowed to start this workflow") # type: ignore
36
+ def is_allowed(self, info: OrchestratorInfo) -> bool:
37
+ oidc_user = info.context.get_current_user
38
+ workflow_table = get_original_model(self, WorkflowTable)
39
+ workflow = get_workflow(workflow_table.name)
40
+
41
+ return workflow.authorize_callback(oidc_user) # type: ignore
@@ -1,4 +1,4 @@
1
- # Copyright 2022-2023 SURF, GÉANT.
1
+ # Copyright 2022-2025 SURF, GÉANT.
2
2
  # Licensed under the Apache License, Version 2.0 (the "License");
3
3
  # you may not use this file except in compliance with the License.
4
4
  # You may obtain a copy of the License at
@@ -132,6 +132,12 @@ SCALAR_OVERRIDES: ScalarOverrideType = {
132
132
  }
133
133
 
134
134
 
135
+ @strawberry.type(description="User permissions on a specific process")
136
+ class FormUserPermissionsType:
137
+ retryAllowed: bool
138
+ resumeAllowed: bool
139
+
140
+
135
141
  @strawberry.type(description="Generic class to capture errors")
136
142
  class MutationError:
137
143
  message: str = strawberry.field(description="Error message")