orchestrator-core 4.1.0rc1__py3-none-any.whl → 4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. orchestrator/__init__.py +1 -1
  2. orchestrator/api/api_v1/endpoints/processes.py +65 -11
  3. orchestrator/api/api_v1/endpoints/subscriptions.py +25 -2
  4. orchestrator/cli/database.py +8 -1
  5. orchestrator/cli/domain_gen_helpers/helpers.py +44 -2
  6. orchestrator/cli/domain_gen_helpers/product_block_helpers.py +35 -15
  7. orchestrator/cli/domain_gen_helpers/resource_type_helpers.py +5 -5
  8. orchestrator/cli/domain_gen_helpers/types.py +7 -1
  9. orchestrator/cli/generator/templates/create_product.j2 +1 -2
  10. orchestrator/cli/migrate_domain_models.py +16 -5
  11. orchestrator/db/models.py +6 -3
  12. orchestrator/graphql/schemas/process.py +21 -2
  13. orchestrator/graphql/schemas/product.py +8 -9
  14. orchestrator/graphql/schemas/workflow.py +9 -0
  15. orchestrator/graphql/types.py +7 -1
  16. orchestrator/migrations/versions/schema/2025-07-01_93fc5834c7e5_changed_timestamping_fields_in_process_steps.py +65 -0
  17. orchestrator/migrations/versions/schema/2025-07-04_4b58e336d1bf_deprecating_workflow_target_in_.py +30 -0
  18. orchestrator/schemas/process.py +5 -1
  19. orchestrator/services/celery.py +7 -2
  20. orchestrator/services/processes.py +12 -12
  21. orchestrator/services/settings_env_variables.py +3 -15
  22. orchestrator/settings.py +1 -1
  23. orchestrator/utils/auth.py +9 -0
  24. orchestrator/utils/enrich_process.py +4 -2
  25. orchestrator/utils/errors.py +2 -1
  26. orchestrator/workflow.py +52 -9
  27. orchestrator/workflows/modify_note.py +1 -1
  28. orchestrator/workflows/steps.py +14 -8
  29. orchestrator/workflows/utils.py +13 -7
  30. orchestrator_core-4.2.0.dist-info/METADATA +167 -0
  31. {orchestrator_core-4.1.0rc1.dist-info → orchestrator_core-4.2.0.dist-info}/RECORD +33 -30
  32. orchestrator_core-4.1.0rc1.dist-info/METADATA +0 -118
  33. {orchestrator_core-4.1.0rc1.dist-info → orchestrator_core-4.2.0.dist-info}/WHEEL +0 -0
  34. {orchestrator_core-4.1.0rc1.dist-info → orchestrator_core-4.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -52,21 +52,20 @@ class ProductType:
52
52
  return await resolve_subscriptions(info, filter_by_with_related_subscriptions, sort_by, first, after)
53
53
 
54
54
  @strawberry.field(description="Returns list of all nested productblock names") # type: ignore
55
- async def all_pb_names(self) -> list[str]:
56
-
55
+ async def all_product_block_names(self) -> list[str]:
57
56
  model = get_original_model(self, ProductTable)
58
57
 
59
- def get_all_pb_names(product_blocks: list[ProductBlockTable]) -> Iterable[str]:
58
+ def get_names(product_blocks: list[ProductBlockTable], visited: set) -> Iterable[str]:
60
59
  for product_block in product_blocks:
60
+ if product_block.product_block_id in visited:
61
+ continue
62
+ visited.add(product_block.product_block_id)
61
63
  yield product_block.name
62
-
63
64
  if product_block.depends_on:
64
- yield from get_all_pb_names(product_block.depends_on)
65
-
66
- names: list[str] = list(get_all_pb_names(model.product_blocks))
67
- names.sort()
65
+ yield from get_names(product_block.depends_on, visited)
68
66
 
69
- return names
67
+ names = set(get_names(model.product_blocks, set()))
68
+ return sorted(names)
70
69
 
71
70
  @strawberry.field(description="Return product blocks") # type: ignore
72
71
  async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]:
@@ -5,6 +5,7 @@ import strawberry
5
5
  from orchestrator.config.assignee import Assignee
6
6
  from orchestrator.db import WorkflowTable
7
7
  from orchestrator.graphql.schemas.helpers import get_original_model
8
+ from orchestrator.graphql.types import OrchestratorInfo
8
9
  from orchestrator.schemas import StepSchema, WorkflowSchema
9
10
  from orchestrator.workflows import get_workflow
10
11
 
@@ -30,3 +31,11 @@ class Workflow:
30
31
  @strawberry.field(description="Return all steps for this workflow") # type: ignore
31
32
  def steps(self) -> list[Step]:
32
33
  return [Step(name=step.name, assignee=step.assignee) for step in get_workflow(self.name).steps] # type: ignore
34
+
35
+ @strawberry.field(description="Return whether the currently logged-in used is allowed to start this workflow") # type: ignore
36
+ def is_allowed(self, info: OrchestratorInfo) -> bool:
37
+ oidc_user = info.context.get_current_user
38
+ workflow_table = get_original_model(self, WorkflowTable)
39
+ workflow = get_workflow(workflow_table.name)
40
+
41
+ return workflow.authorize_callback(oidc_user) # type: ignore
@@ -1,4 +1,4 @@
1
- # Copyright 2022-2023 SURF, GÉANT.
1
+ # Copyright 2022-2025 SURF, GÉANT.
2
2
  # Licensed under the Apache License, Version 2.0 (the "License");
3
3
  # you may not use this file except in compliance with the License.
4
4
  # You may obtain a copy of the License at
@@ -132,6 +132,12 @@ SCALAR_OVERRIDES: ScalarOverrideType = {
132
132
  }
133
133
 
134
134
 
135
+ @strawberry.type(description="User permissions on a specific process")
136
+ class FormUserPermissionsType:
137
+ retryAllowed: bool
138
+ resumeAllowed: bool
139
+
140
+
135
141
  @strawberry.type(description="Generic class to capture errors")
136
142
  class MutationError:
137
143
  message: str = strawberry.field(description="Error message")
@@ -0,0 +1,65 @@
1
+ """Changed timestamping fields in process_steps.
2
+
3
+ Revision ID: 93fc5834c7e5
4
+ Revises: 4b58e336d1bf
5
+ Create Date: 2025-07-01 14:20:44.755694
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ from orchestrator import db
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision = "93fc5834c7e5"
16
+ down_revision = "4b58e336d1bf"
17
+ branch_labels = None
18
+ depends_on = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.add_column(
24
+ "process_steps",
25
+ sa.Column(
26
+ "started_at",
27
+ db.UtcTimestamp(timezone=True),
28
+ server_default=sa.text("statement_timestamp()"),
29
+ nullable=False,
30
+ ),
31
+ )
32
+ op.alter_column("process_steps", "executed_at", new_column_name="completed_at")
33
+ # conn = op.get_bind()
34
+ # sa.select
35
+ # ### end Alembic commands ###
36
+ # Backfill started_at field correctly using proper aliasing
37
+ op.execute(
38
+ """
39
+ WITH backfill_started_at AS (
40
+ SELECT
41
+ ps1.stepid,
42
+ COALESCE(prev.completed_at, p.started_at) AS new_started_at
43
+ FROM process_steps ps1
44
+ JOIN processes p ON ps1.pid = p.pid
45
+ LEFT JOIN LATERAL (
46
+ SELECT ps2.completed_at
47
+ FROM process_steps ps2
48
+ WHERE ps2.pid = ps1.pid AND ps2.completed_at < ps1.completed_at
49
+ ORDER BY ps2.completed_at DESC
50
+ LIMIT 1
51
+ ) prev ON true
52
+ )
53
+ UPDATE process_steps
54
+ SET started_at = b.new_started_at
55
+ FROM backfill_started_at b
56
+ WHERE process_steps.stepid = b.stepid;
57
+ """
58
+ )
59
+
60
+
61
+ def downgrade() -> None:
62
+ # ### commands auto generated by Alembic - please adjust! ###
63
+ op.drop_column("process_steps", "started_at")
64
+ op.alter_column("process_steps", "completed_at", new_column_name="executed_at")
65
+ # ### end Alembic commands ###
@@ -0,0 +1,30 @@
1
+ """Deprecating workflow target in ProcessSubscriptionTable.
2
+
3
+ Revision ID: 4b58e336d1bf
4
+ Revises: 161918133bec
5
+ Create Date: 2025-07-04 15:27:23.814954
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "4b58e336d1bf"
14
+ down_revision = "161918133bec"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ op.alter_column("processes_subscriptions", "workflow_target", existing_type=sa.VARCHAR(length=255), nullable=True)
21
+
22
+
23
+ def downgrade() -> None:
24
+ op.alter_column(
25
+ "processes_subscriptions",
26
+ "workflow_target",
27
+ existing_type=sa.VARCHAR(length=255),
28
+ nullable=False,
29
+ existing_server_default=sa.text("'CREATE'::character varying"),
30
+ )
@@ -49,7 +49,11 @@ class ProcessStepSchema(OrchestratorBaseModel):
49
49
  name: str
50
50
  status: str
51
51
  created_by: str | None = None
52
- executed: datetime | None = None
52
+ executed: datetime | None = Field(
53
+ None, deprecated="Deprecated, use 'started' and 'completed' for step start and completion times"
54
+ )
55
+ started: datetime | None = None
56
+ completed: datetime | None = None
53
57
  commit_hash: str | None = None
54
58
  state: dict[str, Any] | None = None
55
59
  state_delta: dict[str, Any] | None = None
@@ -19,6 +19,7 @@ import structlog
19
19
  from celery.result import AsyncResult
20
20
  from kombu.exceptions import ConnectionError, OperationalError
21
21
 
22
+ from oauth2_lib.fastapi import OIDCUserModel
22
23
  from orchestrator import app_settings
23
24
  from orchestrator.api.error_handling import raise_status
24
25
  from orchestrator.db import ProcessTable, db
@@ -42,7 +43,11 @@ def _block_when_testing(task_result: AsyncResult) -> None:
42
43
 
43
44
 
44
45
  def _celery_start_process(
45
- workflow_key: str, user_inputs: list[State] | None, user: str = SYSTEM_USER, **kwargs: Any
46
+ workflow_key: str,
47
+ user_inputs: list[State] | None,
48
+ user: str = SYSTEM_USER,
49
+ user_model: OIDCUserModel | None = None,
50
+ **kwargs: Any,
46
51
  ) -> UUID:
47
52
  """Client side call of Celery."""
48
53
  from orchestrator.services.tasks import NEW_TASK, NEW_WORKFLOW, get_celery_task
@@ -57,7 +62,7 @@ def _celery_start_process(
57
62
 
58
63
  task_name = NEW_TASK if wf_table.is_task else NEW_WORKFLOW
59
64
  trigger_task = get_celery_task(task_name)
60
- pstat = create_process(workflow_key, user_inputs, user)
65
+ pstat = create_process(workflow_key, user_inputs=user_inputs, user=user, user_model=user_model)
61
66
  try:
62
67
  result = trigger_task.delay(pstat.process_id, workflow_key, user)
63
68
  _block_when_testing(result)
@@ -12,6 +12,7 @@
12
12
  # limitations under the License.
13
13
  from collections.abc import Callable, Sequence
14
14
  from concurrent.futures.thread import ThreadPoolExecutor
15
+ from datetime import datetime
15
16
  from functools import partial
16
17
  from http import HTTPStatus
17
18
  from typing import Any
@@ -19,6 +20,7 @@ from uuid import UUID, uuid4
19
20
 
20
21
  import structlog
21
22
  from deepmerge.merger import Merger
23
+ from pytz import utc
22
24
  from sqlalchemy import delete, select
23
25
  from sqlalchemy.exc import SQLAlchemyError
24
26
  from sqlalchemy.orm import joinedload
@@ -206,6 +208,10 @@ def _get_current_step_to_update(
206
208
  finally:
207
209
  step_state.pop("__remove_keys", None)
208
210
 
211
+ # We don't have __last_step_started in __remove_keys because the way __remove_keys is populated appears like it would overwrite
212
+ # what's put there in the step decorator in certain cases (step groups and callback steps)
213
+ step_start_time = step_state.pop("__last_step_started_at", None)
214
+
209
215
  if process_state.isfailed() or process_state.iswaiting():
210
216
  if (
211
217
  last_db_step is not None
@@ -216,7 +222,7 @@ def _get_current_step_to_update(
216
222
  ):
217
223
  state_ex_info = {
218
224
  "retries": last_db_step.state.get("retries", 0) + 1,
219
- "executed_at": last_db_step.state.get("executed_at", []) + [str(last_db_step.executed_at)],
225
+ "completed_at": last_db_step.state.get("completed_at", []) + [str(last_db_step.completed_at)],
220
226
  }
221
227
 
222
228
  # write new state info and execution date
@@ -236,10 +242,13 @@ def _get_current_step_to_update(
236
242
  state=step_state,
237
243
  created_by=stat.current_user,
238
244
  )
245
+ # Since the Start step does not have a __last_step_started_at in it's state, we effectively assume it is instantaneous.
246
+ now = nowtz()
247
+ current_step.started_at = datetime.fromtimestamp(step_start_time or now.timestamp(), tz=utc)
239
248
 
240
249
  # Always explicitly set this instead of leaving it to the database to prevent failing tests
241
250
  # Test will fail if multiple steps have the same timestamp
242
- current_step.executed_at = nowtz()
251
+ current_step.completed_at = now
243
252
  return current_step
244
253
 
245
254
 
@@ -467,9 +476,7 @@ def thread_start_process(
467
476
  user_model: OIDCUserModel | None = None,
468
477
  broadcast_func: BroadcastFunc | None = None,
469
478
  ) -> UUID:
470
- pstat = create_process(workflow_key, user_inputs=user_inputs, user=user)
471
- if not pstat.workflow.authorize_callback(user_model):
472
- raise_status(HTTPStatus.FORBIDDEN, error_message_unauthorized(workflow_key))
479
+ pstat = create_process(workflow_key, user_inputs=user_inputs, user=user, user_model=user_model)
473
480
 
474
481
  _safe_logstep_with_func = partial(safe_logstep, broadcast_func=broadcast_func)
475
482
  return _run_process_async(pstat.process_id, lambda: runwf(pstat, _safe_logstep_with_func))
@@ -506,7 +513,6 @@ def thread_resume_process(
506
513
  *,
507
514
  user_inputs: list[State] | None = None,
508
515
  user: str | None = None,
509
- user_model: OIDCUserModel | None = None,
510
516
  broadcast_func: BroadcastFunc | None = None,
511
517
  ) -> UUID:
512
518
  # ATTENTION!! When modifying this function make sure you make similar changes to `resume_workflow` in the test code
@@ -515,8 +521,6 @@ def thread_resume_process(
515
521
  user_inputs = [{}]
516
522
 
517
523
  pstat = load_process(process)
518
- if not pstat.workflow.authorize_callback(user_model):
519
- raise_status(HTTPStatus.FORBIDDEN, error_message_unauthorized(str(process.workflow_name)))
520
524
 
521
525
  if pstat.workflow == removed_workflow:
522
526
  raise ValueError("This workflow cannot be resumed")
@@ -556,7 +560,6 @@ def resume_process(
556
560
  *,
557
561
  user_inputs: list[State] | None = None,
558
562
  user: str | None = None,
559
- user_model: OIDCUserModel | None = None,
560
563
  broadcast_func: BroadcastFunc | None = None,
561
564
  ) -> UUID:
562
565
  """Resume a failed or suspended process.
@@ -565,7 +568,6 @@ def resume_process(
565
568
  process: Process from database
566
569
  user_inputs: Optional user input from forms
567
570
  user: user who resumed this process
568
- user_model: OIDCUserModel of user who resumed this process
569
571
  broadcast_func: Optional function to broadcast process data
570
572
 
571
573
  Returns:
@@ -573,8 +575,6 @@ def resume_process(
573
575
 
574
576
  """
575
577
  pstat = load_process(process)
576
- if not pstat.workflow.authorize_callback(user_model):
577
- raise_status(HTTPStatus.FORBIDDEN, error_message_unauthorized(str(process.workflow_name)))
578
578
 
579
579
  try:
580
580
  post_form(pstat.log[0].form, pstat.state.unwrap(), user_inputs=user_inputs or [])
@@ -14,7 +14,7 @@
14
14
  from typing import Any, Dict, Type
15
15
 
16
16
  from pydantic import SecretStr as PydanticSecretStr
17
- from pydantic_core import MultiHostUrl
17
+ from pydantic_core import MultiHostUrl, Url
18
18
  from pydantic_settings import BaseSettings
19
19
 
20
20
  from orchestrator.utils.expose_settings import SecretStr as OrchSecretStr
@@ -32,21 +32,9 @@ def expose_settings(settings_name: str, base_settings: Type[BaseSettings]) -> Ty
32
32
 
33
33
  def mask_value(key: str, value: Any) -> Any:
34
34
  key_lower = key.lower()
35
+ is_sensitive_key = "secret" in key_lower or "password" in key_lower
35
36
 
36
- if "secret" in key_lower or "password" in key_lower:
37
- # Mask sensitive information
38
- return MASK
39
-
40
- if isinstance(value, PydanticSecretStr):
41
- # Need to convert SecretStr to str for serialization
42
- return str(value)
43
-
44
- if isinstance(value, OrchSecretStr):
45
- return MASK
46
-
47
- # PostgresDsn is just MultiHostUrl with extra metadata (annotations)
48
- if isinstance(value, MultiHostUrl):
49
- # Convert PostgresDsn to str for serialization
37
+ if is_sensitive_key or isinstance(value, (OrchSecretStr, PydanticSecretStr, MultiHostUrl, Url)):
50
38
  return MASK
51
39
 
52
40
  return value
orchestrator/settings.py CHANGED
@@ -72,7 +72,7 @@ class AppSettings(BaseSettings):
72
72
  TRACING_ENABLED: bool = False
73
73
  TRACE_HOST: str = "http://localhost:4317"
74
74
  TRANSLATIONS_DIR: Path | None = None
75
- WEBSOCKET_BROADCASTER_URL: str = "memory://"
75
+ WEBSOCKET_BROADCASTER_URL: OrchSecretStr = "memory://" # type: ignore
76
76
  ENABLE_WEBSOCKETS: bool = True
77
77
  DISABLE_INSYNC_CHECK: bool = False
78
78
  DEFAULT_PRODUCT_WORKFLOWS: list[str] = ["modify_note"]
@@ -0,0 +1,9 @@
1
+ from collections.abc import Callable
2
+ from typing import TypeAlias
3
+
4
+ from oauth2_lib.fastapi import OIDCUserModel
5
+
6
+ # This file is broken out separately to avoid circular imports.
7
+
8
+ # Can instead use "type Authorizer = ..." in later Python versions.
9
+ Authorizer: TypeAlias = Callable[[OIDCUserModel | None], bool]
@@ -57,7 +57,9 @@ def enrich_step_details(step: ProcessStepTable, previous_step: ProcessStepTable
57
57
 
58
58
  return {
59
59
  "name": step.name,
60
- "executed": step.executed_at.timestamp(),
60
+ "executed": step.completed_at.timestamp(),
61
+ "started": step.started_at.timestamp(),
62
+ "completed": step.completed_at.timestamp(),
61
63
  "status": step.status,
62
64
  "state": step.state,
63
65
  "created_by": step.created_by,
@@ -103,7 +105,7 @@ def enrich_process(process: ProcessTable, p_stat: ProcessStat | None = None) ->
103
105
  "is_task": process.is_task,
104
106
  "workflow_id": process.workflow_id,
105
107
  "workflow_name": process.workflow.name,
106
- "workflow_target": process.process_subscriptions[0].workflow_target if process.process_subscriptions else None,
108
+ "workflow_target": process.workflow.target,
107
109
  "failed_reason": process.failed_reason,
108
110
  "created_by": process.created_by,
109
111
  "started_at": process.started_at,
@@ -128,12 +128,13 @@ def _(err: Exception) -> ErrorDict:
128
128
  # We can't dispatch on ApiException, see is_api_exception docstring
129
129
  if is_api_exception(err):
130
130
  err = cast(ApiException, err)
131
+ headers = err.headers or {}
131
132
  return {
132
133
  "class": type(err).__name__,
133
134
  "error": err.reason,
134
135
  "status_code": err.status,
135
136
  "body": err.body,
136
- "headers": "\n".join(f"{k}: {v}" for k, v in err.headers.items()),
137
+ "headers": "\n".join(f"{k}: {v}" for k, v in headers.items()),
137
138
  "traceback": show_ex(err),
138
139
  }
139
140
 
orchestrator/workflow.py CHANGED
@@ -45,6 +45,8 @@ from orchestrator.db import db, transactional
45
45
  from orchestrator.services.settings import get_engine_settings
46
46
  from orchestrator.targets import Target
47
47
  from orchestrator.types import ErrorDict, StepFunc
48
+ from orchestrator.utils.auth import Authorizer
49
+ from orchestrator.utils.datetime import nowtz
48
50
  from orchestrator.utils.docs import make_workflow_doc
49
51
  from orchestrator.utils.errors import error_state_to_dict
50
52
  from orchestrator.utils.state import form_inject_args, inject_args
@@ -80,6 +82,8 @@ class Step(Protocol):
80
82
  name: str
81
83
  form: InputFormGenerator | None
82
84
  assignee: Assignee | None
85
+ resume_auth_callback: Authorizer | None = None
86
+ retry_auth_callback: Authorizer | None = None
83
87
 
84
88
  def __call__(self, state: State) -> Process: ...
85
89
 
@@ -90,7 +94,8 @@ class Workflow(Protocol):
90
94
  __qualname__: str
91
95
  name: str
92
96
  description: str
93
- authorize_callback: Callable[[OIDCUserModel | None], bool]
97
+ authorize_callback: Authorizer
98
+ retry_auth_callback: Authorizer
94
99
  initial_input_form: InputFormGenerator | None = None
95
100
  target: Target
96
101
  steps: StepList
@@ -99,13 +104,20 @@ class Workflow(Protocol):
99
104
 
100
105
 
101
106
  def make_step_function(
102
- f: Callable, name: str, form: InputFormGenerator | None = None, assignee: Assignee | None = Assignee.SYSTEM
107
+ f: Callable,
108
+ name: str,
109
+ form: InputFormGenerator | None = None,
110
+ assignee: Assignee | None = Assignee.SYSTEM,
111
+ resume_auth_callback: Authorizer | None = None,
112
+ retry_auth_callback: Authorizer | None = None,
103
113
  ) -> Step:
104
114
  step_func = cast(Step, f)
105
115
 
106
116
  step_func.name = name
107
117
  step_func.form = form
108
118
  step_func.assignee = assignee
119
+ step_func.resume_auth_callback = resume_auth_callback
120
+ step_func.retry_auth_callback = retry_auth_callback
109
121
  return step_func
110
122
 
111
123
 
@@ -167,6 +179,7 @@ class StepList(list[Step]):
167
179
 
168
180
 
169
181
  def _handle_simple_input_form_generator(f: StateInputStepFunc) -> StateInputFormGenerator:
182
+ """Processes f into a form generator and injects a pre-hook for user authorization."""
170
183
  if inspect.isgeneratorfunction(f):
171
184
  return cast(StateInputFormGenerator, f)
172
185
  if inspect.isgenerator(f):
@@ -191,7 +204,8 @@ def make_workflow(
191
204
  initial_input_form: InputStepFunc | None,
192
205
  target: Target,
193
206
  steps: StepList,
194
- authorize_callback: Callable[[OIDCUserModel | None], bool] | None = None,
207
+ authorize_callback: Authorizer | None = None,
208
+ retry_auth_callback: Authorizer | None = None,
195
209
  ) -> Workflow:
196
210
  @functools.wraps(f)
197
211
  def wrapping_function() -> NoReturn:
@@ -202,6 +216,10 @@ def make_workflow(
202
216
  wrapping_function.name = f.__name__ # default, will be changed by LazyWorkflowInstance
203
217
  wrapping_function.description = description
204
218
  wrapping_function.authorize_callback = allow if authorize_callback is None else authorize_callback
219
+ # If no retry auth policy is given, defer to policy for process creation.
220
+ wrapping_function.retry_auth_callback = (
221
+ wrapping_function.authorize_callback if retry_auth_callback is None else retry_auth_callback
222
+ )
205
223
 
206
224
  if initial_input_form is None:
207
225
  # We always need a form to prevent starting a workflow when no input is needed.
@@ -270,9 +288,16 @@ def retrystep(name: str) -> Callable[[StepFunc], Step]:
270
288
  return decorator
271
289
 
272
290
 
273
- def inputstep(name: str, assignee: Assignee) -> Callable[[InputStepFunc], Step]:
291
+ def inputstep(
292
+ name: str,
293
+ assignee: Assignee,
294
+ resume_auth_callback: Authorizer | None = None,
295
+ retry_auth_callback: Authorizer | None = None,
296
+ ) -> Callable[[InputStepFunc], Step]:
274
297
  """Add user input step to workflow.
275
298
 
299
+ Any authorization callbacks will be attached to the resulting Step.
300
+
276
301
  IMPORTANT: In contrast to other workflow steps, the `@inputstep` wrapped function will not run in the
277
302
  workflow engine! This means that it must be free of side effects!
278
303
 
@@ -299,7 +324,14 @@ def inputstep(name: str, assignee: Assignee) -> Callable[[InputStepFunc], Step]:
299
324
  def suspend(state: State) -> Process:
300
325
  return Suspend(state)
301
326
 
302
- return make_step_function(suspend, name, wrapper, assignee)
327
+ return make_step_function(
328
+ suspend,
329
+ name,
330
+ wrapper,
331
+ assignee,
332
+ resume_auth_callback=resume_auth_callback,
333
+ retry_auth_callback=retry_auth_callback,
334
+ )
303
335
 
304
336
  return decorator
305
337
 
@@ -350,11 +382,13 @@ def step_group(name: str, steps: StepList, extract_form: bool = True) -> Step:
350
382
  p = p.map(lambda s: s | {"__replace_last_state": True})
351
383
  return step_log_fn(step_, p)
352
384
 
385
+ step_group_start_time = nowtz().timestamp()
353
386
  process: Process = Success(initial_state)
354
387
  process = _exec_steps(step_list, process, dblogstep)
355
-
356
388
  # Add instruction to replace state of last sub step before returning process _exec_steps higher in the call tree
357
- return process.map(lambda s: s | {"__replace_last_state": True})
389
+ return process.map(
390
+ lambda s: s | {"__replace_last_state": True, "__last_step_started_at": step_group_start_time}
391
+ )
358
392
 
359
393
  # Make sure we return a form is a sub step has a form
360
394
  form = next((sub_step.form for sub_step in steps if sub_step.form), None) if extract_form else None
@@ -479,7 +513,8 @@ def workflow(
479
513
  description: str,
480
514
  initial_input_form: InputStepFunc | None = None,
481
515
  target: Target = Target.SYSTEM,
482
- authorize_callback: Callable[[OIDCUserModel | None], bool] | None = None,
516
+ authorize_callback: Authorizer | None = None,
517
+ retry_auth_callback: Authorizer | None = None,
483
518
  ) -> Callable[[Callable[[], StepList]], Workflow]:
484
519
  """Transform an initial_input_form and a step list into a workflow.
485
520
 
@@ -500,7 +535,13 @@ def workflow(
500
535
 
501
536
  def _workflow(f: Callable[[], StepList]) -> Workflow:
502
537
  return make_workflow(
503
- f, description, initial_input_form_in_form_inject_args, target, f(), authorize_callback=authorize_callback
538
+ f,
539
+ description,
540
+ initial_input_form_in_form_inject_args,
541
+ target,
542
+ f(),
543
+ authorize_callback=authorize_callback,
544
+ retry_auth_callback=retry_auth_callback,
504
545
  )
505
546
 
506
547
  return _workflow
@@ -1416,6 +1457,8 @@ def _exec_steps(steps: StepList, starting_process: Process, dblogstep: StepLogFu
1416
1457
  "Not executing Step as the workflow engine is Paused. Process will remain in state 'running'"
1417
1458
  )
1418
1459
  return process
1460
+
1461
+ process = process.map(lambda s: s | {"__last_step_started_at": nowtz().timestamp()})
1419
1462
  step_result_process = process.execute_step(step)
1420
1463
  except Exception as e:
1421
1464
  consolelogger.error("An exception occurred while executing the workflow step.")
@@ -53,4 +53,4 @@ def store_subscription_note(subscription_id: UUIDstr, note: str) -> State:
53
53
 
54
54
  @workflow("Modify Note", initial_input_form=wrap_modify_initial_input_form(initial_input_form), target=Target.MODIFY)
55
55
  def modify_note() -> StepList:
56
- return init >> store_process_subscription(Target.MODIFY) >> store_subscription_note >> done
56
+ return init >> store_process_subscription() >> store_subscription_note >> done
@@ -23,6 +23,7 @@ from orchestrator.services.subscriptions import get_subscription
23
23
  from orchestrator.targets import Target
24
24
  from orchestrator.types import SubscriptionLifecycle
25
25
  from orchestrator.utils.json import to_serializable
26
+ from orchestrator.websocket import sync_invalidate_subscription_cache
26
27
  from orchestrator.workflow import Step, step
27
28
  from pydantic_forms.types import State, UUIDstr
28
29
 
@@ -33,6 +34,7 @@ logger = structlog.get_logger(__name__)
33
34
  def resync(subscription: SubscriptionModel) -> State:
34
35
  """Transition a subscription to in sync."""
35
36
  subscription.insync = True
37
+ sync_invalidate_subscription_cache(subscription.subscription_id)
36
38
  return {"subscription": subscription}
37
39
 
38
40
 
@@ -93,6 +95,7 @@ def unsync(subscription_id: UUIDstr, __old_subscriptions__: dict | None = None)
93
95
  if not subscription.insync:
94
96
  raise ValueError("Subscription is already out of sync, cannot continue!")
95
97
  subscription.insync = False
98
+ sync_invalidate_subscription_cache(subscription.subscription_id)
96
99
 
97
100
  return {"subscription": subscription, "__old_subscriptions__": subscription_backup}
98
101
 
@@ -105,20 +108,23 @@ def unsync_unchecked(subscription_id: UUIDstr) -> State:
105
108
  return {"subscription": subscription}
106
109
 
107
110
 
108
- def store_process_subscription_relationship(
109
- process_id: UUIDstr, subscription_id: UUIDstr, workflow_target: str
110
- ) -> ProcessSubscriptionTable:
111
- process_subscription = ProcessSubscriptionTable(
112
- process_id=process_id, subscription_id=subscription_id, workflow_target=workflow_target
113
- )
111
+ def store_process_subscription_relationship(process_id: UUIDstr, subscription_id: UUIDstr) -> ProcessSubscriptionTable:
112
+ process_subscription = ProcessSubscriptionTable(process_id=process_id, subscription_id=subscription_id)
114
113
  db.session.add(process_subscription)
115
114
  return process_subscription
116
115
 
117
116
 
118
- def store_process_subscription(workflow_target: Target) -> Step:
117
+ def store_process_subscription(workflow_target: Target | None = None) -> Step:
118
+ if workflow_target:
119
+ deprecation_warning = (
120
+ "Providing a workflow target to function store_process_subscription() is deprecated. "
121
+ "This information is already stored in the workflow table."
122
+ )
123
+ logger.warning(deprecation_warning)
124
+
119
125
  @step("Create Process Subscription relation")
120
126
  def _store_process_subscription(process_id: UUIDstr, subscription_id: UUIDstr) -> None:
121
- store_process_subscription_relationship(process_id, subscription_id, workflow_target)
127
+ store_process_subscription_relationship(process_id, subscription_id)
122
128
 
123
129
  return _store_process_subscription
124
130