orchestrator-core 4.7.0__py3-none-any.whl → 4.7.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. orchestrator/__init__.py +1 -1
  2. orchestrator/app.py +1 -34
  3. orchestrator/cli/scheduler.py +8 -53
  4. orchestrator/graphql/schemas/process.py +2 -2
  5. orchestrator/llm_settings.py +1 -0
  6. orchestrator/migrations/versions/schema/2020-10-19_a76b9185b334_add_generic_workflows_to_core.py +0 -1
  7. orchestrator/migrations/versions/schema/2021-04-06_3c8b9185c221_add_validate_products_task.py +0 -1
  8. orchestrator/migrations/versions/schema/2025-11-18_961eddbd4c13_create_linker_table_workflow_apscheduler.py +1 -1
  9. orchestrator/schedules/__init__.py +1 -3
  10. orchestrator/schedules/scheduling.py +1 -5
  11. orchestrator/search/agent/prompts.py +6 -10
  12. orchestrator/search/agent/tools.py +15 -55
  13. orchestrator/search/aggregations/base.py +2 -6
  14. orchestrator/search/query/builder.py +3 -75
  15. orchestrator/search/query/mixins.py +2 -57
  16. orchestrator/search/query/queries.py +1 -15
  17. orchestrator/search/query/validation.py +0 -43
  18. orchestrator/settings.py +0 -48
  19. orchestrator/workflows/modify_note.py +1 -10
  20. orchestrator/workflows/removed_workflow.py +1 -8
  21. orchestrator/workflows/tasks/cleanup_tasks_log.py +2 -9
  22. orchestrator/workflows/tasks/resume_workflows.py +0 -4
  23. orchestrator/workflows/tasks/validate_product_type.py +1 -7
  24. orchestrator/workflows/tasks/validate_products.py +1 -9
  25. orchestrator/workflows/tasks/validate_subscriptions.py +2 -9
  26. {orchestrator_core-4.7.0.dist-info → orchestrator_core-4.7.0rc1.dist-info}/METADATA +3 -3
  27. {orchestrator_core-4.7.0.dist-info → orchestrator_core-4.7.0rc1.dist-info}/RECORD +29 -30
  28. orchestrator/migrations/versions/schema/2025-12-10_9736496e3eba_set_is_task_true_on_certain_tasks.py +0 -40
  29. {orchestrator_core-4.7.0.dist-info → orchestrator_core-4.7.0rc1.dist-info}/WHEEL +0 -0
  30. {orchestrator_core-4.7.0.dist-info → orchestrator_core-4.7.0rc1.dist-info}/licenses/LICENSE +0 -0
orchestrator/__init__.py CHANGED
@@ -13,7 +13,7 @@
13
13
 
14
14
  """This is the orchestrator workflow engine."""
15
15
 
16
- __version__ = "4.7.0"
16
+ __version__ = "4.7.0rc1"
17
17
 
18
18
 
19
19
  from structlog import get_logger
orchestrator/app.py CHANGED
@@ -57,8 +57,7 @@ from orchestrator.graphql.types import ScalarOverrideType, StrawberryModelType
57
57
  from orchestrator.log_config import LOGGER_OVERRIDES
58
58
  from orchestrator.metrics import ORCHESTRATOR_METRICS_REGISTRY, initialize_default_metrics
59
59
  from orchestrator.services.process_broadcast_thread import ProcessDataBroadcastThread
60
- from orchestrator.settings import AppSettings, ExecutorType, app_settings, get_authorizers
61
- from orchestrator.utils.auth import Authorizer
60
+ from orchestrator.settings import AppSettings, ExecutorType, app_settings
62
61
  from orchestrator.version import GIT_COMMIT_HASH
63
62
  from orchestrator.websocket import init_websocket_manager
64
63
  from pydantic_forms.exception_handlers.fastapi import form_error_handler
@@ -312,38 +311,6 @@ class OrchestratorCore(FastAPI):
312
311
  """
313
312
  self.auth_manager.graphql_authorization = graphql_authorization_instance
314
313
 
315
- def register_internal_authorize_callback(self, callback: Authorizer) -> None:
316
- """Registers the authorize_callback for WFO's internal workflows and tasks.
317
-
318
- Since RBAC policies are applied to workflows via decorator, this enables registration of callbacks
319
- for workflows defined in orchestrator-core itself.
320
- However, this assignment MUST be made before any workflows are run.
321
-
322
- Args:
323
- callback (Authorizer): The async Authorizer to run for the `authorize_callback` argument of internal workflows.
324
-
325
- Returns:
326
- None
327
- """
328
- authorizers = get_authorizers()
329
- authorizers.internal_authorize_callback = callback
330
-
331
- def register_internal_retry_auth_callback(self, callback: Authorizer) -> None:
332
- """Registers the retry_auth_callback for WFO's internal workflows and tasks.
333
-
334
- Since RBAC policies are applied to workflows via decorator, this enables registration of callbacks
335
- for workflows defined in orchestrator-core itself.
336
- However, this assignment MUST be made before any workflows are run.
337
-
338
- Args:
339
- callback (Authorizer): The async Authorizer to run for the `retry_auth_callback` argument of internal workflows.
340
-
341
- Returns:
342
- None
343
- """
344
- authorizers = get_authorizers()
345
- authorizers.internal_retry_auth_callback = callback
346
-
347
314
 
348
315
  main_typer_app = typer.Typer()
349
316
  main_typer_app.add_typer(cli_app, name="orchestrator", help="The orchestrator CLI commands")
@@ -36,13 +36,7 @@ app: typer.Typer = typer.Typer()
36
36
 
37
37
  @app.command()
38
38
  def run() -> None:
39
- """Starts the scheduler in the foreground.
40
-
41
- While running, this process will:
42
-
43
- * Periodically wake up when the next schedule is due for execution, and run it
44
- * Process schedule changes made through the schedule API
45
- """
39
+ """Start scheduler and loop eternally to keep thread alive."""
46
40
 
47
41
  def _get_scheduled_task_item_from_queue(redis_conn: Redis) -> tuple[str, bytes] | None:
48
42
  """Get an item from the Redis Queue for scheduler tasks."""
@@ -69,45 +63,17 @@ def run() -> None:
69
63
 
70
64
  @app.command()
71
65
  def show_schedule() -> None:
72
- """The `show-schedule` command shows an overview of the scheduled jobs."""
73
- from rich.console import Console
74
- from rich.table import Table
75
-
76
- from orchestrator.schedules.service import get_linker_entries_by_schedule_ids
77
-
78
- console = Console()
79
-
80
- table = Table(title="Scheduled Tasks")
81
- table.add_column("id", no_wrap=True)
82
- table.add_column("name")
83
- table.add_column("source")
84
- table.add_column("next run time")
85
- table.add_column("trigger")
86
-
87
- scheduled_tasks = get_all_scheduler_tasks()
88
- _schedule_ids = [task.id for task in scheduled_tasks]
89
- api_managed = {str(i.schedule_id) for i in get_linker_entries_by_schedule_ids(_schedule_ids)}
90
-
91
- for task in scheduled_tasks:
92
- source = "API" if task.id in api_managed else "decorator"
93
- run_time = str(task.next_run_time.replace(microsecond=0))
94
- table.add_row(task.id, task.name, source, str(run_time), str(task.trigger))
66
+ """Show the currently configured schedule.
95
67
 
96
- console.print(table)
68
+ in cli underscore is replaced by a dash `show-schedule`
69
+ """
70
+ for task in get_all_scheduler_tasks():
71
+ typer.echo(f"[{task.id}] Next run: {task.next_run_time} | Trigger: {task.trigger}")
97
72
 
98
73
 
99
74
  @app.command()
100
75
  def force(task_id: str) -> None:
101
- """Force the execution of (a) scheduler(s) based on a schedule ID.
102
-
103
- Use the `show-schedule` command to determine the ID of the schedule to execute.
104
-
105
- CLI Arguments:
106
- ```sh
107
- Arguments:
108
- SCHEDULE_ID ID of the schedule to execute
109
- ```
110
- """
76
+ """Force the execution of (a) scheduler(s) based on a task_id."""
111
77
  task = get_scheduler_task(task_id)
112
78
 
113
79
  if not task:
@@ -125,18 +91,7 @@ def force(task_id: str) -> None:
125
91
 
126
92
  @app.command()
127
93
  def load_initial_schedule() -> None:
128
- """The `load-initial-schedule` command loads the initial schedule using the scheduler API.
129
-
130
- The initial schedules are:
131
- - Task Resume Workflows
132
- - Task Clean Up Tasks
133
- - Task Validate Subscriptions
134
-
135
- !!! Warning
136
- This command is not idempotent.
137
-
138
- Please run `show-schedule` first to determine if the schedules already exist.
139
- """
94
+ """Load the initial schedule into the scheduler."""
140
95
  initial_schedules = [
141
96
  {
142
97
  "name": "Task Resume Workflows",
@@ -89,8 +89,8 @@ class ProcessType:
89
89
  auth_resume, auth_retry = get_auth_callbacks(get_steps_to_evaluate_for_rbac(process), workflow)
90
90
 
91
91
  return FormUserPermissionsType(
92
- retryAllowed=bool(auth_retry and await auth_retry(oidc_user)),
93
- resumeAllowed=bool(auth_resume and await auth_resume(oidc_user)),
92
+ retryAllowed=bool(auth_retry and auth_retry(oidc_user)),
93
+ resumeAllowed=bool(auth_resume and auth_resume(oidc_user)),
94
94
  )
95
95
 
96
96
  @authenticated_field(description="Returns list of subscriptions of the process") # type: ignore
@@ -34,6 +34,7 @@ EMBEDDING_DIMENSION_FIELD = Annotated[
34
34
 
35
35
 
36
36
  class LLMSettings(BaseSettings):
37
+
37
38
  # Feature flags for LLM functionality
38
39
  SEARCH_ENABLED: bool = False # Enable search/indexing with embeddings
39
40
  AGENT_ENABLED: bool = False # Enable agentic functionality
@@ -17,7 +17,6 @@ down_revision = "c112305b07d3"
17
17
  branch_labels = None
18
18
  depends_on = None
19
19
 
20
- # NOTE: this migration forgot to insert these workflows with is_task=true. Make sure to correct that if you copy this.
21
20
  workflows = [
22
21
  {"name": "modify_note", "description": "Modify Note", "workflow_id": uuid4(), "target": "MODIFY"},
23
22
  {"name": "task_clean_up_tasks", "description": "Clean up old tasks", "workflow_id": uuid4(), "target": "SYSTEM"},
@@ -17,7 +17,6 @@ down_revision = "3323bcb934e7"
17
17
  branch_labels = None
18
18
  depends_on = None
19
19
 
20
- # NOTE: this migration forgot to insert these workflows with is_task=true. Make sure to correct that if you copy this.
21
20
  workflows = [
22
21
  {"name": "task_validate_products", "description": "Validate products", "workflow_id": uuid4(), "target": "SYSTEM"},
23
22
  ]
@@ -17,7 +17,7 @@ down_revision = "850dccac3b02"
17
17
  branch_labels = None
18
18
  depends_on = None
19
19
 
20
- # NOTE: this migration forgot to insert these workflows with is_task=true. Make sure to correct that if you copy this.
20
+
21
21
  workflows = [
22
22
  {
23
23
  "name": "task_validate_subscriptions",
@@ -15,9 +15,7 @@ import warnings
15
15
  from orchestrator.schedules.validate_products import validate_products
16
16
 
17
17
  warnings.warn(
18
- "ALL_SCHEDULERS is deprecated and will be removed in 5.0.0. "
19
- "Scheduling tasks can now be handled entirely through the API. "
20
- "For more details, please consult https://workfloworchestrator.org/orchestrator-core/guides/upgrading/4.7/",
18
+ "ALL_SCHEDULERS is deprecated; scheduling is now handled entirely through the scheduler API.",
21
19
  DeprecationWarning,
22
20
  stacklevel=2,
23
21
  )
@@ -23,11 +23,7 @@ F = TypeVar("F", bound=Callable[..., object])
23
23
 
24
24
 
25
25
  @deprecated(
26
- reason=(
27
- "Scheduling tasks with a decorator is deprecated in favor of using the API. "
28
- "This decorator will be removed in 5.0.0. "
29
- "For more details, please consult https://workfloworchestrator.org/orchestrator-core/guides/upgrading/4.7/"
30
- )
26
+ reason="We changed from scheduler to apscheduler which has its own decoractor, use `@scheduler.scheduled_job()` from `from orchestrator.scheduling.scheduler import scheduler`"
31
27
  )
32
28
  def scheduler(
33
29
  name: str,
@@ -26,7 +26,7 @@ logger = structlog.get_logger(__name__)
26
26
 
27
27
  async def get_base_instructions() -> str:
28
28
  return dedent(
29
- f"""
29
+ """
30
30
  You are an expert assistant designed to find relevant information by building and running database queries.
31
31
 
32
32
  ---
@@ -50,21 +50,17 @@ async def get_base_instructions() -> str:
50
50
 
51
51
  Follow these steps:
52
52
 
53
- 1. **Set Context**: Call `start_new_search` with appropriate entity_type and action:
54
- - `action={ActionType.SELECT.value}` for finding/searching entities
55
- - `action={ActionType.COUNT.value}` for counting (e.g., "how many", "count by status", "monthly growth")
56
- - `action={ActionType.AGGREGATE.value}` for numeric operations (SUM, AVG, MIN, MAX of specific fields)
53
+ 1. **Set Context**: Call `start_new_search` with appropriate entity_type and action
57
54
  2. **Set Filters** (if needed): Discover paths, build FilterTree, call `set_filter_tree`
58
55
  - IMPORTANT: Temporal constraints like "in 2025", "in January", "between X and Y" require filters on datetime fields
59
56
  - Filters restrict WHICH records to include; grouping controls HOW to aggregate them
60
- 3. **Set Grouping/Aggregations** (for {ActionType.COUNT.value}/{ActionType.AGGREGATE.value}):
57
+ 3. **Set Grouping/Aggregations** (for COUNT/AGGREGATE):
61
58
  - For temporal grouping (per month, per year, per day, etc.): Use `set_temporal_grouping`
62
59
  - For regular grouping (by status, by name, etc.): Use `set_grouping`
63
- - For {ActionType.AGGREGATE.value} action ONLY: Use `set_aggregations` to specify what to compute (SUM, AVG, etc.)
64
- - For {ActionType.COUNT.value} action: Do NOT call `set_aggregations` (counting is automatic)
60
+ - For aggregations: Use `set_aggregations`
65
61
  4. **Execute**:
66
- - For {ActionType.SELECT.value} action: Call `run_search()`
67
- - For {ActionType.COUNT.value}/{ActionType.AGGREGATE.value} actions: Call `run_aggregation()`
62
+ - For SELECT action: Call `run_search()`
63
+ - For COUNT/AGGREGATE actions: Call `run_aggregation()`
68
64
 
69
65
  After search execution, follow the dynamic instructions based on the current state.
70
66
 
@@ -16,7 +16,6 @@ from typing import Any, cast
16
16
 
17
17
  import structlog
18
18
  from ag_ui.core import EventType, StateSnapshotEvent
19
- from pydantic import ValidationError
20
19
  from pydantic_ai import RunContext
21
20
  from pydantic_ai.ag_ui import StateDeps
22
21
  from pydantic_ai.exceptions import ModelRetry
@@ -40,15 +39,13 @@ from orchestrator.search.filters import FilterTree
40
39
  from orchestrator.search.query import engine
41
40
  from orchestrator.search.query.exceptions import PathNotFoundError, QueryValidationError
42
41
  from orchestrator.search.query.export import fetch_export_data
43
- from orchestrator.search.query.mixins import OrderBy
44
42
  from orchestrator.search.query.queries import AggregateQuery, CountQuery, Query, SelectQuery
45
43
  from orchestrator.search.query.results import AggregationResponse, AggregationResult, ExportData, VisualizationType
46
44
  from orchestrator.search.query.state import QueryState
47
45
  from orchestrator.search.query.validation import (
48
46
  validate_aggregation_field,
47
+ validate_filter_path,
49
48
  validate_filter_tree,
50
- validate_grouping_fields,
51
- validate_order_by_fields,
52
49
  validate_temporal_grouping_field,
53
50
  )
54
51
  from orchestrator.settings import app_settings
@@ -407,30 +404,20 @@ async def prepare_export(
407
404
  async def set_grouping(
408
405
  ctx: RunContext[StateDeps[SearchState]],
409
406
  group_by_paths: list[str],
410
- order_by: list[OrderBy] | None = None,
411
407
  ) -> StateSnapshotEvent:
412
408
  """Set which field paths to group results by for aggregation.
413
409
 
414
410
  Only used with COUNT or AGGREGATE actions. Paths must exist in the schema; use discover_filter_paths to verify.
415
- Optionally specify ordering for the grouped results.
416
-
417
- For order_by: You can order by grouping field paths OR aggregation aliases (e.g., 'count').
418
- Grouping field paths will be validated; aggregation aliases cannot be validated until execution.
419
411
  """
420
- try:
421
- validate_grouping_fields(group_by_paths)
422
- validate_order_by_fields(order_by)
423
- except PathNotFoundError as e:
424
- raise ModelRetry(f"{str(e)} Use discover_filter_paths to find valid paths.")
412
+ for path in group_by_paths:
413
+ field_type = validate_filter_path(path)
414
+ if field_type is None:
415
+ raise ModelRetry(
416
+ f"Path '{path}' not found in database schema. "
417
+ f"Use discover_filter_paths(['{path.split('.')[-1]}']) to find valid paths."
418
+ )
425
419
 
426
- update_dict: dict[str, Any] = {"group_by": group_by_paths}
427
- if order_by is not None:
428
- update_dict["order_by"] = order_by
429
-
430
- try:
431
- ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update=update_dict)
432
- except ValidationError as e:
433
- raise ModelRetry(str(e))
420
+ ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update={"group_by": group_by_paths})
434
421
 
435
422
  return StateSnapshotEvent(
436
423
  type=EventType.STATE_SNAPSHOT,
@@ -447,26 +434,16 @@ async def set_aggregations(
447
434
  """Define what aggregations to compute over the matching records.
448
435
 
449
436
  Only used with AGGREGATE action. See Aggregation model (CountAggregation, FieldAggregation) for structure and field requirements.
450
-
451
437
  """
452
438
  # Validate field paths for FieldAggregations
453
439
  try:
454
440
  for agg in aggregations:
455
441
  if isinstance(agg, FieldAggregation):
456
442
  validate_aggregation_field(agg.type, agg.field)
457
- except PathNotFoundError as e:
458
- raise ModelRetry(
459
- f"{str(e)} "
460
- f"You MUST call discover_filter_paths first to find valid fields. "
461
- f"If the field truly doesn't exist, inform the user that this data is not available."
462
- )
463
- except QueryValidationError as e:
464
- raise ModelRetry(f"{str(e)}")
443
+ except ValueError as e:
444
+ raise ModelRetry(f"{str(e)} Use discover_filter_paths to find valid paths.")
465
445
 
466
- try:
467
- ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update={"aggregations": aggregations})
468
- except ValidationError as e:
469
- raise ModelRetry(str(e))
446
+ ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update={"aggregations": aggregations})
470
447
 
471
448
  return StateSnapshotEvent(
472
449
  type=EventType.STATE_SNAPSHOT,
@@ -479,36 +456,19 @@ async def set_aggregations(
479
456
  async def set_temporal_grouping(
480
457
  ctx: RunContext[StateDeps[SearchState]],
481
458
  temporal_groups: list[TemporalGrouping],
482
- cumulative: bool = False,
483
- order_by: list[OrderBy] | None = None,
484
459
  ) -> StateSnapshotEvent:
485
460
  """Set temporal grouping to group datetime fields by time periods.
486
461
 
487
462
  Only used with COUNT or AGGREGATE actions. See TemporalGrouping model for structure, periods, and examples.
488
- Optionally enable cumulative aggregations (running totals) and specify ordering.
489
-
490
- For order_by: You can order by temporal field paths OR aggregation aliases (e.g., 'count').
491
- Temporal field paths will be validated; aggregation aliases cannot be validated until execution.
492
463
  """
464
+ # Validate that fields exist and are datetime types
493
465
  try:
494
466
  for tg in temporal_groups:
495
467
  validate_temporal_grouping_field(tg.field)
496
- validate_order_by_fields(order_by)
497
- except PathNotFoundError as e:
498
- raise ModelRetry(f"{str(e)} Use discover_filter_paths to find valid paths.")
499
- except QueryValidationError as e:
468
+ except ValueError as e:
500
469
  raise ModelRetry(f"{str(e)} Use discover_filter_paths to find datetime fields.")
501
470
 
502
- update_dict: dict[str, Any] = {"temporal_group_by": temporal_groups}
503
- if cumulative:
504
- update_dict["cumulative"] = cumulative
505
- if order_by is not None:
506
- update_dict["order_by"] = order_by
507
-
508
- try:
509
- ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update=update_dict)
510
- except ValidationError as e:
511
- raise ModelRetry(str(e))
471
+ ctx.deps.state.query = cast(Query, ctx.deps.state.query).model_copy(update={"temporal_group_by": temporal_groups})
512
472
 
513
473
  return StateSnapshotEvent(
514
474
  type=EventType.STATE_SNAPSHOT,
@@ -61,11 +61,6 @@ class TemporalGrouping(BaseModel):
61
61
  },
62
62
  )
63
63
 
64
- @property
65
- def alias(self) -> str:
66
- """Return the SQL-friendly alias for this temporal grouping."""
67
- return f"{BaseAggregation.field_to_alias(self.field)}_{self.period.value}"
68
-
69
64
  def get_pivot_fields(self) -> list[str]:
70
65
  """Return fields that need to be pivoted for this temporal grouping."""
71
66
  return [self.field]
@@ -88,7 +83,8 @@ class TemporalGrouping(BaseModel):
88
83
  col = getattr(pivot_cte_columns, field_alias)
89
84
  truncated_col = func.date_trunc(self.period.value, cast(col, TIMESTAMP(timezone=True)))
90
85
 
91
- col_name = self.alias
86
+ # Column name without prefix
87
+ col_name = f"{field_alias}_{self.period.value}"
92
88
  select_col = truncated_col.label(col_name)
93
89
  return select_col, truncated_col, col_name
94
90
 
@@ -25,7 +25,6 @@ from orchestrator.db.models import AiSearchIndex
25
25
  from orchestrator.search.aggregations import AggregationType, BaseAggregation, CountAggregation
26
26
  from orchestrator.search.core.types import EntityType, FieldType, FilterOp, UIType
27
27
  from orchestrator.search.filters import LtreeFilter
28
- from orchestrator.search.query.mixins import OrderDirection
29
28
  from orchestrator.search.query.queries import AggregateQuery, CountQuery, Query
30
29
 
31
30
 
@@ -182,8 +181,7 @@ def _build_pivot_cte(base_query: Select, pivot_fields: list[str]) -> CTE:
182
181
 
183
182
 
184
183
  def _build_grouping_columns(
185
- query: CountQuery | AggregateQuery,
186
- pivot_cte: CTE,
184
+ query: CountQuery | AggregateQuery, pivot_cte: CTE
187
185
  ) -> tuple[list[Any], list[Any], list[str]]:
188
186
  """Build GROUP BY columns and their SELECT columns.
189
187
 
@@ -246,76 +244,6 @@ def _build_aggregation_columns(query: CountQuery | AggregateQuery, pivot_cte: CT
246
244
  return [count_agg.to_expression(pivot_cte.c.entity_id)]
247
245
 
248
246
 
249
- def _apply_cumulative_aggregations(
250
- stmt: Select,
251
- query: CountQuery | AggregateQuery,
252
- group_column_names: list[str],
253
- aggregation_columns: list[Label],
254
- ) -> Select:
255
- """Add cumulative aggregation columns."""
256
-
257
- # At this point, cumulative validation has already happened at query build time
258
- # in GroupingMixin.validate_grouping_constraints, so we know:
259
- # temporal_group_by exists and has exactly 1 element when cumulative=True
260
- if not query.cumulative or not aggregation_columns or not query.temporal_group_by:
261
- return stmt
262
-
263
- temporal_alias = query.temporal_group_by[0].alias
264
-
265
- base_subquery = stmt.subquery()
266
- partition_cols = [base_subquery.c[name] for name in group_column_names if name != temporal_alias]
267
- order_col = base_subquery.c[temporal_alias]
268
-
269
- base_columns = [base_subquery.c[col] for col in base_subquery.c.keys()]
270
-
271
- cumulative_columns = []
272
- for agg_col in aggregation_columns:
273
- cumulative_alias = f"{agg_col.key}_cumulative"
274
- over_kwargs: dict[str, Any] = {"order_by": order_col}
275
- if partition_cols:
276
- over_kwargs["partition_by"] = partition_cols
277
- cumulative_expr = func.sum(base_subquery.c[agg_col.key]).over(**over_kwargs).label(cumulative_alias)
278
- cumulative_columns.append(cumulative_expr)
279
-
280
- return select(*(base_columns + cumulative_columns)).select_from(base_subquery)
281
-
282
-
283
- def _apply_ordering(
284
- stmt: Select,
285
- query: CountQuery | AggregateQuery,
286
- group_column_names: list[str],
287
- ) -> Select:
288
- """Apply ordering instructions to the SELECT statement."""
289
- columns_by_key = {col.key: col for col in stmt.selected_columns}
290
-
291
- if query.order_by:
292
- order_expressions = []
293
- for instruction in query.order_by:
294
- # 1) exact match
295
- col = columns_by_key.get(instruction.field)
296
- if col is None:
297
- # 2) temporal alias,
298
- for tg in query.temporal_group_by or []:
299
- if instruction.field == tg.field or instruction.field == tg.alias:
300
- col = columns_by_key.get(tg.alias)
301
- if col is not None:
302
- break
303
- if col is None:
304
- # 3) normalized field path
305
- col = columns_by_key.get(BaseAggregation.field_to_alias(instruction.field))
306
- if col is None:
307
- raise ValueError(f"Cannot order by '{instruction.field}'; column not found.")
308
- order_expressions.append(col.desc() if instruction.direction == OrderDirection.DESC else col.asc())
309
- return stmt.order_by(*order_expressions)
310
-
311
- if query.temporal_group_by:
312
- # Default ordering by all grouping columns (ascending)
313
- order_expressions = [columns_by_key[col_name].asc() for col_name in group_column_names]
314
- return stmt.order_by(*order_expressions)
315
-
316
- return stmt
317
-
318
-
319
247
  def build_simple_count_query(base_query: Select) -> Select:
320
248
  """Build a simple count query without grouping.
321
249
 
@@ -354,7 +282,7 @@ def build_aggregation_query(query: CountQuery | AggregateQuery, base_query: Sele
354
282
  if group_cols:
355
283
  stmt = stmt.group_by(*group_cols)
356
284
 
357
- stmt = _apply_cumulative_aggregations(stmt, query, group_col_names, agg_cols)
358
- stmt = _apply_ordering(stmt, query, group_col_names)
285
+ if query.temporal_group_by:
286
+ stmt = stmt.order_by(*group_cols)
359
287
 
360
288
  return stmt, group_col_names
@@ -1,8 +1,6 @@
1
1
  import uuid
2
- from enum import Enum
3
- from typing import Self
4
2
 
5
- from pydantic import BaseModel, Field, model_validator
3
+ from pydantic import BaseModel, Field
6
4
 
7
5
  from orchestrator.search.aggregations import Aggregation, TemporalGrouping
8
6
 
@@ -10,28 +8,9 @@ __all__ = [
10
8
  "SearchMixin",
11
9
  "GroupingMixin",
12
10
  "AggregationMixin",
13
- "OrderBy",
14
- "OrderDirection",
15
11
  ]
16
12
 
17
13
 
18
- class OrderDirection(str, Enum):
19
- """Sorting direction for aggregation results."""
20
-
21
- ASC = "asc"
22
- DESC = "desc"
23
-
24
-
25
- class OrderBy(BaseModel):
26
- """Ordering descriptor for aggregation responses."""
27
-
28
- field: str = Field(description="Grouping or aggregation field/alias to order by.")
29
- direction: OrderDirection = Field(
30
- default=OrderDirection.ASC,
31
- description="Sorting direction (asc or desc).",
32
- )
33
-
34
-
35
14
  class SearchMixin(BaseModel):
36
15
  """Mixin providing text search capability.
37
16
 
@@ -80,37 +59,6 @@ class GroupingMixin(BaseModel):
80
59
  default=None,
81
60
  description="Temporal grouping specifications (group by month, year, etc.)",
82
61
  )
83
- cumulative: bool = Field(
84
- default=False,
85
- description="Enable cumulative aggregations when temporal grouping is present.",
86
- )
87
- order_by: list[OrderBy] | None = Field(
88
- default=None,
89
- description="Ordering instructions for grouped aggregation results.",
90
- )
91
-
92
- @model_validator(mode="after")
93
- def validate_grouping_constraints(self) -> Self:
94
- """Validate cross-field constraints for grouping features."""
95
- if self.order_by and not self.group_by and not self.temporal_group_by:
96
- raise ValueError(
97
- "order_by requires at least one grouping field (group_by or temporal_group_by). "
98
- "Ordering only applies to grouped aggregation results."
99
- )
100
-
101
- if self.cumulative:
102
- if not self.temporal_group_by:
103
- raise ValueError(
104
- "cumulative requires at least one temporal grouping (temporal_group_by). "
105
- "Cumulative aggregations compute running totals over time."
106
- )
107
- if len(self.temporal_group_by) > 1:
108
- raise ValueError(
109
- "cumulative currently supports only a single temporal grouping. "
110
- "Multiple temporal dimensions with running totals are not yet supported."
111
- )
112
-
113
- return self
114
62
 
115
63
  def get_pivot_fields(self) -> list[str]:
116
64
  """Get all fields needed for EAV pivot from grouping.
@@ -134,10 +82,7 @@ class AggregationMixin(BaseModel):
134
82
  Used by AGGREGATE queries to define what statistics to compute.
135
83
  """
136
84
 
137
- aggregations: list[Aggregation] = Field(
138
- description="Aggregations to compute (SUM, AVG, MIN, MAX, COUNT)",
139
- min_length=1,
140
- )
85
+ aggregations: list[Aggregation] = Field(description="Aggregations to compute (SUM, AVG, MIN, MAX, COUNT)")
141
86
 
142
87
  def get_aggregation_pivot_fields(self) -> list[str]:
143
88
  """Get fields needed for EAV pivot from aggregations.
@@ -13,7 +13,7 @@
13
13
 
14
14
  from typing import Annotated, Any, ClassVar, Literal, Self, Union
15
15
 
16
- from pydantic import BaseModel, ConfigDict, Discriminator, Field, model_validator
16
+ from pydantic import BaseModel, ConfigDict, Discriminator, Field
17
17
 
18
18
  from orchestrator.search.core.types import ActionType, EntityType
19
19
  from orchestrator.search.filters import FilterTree
@@ -112,20 +112,6 @@ class AggregateQuery(BaseQuery, GroupingMixin, AggregationMixin):
112
112
  query_type: Literal["aggregate"] = "aggregate"
113
113
  _action: ClassVar[ActionType] = ActionType.AGGREGATE
114
114
 
115
- @model_validator(mode="after")
116
- def validate_cumulative_aggregation_types(self) -> Self:
117
- """Validate that cumulative is only used with COUNT and SUM aggregations."""
118
- if self.cumulative:
119
- from orchestrator.search.aggregations import AggregationType
120
-
121
- for agg in self.aggregations:
122
- if agg.type in (AggregationType.AVG, AggregationType.MIN, AggregationType.MAX):
123
- raise ValueError(
124
- f"Cumulative aggregations are not supported for {agg.type.value.upper()} aggregations. "
125
- f"Cumulative only works with COUNT and SUM."
126
- )
127
- return self
128
-
129
115
  def get_pivot_fields(self) -> list[str]:
130
116
  """Get all fields needed for EAV pivot including aggregation fields."""
131
117
  # Get grouping fields from GroupingMixin
@@ -31,7 +31,6 @@ from orchestrator.search.query.exceptions import (
31
31
  InvalidLtreePatternError,
32
32
  PathNotFoundError,
33
33
  )
34
- from orchestrator.search.query.mixins import OrderBy
35
34
 
36
35
 
37
36
  def is_filter_compatible_with_field_type(filter_condition: FilterCondition, field_type: FieldType) -> bool:
@@ -208,45 +207,3 @@ def validate_temporal_grouping_field(field_path: str) -> None:
208
207
  # Validate field type is datetime
209
208
  if field_type_str != FieldType.DATETIME.value:
210
209
  raise IncompatibleTemporalGroupingTypeError(field_path, field_type_str)
211
-
212
-
213
- def validate_grouping_fields(group_by_paths: list[str]) -> None:
214
- """Validate that all grouping field paths exist in the database.
215
-
216
- Args:
217
- group_by_paths: List of field paths to group by
218
-
219
- Raises:
220
- PathNotFoundError: If any path doesn't exist in the database
221
- """
222
- for path in group_by_paths:
223
- field_type = validate_filter_path(path)
224
- if field_type is None:
225
- raise PathNotFoundError(path)
226
-
227
-
228
- def validate_order_by_fields(order_by: list[OrderBy] | None) -> None:
229
- """Validate that order_by field paths exist in the database.
230
-
231
- Args:
232
- order_by: List of ordering instructions, or None
233
-
234
- Raises:
235
- PathNotFoundError: If a field path doesn't exist in the database
236
-
237
- Note:
238
- Only validates fields that appear to be paths (contain dots).
239
- Aggregation aliases (no dots, like 'count') are skipped as they
240
- cannot be validated until query execution time.
241
- """
242
- if order_by is None:
243
- return
244
-
245
- for order_instr in order_by:
246
- # Skip aggregation aliases (no dots, e.g., 'count', 'revenue')
247
- if "." not in order_instr.field:
248
- continue
249
-
250
- field_type = validate_filter_path(order_instr.field)
251
- if field_type is None:
252
- raise PathNotFoundError(order_instr.field)
orchestrator/settings.py CHANGED
@@ -17,13 +17,10 @@ from pathlib import Path
17
17
  from typing import Literal
18
18
 
19
19
  from pydantic import Field, NonNegativeInt, PostgresDsn, RedisDsn
20
- from pydantic.main import BaseModel
21
20
  from pydantic_settings import BaseSettings
22
21
 
23
- from oauth2_lib.fastapi import OIDCUserModel
24
22
  from oauth2_lib.settings import oauth2lib_settings
25
23
  from orchestrator.services.settings_env_variables import expose_settings
26
- from orchestrator.utils.auth import Authorizer
27
24
  from orchestrator.utils.expose_settings import SecretStr as OrchSecretStr
28
25
  from pydantic_forms.types import strEnum
29
26
 
@@ -114,48 +111,3 @@ if app_settings.EXPOSE_SETTINGS:
114
111
  expose_settings("app_settings", app_settings) # type: ignore
115
112
  if app_settings.EXPOSE_OAUTH_SETTINGS:
116
113
  expose_settings("oauth2lib_settings", oauth2lib_settings) # type: ignore
117
-
118
-
119
- class Authorizers(BaseModel):
120
- # Callbacks specifically for orchestrator-core callbacks.
121
- # Separate from defaults for user-defined workflows and steps.
122
- internal_authorize_callback: Authorizer | None = None
123
- internal_retry_auth_callback: Authorizer | None = None
124
-
125
- async def authorize_callback(self, user: OIDCUserModel | None) -> bool:
126
- """This is the authorize_callback to be registered for workflows defined within orchestrator-core.
127
-
128
- If Authorizers.internal_authorize_callback is None, this function will return True.
129
- i.e. any user will be authorized to start internal workflows.
130
- """
131
- if self.internal_authorize_callback is None:
132
- return True
133
- return await self.internal_authorize_callback(user)
134
-
135
- async def retry_auth_callback(self, user: OIDCUserModel | None) -> bool:
136
- """This is the retry_auth_callback to be registered for workflows defined within orchestrator-core.
137
-
138
- If Authorizers.internal_retry_auth_callback is None, this function will return True.
139
- i.e. any user will be authorized to retry internal workflows on failure.
140
- """
141
- if self.internal_retry_auth_callback is None:
142
- return True
143
- return await self.internal_retry_auth_callback(user)
144
-
145
-
146
- _authorizers = Authorizers()
147
-
148
-
149
- def get_authorizers() -> Authorizers:
150
- """Acquire singleton of app authorizers to assign these callbacks at app setup.
151
-
152
- Ensures downstream users can acquire singleton without being tempted to do
153
- from orchestrator.settings import authorizers
154
- authorizers = my_authorizers
155
- or
156
- from orchestrator import settings
157
- settings.authorizers = my_authorizers
158
-
159
- ...each of which goes wrong in its own way.
160
- """
161
- return _authorizers
@@ -13,7 +13,6 @@
13
13
  from orchestrator.db import db
14
14
  from orchestrator.forms import SubmitFormPage
15
15
  from orchestrator.services import subscriptions
16
- from orchestrator.settings import get_authorizers
17
16
  from orchestrator.targets import Target
18
17
  from orchestrator.utils.json import to_serializable
19
18
  from orchestrator.workflow import StepList, done, init, step, workflow
@@ -22,8 +21,6 @@ from orchestrator.workflows.utils import wrap_modify_initial_input_form
22
21
  from pydantic_forms.types import FormGenerator, State, UUIDstr
23
22
  from pydantic_forms.validators import LongText
24
23
 
25
- authorizers = get_authorizers()
26
-
27
24
 
28
25
  def initial_input_form(subscription_id: UUIDstr) -> FormGenerator:
29
26
  subscription = subscriptions.get_subscription(subscription_id)
@@ -54,12 +51,6 @@ def store_subscription_note(subscription_id: UUIDstr, note: str) -> State:
54
51
  }
55
52
 
56
53
 
57
- @workflow(
58
- "Modify Note",
59
- initial_input_form=wrap_modify_initial_input_form(initial_input_form),
60
- target=Target.MODIFY,
61
- authorize_callback=authorizers.authorize_callback,
62
- retry_auth_callback=authorizers.retry_auth_callback,
63
- )
54
+ @workflow("Modify Note", initial_input_form=wrap_modify_initial_input_form(initial_input_form), target=Target.MODIFY)
64
55
  def modify_note() -> StepList:
65
56
  return init >> store_process_subscription() >> store_subscription_note >> done
@@ -12,18 +12,11 @@
12
12
  # limitations under the License.
13
13
 
14
14
 
15
- from orchestrator.settings import get_authorizers
16
15
  from orchestrator.workflow import StepList, workflow
17
16
 
18
- authorizers = get_authorizers()
19
-
20
17
 
21
18
  # This workflow has been made to create the initial import process for a SN7 subscription
22
19
  # it does not do anything but is needed for the correct showing in the GUI.
23
- @workflow(
24
- "Dummy workflow to replace removed workflows",
25
- authorize_callback=authorizers.authorize_callback,
26
- retry_auth_callback=authorizers.retry_auth_callback,
27
- )
20
+ @workflow("Dummy workflow to replace removed workflows")
28
21
  def removed_workflow() -> StepList:
29
22
  return StepList()
@@ -17,14 +17,12 @@ from datetime import timedelta
17
17
  from sqlalchemy import select
18
18
 
19
19
  from orchestrator.db import ProcessTable, db
20
- from orchestrator.settings import app_settings, get_authorizers
20
+ from orchestrator.settings import app_settings
21
21
  from orchestrator.targets import Target
22
22
  from orchestrator.utils.datetime import nowtz
23
23
  from orchestrator.workflow import ProcessStatus, StepList, done, init, step, workflow
24
24
  from pydantic_forms.types import State
25
25
 
26
- authorizers = get_authorizers()
27
-
28
26
 
29
27
  @step("Clean up completed tasks older than TASK_LOG_RETENTION_DAYS")
30
28
  def remove_tasks() -> State:
@@ -43,11 +41,6 @@ def remove_tasks() -> State:
43
41
  return {"tasks_removed": count}
44
42
 
45
43
 
46
- @workflow(
47
- "Clean up old tasks",
48
- target=Target.SYSTEM,
49
- authorize_callback=authorizers.authorize_callback,
50
- retry_auth_callback=authorizers.retry_auth_callback,
51
- )
44
+ @workflow("Clean up old tasks", target=Target.SYSTEM)
52
45
  def task_clean_up_tasks() -> StepList:
53
46
  return init >> remove_tasks >> done
@@ -17,12 +17,10 @@ from sqlalchemy import select
17
17
 
18
18
  from orchestrator.db import ProcessTable, db
19
19
  from orchestrator.services import processes
20
- from orchestrator.settings import get_authorizers
21
20
  from orchestrator.targets import Target
22
21
  from orchestrator.workflow import ProcessStatus, StepList, done, init, step, workflow
23
22
  from pydantic_forms.types import State, UUIDstr
24
23
 
25
- authorizers = get_authorizers()
26
24
  logger = structlog.get_logger(__name__)
27
25
 
28
26
 
@@ -112,8 +110,6 @@ def restart_created_workflows(created_state_process_ids: list[UUIDstr]) -> State
112
110
  @workflow(
113
111
  "Resume all workflows that are stuck on tasks with the status 'waiting', 'created' or 'resumed'",
114
112
  target=Target.SYSTEM,
115
- authorize_callback=authorizers.authorize_callback,
116
- retry_auth_callback=authorizers.retry_auth_callback,
117
113
  )
118
114
  def task_resume_workflows() -> StepList:
119
115
  return init >> find_waiting_workflows >> resume_found_workflows >> restart_created_workflows >> done
@@ -25,12 +25,10 @@ from orchestrator.services.workflows import (
25
25
  get_validation_product_workflows_for_subscription,
26
26
  start_validation_workflow_for_workflows,
27
27
  )
28
- from orchestrator.settings import get_authorizers
29
28
  from orchestrator.targets import Target
30
29
  from orchestrator.workflow import StepList, done, init, step, workflow
31
30
  from pydantic_forms.types import FormGenerator, State
32
31
 
33
- authorizers = get_authorizers()
34
32
  logger = structlog.get_logger(__name__)
35
33
 
36
34
 
@@ -88,11 +86,7 @@ def validate_product_type(product_type: str) -> State:
88
86
 
89
87
 
90
88
  @workflow(
91
- "Validate all subscriptions of Product Type",
92
- target=Target.SYSTEM,
93
- initial_input_form=initial_input_form_generator,
94
- authorize_callback=authorizers.authorize_callback,
95
- retry_auth_callback=authorizers.retry_auth_callback,
89
+ "Validate all subscriptions of Product Type", target=Target.SYSTEM, initial_input_form=initial_input_form_generator
96
90
  )
97
91
  def task_validate_product_type() -> StepList:
98
92
  return init >> validate_product_type >> done
@@ -26,15 +26,12 @@ from orchestrator.services import products
26
26
  from orchestrator.services.products import get_products
27
27
  from orchestrator.services.translations import generate_translations
28
28
  from orchestrator.services.workflows import get_workflow_by_name, get_workflows
29
- from orchestrator.settings import get_authorizers
30
29
  from orchestrator.targets import Target
31
30
  from orchestrator.utils.errors import ProcessFailureError
32
31
  from orchestrator.utils.fixed_inputs import fixed_input_configuration as fi_configuration
33
32
  from orchestrator.workflow import StepList, done, init, step, workflow
34
33
  from pydantic_forms.types import State
35
34
 
36
- authorizers = get_authorizers()
37
-
38
35
  # Since these errors are probably programming failures we should not throw AssertionErrors
39
36
 
40
37
 
@@ -190,12 +187,7 @@ def check_subscription_models() -> State:
190
187
  return {"check_subscription_models": True}
191
188
 
192
189
 
193
- @workflow(
194
- "Validate products",
195
- target=Target.SYSTEM,
196
- authorize_callback=authorizers.authorize_callback,
197
- retry_auth_callback=authorizers.retry_auth_callback,
198
- )
190
+ @workflow("Validate products", target=Target.SYSTEM)
199
191
  def task_validate_products() -> StepList:
200
192
  return (
201
193
  init
@@ -24,7 +24,7 @@ from orchestrator.services.workflows import (
24
24
  get_validation_product_workflows_for_subscription,
25
25
  start_validation_workflow_for_workflows,
26
26
  )
27
- from orchestrator.settings import app_settings, get_authorizers
27
+ from orchestrator.settings import app_settings
28
28
  from orchestrator.targets import Target
29
29
  from orchestrator.workflow import StepList, init, step, workflow
30
30
 
@@ -33,8 +33,6 @@ logger = structlog.get_logger(__name__)
33
33
 
34
34
  task_semaphore = BoundedSemaphore(value=2)
35
35
 
36
- authorizers = get_authorizers()
37
-
38
36
 
39
37
  @step("Validate subscriptions")
40
38
  def validate_subscriptions() -> None:
@@ -58,11 +56,6 @@ def validate_subscriptions() -> None:
58
56
  start_validation_workflow_for_workflows(subscription=subscription, workflows=validation_product_workflows)
59
57
 
60
58
 
61
- @workflow(
62
- "Validate subscriptions",
63
- target=Target.SYSTEM,
64
- authorize_callback=authorizers.authorize_callback,
65
- retry_auth_callback=authorizers.retry_auth_callback,
66
- )
59
+ @workflow("Validate subscriptions", target=Target.SYSTEM)
67
60
  def task_validate_subscriptions() -> StepList:
68
61
  return init >> validate_subscriptions
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: orchestrator-core
3
- Version: 4.7.0
3
+ Version: 4.7.0rc1
4
4
  Summary: This is the orchestrator workflow engine.
5
5
  Author-email: SURF <automation-beheer@surf.nl>
6
6
  Requires-Python: >=3.11,<3.15
@@ -57,8 +57,8 @@ Requires-Dist: pytz==2025.2
57
57
  Requires-Dist: redis==7.1.0
58
58
  Requires-Dist: semver==3.0.4
59
59
  Requires-Dist: sentry-sdk[fastapi]>=2.29.1
60
- Requires-Dist: sqlalchemy==2.0.45
61
- Requires-Dist: sqlalchemy-utils==0.42.1
60
+ Requires-Dist: sqlalchemy==2.0.44
61
+ Requires-Dist: sqlalchemy-utils==0.42.0
62
62
  Requires-Dist: strawberry-graphql>=0.281.0,<0.285.0
63
63
  Requires-Dist: structlog>=25.4.0
64
64
  Requires-Dist: tabulate==0.9.0
@@ -1,12 +1,12 @@
1
- orchestrator/__init__.py,sha256=ucWvTHgGGtzNzR8Vx18GsxXa-Z3pkHTpGl8ZSVeCwHg,1454
1
+ orchestrator/__init__.py,sha256=HWuhzwoxPlAnuAGVtn9GBpZS5X185yqB_j-WnUzXnVE,1457
2
2
  orchestrator/agentic_app.py,sha256=ouiyyZiS4uS6Lox2DtbGGRnb2njJBMSHpSAGe-T5rX0,3028
3
- orchestrator/app.py,sha256=5ITGSN_KeRi2qTvfwBXhjOGNyWNy-rdtzfOLEk76ZtY,14661
3
+ orchestrator/app.py,sha256=w8ubXaaogwjmwLM0TXqZaLkAhmaOTWzVlwiYbi5mHeE,13203
4
4
  orchestrator/exception_handlers.py,sha256=UsW3dw8q0QQlNLcV359bIotah8DYjMsj2Ts1LfX4ClY,1268
5
- orchestrator/llm_settings.py,sha256=Rk5yBTmMAhyTcBVucQwzV2qBGTs-zZss76687cHy8tA,2815
5
+ orchestrator/llm_settings.py,sha256=giqxZmwO8sXyNF5Zc7-O9JYd7PLN065pKQYpJYOpXcE,2816
6
6
  orchestrator/log_config.py,sha256=aCbIzKzDnVwF0zVoQ9TlvuHxn0uhJWRjNL72Hc5pcss,2031
7
7
  orchestrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  orchestrator/security.py,sha256=iXFxGxab54aav7oHEKLAVkTgrQMJGHy6IYLojEnD7gI,2422
9
- orchestrator/settings.py,sha256=Xn9SC8omh3rHhp-1bIyYHhwbu-rpvbqauBFPTLGhYic,6562
9
+ orchestrator/settings.py,sha256=mvs1VhBYth6Zp55HsNroML4DU1jiq5SkVM47_BLgcIo,4662
10
10
  orchestrator/targets.py,sha256=d7Fyh_mWIWPivA_E7DTNFpZID3xFW_K0JlZ5nksVX7k,830
11
11
  orchestrator/types.py,sha256=qzs7xx5AYRmKbpYRyJJP3wuDb0W0bcAzefCN0RWLAco,15459
12
12
  orchestrator/version.py,sha256=b58e08lxs47wUNXv0jXFO_ykpksmytuzEXD4La4W-NQ,1366
@@ -41,7 +41,7 @@ orchestrator/cli/migrate_domain_models.py,sha256=WRXy_1OnziQwpsCFZXvjB30nDJtjj0i
41
41
  orchestrator/cli/migrate_tasks.py,sha256=bju8XColjSZD0v3rS4kl-24dLr8En_H4-6enBmqd494,7255
42
42
  orchestrator/cli/migrate_workflows.py,sha256=nxUpx0vgEIc_8aJrjAyrw3E9Dt8JmaamTts8oiQ4vHY,8923
43
43
  orchestrator/cli/migration_helpers.py,sha256=C5tpkP5WEBr7G9S-1k1hgSI8ili6xd9Z5ygc9notaK0,4110
44
- orchestrator/cli/scheduler.py,sha256=kCHKAXnVWDgmzgIaIBYHop0vE2Isi22KFH3W7d8Yyi4,5805
44
+ orchestrator/cli/scheduler.py,sha256=U-YLa_SkwGVAf7jQ8EQugtwTuycw3DtXhmCxlMpiQqQ,4396
45
45
  orchestrator/cli/domain_gen_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  orchestrator/cli/domain_gen_helpers/fixed_input_helpers.py,sha256=uzpwsaau81hHSxNMOS9-o7kF-9_78R0f_UE0AvWooZQ,6775
47
47
  orchestrator/cli/domain_gen_helpers/helpers.py,sha256=tIPxn8ezED_xYZxH7ZAtQLwkDc6RNmLZVxWAoJ3a9lw,4203
@@ -196,7 +196,7 @@ orchestrator/graphql/schemas/customer_description.py,sha256=fize71IMpkvk_rTzcqCY
196
196
  orchestrator/graphql/schemas/errors.py,sha256=VRl-Zd1FHMnscyozhfxzqeEUZ0ERAWum_Y8YwjGxwmA,203
197
197
  orchestrator/graphql/schemas/fixed_input.py,sha256=1yqYHADQRgHz8OIP7ObYsPFS-gmzfkCvEO0a-KKf7zI,513
198
198
  orchestrator/graphql/schemas/helpers.py,sha256=Kpj4kIbmoKKN35bdgUSwQvGUIbeg7VJAVMEq65YS_ik,346
199
- orchestrator/graphql/schemas/process.py,sha256=2TJq5MTrHW3NtlImuTiIJZm8GTTzS8msWgB_k_-rPhc,4924
199
+ orchestrator/graphql/schemas/process.py,sha256=APARniusSdqHkXann2oHp72PBE-nslQcLTQQaFFLiG4,4912
200
200
  orchestrator/graphql/schemas/product.py,sha256=vUCqcjrKBJj-VKSrMYPKzjmmxLMXL7alKTJ8UdUkhTg,4342
201
201
  orchestrator/graphql/schemas/product_block.py,sha256=Qk9cbA6vm7ZPrhdgPHatKRuy6TytBmxSr97McEOxAu8,2860
202
202
  orchestrator/graphql/schemas/resource_type.py,sha256=s5d_FwQXL2-Sc-IDUxTJun5qFQ4zOP4-XcHF9ql-t1g,898
@@ -230,9 +230,9 @@ orchestrator/migrations/templates/alembic.ini.j2,sha256=8v7UbKvOiWEbEKQa-Au3uONK
230
230
  orchestrator/migrations/templates/env.py.j2,sha256=LIt0ildZTZvNEx3imhy4GNzfFi_rPZg-8H7rGgrBOP8,2717
231
231
  orchestrator/migrations/templates/helpers.py.j2,sha256=3MWNMICGrcQFObyBQefL-FPjoVKUgP0QIlbk4TdMZds,98
232
232
  orchestrator/migrations/versions/schema/2020-10-19_3323bcb934e7_fix_tsv_triggers.py,sha256=ufe6OFUELNpx6N2663bvdwgB4lP-v71fuMuJtx9CmJc,2698
233
- orchestrator/migrations/versions/schema/2020-10-19_a76b9185b334_add_generic_workflows_to_core.py,sha256=3MP7cY_znFbC-dTdf8us7Dn8Fn7g1D_ROCrxrlRzfx0,1384
233
+ orchestrator/migrations/versions/schema/2020-10-19_a76b9185b334_add_generic_workflows_to_core.py,sha256=EHj87IXucruyB8KuxEWcc7JK1NIizZ5Jzmj-bzY0t1Y,1265
234
234
  orchestrator/migrations/versions/schema/2020-10-19_c112305b07d3_initial_schema_migration.py,sha256=-_dEwEXbl1E2HQpdcigMsSsq6H98eRcmaE8g5NR36iE,39291
235
- orchestrator/migrations/versions/schema/2021-04-06_3c8b9185c221_add_validate_products_task.py,sha256=BVPkhxz8E_eEM4T1toujSdVWhmR_klZEqN3DUpQDaus,1069
235
+ orchestrator/migrations/versions/schema/2021-04-06_3c8b9185c221_add_validate_products_task.py,sha256=rLuEl8WuQHcwmOBZdADbgCBqzJ5EZ4KWXrTVwXrRyx4,950
236
236
  orchestrator/migrations/versions/schema/2021-07-01_6896a54e9483_add_product_block_relations.py,sha256=xw01x5YTNDDxZiMUCeBPuzp0LKsKeMMR4YWF2aWI9ZI,1214
237
237
  orchestrator/migrations/versions/schema/2021-11-17_19cdd3ab86f6_fix_parse_websearch.py,sha256=FUWxAPpi32SgowU_WdZiC903BbLUA5zktBICOi4ecpQ,1603
238
238
  orchestrator/migrations/versions/schema/2022-02-16_bed6bc0b197a_rename_parent_and_child_block_relations.py,sha256=2hiV8aFwlcgRQ7EFVvGhV13j2j-p7cMLadyUfXezIF8,5106
@@ -258,11 +258,10 @@ orchestrator/migrations/versions/schema/2025-05-08_161918133bec_add_is_task_to_w
258
258
  orchestrator/migrations/versions/schema/2025-07-01_93fc5834c7e5_changed_timestamping_fields_in_process_steps.py,sha256=Oezd8b2qaI1Kyq-sZFVFmdzd4d9NjXrf6HtJGk11fy0,1914
259
259
  orchestrator/migrations/versions/schema/2025-07-04_4b58e336d1bf_deprecating_workflow_target_in_.py,sha256=xnD6w-97R4ClS7rbmXQEXc36K3fdcXKhCy7ZZNy_FX4,742
260
260
  orchestrator/migrations/versions/schema/2025-07-28_850dccac3b02_update_description_of_resume_workflows_.py,sha256=R6Qoga83DJ1IL0WYPu0u5u2ZvAmqGlDmUMv_KtJyOhQ,812
261
- orchestrator/migrations/versions/schema/2025-11-18_961eddbd4c13_create_linker_table_workflow_apscheduler.py,sha256=Vy2qA8wb_lQWExhF0PX_IFwCr_vafe9uaT1pXvCwbGI,3227
262
- orchestrator/migrations/versions/schema/2025-12-10_9736496e3eba_set_is_task_true_on_certain_tasks.py,sha256=2DOERJ7QF83o-goxJPtz0FUC3xZAt5ms8miadFGVFcw,1007
263
- orchestrator/schedules/__init__.py,sha256=i8sT88A3v_5KIfwbKZxe3rS2rMakOuqfAis0DRmBleU,1017
261
+ orchestrator/migrations/versions/schema/2025-11-18_961eddbd4c13_create_linker_table_workflow_apscheduler.py,sha256=wJ01G2fpph9gYq0haeYRoSD21cTLOdn41axCSMyWV8o,3109
262
+ orchestrator/schedules/__init__.py,sha256=WNjy4D4QdOKJh6ZEHJ119tXL1xJ3L7kMPuJWYnayjtE,868
264
263
  orchestrator/schedules/scheduler.py,sha256=8o7DoVs9Q1Q231FVMpv3tXtKbaydeNkYQ1h6kl7U1X4,7198
265
- orchestrator/schedules/scheduling.py,sha256=1lSeAhKRGhZNOtFiB-FPMeo3bEIDpt9OdJKBkk7QknI,2914
264
+ orchestrator/schedules/scheduling.py,sha256=_mbpHMhijey8Y56ebtJ4wVkrp_kPVRm8hoByzlQF4SE,2821
266
265
  orchestrator/schedules/service.py,sha256=CYWb_gB5Dw57AIiQtSVcLW4sEE69zNoWGuSe2WEIj_8,8940
267
266
  orchestrator/schedules/validate_products.py,sha256=_ucUG9HecskG2eN3tcDSiMzJK9gN3kZB1dXjrtxcApY,1324
268
267
  orchestrator/schemas/__init__.py,sha256=YDyZ0YBvzB4ML9oDBCBPGnBvf680zFFgUzg7X0tYBRY,2326
@@ -286,12 +285,12 @@ orchestrator/search/agent/__init__.py,sha256=_O4DN0MSTUtr4olhyE0-2hsb7x3f_KURMCY
286
285
  orchestrator/search/agent/agent.py,sha256=iWa4_achqh5zRIfcJvjmY3hmDuFVGpV_PxjgttLdokU,2075
287
286
  orchestrator/search/agent/handlers.py,sha256=z1KqeUUn7cYNBW39femwYnZOqRiYm1885Cx4TWVNyCY,4476
288
287
  orchestrator/search/agent/json_patch.py,sha256=_Z5ULhLyeuOuy-Gr_DJR4eA-wo9F78qySKUt5F_SQvQ,1892
289
- orchestrator/search/agent/prompts.py,sha256=3Bo1Pc2iasGvifwpmvmd3S-kE07fTO4uZ6wp7VmoLM8,7562
288
+ orchestrator/search/agent/prompts.py,sha256=aNxdJz7M1RPg_Py5v3Wvc2Kzo_tshkfHqNlF6S7YQv4,6975
290
289
  orchestrator/search/agent/state.py,sha256=YBrpHb8ok2RPwoLwBUWHYaGms_uj6nz51le2vbDjJwA,1113
291
- orchestrator/search/agent/tools.py,sha256=WwJVvl_P4BM5Bf0niS1H0lvlojdSoSb1AbIWBbtBZRY,18727
290
+ orchestrator/search/agent/tools.py,sha256=hMovAKJ03EKsmGbdS363kbx1-BX4YJB6MsQUzZkEA38,17144
292
291
  orchestrator/search/agent/validation.py,sha256=-U7g9Tgw6wH_tli0X0wrVKmiP9Id08sISq3kRrJjFNQ,2926
293
292
  orchestrator/search/aggregations/__init__.py,sha256=Re5XDxvbRCT5DUFTQbHeDCyync1RL7yHh-D3Bm28CXg,921
294
- orchestrator/search/aggregations/base.py,sha256=jC06rzecurlzIxh7RCctFgsaPxU1sGCamgM7u6pzhJw,7051
293
+ orchestrator/search/aggregations/base.py,sha256=8zIdZx9cLxgFg6PePHzUNdMhHejPpb-hKL6lirDJlyQ,6916
295
294
  orchestrator/search/core/__init__.py,sha256=q5G0z3nKjIHKFs1PkEG3nvTUy3Wp4kCyBtCbqUITj3A,579
296
295
  orchestrator/search/core/embedding.py,sha256=n16H5fZRlfn91wI8PfZPa1R39HwQd8T1nwlDOzcOUBU,2823
297
296
  orchestrator/search/core/exceptions.py,sha256=S_ZMEhrqsQBVqJ559FQ5J6tZU6BYLiU65AGWgSvgv_k,1159
@@ -311,15 +310,15 @@ orchestrator/search/indexing/registry.py,sha256=V6Q4aRXHON1gSE6wsavEIfwHwCPicSzF
311
310
  orchestrator/search/indexing/tasks.py,sha256=0p68RNwJnHSGZQjfdpyFsS2Ma5Gr2PpZROZgal_R1wI,3064
312
311
  orchestrator/search/indexing/traverse.py,sha256=JLut9t4LoPCWhJ_63VgYhRKfjwyxRv-mTbQLC8mA_mU,15158
313
312
  orchestrator/search/query/__init__.py,sha256=nCjvK_n2WQdV_ACrncFXEfnvLcHtuI__J7KLlFIaQvo,2437
314
- orchestrator/search/query/builder.py,sha256=EfDSSOQKUBNtUESDBsKaPY6hZ_iDXAwc3qcNR4AGAEg,13261
313
+ orchestrator/search/query/builder.py,sha256=ob92J0jQ_bn86Dhl_3k0_cOL7C5PmcXdmpOKeqw4Pz4,10228
315
314
  orchestrator/search/query/engine.py,sha256=TFdV_sSoSXCSDSpyhVA2S6YaJysDSW2WtPj7duAyomk,5745
316
315
  orchestrator/search/query/exceptions.py,sha256=DrkNzXVbQAOi28FTHKimf_eTrXmhYwXrH986QhfQLPU,4941
317
316
  orchestrator/search/query/export.py,sha256=_0ncVpTqN6AoQfW3WX0fWnDQX3hBz6ZGC31Beu4PVwQ,6678
318
- orchestrator/search/query/mixins.py,sha256=IUtN8QEowTNntk_MiiaaRxJS_QNxOXYmfiiOwzWuYZA,4939
319
- orchestrator/search/query/queries.py,sha256=0jF97cU2Z98-oWm1Iyqf3xIgrmc7FcWAPTb51tUG4MA,4506
317
+ orchestrator/search/query/mixins.py,sha256=BdVDzCOFDXT6N9LI_WrbVzGrk61UNplX-UZPvD0rEV0,3019
318
+ orchestrator/search/query/queries.py,sha256=j1uKSQgF_ifVaDJaxjs4h2z48KqGVEIKCXOoJ7Ur9Mk,3805
320
319
  orchestrator/search/query/results.py,sha256=5OgAs39oncDIBdpB3NJltPr-UvLvLlxTWw9sn-lyfQA,10989
321
320
  orchestrator/search/query/state.py,sha256=fMSBJs39kZTkpDE2T4h4x0x-51GqUvzAuePg2YUbO6I,3220
322
- orchestrator/search/query/validation.py,sha256=Pprv40yvpynL1-MCFE1YuouguYW6lfh1PZKsVei2i6w,9622
321
+ orchestrator/search/query/validation.py,sha256=m0xJ71A0Qa5hm8b71zKRjSVpPrnkG7LbqPu4lv_GboI,8260
323
322
  orchestrator/search/retrieval/__init__.py,sha256=q5G0z3nKjIHKFs1PkEG3nvTUy3Wp4kCyBtCbqUITj3A,579
324
323
  orchestrator/search/retrieval/pagination.py,sha256=kcUzq1QQk4GrZq02M4hsKwAelUo1qDeCqsXImLUK6DA,3006
325
324
  orchestrator/search/retrieval/retrievers/__init__.py,sha256=dJlN6a0oHSquzjE5POYxrMGOXMx4Bx2khbJI-rA_qwg,971
@@ -372,18 +371,18 @@ orchestrator/websocket/websocket_manager.py,sha256=hwlG9FDXcNU42jDNNsPMQLIyrvEpG
372
371
  orchestrator/websocket/managers/broadcast_websocket_manager.py,sha256=fwoSgTjkHJ2GmsLTU9dqQpAA9i8b1McPu7gLNzxtfG4,5401
373
372
  orchestrator/websocket/managers/memory_websocket_manager.py,sha256=lF5EEx1iFMCGEkTbItTDr88NENMSaSeG1QrJ7teoPkY,3324
374
373
  orchestrator/workflows/__init__.py,sha256=FbwcAYJh8oSi0QFjXXXomdl9c8whCa_qSt_vPXcwasE,4216
375
- orchestrator/workflows/modify_note.py,sha256=WFK3rA3Cmpk2_kOUP3xDfe9OI5LuQGv09tRAoTVKaR4,2360
376
- orchestrator/workflows/removed_workflow.py,sha256=fwi1-aC1KQvb08hq8St-_lWOLM_tjTcQMLJ_Fjdn2M8,1111
374
+ orchestrator/workflows/modify_note.py,sha256=eXt5KQvrkOXf-3YEXCn2XbBLP9N-n1pUYRW2t8Odupo,2150
375
+ orchestrator/workflows/removed_workflow.py,sha256=V0Da5TEdfLdZZKD38ig-MTp3_IuE7VGqzHHzvPYQmLI,909
377
376
  orchestrator/workflows/steps.py,sha256=VVLRK9_7KzrBlnK7L8eSmRMNVOO7VJBh5OSjHQHM9fU,7019
378
377
  orchestrator/workflows/utils.py,sha256=VUCDoIl5XAKtIeAJpVpyW2pCIg3PoVWfwGn28BYlYhA,15424
379
378
  orchestrator/workflows/tasks/__init__.py,sha256=GyHNfEFCGKQwRiN6rQmvSRH2iYX7npjMZn97n8XzmLU,571
380
- orchestrator/workflows/tasks/cleanup_tasks_log.py,sha256=FOcYfyH-dsNAilmW8ynEpz5zaKzlHNYJfDtIUqfrztQ,1797
381
- orchestrator/workflows/tasks/resume_workflows.py,sha256=OvYFY-Nun885ZAzcFp6hphvYtRy3VkdUgiGIU5TapZ0,4507
382
- orchestrator/workflows/tasks/validate_product_type.py,sha256=KoDMqROGVQ0ZPu69jMFO7cto8j0xi11h0_IeBOcV94A,3413
383
- orchestrator/workflows/tasks/validate_products.py,sha256=lCAXmCVhohgrdgJn7-d7fIxPj4MVOX0J8KezcvwIK3k,8716
384
- orchestrator/workflows/tasks/validate_subscriptions.py,sha256=lMAwaEVhZ1FD91Sw2eIjTc4hmGxQX3DgbJPYdJ1_dmw,2373
379
+ orchestrator/workflows/tasks/cleanup_tasks_log.py,sha256=BfWYbPXhnLAHUJ0mlODDnjZnQQAvKCZJDVTwbwOWI04,1624
380
+ orchestrator/workflows/tasks/resume_workflows.py,sha256=T3iobSJjVgiupe0rClD34kUZ7KF4pL5yK2AVeRLZog8,4313
381
+ orchestrator/workflows/tasks/validate_product_type.py,sha256=lo2TX_MZOfcOmYFjLyD82FrJ5AAN3HOsE6BhDVFuy9Q,3210
382
+ orchestrator/workflows/tasks/validate_products.py,sha256=GZJBoFF-WMphS7ghMs2-gqvV2iL1F0POhk0uSNt93n0,8510
383
+ orchestrator/workflows/tasks/validate_subscriptions.py,sha256=OZtqO6aJR4KwEFvBioV2gWAjmyLiWxe9Wlps3YmFh9w,2200
385
384
  orchestrator/workflows/translations/en-GB.json,sha256=ObBlH9XILJ9uNaGcJexi3IB0e6P8CKFKRgu29luIEM8,973
386
- orchestrator_core-4.7.0.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
387
- orchestrator_core-4.7.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
388
- orchestrator_core-4.7.0.dist-info/METADATA,sha256=eUDFx3trsADoJizIQc38c4DTqLMMWbLnE8z6AwNyaK8,6418
389
- orchestrator_core-4.7.0.dist-info/RECORD,,
385
+ orchestrator_core-4.7.0rc1.dist-info/licenses/LICENSE,sha256=b-aA5OZQuuBATmLKo_mln8CQrDPPhg3ghLzjPjLn4Tg,11409
386
+ orchestrator_core-4.7.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
387
+ orchestrator_core-4.7.0rc1.dist-info/METADATA,sha256=usMBg-ktGcbAy1WVADeof9gd_oBI4OQg2fPJfOhv60A,6421
388
+ orchestrator_core-4.7.0rc1.dist-info/RECORD,,
@@ -1,40 +0,0 @@
1
- """Set is_task=true on certain tasks.
2
-
3
- This is required to make them appear in the completed tasks in the UI, and for the cleanup task to be able to
4
- remove them.
5
-
6
- Revision ID: 9736496e3eba
7
- Revises: 961eddbd4c13
8
- Create Date: 2025-12-10 16:42:29.060382
9
-
10
- """
11
-
12
- import sqlalchemy as sa
13
- from alembic import op
14
-
15
- # revision identifiers, used by Alembic.
16
- revision = "9736496e3eba"
17
- down_revision = "961eddbd4c13"
18
- branch_labels = None
19
- depends_on = None
20
-
21
- task_names = [
22
- # Added in a76b9185b334
23
- "task_clean_up_tasks",
24
- "task_resume_workflows",
25
- # Added in 3c8b9185c221
26
- "task_validate_products",
27
- # Added in 961eddbd4c13
28
- "task_validate_subscriptions",
29
- ]
30
-
31
-
32
- def upgrade() -> None:
33
- conn = op.get_bind()
34
- query = sa.text("UPDATE workflows SET is_task=true WHERE name = :task_name and is_task=false")
35
- for task_name in task_names:
36
- conn.execute(query, parameters={"task_name": task_name})
37
-
38
-
39
- def downgrade() -> None:
40
- pass # Does not make sense to downgrade back to a 'bad' state.