horsies 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. horsies/__init__.py +115 -0
  2. horsies/core/__init__.py +0 -0
  3. horsies/core/app.py +552 -0
  4. horsies/core/banner.py +144 -0
  5. horsies/core/brokers/__init__.py +5 -0
  6. horsies/core/brokers/listener.py +444 -0
  7. horsies/core/brokers/postgres.py +864 -0
  8. horsies/core/cli.py +624 -0
  9. horsies/core/codec/serde.py +575 -0
  10. horsies/core/errors.py +535 -0
  11. horsies/core/logging.py +90 -0
  12. horsies/core/models/__init__.py +0 -0
  13. horsies/core/models/app.py +268 -0
  14. horsies/core/models/broker.py +79 -0
  15. horsies/core/models/queues.py +23 -0
  16. horsies/core/models/recovery.py +101 -0
  17. horsies/core/models/schedule.py +229 -0
  18. horsies/core/models/task_pg.py +307 -0
  19. horsies/core/models/tasks.py +332 -0
  20. horsies/core/models/workflow.py +1988 -0
  21. horsies/core/models/workflow_pg.py +245 -0
  22. horsies/core/registry/tasks.py +101 -0
  23. horsies/core/scheduler/__init__.py +26 -0
  24. horsies/core/scheduler/calculator.py +267 -0
  25. horsies/core/scheduler/service.py +569 -0
  26. horsies/core/scheduler/state.py +260 -0
  27. horsies/core/task_decorator.py +615 -0
  28. horsies/core/types/status.py +38 -0
  29. horsies/core/utils/imports.py +203 -0
  30. horsies/core/utils/loop_runner.py +44 -0
  31. horsies/core/worker/current.py +17 -0
  32. horsies/core/worker/worker.py +1967 -0
  33. horsies/core/workflows/__init__.py +23 -0
  34. horsies/core/workflows/engine.py +2344 -0
  35. horsies/core/workflows/recovery.py +501 -0
  36. horsies/core/workflows/registry.py +97 -0
  37. horsies/py.typed +0 -0
  38. horsies-0.1.0a1.dist-info/METADATA +31 -0
  39. horsies-0.1.0a1.dist-info/RECORD +42 -0
  40. horsies-0.1.0a1.dist-info/WHEEL +5 -0
  41. horsies-0.1.0a1.dist-info/entry_points.txt +2 -0
  42. horsies-0.1.0a1.dist-info/top_level.txt +1 -0
horsies/__init__.py ADDED
@@ -0,0 +1,115 @@
1
+ """Task Library - A Python library for distributed task execution"""
2
+
3
+ # Install Rust-style error handler on import
4
+ from .core.errors import install_error_handler as _install_error_handler
5
+
6
+ _install_error_handler()
7
+
8
+ from .core.app import Horsies
9
+ from .core.models.app import AppConfig
10
+ from .core.models.broker import PostgresConfig
11
+ from .core.models.tasks import (
12
+ TaskResult,
13
+ TaskError,
14
+ LibraryErrorCode,
15
+ SubWorkflowError,
16
+ RetryPolicy,
17
+ )
18
+ from .core.models.queues import QueueMode, CustomQueueConfig
19
+ from .core.models.workflow import (
20
+ WorkflowSpec,
21
+ TaskNode,
22
+ SubWorkflowNode,
23
+ AnyNode,
24
+ WorkflowHandle,
25
+ WorkflowStatus,
26
+ WorkflowTaskStatus,
27
+ WorkflowContext,
28
+ WorkflowMeta,
29
+ OnError,
30
+ WorkflowValidationError,
31
+ WorkflowDefinition,
32
+ slugify,
33
+ SubWorkflowRetryMode,
34
+ SubWorkflowSummary,
35
+ SuccessCase,
36
+ SuccessPolicy,
37
+ NodeKey,
38
+ WorkflowTaskInfo,
39
+ WorkflowContextMissingIdError,
40
+ WorkflowHandleMissingIdError,
41
+ WORKFLOW_TERMINAL_STATES,
42
+ WORKFLOW_TASK_TERMINAL_STATES,
43
+ )
44
+ from .core.workflows.engine import start_workflow, start_workflow_async
45
+ from .core.models.schedule import (
46
+ Weekday,
47
+ IntervalSchedule,
48
+ HourlySchedule,
49
+ DailySchedule,
50
+ WeeklySchedule,
51
+ MonthlySchedule,
52
+ SchedulePattern,
53
+ TaskSchedule,
54
+ ScheduleConfig,
55
+ )
56
+ from .core.models.recovery import RecoveryConfig
57
+ from .core.types.status import TaskStatus, TASK_TERMINAL_STATES
58
+ from .core.errors import ErrorCode, ValidationReport, MultipleValidationErrors
59
+
60
+ __all__ = [
61
+ # Core
62
+ 'Horsies',
63
+ 'AppConfig',
64
+ 'PostgresConfig',
65
+ 'TaskResult',
66
+ 'TaskError',
67
+ 'LibraryErrorCode',
68
+ 'SubWorkflowError',
69
+ 'RetryPolicy',
70
+ 'QueueMode',
71
+ 'CustomQueueConfig',
72
+ 'TaskStatus',
73
+ 'TASK_TERMINAL_STATES',
74
+ 'ErrorCode',
75
+ 'ValidationReport',
76
+ 'MultipleValidationErrors',
77
+ # Workflow
78
+ 'WorkflowSpec',
79
+ 'TaskNode',
80
+ 'SubWorkflowNode',
81
+ 'AnyNode',
82
+ 'WorkflowHandle',
83
+ 'WorkflowStatus',
84
+ 'WorkflowTaskStatus',
85
+ 'WorkflowContext',
86
+ 'WorkflowMeta',
87
+ 'OnError',
88
+ 'WorkflowValidationError',
89
+ 'WorkflowDefinition',
90
+ 'slugify',
91
+ 'SubWorkflowRetryMode',
92
+ 'SubWorkflowSummary',
93
+ 'SuccessCase',
94
+ 'SuccessPolicy',
95
+ 'NodeKey',
96
+ 'WorkflowTaskInfo',
97
+ 'WorkflowContextMissingIdError',
98
+ 'WorkflowHandleMissingIdError',
99
+ 'WORKFLOW_TERMINAL_STATES',
100
+ 'WORKFLOW_TASK_TERMINAL_STATES',
101
+ 'start_workflow',
102
+ 'start_workflow_async',
103
+ # Scheduling
104
+ 'Weekday',
105
+ 'IntervalSchedule',
106
+ 'HourlySchedule',
107
+ 'DailySchedule',
108
+ 'WeeklySchedule',
109
+ 'MonthlySchedule',
110
+ 'SchedulePattern',
111
+ 'TaskSchedule',
112
+ 'ScheduleConfig',
113
+ # Recovery
114
+ 'RecoveryConfig',
115
+ ]
File without changes
horsies/core/app.py ADDED
@@ -0,0 +1,552 @@
1
+ # app/core/app.py
2
+ from typing import (
3
+ Optional,
4
+ Callable,
5
+ TypeVar,
6
+ overload,
7
+ TYPE_CHECKING,
8
+ ParamSpec,
9
+ Any,
10
+ Union,
11
+ )
12
+ from horsies.core.models.app import AppConfig
13
+ from horsies.core.models.queues import QueueMode
14
+ from horsies.core.models.tasks import TaskError, TaskOptions, RetryPolicy
15
+ from horsies.core.task_decorator import create_task_wrapper, effective_priority
16
+ from horsies.core.models.workflow import (
17
+ TaskNode,
18
+ SubWorkflowNode,
19
+ WorkflowSpec,
20
+ OnError,
21
+ SuccessPolicy,
22
+ )
23
+ from horsies.core.brokers.postgres import PostgresBroker
24
+ from horsies.core.logging import get_logger
25
+ from horsies.core.registry.tasks import TaskRegistry
26
+ from horsies.core.errors import (
27
+ ConfigurationError,
28
+ HorsiesError,
29
+ MultipleValidationErrors,
30
+ TaskDefinitionError,
31
+ ErrorCode,
32
+ SourceLocation,
33
+ ValidationReport,
34
+ raise_collected,
35
+ )
36
+ import os
37
+ import importlib
38
+ import glob
39
+ from fnmatch import fnmatch
40
+ from horsies.core.utils.imports import import_by_path
41
+
42
+ if TYPE_CHECKING:
43
+ from horsies.core.task_decorator import TaskFunction
44
+ from horsies.core.models.tasks import TaskResult
45
+
46
+ P = ParamSpec('P')
47
+ T = TypeVar('T')
48
+
49
+ _E = TypeVar('_E', bound=HorsiesError)
50
+
51
+
52
+ def _no_location(error: _E) -> _E:
53
+ """Strip the auto-detected source location from a programmatic error.
54
+
55
+ Used for errors created inside horsies internals where the auto-detected
56
+ frame (e.g., CLI entry point) is misleading. The error message itself
57
+ contains the relevant context (e.g., module path).
58
+ """
59
+ error.location = None
60
+ return error
61
+
62
+
63
+ class Horsies:
64
+ """
65
+ Configuration-driven task management app.
66
+ Requires an AppConfig instance for proper validation and type safety.
67
+ """
68
+
69
+ def __init__(self, config: AppConfig):
70
+ self.config = config
71
+ self._broker: Optional['PostgresBroker'] = None
72
+ self.tasks: TaskRegistry[Callable[..., Any]] = TaskRegistry()
73
+ self.logger = get_logger('app')
74
+ self._discovered_task_modules: list[str] = []
75
+ # When True, task sends/schedules are suppressed (used during import/discovery)
76
+ self._suppress_sends: bool = False
77
+ # Role indicates context: 'producer', 'worker', or 'scheduler'
78
+ self._role: str = 'producer'
79
+
80
+ if os.getenv('HORSIES_CHILD_PROCESS') == '1':
81
+ self.logger.info(
82
+ f'horsies subprocess initialized with {config.queue_mode.name} mode (pid={os.getpid()})'
83
+ )
84
+ else:
85
+ self.logger.info(f'horsies initialized as {self._role} with {config.queue_mode.name} mode')
86
+
87
+ def set_role(self, role: str) -> None:
88
+ """Set the role and log it. Called by CLI after discovery."""
89
+ self._role = role
90
+ self.logger.info(f'horsies running as {role}')
91
+
92
+ def get_valid_queue_names(self) -> list[str]:
93
+ """Get list of valid queue names based on configuration"""
94
+ if self.config.queue_mode == QueueMode.DEFAULT:
95
+ return ['default']
96
+ else: # CUSTOM mode
97
+ return [queue.name for queue in (self.config.custom_queues or [])]
98
+
99
+ def validate_queue_name(self, queue_name: Optional[str]) -> str:
100
+ """Validate queue name against app configuration"""
101
+ if self.config.queue_mode == QueueMode.DEFAULT:
102
+ if queue_name is not None:
103
+ raise ConfigurationError(
104
+ message='cannot specify queue_name in DEFAULT mode',
105
+ code=ErrorCode.CONFIG_INVALID_QUEUE_MODE,
106
+ notes=[
107
+ f"queue_name='{queue_name}' was specified",
108
+ 'but app is configured with QueueMode.DEFAULT',
109
+ ],
110
+ help_text='either remove queue_name or switch to QueueMode.CUSTOM',
111
+ )
112
+ return 'default'
113
+ else: # CUSTOM mode
114
+ if queue_name is None:
115
+ raise ConfigurationError(
116
+ message='queue_name is required in CUSTOM mode',
117
+ code=ErrorCode.CONFIG_INVALID_QUEUE_MODE,
118
+ notes=['app is configured with QueueMode.CUSTOM'],
119
+ help_text='specify queue_name from configured queues',
120
+ )
121
+ valid_queues = self.get_valid_queue_names()
122
+ if queue_name not in valid_queues:
123
+ raise ConfigurationError(
124
+ message=f"invalid queue_name '{queue_name}'",
125
+ code=ErrorCode.TASK_INVALID_QUEUE,
126
+ notes=[f'valid queues: {valid_queues}'],
127
+ help_text='use one of the configured queue names',
128
+ )
129
+ return queue_name
130
+
131
+ @overload
132
+ def task(
133
+ self, task_name: str, func: Callable[P, 'TaskResult[T, TaskError]']
134
+ ) -> 'TaskFunction[P, T]': ...
135
+
136
+ @overload
137
+ def task(
138
+ self,
139
+ task_name: str,
140
+ *,
141
+ queue_name: Optional[str] = None,
142
+ good_until: Any = None,
143
+ auto_retry_for: Optional[list[str]] = None,
144
+ retry_policy: Optional['RetryPolicy'] = None,
145
+ ) -> Callable[
146
+ [Callable[P, 'TaskResult[T, TaskError]']],
147
+ 'TaskFunction[P, T]',
148
+ ]: ...
149
+
150
+ def task(
151
+ self,
152
+ task_name: str,
153
+ func: Optional[Callable[P, 'TaskResult[T, TaskError]']] = None,
154
+ **task_options_kwargs: Any,
155
+ ) -> Union[
156
+ 'TaskFunction[P, T]',
157
+ Callable[
158
+ [Callable[P, 'TaskResult[T, TaskError]']],
159
+ 'TaskFunction[P, T]',
160
+ ],
161
+ ]:
162
+ """
163
+ Decorator to register a task with this app.
164
+ Task options are validated against TaskOptions model and app configuration.
165
+ """
166
+
167
+ def decorator(fn: Callable[P, 'TaskResult[T, TaskError]']):
168
+ fn_location = SourceLocation.from_function(fn)
169
+
170
+ # Validate and create TaskOptions - this enforces pydantic validation
171
+ try:
172
+ task_options = TaskOptions(task_name=task_name, **task_options_kwargs)
173
+ except Exception as e:
174
+ raise TaskDefinitionError(
175
+ message=f'invalid task options',
176
+ code=ErrorCode.TASK_INVALID_OPTIONS,
177
+ location=fn_location,
178
+ notes=[f"task '{fn.__name__}'", str(e)],
179
+ help_text='check task decorator arguments',
180
+ )
181
+
182
+ # VALIDATION AT DEFINITION TIME
183
+ # Validate queue_name against app configuration
184
+ try:
185
+ self.validate_queue_name(task_options.queue_name)
186
+ except ConfigurationError:
187
+ raise # Re-raise with original formatting
188
+
189
+ # Create wrapper that uses this app's configuration
190
+ task_function = create_task_wrapper(fn, self, task_name, task_options)
191
+
192
+ # Register task with this app, passing source for duplicate detection
193
+ # Normalize path with realpath to handle symlinks and relative paths
194
+ source_str = (
195
+ f"{os.path.realpath(fn_location.file)}:{fn_location.line}"
196
+ if fn_location
197
+ else None
198
+ )
199
+ self.tasks.register(task_function, name=task_name, source=source_str)
200
+
201
+ return task_function
202
+
203
+ if func is None:
204
+ # Called with arguments: @app.task(queue_name="custom")
205
+ return decorator
206
+ else:
207
+ # Called without arguments: @app.task
208
+ return decorator(func)
209
+
210
+ def check(self, *, live: bool = False) -> list[HorsiesError]:
211
+ """Orchestrate phased validation and return all errors found.
212
+
213
+ Phase 1: Config — already validated at construction (implicit pass).
214
+ Phase 2: Task module imports — import each module, collect errors.
215
+ Phase 3: Workflow validation — happens during imports (WorkflowSpec construction).
216
+ Phase 4 (if live): Broker connectivity — async SELECT 1.
217
+
218
+ Args:
219
+ live: If True, also check broker connectivity (Phase 4).
220
+
221
+ Returns:
222
+ List of all HorsiesError instances found across phases.
223
+ Empty list means all validations passed.
224
+ """
225
+ all_errors: list[HorsiesError] = []
226
+
227
+ # Phase 2: task module imports (also triggers Phase 3 workflow validation)
228
+ all_errors.extend(self._check_task_imports())
229
+ if all_errors:
230
+ return all_errors
231
+
232
+ # Phase 4 (optional): broker connectivity
233
+ if live:
234
+ all_errors.extend(self._check_broker_connectivity())
235
+
236
+ return all_errors
237
+
238
+ def _check_task_imports(self) -> list[HorsiesError]:
239
+ """Import task modules and collect any errors."""
240
+ errors: list[HorsiesError] = []
241
+ modules = self._discovered_task_modules
242
+ prev_suppress = self._suppress_sends
243
+ self.suppress_sends(True)
244
+ try:
245
+ for module_path in modules:
246
+ try:
247
+ if module_path.endswith('.py') or os.path.sep in module_path:
248
+ abs_path = os.path.realpath(module_path)
249
+ if not os.path.exists(abs_path):
250
+ errors.append(_no_location(ConfigurationError(
251
+ message=f'task module not found: {module_path}',
252
+ code=ErrorCode.CLI_INVALID_ARGS,
253
+ notes=[f'resolved path: {abs_path}'],
254
+ help_text=(
255
+ 'remove it from app.discover_tasks([...]) or fix the path; \n'
256
+ 'if using globs, run app.expand_module_globs([...]) first'
257
+ ),
258
+ )))
259
+ continue
260
+ import_by_path(abs_path)
261
+ else:
262
+ importlib.import_module(module_path)
263
+ except MultipleValidationErrors as exc:
264
+ errors.extend(exc.report.errors)
265
+ except HorsiesError as exc:
266
+ errors.append(exc)
267
+ except Exception as exc:
268
+ errors.append(_no_location(ConfigurationError(
269
+ message=f'failed to import module: {module_path}',
270
+ code=ErrorCode.CLI_INVALID_ARGS,
271
+ notes=[str(exc)],
272
+ help_text=(
273
+ 'ensure the module is importable; '
274
+ 'for file paths include .py and a valid path, '
275
+ 'for dotted paths verify PYTHONPATH or run from the project root'
276
+ ),
277
+ )))
278
+ finally:
279
+ self.suppress_sends(prev_suppress)
280
+ return errors
281
+
282
+ def _check_broker_connectivity(self) -> list[HorsiesError]:
283
+ """Check broker connectivity via SELECT 1."""
284
+ import asyncio
285
+
286
+ errors: list[HorsiesError] = []
287
+ try:
288
+ broker = self.get_broker()
289
+
290
+ async def _test_connection() -> None:
291
+ from sqlalchemy import text
292
+
293
+ async with broker.session_factory() as session:
294
+ await session.execute(text('SELECT 1'))
295
+
296
+ asyncio.run(_test_connection())
297
+ except HorsiesError as exc:
298
+ errors.append(exc)
299
+ except Exception as exc:
300
+ errors.append(_no_location(ConfigurationError(
301
+ message='broker connectivity check failed',
302
+ code=ErrorCode.BROKER_INVALID_URL,
303
+ notes=[str(exc)],
304
+ help_text='check database_url in PostgresConfig',
305
+ )))
306
+ return errors
307
+
308
+ def list_tasks(self) -> list[str]:
309
+ """List tasks registered with this app"""
310
+ return list(self.tasks.keys_list())
311
+
312
+ def get_broker(self) -> 'PostgresBroker':
313
+ """Get the configured PostgreSQL broker for this app"""
314
+ try:
315
+ if self._broker is None:
316
+ self._broker = PostgresBroker(self.config.broker)
317
+ self._broker.app = self # Store app reference for subworkflow support
318
+ return self._broker
319
+ except Exception as e:
320
+ raise ValueError(f'Failed to get broker: {e}')
321
+
322
+ def discover_tasks(
323
+ self,
324
+ modules: list[str],
325
+ ) -> None:
326
+ """
327
+ Register task modules for later import.
328
+
329
+ This method only records module paths — no file I/O happens here.
330
+ Actual imports occur when import_task_modules() is called (typically by worker).
331
+
332
+ Args:
333
+ modules: List of dotted module paths (e.g., ['myapp.tasks', 'myapp.jobs.tasks'])
334
+ or file paths (e.g., ['tasks.py', 'src/worker_tasks.py'])
335
+
336
+ Examples:
337
+ app.discover_tasks(['myapp.tasks']) # dotted module path
338
+ app.discover_tasks(['tasks.py']) # file path
339
+
340
+ # For glob patterns, use expand_module_globs() first:
341
+ paths = app.expand_module_globs(['src/**/*_tasks.py'])
342
+ app.discover_tasks(paths)
343
+ """
344
+ self._discovered_task_modules = list(modules)
345
+
346
+ is_child_process = os.getenv('HORSIES_CHILD_PROCESS') == '1'
347
+ child_logs_enabled = os.getenv('HORSIES_CHILD_DISCOVERY_LOGS') == '1'
348
+ should_log = not is_child_process or child_logs_enabled
349
+
350
+ if should_log and len(modules) > 0:
351
+ self.logger.info(f'Registered {len(modules)} task module(s) for discovery')
352
+
353
+ def expand_module_globs(
354
+ self,
355
+ patterns: list[str],
356
+ exclude: list[str] | None = None,
357
+ ) -> list[str]:
358
+ """
359
+ Expand glob patterns to file paths.
360
+
361
+ Use this explicitly when you need glob-based discovery.
362
+ This is separated from discover_tasks() to make the I/O cost explicit.
363
+
364
+ Args:
365
+ patterns: Glob patterns like ['src/**/*_tasks.py'] or file paths
366
+ exclude: Glob patterns to exclude (default: test files)
367
+
368
+ Returns:
369
+ List of absolute file paths
370
+
371
+ Examples:
372
+ paths = app.expand_module_globs(['src/**/*_tasks.py'])
373
+ app.discover_tasks(paths)
374
+ """
375
+ exclude_patterns = exclude or ['*_test.py', 'test_*.py', 'conftest.py']
376
+ results: list[str] = []
377
+
378
+ def _is_excluded(path: str) -> bool:
379
+ basename = os.path.basename(path)
380
+ for pattern in exclude_patterns:
381
+ if fnmatch(path, pattern) or fnmatch(basename, pattern):
382
+ return True
383
+ return False
384
+
385
+ for pattern in patterns:
386
+ has_glob = any(ch in pattern for ch in ['*', '?', '[', ']'])
387
+ if has_glob:
388
+ for match in glob.glob(pattern, recursive=True):
389
+ abs_path = os.path.realpath(match)
390
+ if not os.path.exists(abs_path):
391
+ continue
392
+ if os.path.isdir(abs_path):
393
+ continue
394
+ if not abs_path.endswith('.py'):
395
+ continue
396
+ if _is_excluded(abs_path):
397
+ continue
398
+ if abs_path not in results:
399
+ results.append(abs_path)
400
+ elif pattern.endswith('.py') or os.path.sep in pattern:
401
+ # Direct file path
402
+ abs_path = os.path.realpath(pattern)
403
+ if os.path.exists(abs_path) and abs_path.endswith('.py'):
404
+ if not _is_excluded(abs_path) and abs_path not in results:
405
+ results.append(abs_path)
406
+ else:
407
+ # Dotted module path - pass through as-is
408
+ if pattern not in results:
409
+ results.append(pattern)
410
+
411
+ return results
412
+
413
+ def get_discovered_task_modules(self) -> list[str]:
414
+ """Get the list of discovered task modules"""
415
+ return self._discovered_task_modules.copy()
416
+
417
+ def import_task_modules(
418
+ self,
419
+ modules: Optional[list[str]] = None,
420
+ ) -> list[str]:
421
+ """Import task modules to eagerly register tasks.
422
+
423
+ If modules is None, imports the modules discovered by discover_tasks().
424
+ Returns the list of module identifiers that were imported.
425
+ """
426
+ modules_to_import = (
427
+ self._discovered_task_modules if modules is None else modules
428
+ )
429
+ imported: list[str] = []
430
+ for module in modules_to_import:
431
+ if module.endswith('.py') or os.path.sep in module:
432
+ abs_path = os.path.realpath(module)
433
+ if not os.path.exists(abs_path):
434
+ self.logger.warning(f'Task module not found: {module}')
435
+ continue
436
+ import_by_path(abs_path)
437
+ imported.append(abs_path)
438
+ else:
439
+ importlib.import_module(module)
440
+ imported.append(module)
441
+ return imported
442
+
443
+ # -------- side-effect control (import/discovery) --------
444
+ def suppress_sends(self, value: bool = True) -> None:
445
+ """Enable/disable suppression of task sends/schedules.
446
+
447
+ Library-internal use: the worker sets this True while importing user
448
+ modules for task discovery so any top-level `.send()` calls in those
449
+ modules do not enqueue tasks as an import side effect.
450
+ """
451
+ self._suppress_sends = value
452
+
453
+ def are_sends_suppressed(self) -> bool:
454
+ """Return True if sends/schedules should be no-ops.
455
+
456
+ Environment override: if TASKLIB_SUPPRESS_SENDS=1, suppression is also
457
+ considered active (useful for ad-hoc scripting).
458
+ """
459
+ env_flag = os.getenv('TASKLIB_SUPPRESS_SENDS', '').strip()
460
+ return self._suppress_sends or env_flag == '1'
461
+
462
+ # -------- workflow factory --------
463
+ def workflow(
464
+ self,
465
+ name: str,
466
+ tasks: list[TaskNode[Any] | SubWorkflowNode[Any]],
467
+ on_error: OnError = OnError.FAIL,
468
+ output: TaskNode[Any] | SubWorkflowNode[Any] | None = None,
469
+ success_policy: SuccessPolicy | None = None,
470
+ ) -> WorkflowSpec:
471
+ """
472
+ Create a validated WorkflowSpec with proper queue and priority resolution.
473
+
474
+ Validates queues against app config and resolves priorities using
475
+ effective_priority() to match non-workflow task behavior.
476
+
477
+ Args:
478
+ name: Human-readable workflow name
479
+ tasks: List of TaskNode/SubWorkflowNode instances
480
+ on_error: Error handling policy (FAIL or PAUSE)
481
+ output: Explicit output task (optional)
482
+ success_policy: Custom success policy for workflow completion (optional)
483
+
484
+ Returns:
485
+ WorkflowSpec ready to start
486
+
487
+ Raises:
488
+ ValueError: If any TaskNode.queue is not in app config
489
+ """
490
+ report = ValidationReport('workflow')
491
+ for node in tasks:
492
+ # SubWorkflowNode doesn't have queue/priority - handled at execution time
493
+ if isinstance(node, SubWorkflowNode):
494
+ continue
495
+
496
+ # TaskNode: resolve queue and priority
497
+ task = node
498
+
499
+ # Resolve queue: explicit override > task decorator > "default"
500
+ resolved_queue = (
501
+ task.queue or getattr(task.fn, 'task_queue_name', None) or 'default'
502
+ )
503
+
504
+ # Validate queue against app config
505
+ # In DEFAULT mode, queue must be "default" (or None which resolves to "default")
506
+ # In CUSTOM mode, queue must be in custom_queues list
507
+ queue_valid = True
508
+ if self.config.queue_mode == QueueMode.CUSTOM:
509
+ valid_queues = self.get_valid_queue_names()
510
+ if resolved_queue not in valid_queues:
511
+ report.add(ConfigurationError(
512
+ message='TaskNode queue not in app config',
513
+ code=ErrorCode.TASK_INVALID_QUEUE,
514
+ notes=[
515
+ f"TaskNode '{task.name}' has queue '{resolved_queue}'",
516
+ f'valid queues: {valid_queues}',
517
+ ],
518
+ help_text='use one of the configured queue names or add this queue to app config',
519
+ ))
520
+ queue_valid = False
521
+ elif resolved_queue != 'default':
522
+ report.add(ConfigurationError(
523
+ message='TaskNode has non-default queue in DEFAULT mode',
524
+ code=ErrorCode.CONFIG_INVALID_QUEUE_MODE,
525
+ notes=[
526
+ f"TaskNode '{task.name}' has queue '{resolved_queue}'",
527
+ "app is in DEFAULT mode (only 'default' queue allowed)",
528
+ ],
529
+ help_text='either remove queue override or switch to QueueMode.CUSTOM',
530
+ ))
531
+ queue_valid = False
532
+
533
+ if queue_valid:
534
+ # Store resolved queue for later use
535
+ task.queue = resolved_queue
536
+
537
+ # Resolve priority if not explicitly set
538
+ if task.priority is None:
539
+ task.priority = effective_priority(self, resolved_queue)
540
+
541
+ raise_collected(report)
542
+
543
+ # Create spec with validated tasks and broker
544
+ spec = WorkflowSpec(
545
+ name=name,
546
+ tasks=tasks,
547
+ on_error=on_error,
548
+ output=output,
549
+ success_policy=success_policy,
550
+ broker=self.get_broker(),
551
+ )
552
+ return spec