horsies 0.1.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. horsies/__init__.py +117 -0
  2. horsies/core/__init__.py +0 -0
  3. horsies/core/app.py +552 -0
  4. horsies/core/banner.py +144 -0
  5. horsies/core/brokers/__init__.py +5 -0
  6. horsies/core/brokers/listener.py +444 -0
  7. horsies/core/brokers/postgres.py +993 -0
  8. horsies/core/cli.py +624 -0
  9. horsies/core/codec/serde.py +596 -0
  10. horsies/core/errors.py +535 -0
  11. horsies/core/logging.py +90 -0
  12. horsies/core/models/__init__.py +0 -0
  13. horsies/core/models/app.py +268 -0
  14. horsies/core/models/broker.py +79 -0
  15. horsies/core/models/queues.py +23 -0
  16. horsies/core/models/recovery.py +101 -0
  17. horsies/core/models/schedule.py +229 -0
  18. horsies/core/models/task_pg.py +307 -0
  19. horsies/core/models/tasks.py +358 -0
  20. horsies/core/models/workflow.py +1990 -0
  21. horsies/core/models/workflow_pg.py +245 -0
  22. horsies/core/registry/tasks.py +101 -0
  23. horsies/core/scheduler/__init__.py +26 -0
  24. horsies/core/scheduler/calculator.py +267 -0
  25. horsies/core/scheduler/service.py +569 -0
  26. horsies/core/scheduler/state.py +260 -0
  27. horsies/core/task_decorator.py +656 -0
  28. horsies/core/types/status.py +38 -0
  29. horsies/core/utils/imports.py +203 -0
  30. horsies/core/utils/loop_runner.py +44 -0
  31. horsies/core/worker/current.py +17 -0
  32. horsies/core/worker/worker.py +1967 -0
  33. horsies/core/workflows/__init__.py +23 -0
  34. horsies/core/workflows/engine.py +2344 -0
  35. horsies/core/workflows/recovery.py +501 -0
  36. horsies/core/workflows/registry.py +97 -0
  37. horsies/py.typed +0 -0
  38. horsies-0.1.0a4.dist-info/METADATA +35 -0
  39. horsies-0.1.0a4.dist-info/RECORD +42 -0
  40. horsies-0.1.0a4.dist-info/WHEEL +5 -0
  41. horsies-0.1.0a4.dist-info/entry_points.txt +2 -0
  42. horsies-0.1.0a4.dist-info/top_level.txt +1 -0
@@ -0,0 +1,596 @@
1
+ # app/core/codec/serde.py
2
+ from __future__ import annotations
3
+ from typing import (
4
+ Any,
5
+ Dict,
6
+ List,
7
+ Optional,
8
+ Type,
9
+ Union,
10
+ Mapping,
11
+ Sequence,
12
+ TypeGuard,
13
+ cast,
14
+ )
15
+ import datetime as dt
16
+ import json
17
+ import traceback as tb
18
+ from pydantic import BaseModel
19
+ import dataclasses
20
+ from horsies.core.models.tasks import (
21
+ TaskOptions,
22
+ TaskResult,
23
+ TaskError,
24
+ LibraryErrorCode,
25
+ )
26
+ from importlib import import_module
27
+ from horsies.core.logging import get_logger
28
+
29
+ logger = get_logger('serde')
30
+
31
+
32
+ Json = Union[None, bool, int, float, str, List['Json'], Dict[str, 'Json']]
33
+ """
34
+ Union type for JSON-serializable values.
35
+ """
36
+
37
+
38
+ class SerializationError(Exception):
39
+ """
40
+ Raised when a value cannot be serialized to JSON.
41
+ """
42
+
43
+ pass
44
+
45
+
46
+ def _is_json_native(x: object) -> TypeGuard[Json]:
47
+ """
48
+ Check if a value is a JSON-native type (by our stricter definition: dict keys must be str).
49
+ """
50
+ if x is None or isinstance(x, (bool, int, float, str)):
51
+ return True
52
+
53
+ if isinstance(x, list):
54
+ items = cast(List[object], x)
55
+ return all(_is_json_native(item) for item in items)
56
+
57
+ if isinstance(x, dict):
58
+ _dict = cast(Dict[object, object], x)
59
+ for key, value in _dict.items():
60
+ if not isinstance(key, str) or not _is_json_native(value):
61
+ return False
62
+ return True
63
+
64
+ return False
65
+
66
+
67
+ def _exception_to_json(ex: BaseException) -> Dict[str, Json]:
68
+ """
69
+ Convert a BaseException to a JSON-serializable dictionary.
70
+
71
+ Returns:
72
+ A dict with following key-value pairs:
73
+ - "type": str
74
+ - "message": str
75
+ - "traceback": str
76
+ """
77
+ return {
78
+ 'type': type(ex).__name__,
79
+ 'message': str(ex),
80
+ 'traceback': ''.join(tb.format_exception(type(ex), ex, ex.__traceback__)),
81
+ }
82
+
83
+
84
+ def _task_error_to_json(err: TaskError) -> Dict[str, Json]:
85
+ """
86
+ Convert a `TaskError` BaseModel to a JSON-serializable dictionary.
87
+ After converting to JSON, if the `exception` field is a `BaseException`,
88
+ it will be converted to a JSON-serializable dictionary.
89
+ If the `exception` field is already a JSON-serializable dictionary, it will be returned as is.
90
+ If the `exception` field is not a `BaseException` or a JSON-serializable dictionary,
91
+ it will be coerced to a simple shape of string.
92
+ For more information, see `TaskError` model definition.
93
+
94
+ Args:
95
+ err: The `TaskError` BaseModel to convert to JSON.
96
+
97
+ Returns:
98
+ A dict with following key-value pairs:
99
+ - "__task_error__": bool
100
+ - "error_code": str | LibraryErrorCode
101
+ - "message": str
102
+ - "data": dict[str, Json]
103
+ """
104
+ # data = err.model_dump(mode="json")
105
+ # ex = data.pop("exception", None)
106
+
107
+ # Avoid pydantic trying to serialize Exception; handle it manually
108
+ ex = err.exception
109
+ data = err.model_dump(mode='json', exclude={'exception'})
110
+
111
+ if isinstance(ex, BaseException):
112
+ ex_json: Optional[Dict[str, Json]] = _exception_to_json(ex)
113
+ elif isinstance(ex, dict) or ex is None:
114
+ ex_json = ex # already JSON-like or absent (e.g. None)
115
+ else:
116
+ # Unknown type: coerce to a simple shape of string
117
+ ex_json = {'type': type(ex).__name__, 'message': str(ex)}
118
+
119
+ if ex_json is not None:
120
+ data['exception'] = ex_json
121
+
122
+ return {'__task_error__': True, **data}
123
+
124
+
125
+ def _is_task_result(value: Any) -> TypeGuard[TaskResult[Any, TaskError]]:
126
+ """Type guard to properly narrow TaskResult types."""
127
+ return isinstance(value, TaskResult)
128
+
129
+
130
+ _CLASS_CACHE: Dict[
131
+ str, Type[BaseModel]
132
+ ] = {} # cache of resolved Pydantic classes by module name and qualname
133
+
134
+ _DATACLASS_CACHE: Dict[
135
+ str, type
136
+ ] = {} # cache of resolved dataclass types by module name and qualname
137
+
138
+
139
+ def _qualified_class_path(cls: type) -> tuple[str, str]:
140
+ """
141
+ Get the module and qualname for a class, with validation for importability.
142
+
143
+ Raises SerializationError if the class is not importable by workers:
144
+ - Defined in __main__ (entrypoint script)
145
+ - Defined inside a function (local class with <locals> in qualname)
146
+ """
147
+ module_name = cls.__module__
148
+ qualname = cls.__qualname__
149
+
150
+ # STRICT CHECK: Refuse to serialize classes defined in the entrypoint script
151
+ if module_name in ('__main__', '__mp_main__'):
152
+ raise SerializationError(
153
+ f"Cannot serialize '{qualname}' because it is defined in '__main__'. "
154
+ 'Please move this class to a separate module (file) so it can be imported by the worker.'
155
+ )
156
+
157
+ # STRICT CHECK: Refuse to serialize local classes (defined inside functions)
158
+ if '<locals>' in qualname:
159
+ raise SerializationError(
160
+ f"Cannot serialize '{qualname}' because it is a local class defined inside a function. "
161
+ 'Please move this class to module level so it can be imported by the worker.'
162
+ )
163
+
164
+ return (module_name, qualname)
165
+
166
+
167
+ def _qualified_model_path(model: BaseModel) -> tuple[str, str]:
168
+ """Get qualified path for a Pydantic BaseModel instance."""
169
+ return _qualified_class_path(type(model))
170
+
171
+
172
+ def _qualified_dataclass_path(instance: Any) -> tuple[str, str]:
173
+ """Get qualified path for a dataclass instance."""
174
+ return _qualified_class_path(type(instance))
175
+
176
+
177
+ def to_jsonable(value: Any) -> Json:
178
+ """
179
+ Convert value to JSON with special handling for Pydantic models, TaskError, TaskResult.
180
+
181
+ Args:
182
+ value: The value to convert to JSON.
183
+
184
+ Returns:
185
+ A JSON-serializable value. For more information, see `Json` Union type.
186
+ """
187
+ # Is value a JSON-native type?
188
+ if _is_json_native(value):
189
+ return value
190
+
191
+ # datetime.datetime is a subclass of datetime.date — check datetime first.
192
+ if isinstance(value, dt.datetime):
193
+ return {'__datetime__': True, 'value': value.isoformat()}
194
+
195
+ if isinstance(value, dt.date):
196
+ return {'__date__': True, 'value': value.isoformat()}
197
+
198
+ if isinstance(value, dt.time):
199
+ return {'__time__': True, 'value': value.isoformat()}
200
+
201
+ # Is value a `TaskResult`?
202
+ if _is_task_result(value):
203
+ # Represent discriminated union explicitly
204
+ ok_json = to_jsonable(value.ok) if value.ok is not None else None
205
+ err_json: Optional[Dict[str, Json]] = None
206
+ if value.err is not None:
207
+ if isinstance(value.err, TaskError):
208
+ err_json = _task_error_to_json(value.err)
209
+ elif isinstance(value.err, BaseModel):
210
+ err_json = value.err.model_dump() # if someone used a model for error
211
+ else:
212
+ # last resort: stringify
213
+ err_json = {'message': str(value.err)}
214
+ return {'__task_result__': True, 'ok': ok_json, 'err': err_json}
215
+
216
+ # Is value a `TaskError`?
217
+ if isinstance(value, TaskError):
218
+ return _task_error_to_json(value)
219
+
220
+ # Is value a `BaseModel`?
221
+ if isinstance(value, BaseModel):
222
+ # Include type metadata so we can rehydrate on the other side
223
+ module, qualname = _qualified_model_path(value)
224
+ return {
225
+ '__pydantic_model__': True,
226
+ 'module': module,
227
+ 'qualname': qualname,
228
+ # Use mode="json" to ensure JSON-compatible field values
229
+ 'data': value.model_dump(mode='json'),
230
+ }
231
+
232
+ # Dataclass support - serialize with metadata for round-trip reconstruction
233
+ # Use field-by-field conversion instead of asdict() to preserve nested type metadata
234
+ if dataclasses.is_dataclass(value) and not isinstance(value, type):
235
+ module, qualname = _qualified_dataclass_path(value)
236
+ # Convert each field via to_jsonable to preserve nested Pydantic/dataclass metadata
237
+ field_data: Dict[str, Json] = {}
238
+ for field in dataclasses.fields(value):
239
+ field_value = getattr(value, field.name)
240
+ field_data[field.name] = to_jsonable(field_value)
241
+ return {
242
+ '__dataclass__': True,
243
+ 'module': module,
244
+ 'qualname': qualname,
245
+ 'data': field_data,
246
+ }
247
+
248
+ # Handle dictionary-like objects (Mappings). This is a generic way to handle
249
+ # not only `dict` but also other dictionary-like types such as `OrderedDict`
250
+ # or `defaultdict`. It ensures all keys are strings and that values are
251
+ # recursively made JSON-serializable.
252
+ if isinstance(value, Mapping):
253
+ mapping = cast(Mapping[object, object], value)
254
+ return {str(key): to_jsonable(item) for key, item in mapping.items()}
255
+
256
+ # Handle list-like objects (Sequences). This handles not only `list` but also
257
+ # other sequence types like `tuple` or `set`. The check excludes `str`,
258
+ # `bytes`, and `bytearray`, as they are treated as primitive types rather
259
+ # than sequences of characters. It recursively ensures all items in the
260
+ # sequence are JSON-serializable.
261
+ if isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)):
262
+ seq = cast(Sequence[object], value)
263
+ return [to_jsonable(item) for item in seq]
264
+
265
+ raise SerializationError(f'Cannot serialize value of type {type(value).__name__}')
266
+
267
+
268
+ def dumps_json(value: Any) -> str:
269
+ """
270
+ Serialize a value to JSON string.
271
+
272
+ Args:
273
+ value: The value to serialize.
274
+
275
+ Returns:
276
+ A JSON string.
277
+ """
278
+ return json.dumps(
279
+ to_jsonable(value),
280
+ ensure_ascii=False,
281
+ separators=(',', ':'),
282
+ allow_nan=False, # Prevent NaN values in JSON
283
+ )
284
+
285
+
286
+ def loads_json(s: Optional[str]) -> Json:
287
+ """
288
+ Deserialize a JSON string to a JSON value.
289
+
290
+ Args:
291
+ s: The JSON string to deserialize.
292
+
293
+ Returns:
294
+ A JSON value. For more information, see `Json` Union type.
295
+ """
296
+ return json.loads(s) if s else None
297
+
298
+
299
+ def args_to_json(args: tuple[Any, ...]) -> str:
300
+ """
301
+ Serialize a tuple of arguments to a JSON string.
302
+
303
+ Args:
304
+ args: The tuple of arguments to serialize.
305
+
306
+ Returns:
307
+ A JSON string.
308
+ """
309
+ return dumps_json(list(args))
310
+
311
+
312
+ def kwargs_to_json(kwargs: dict[str, Any]) -> str:
313
+ """
314
+ Serialize a dictionary of keyword arguments to a JSON string.
315
+
316
+ Args:
317
+ kwargs: The dictionary of keyword arguments to serialize.
318
+
319
+ Returns:
320
+ A JSON string.
321
+ """
322
+ return dumps_json(kwargs)
323
+
324
+
325
+ def rehydrate_value(value: Json) -> Any:
326
+ """
327
+ Recursively rehydrate a JSON value, restoring Pydantic models from their serialized form.
328
+
329
+ Args:
330
+ value: The JSON value to rehydrate.
331
+
332
+ Returns:
333
+ The rehydrated value with Pydantic models restored.
334
+
335
+ Raises:
336
+ SerializationError: If a Pydantic model cannot be rehydrated.
337
+ """
338
+ # Handle Pydantic model rehydration
339
+ if isinstance(value, dict) and value.get('__pydantic_model__'):
340
+ module_name = cast(str, value.get('module'))
341
+ qualname = cast(str, value.get('qualname'))
342
+ data = value.get('data')
343
+
344
+ cache_key = f'{module_name}:{qualname}'
345
+
346
+ try:
347
+ # 1. Check Cache
348
+ if cache_key in _CLASS_CACHE:
349
+ cls = _CLASS_CACHE[cache_key]
350
+ else:
351
+ # 2. Dynamic Import
352
+ try:
353
+ module = import_module(module_name)
354
+ except ImportError as e:
355
+ raise SerializationError(
356
+ f"Could not import module '{module_name}'. "
357
+ f'Did you move the file without leaving a re-export shim? Error: {e}'
358
+ )
359
+
360
+ # 3. Resolve Class
361
+ cls = module
362
+ # Handle nested classes (e.g. ClassA.ClassB)
363
+ for part in qualname.split('.'):
364
+ cls = getattr(cls, part)
365
+
366
+ if not (isinstance(cls, type) and issubclass(cls, BaseModel)):
367
+ raise SerializationError(f'{cache_key} is not a BaseModel')
368
+
369
+ # 4. Save to Cache
370
+ _CLASS_CACHE[cache_key] = cls
371
+
372
+ # 5. Validate/Hydrate
373
+ return cls.model_validate(data)
374
+
375
+ except Exception as e:
376
+ # Catch Pydantic ValidationErrors or AttributeErrors here
377
+ logger.error(
378
+ f'Failed to rehydrate Pydantic model {cache_key}: {type(e).__name__}: {e}'
379
+ )
380
+ raise SerializationError(f'Failed to rehydrate {cache_key}: {str(e)}')
381
+
382
+ # Handle dataclass rehydration
383
+ if isinstance(value, dict) and value.get('__dataclass__'):
384
+ module_name = cast(str, value.get('module'))
385
+ qualname = cast(str, value.get('qualname'))
386
+ data = value.get('data')
387
+
388
+ cache_key = f'{module_name}:{qualname}'
389
+
390
+ try:
391
+ # 1. Check Cache
392
+ if cache_key in _DATACLASS_CACHE:
393
+ dc_cls = _DATACLASS_CACHE[cache_key]
394
+ else:
395
+ # 2. Dynamic Import
396
+ try:
397
+ module = import_module(module_name)
398
+ except ImportError as e:
399
+ raise SerializationError(
400
+ f"Could not import module '{module_name}'. "
401
+ f'Did you move the file without leaving a re-export shim? Error: {e}'
402
+ )
403
+
404
+ # 3. Resolve Class
405
+ resolved: Any = module
406
+ # Handle nested classes (e.g. ClassA.ClassB)
407
+ for part in qualname.split('.'):
408
+ resolved = getattr(resolved, part)
409
+
410
+ if not isinstance(resolved, type) or not dataclasses.is_dataclass(
411
+ resolved
412
+ ):
413
+ raise SerializationError(f'{cache_key} is not a dataclass')
414
+
415
+ dc_cls = resolved
416
+ # 4. Save to Cache
417
+ _DATACLASS_CACHE[cache_key] = dc_cls
418
+
419
+ # 5. Instantiate dataclass with rehydrated field values
420
+ if not isinstance(data, dict):
421
+ raise SerializationError(
422
+ f'Dataclass data must be a dict, got {type(data)}'
423
+ )
424
+
425
+ # Rehydrate each field to restore nested Pydantic/dataclass types
426
+ rehydrated_data = {k: rehydrate_value(v) for k, v in data.items()}
427
+
428
+ # Separate init=True fields from init=False fields
429
+ dc_fields = {f.name: f for f in dataclasses.fields(dc_cls)}
430
+ init_kwargs: Dict[str, Any] = {}
431
+ non_init_fields: Dict[str, Any] = {}
432
+ for field_name, field_value in rehydrated_data.items():
433
+ field_def = dc_fields.get(field_name)
434
+ if field_def is None:
435
+ # Field not in dataclass definition - skip (could be removed field)
436
+ continue
437
+ if field_def.init:
438
+ init_kwargs[field_name] = field_value
439
+ else:
440
+ non_init_fields[field_name] = field_value
441
+
442
+ # Construct with init fields only
443
+ instance = dc_cls(**init_kwargs)
444
+
445
+ # Set non-init fields directly on the instance
446
+ for fname, fvalue in non_init_fields.items():
447
+ object.__setattr__(instance, fname, fvalue)
448
+
449
+ return instance
450
+
451
+ except SerializationError:
452
+ raise
453
+ except Exception as e:
454
+ logger.error(
455
+ f'Failed to rehydrate dataclass {cache_key}: {type(e).__name__}: {e}'
456
+ )
457
+ raise SerializationError(
458
+ f'Failed to rehydrate dataclass {cache_key}: {str(e)}'
459
+ )
460
+
461
+ # Handle datetime rehydration (datetime before date — subclass ordering)
462
+ if isinstance(value, dict) and value.get('__datetime__'):
463
+ return dt.datetime.fromisoformat(cast(str, value['value']))
464
+
465
+ if isinstance(value, dict) and value.get('__date__'):
466
+ return dt.date.fromisoformat(cast(str, value['value']))
467
+
468
+ if isinstance(value, dict) and value.get('__time__'):
469
+ return dt.time.fromisoformat(cast(str, value['value']))
470
+
471
+ # Handle nested TaskResult rehydration
472
+ if isinstance(value, dict) and value.get('__task_result__'):
473
+ return task_result_from_json(value)
474
+
475
+ # Recursively rehydrate nested dicts
476
+ if isinstance(value, dict):
477
+ return {k: rehydrate_value(v) for k, v in value.items()}
478
+
479
+ # Recursively rehydrate nested lists
480
+ if isinstance(value, list):
481
+ return [rehydrate_value(item) for item in value]
482
+
483
+ # Return primitive values as-is
484
+ return value
485
+
486
+
487
+ def json_to_args(j: Json) -> List[Any]:
488
+ """
489
+ Deserialize a JSON value to a list of arguments, rehydrating Pydantic models.
490
+
491
+ Args:
492
+ j: The JSON value to deserialize.
493
+
494
+ Returns:
495
+ A list of arguments with Pydantic models rehydrated.
496
+
497
+ Raises:
498
+ SerializationError: If the JSON value is not a list.
499
+ """
500
+ if j is None:
501
+ return []
502
+ if isinstance(j, list):
503
+ return [rehydrate_value(item) for item in j]
504
+ raise SerializationError('Args payload is not a list JSON.')
505
+
506
+
507
+ def json_to_kwargs(j: Json) -> Dict[str, Any]:
508
+ """
509
+ Deserialize a JSON value to a dictionary of keyword arguments, rehydrating Pydantic models.
510
+
511
+ Args:
512
+ j: The JSON value to deserialize.
513
+
514
+ Returns:
515
+ A dictionary of keyword arguments with Pydantic models rehydrated.
516
+
517
+ Raises:
518
+ SerializationError: If the JSON value is not a dict.
519
+ """
520
+ if j is None:
521
+ return {}
522
+ if isinstance(j, dict):
523
+ return {k: rehydrate_value(v) for k, v in j.items()}
524
+ raise SerializationError('Kwargs payload is not a dict JSON.')
525
+
526
+
527
+ def task_result_from_json(j: Json) -> TaskResult[Any, TaskError]:
528
+ """
529
+ Rehydrate `TaskResult` from JSON.
530
+ NOTES:
531
+ - We don't recreate `Exception` objects;
532
+ - We keep the flattened structure inside `TaskError.exception` (as dict) or `None`.
533
+
534
+ Args:
535
+ j: The JSON string to deserialize.
536
+
537
+ Returns:
538
+ A `TaskResult`.
539
+ """
540
+ if not isinstance(j, dict) or '__task_result__' not in j:
541
+ # Accept legacy "ok"/"err" shape if present
542
+ if isinstance(j, dict) and ('ok' in j or 'err' in j):
543
+ payload = j
544
+ else:
545
+ raise SerializationError('Not a TaskResult JSON')
546
+ else:
547
+ payload = j
548
+
549
+ ok = payload.get('ok', None)
550
+ err = payload.get('err', None)
551
+
552
+ # meaning task itself returned an error
553
+ if err is not None:
554
+ # Build TaskError from dict, letting pydantic validate
555
+ if isinstance(err, dict) and err.get('__task_error__'):
556
+ err = {k: v for k, v in err.items() if k != '__task_error__'}
557
+ task_err = TaskError.model_validate(err)
558
+ return TaskResult(err=task_err)
559
+ else:
560
+ # Try to rehydrate pydantic BaseModel if we have metadata (using reusable function)
561
+ try:
562
+ ok_value = rehydrate_value(ok)
563
+ return TaskResult(ok=ok_value)
564
+ except SerializationError as e:
565
+ # Any failure during rehydration becomes a library error
566
+ logger.warning(f'PYDANTIC_HYDRATION_ERROR: {e}')
567
+ return TaskResult(
568
+ err=TaskError(
569
+ error_code=LibraryErrorCode.PYDANTIC_HYDRATION_ERROR,
570
+ message=str(e),
571
+ data={},
572
+ )
573
+ )
574
+
575
+
576
+ def serialize_task_options(task_options: TaskOptions) -> str:
577
+ # Normalize auto_retry_for entries to plain strings for JSON (support enums)
578
+ auto_retry: Optional[list[str]] = None
579
+ if task_options.auto_retry_for is not None:
580
+ auto_retry = []
581
+ for item in task_options.auto_retry_for:
582
+ if isinstance(item, LibraryErrorCode):
583
+ auto_retry.append(item.value)
584
+ else:
585
+ auto_retry.append(str(item))
586
+ return dumps_json(
587
+ {
588
+ 'auto_retry_for': auto_retry,
589
+ 'retry_policy': task_options.retry_policy.model_dump()
590
+ if task_options.retry_policy
591
+ else None,
592
+ 'good_until': task_options.good_until.isoformat()
593
+ if task_options.good_until
594
+ else None,
595
+ }
596
+ )