hatchet-sdk 1.14.1__py3-none-any.whl → 1.14.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

hatchet_sdk/__init__.py CHANGED
@@ -11,9 +11,9 @@ from hatchet_sdk.clients.listeners.run_event_listener import (
11
11
  StepRunEventType,
12
12
  WorkflowRunEventType,
13
13
  )
14
- from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
15
14
 
16
15
  # import models into sdk package
16
+ from hatchet_sdk.clients.rest.models.accept_invite_request import AcceptInviteRequest
17
17
  from hatchet_sdk.clients.rest.models.api_error import APIError
18
18
  from hatchet_sdk.clients.rest.models.api_errors import APIErrors
19
19
  from hatchet_sdk.clients.rest.models.api_meta import APIMeta
@@ -166,6 +166,7 @@ from hatchet_sdk.runnables.types import (
166
166
  )
167
167
  from hatchet_sdk.runnables.workflow import TaskRunRef
168
168
  from hatchet_sdk.utils.opentelemetry import OTelAttribute
169
+ from hatchet_sdk.utils.serde import remove_null_unicode_character
169
170
  from hatchet_sdk.worker.worker import Worker, WorkerStartOptions, WorkerStatus
170
171
  from hatchet_sdk.workflow_run import WorkflowRunRef
171
172
 
@@ -290,5 +291,6 @@ __all__ = [
290
291
  "WorkflowVersionDefinition",
291
292
  "WorkflowVersionMeta",
292
293
  "or_",
294
+ "remove_null_unicode_character",
293
295
  "workflow",
294
296
  ]
@@ -1,8 +1,13 @@
1
+ from collections.abc import Callable
1
2
  from typing import ParamSpec, TypeVar
2
3
 
4
+ import tenacity
5
+
3
6
  from hatchet_sdk.clients.rest.api_client import ApiClient
4
7
  from hatchet_sdk.clients.rest.configuration import Configuration
8
+ from hatchet_sdk.clients.rest.exceptions import ServiceException
5
9
  from hatchet_sdk.config import ClientConfig
10
+ from hatchet_sdk.logger import logger
6
11
  from hatchet_sdk.utils.typing import JSONSerializableMapping
7
12
 
8
13
  ## Type variables to use with coroutines.
@@ -20,7 +25,7 @@ P = ParamSpec("P")
20
25
 
21
26
 
22
27
  def maybe_additional_metadata_to_kv(
23
- additional_metadata: dict[str, str] | JSONSerializableMapping | None
28
+ additional_metadata: dict[str, str] | JSONSerializableMapping | None,
24
29
  ) -> list[str] | None:
25
30
  if not additional_metadata:
26
31
  return None
@@ -42,3 +47,24 @@ class BaseRestClient:
42
47
 
43
48
  def client(self) -> ApiClient:
44
49
  return ApiClient(self.api_config)
50
+
51
+
52
+ def retry(func: Callable[P, R]) -> Callable[P, R]:
53
+ return tenacity.retry(
54
+ reraise=True,
55
+ wait=tenacity.wait_exponential_jitter(),
56
+ stop=tenacity.stop_after_attempt(5),
57
+ before_sleep=_alert_on_retry,
58
+ retry=tenacity.retry_if_exception(_should_retry),
59
+ )(func)
60
+
61
+
62
+ def _alert_on_retry(retry_state: tenacity.RetryCallState) -> None:
63
+ logger.debug(
64
+ f"Retrying {retry_state.fn}: attempt "
65
+ f"{retry_state.attempt_number} ended with: {retry_state.outcome}",
66
+ )
67
+
68
+
69
+ def _should_retry(ex: BaseException) -> bool:
70
+ return isinstance(ex, ServiceException)
@@ -19,6 +19,7 @@ from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
19
19
  from hatchet_sdk.clients.v1.api_client import (
20
20
  BaseRestClient,
21
21
  maybe_additional_metadata_to_kv,
22
+ retry,
22
23
  )
23
24
  from hatchet_sdk.utils.typing import JSONSerializableMapping
24
25
 
@@ -201,6 +202,7 @@ class CronClient(BaseRestClient):
201
202
  cron_name=cron_name,
202
203
  )
203
204
 
205
+ @retry
204
206
  def list(
205
207
  self,
206
208
  offset: int | None = None,
@@ -241,6 +243,7 @@ class CronClient(BaseRestClient):
241
243
  cron_name=cron_name,
242
244
  )
243
245
 
246
+ @retry
244
247
  def get(self, cron_id: str) -> CronWorkflows:
245
248
  """
246
249
  Retrieve a specific workflow cron trigger by ID.
@@ -10,7 +10,7 @@ from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList
10
10
  from hatchet_sdk.clients.rest.models.v1_update_filter_request import (
11
11
  V1UpdateFilterRequest,
12
12
  )
13
- from hatchet_sdk.clients.v1.api_client import BaseRestClient
13
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient, retry
14
14
  from hatchet_sdk.utils.typing import JSONSerializableMapping
15
15
 
16
16
 
@@ -41,6 +41,7 @@ class FiltersClient(BaseRestClient):
41
41
  """
42
42
  return await asyncio.to_thread(self.list, limit, offset, workflow_ids, scopes)
43
43
 
44
+ @retry
44
45
  def list(
45
46
  self,
46
47
  limit: int | None = None,
@@ -67,6 +68,7 @@ class FiltersClient(BaseRestClient):
67
68
  scopes=scopes,
68
69
  )
69
70
 
71
+ @retry
70
72
  def get(
71
73
  self,
72
74
  filter_id: str,
@@ -3,7 +3,7 @@ import asyncio
3
3
  from hatchet_sdk.clients.rest.api.log_api import LogApi
4
4
  from hatchet_sdk.clients.rest.api_client import ApiClient
5
5
  from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList
6
- from hatchet_sdk.clients.v1.api_client import BaseRestClient
6
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient, retry
7
7
 
8
8
 
9
9
  class LogsClient(BaseRestClient):
@@ -14,6 +14,7 @@ class LogsClient(BaseRestClient):
14
14
  def _la(self, client: ApiClient) -> LogApi:
15
15
  return LogApi(client)
16
16
 
17
+ @retry
17
18
  def list(self, task_run_id: str) -> V1LogLineList:
18
19
  """
19
20
  List log lines for a given task run.
@@ -12,6 +12,7 @@ from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatu
12
12
  from hatchet_sdk.clients.v1.api_client import (
13
13
  BaseRestClient,
14
14
  maybe_additional_metadata_to_kv,
15
+ retry,
15
16
  )
16
17
  from hatchet_sdk.utils.typing import JSONSerializableMapping
17
18
 
@@ -27,6 +28,7 @@ class MetricsClient(BaseRestClient):
27
28
  def _ta(self, client: ApiClient) -> TenantApi:
28
29
  return TenantApi(client)
29
30
 
31
+ @retry
30
32
  def get_workflow_metrics(
31
33
  self,
32
34
  workflow_id: str,
@@ -66,6 +68,7 @@ class MetricsClient(BaseRestClient):
66
68
  self.get_workflow_metrics, workflow_id, status, group_key
67
69
  )
68
70
 
71
+ @retry
69
72
  def get_queue_metrics(
70
73
  self,
71
74
  workflow_ids: list[str] | None = None,
@@ -105,6 +108,7 @@ class MetricsClient(BaseRestClient):
105
108
  self.get_queue_metrics, workflow_ids, additional_metadata
106
109
  )
107
110
 
111
+ @retry
108
112
  def get_task_metrics(self) -> TenantStepRunQueueMetrics:
109
113
  """
110
114
  Retrieve queue metrics
@@ -1,10 +1,15 @@
1
1
  import asyncio
2
+ from collections.abc import AsyncIterator
2
3
  from datetime import datetime, timedelta, timezone
3
4
  from typing import TYPE_CHECKING, Literal, overload
5
+ from warnings import warn
4
6
 
5
7
  from pydantic import BaseModel, model_validator
6
8
 
7
- from hatchet_sdk.clients.listeners.run_event_listener import RunEventListenerClient
9
+ from hatchet_sdk.clients.listeners.run_event_listener import (
10
+ RunEventListenerClient,
11
+ StepRunEventType,
12
+ )
8
13
  from hatchet_sdk.clients.listeners.workflow_listener import PooledWorkflowRunListener
9
14
  from hatchet_sdk.clients.rest.api.task_api import TaskApi
10
15
  from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi
@@ -13,6 +18,7 @@ from hatchet_sdk.clients.rest.models.v1_cancel_task_request import V1CancelTaskR
13
18
  from hatchet_sdk.clients.rest.models.v1_replay_task_request import V1ReplayTaskRequest
14
19
  from hatchet_sdk.clients.rest.models.v1_task_filter import V1TaskFilter
15
20
  from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus
21
+ from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary
16
22
  from hatchet_sdk.clients.rest.models.v1_task_summary_list import V1TaskSummaryList
17
23
  from hatchet_sdk.clients.rest.models.v1_trigger_workflow_run_request import (
18
24
  V1TriggerWorkflowRunRequest,
@@ -21,8 +27,11 @@ from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRu
21
27
  from hatchet_sdk.clients.v1.api_client import (
22
28
  BaseRestClient,
23
29
  maybe_additional_metadata_to_kv,
30
+ retry,
24
31
  )
25
32
  from hatchet_sdk.config import ClientConfig
33
+ from hatchet_sdk.utils.aio import gather_max_concurrency
34
+ from hatchet_sdk.utils.datetimes import partition_date_range
26
35
  from hatchet_sdk.utils.typing import JSONSerializableMapping
27
36
 
28
37
  if TYPE_CHECKING:
@@ -110,6 +119,7 @@ class RunsClient(BaseRestClient):
110
119
  def _ta(self, client: ApiClient) -> TaskApi:
111
120
  return TaskApi(client)
112
121
 
122
+ @retry
113
123
  def get(self, workflow_run_id: str) -> V1WorkflowRunDetails:
114
124
  """
115
125
  Get workflow run details for a given workflow run ID.
@@ -129,6 +139,7 @@ class RunsClient(BaseRestClient):
129
139
  """
130
140
  return await asyncio.to_thread(self.get, workflow_run_id)
131
141
 
142
+ @retry
132
143
  def get_status(self, workflow_run_id: str) -> V1TaskStatus:
133
144
  """
134
145
  Get workflow run status for a given workflow run ID.
@@ -148,6 +159,152 @@ class RunsClient(BaseRestClient):
148
159
  """
149
160
  return await asyncio.to_thread(self.get_status, workflow_run_id)
150
161
 
162
+ @retry
163
+ def list_with_pagination(
164
+ self,
165
+ since: datetime | None = None,
166
+ only_tasks: bool = False,
167
+ offset: int | None = None,
168
+ limit: int | None = None,
169
+ statuses: list[V1TaskStatus] | None = None,
170
+ until: datetime | None = None,
171
+ additional_metadata: dict[str, str] | None = None,
172
+ workflow_ids: list[str] | None = None,
173
+ worker_id: str | None = None,
174
+ parent_task_external_id: str | None = None,
175
+ triggering_event_external_id: str | None = None,
176
+ ) -> list[V1TaskSummary]:
177
+ """
178
+ List task runs according to a set of filters, paginating through days
179
+
180
+ :param since: The start time for filtering task runs.
181
+ :param only_tasks: Whether to only list task runs.
182
+ :param offset: The offset for pagination.
183
+ :param limit: The maximum number of task runs to return.
184
+ :param statuses: The statuses to filter task runs by.
185
+ :param until: The end time for filtering task runs.
186
+ :param additional_metadata: Additional metadata to filter task runs by.
187
+ :param workflow_ids: The workflow IDs to filter task runs by.
188
+ :param worker_id: The worker ID to filter task runs by.
189
+ :param parent_task_external_id: The parent task external ID to filter task runs by.
190
+ :param triggering_event_external_id: The event id that triggered the task run.
191
+
192
+ :return: A list of task runs matching the specified filters.
193
+ """
194
+
195
+ date_ranges = partition_date_range(
196
+ since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
197
+ until=until or datetime.now(tz=timezone.utc),
198
+ )
199
+
200
+ with self.client() as client:
201
+ responses = [
202
+ self._wra(client).v1_workflow_run_list(
203
+ tenant=self.client_config.tenant_id,
204
+ since=s,
205
+ until=u,
206
+ only_tasks=only_tasks,
207
+ offset=offset,
208
+ limit=limit,
209
+ statuses=statuses,
210
+ additional_metadata=maybe_additional_metadata_to_kv(
211
+ additional_metadata
212
+ ),
213
+ workflow_ids=workflow_ids,
214
+ worker_id=worker_id,
215
+ parent_task_external_id=parent_task_external_id,
216
+ triggering_event_external_id=triggering_event_external_id,
217
+ )
218
+ for s, u in date_ranges
219
+ ]
220
+
221
+ ## Hack for uniqueness
222
+ run_id_to_run = {
223
+ run.metadata.id: run for record in responses for run in record.rows
224
+ }
225
+
226
+ return sorted(
227
+ run_id_to_run.values(),
228
+ key=lambda x: x.created_at,
229
+ reverse=True,
230
+ )
231
+
232
+ @retry
233
+ async def aio_list_with_pagination(
234
+ self,
235
+ since: datetime | None = None,
236
+ only_tasks: bool = False,
237
+ offset: int | None = None,
238
+ limit: int | None = None,
239
+ statuses: list[V1TaskStatus] | None = None,
240
+ until: datetime | None = None,
241
+ additional_metadata: dict[str, str] | None = None,
242
+ workflow_ids: list[str] | None = None,
243
+ worker_id: str | None = None,
244
+ parent_task_external_id: str | None = None,
245
+ triggering_event_external_id: str | None = None,
246
+ ) -> list[V1TaskSummary]:
247
+ """
248
+ List task runs according to a set of filters, paginating through days
249
+
250
+ :param since: The start time for filtering task runs.
251
+ :param only_tasks: Whether to only list task runs.
252
+ :param offset: The offset for pagination.
253
+ :param limit: The maximum number of task runs to return.
254
+ :param statuses: The statuses to filter task runs by.
255
+ :param until: The end time for filtering task runs.
256
+ :param additional_metadata: Additional metadata to filter task runs by.
257
+ :param workflow_ids: The workflow IDs to filter task runs by.
258
+ :param worker_id: The worker ID to filter task runs by.
259
+ :param parent_task_external_id: The parent task external ID to filter task runs by.
260
+ :param triggering_event_external_id: The event id that triggered the task run.
261
+
262
+ :return: A list of task runs matching the specified filters.
263
+ """
264
+
265
+ date_ranges = partition_date_range(
266
+ since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
267
+ until=until or datetime.now(tz=timezone.utc),
268
+ )
269
+
270
+ with self.client() as client:
271
+ coros = [
272
+ asyncio.to_thread(
273
+ self._wra(client).v1_workflow_run_list,
274
+ tenant=self.client_config.tenant_id,
275
+ since=s,
276
+ until=u,
277
+ only_tasks=only_tasks,
278
+ offset=offset,
279
+ limit=limit,
280
+ statuses=statuses,
281
+ additional_metadata=maybe_additional_metadata_to_kv(
282
+ additional_metadata
283
+ ),
284
+ workflow_ids=workflow_ids,
285
+ worker_id=worker_id,
286
+ parent_task_external_id=parent_task_external_id,
287
+ triggering_event_external_id=triggering_event_external_id,
288
+ )
289
+ for s, u in date_ranges
290
+ ]
291
+
292
+ responses = await gather_max_concurrency(
293
+ *coros,
294
+ max_concurrency=3,
295
+ )
296
+
297
+ ## Hack for uniqueness
298
+ run_id_to_run = {
299
+ run.metadata.id: run for record in responses for run in record.rows
300
+ }
301
+
302
+ return sorted(
303
+ run_id_to_run.values(),
304
+ key=lambda x: x.created_at,
305
+ reverse=True,
306
+ )
307
+
151
308
  async def aio_list(
152
309
  self,
153
310
  since: datetime | None = None,
@@ -181,7 +338,7 @@ class RunsClient(BaseRestClient):
181
338
  """
182
339
  return await asyncio.to_thread(
183
340
  self.list,
184
- since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
341
+ since=since,
185
342
  only_tasks=only_tasks,
186
343
  offset=offset,
187
344
  limit=limit,
@@ -194,6 +351,7 @@ class RunsClient(BaseRestClient):
194
351
  triggering_event_external_id=triggering_event_external_id,
195
352
  )
196
353
 
354
+ @retry
197
355
  def list(
198
356
  self,
199
357
  since: datetime | None = None,
@@ -225,10 +383,22 @@ class RunsClient(BaseRestClient):
225
383
 
226
384
  :return: A list of task runs matching the specified filters.
227
385
  """
386
+
387
+ since = since or datetime.now(tz=timezone.utc) - timedelta(days=1)
388
+ until = until or datetime.now(tz=timezone.utc)
389
+
390
+ if (until - since).days >= 7:
391
+ warn(
392
+ "Listing runs with a date range longer than 7 days may result in performance issues. "
393
+ "Consider using `list_with_pagination` or `aio_list_with_pagination` instead.",
394
+ RuntimeWarning,
395
+ stacklevel=2,
396
+ )
397
+
228
398
  with self.client() as client:
229
399
  return self._wra(client).v1_workflow_run_list(
230
400
  tenant=self.client_config.tenant_id,
231
- since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
401
+ since=since,
232
402
  only_tasks=only_tasks,
233
403
  offset=offset,
234
404
  limit=limit,
@@ -376,6 +546,7 @@ class RunsClient(BaseRestClient):
376
546
  """
377
547
  return await asyncio.to_thread(self.bulk_cancel, opts)
378
548
 
549
+ @retry
379
550
  def get_result(self, run_id: str) -> JSONSerializableMapping:
380
551
  """
381
552
  Get the result of a workflow run by its external ID.
@@ -413,3 +584,13 @@ class RunsClient(BaseRestClient):
413
584
  workflow_run_listener=self.workflow_run_listener,
414
585
  runs_client=self,
415
586
  )
587
+
588
+ async def subscribe_to_stream(
589
+ self,
590
+ workflow_run_id: str,
591
+ ) -> AsyncIterator[str]:
592
+ ref = self.get_run_ref(workflow_run_id=workflow_run_id)
593
+
594
+ async for chunk in ref.stream():
595
+ if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
596
+ yield chunk.payload
@@ -21,6 +21,7 @@ from hatchet_sdk.clients.rest.models.workflow_run_order_by_direction import (
21
21
  from hatchet_sdk.clients.v1.api_client import (
22
22
  BaseRestClient,
23
23
  maybe_additional_metadata_to_kv,
24
+ retry,
24
25
  )
25
26
  from hatchet_sdk.utils.typing import JSONSerializableMapping
26
27
 
@@ -153,6 +154,7 @@ class ScheduledClient(BaseRestClient):
153
154
  statuses=statuses,
154
155
  )
155
156
 
157
+ @retry
156
158
  def list(
157
159
  self,
158
160
  offset: int | None = None,
@@ -193,6 +195,7 @@ class ScheduledClient(BaseRestClient):
193
195
  statuses=statuses,
194
196
  )
195
197
 
198
+ @retry
196
199
  def get(self, scheduled_id: str) -> ScheduledWorkflows:
197
200
  """
198
201
  Retrieves a specific scheduled workflow by scheduled run trigger ID.
@@ -3,7 +3,7 @@ import asyncio
3
3
  from hatchet_sdk.clients.rest.api.tenant_api import TenantApi
4
4
  from hatchet_sdk.clients.rest.api_client import ApiClient
5
5
  from hatchet_sdk.clients.rest.models.tenant import Tenant
6
- from hatchet_sdk.clients.v1.api_client import BaseRestClient
6
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient, retry
7
7
 
8
8
 
9
9
  class TenantClient(BaseRestClient):
@@ -14,6 +14,7 @@ class TenantClient(BaseRestClient):
14
14
  def _ta(self, client: ApiClient) -> TenantApi:
15
15
  return TenantApi(client)
16
16
 
17
+ @retry
17
18
  def get(self) -> Tenant:
18
19
  """
19
20
  Get the current tenant.
@@ -5,7 +5,7 @@ from hatchet_sdk.clients.rest.api_client import ApiClient
5
5
  from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
6
6
  from hatchet_sdk.clients.rest.models.worker import Worker
7
7
  from hatchet_sdk.clients.rest.models.worker_list import WorkerList
8
- from hatchet_sdk.clients.v1.api_client import BaseRestClient
8
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient, retry
9
9
 
10
10
 
11
11
  class WorkersClient(BaseRestClient):
@@ -16,6 +16,7 @@ class WorkersClient(BaseRestClient):
16
16
  def _wa(self, client: ApiClient) -> WorkerApi:
17
17
  return WorkerApi(client)
18
18
 
19
+ @retry
19
20
  def get(self, worker_id: str) -> Worker:
20
21
  """
21
22
  Get a worker by its ID.
@@ -35,6 +36,7 @@ class WorkersClient(BaseRestClient):
35
36
  """
36
37
  return await asyncio.to_thread(self.get, worker_id)
37
38
 
39
+ @retry
38
40
  def list(
39
41
  self,
40
42
  ) -> WorkerList:
@@ -6,7 +6,7 @@ from hatchet_sdk.clients.rest.api_client import ApiClient
6
6
  from hatchet_sdk.clients.rest.models.workflow import Workflow
7
7
  from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
8
8
  from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
9
- from hatchet_sdk.clients.v1.api_client import BaseRestClient
9
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient, retry
10
10
 
11
11
 
12
12
  class WorkflowsClient(BaseRestClient):
@@ -31,6 +31,7 @@ class WorkflowsClient(BaseRestClient):
31
31
  """
32
32
  return await asyncio.to_thread(self.get, workflow_id)
33
33
 
34
+ @retry
34
35
  def get(self, workflow_id: str) -> Workflow:
35
36
  """
36
37
  Get a workflow by its ID.
@@ -41,6 +42,7 @@ class WorkflowsClient(BaseRestClient):
41
42
  with self.client() as client:
42
43
  return self._wa(client).workflow_get(workflow_id)
43
44
 
45
+ @retry
44
46
  def list(
45
47
  self,
46
48
  workflow_name: str | None = None,
@@ -81,6 +83,7 @@ class WorkflowsClient(BaseRestClient):
81
83
  """
82
84
  return await asyncio.to_thread(self.list, workflow_name, limit, offset)
83
85
 
86
+ @retry
84
87
  def get_version(
85
88
  self, workflow_id: str, version: str | None = None
86
89
  ) -> WorkflowVersion:
@@ -1,6 +1,6 @@
1
1
  import asyncio
2
2
  from collections.abc import Callable
3
- from datetime import datetime, timedelta, timezone
3
+ from datetime import datetime, timedelta
4
4
  from functools import cached_property
5
5
  from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, get_type_hints
6
6
 
@@ -310,9 +310,9 @@ class BaseWorkflow(Generic[TWorkflowInput]):
310
310
 
311
311
  :returns: A list of `V1TaskSummary` objects representing the runs of the workflow.
312
312
  """
313
- response = self.client.runs.list(
313
+ return self.client.runs.list_with_pagination(
314
314
  workflow_ids=[self.id],
315
- since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
315
+ since=since,
316
316
  only_tasks=only_tasks,
317
317
  offset=offset,
318
318
  limit=limit,
@@ -324,8 +324,6 @@ class BaseWorkflow(Generic[TWorkflowInput]):
324
324
  triggering_event_external_id=triggering_event_external_id,
325
325
  )
326
326
 
327
- return response.rows
328
-
329
327
  async def aio_list_runs(
330
328
  self,
331
329
  since: datetime | None = None,
@@ -355,9 +353,9 @@ class BaseWorkflow(Generic[TWorkflowInput]):
355
353
 
356
354
  :returns: A list of `V1TaskSummary` objects representing the runs of the workflow.
357
355
  """
358
- return await asyncio.to_thread(
359
- self.list_runs,
360
- since=since or datetime.now(tz=timezone.utc) - timedelta(days=1),
356
+ return await self.client.runs.aio_list_with_pagination(
357
+ workflow_ids=[self.id],
358
+ since=since,
361
359
  only_tasks=only_tasks,
362
360
  offset=offset,
363
361
  limit=limit,
@@ -0,0 +1,21 @@
1
+ import asyncio
2
+ from collections.abc import Coroutine
3
+ from typing import TypeVar
4
+
5
+ T = TypeVar("T")
6
+
7
+
8
+ async def gather_max_concurrency(
9
+ *tasks: Coroutine[None, None, T],
10
+ max_concurrency: int,
11
+ ) -> list[T]:
12
+ sem = asyncio.Semaphore(max_concurrency)
13
+
14
+ async def task_wrapper(task: Coroutine[None, None, T]) -> T:
15
+ async with sem:
16
+ return await task
17
+
18
+ return await asyncio.gather(
19
+ *(task_wrapper(task) for task in tasks),
20
+ return_exceptions=False,
21
+ )
@@ -0,0 +1,35 @@
1
+ from datetime import datetime, timedelta, timezone
2
+ from typing import TypeVar
3
+
4
+ T = TypeVar("T")
5
+ R = TypeVar("R")
6
+
7
+
8
+ def _to_utc(dt: datetime) -> datetime:
9
+ if not dt.tzinfo:
10
+ return dt.replace(tzinfo=timezone.utc)
11
+
12
+ return dt.astimezone(timezone.utc)
13
+
14
+
15
+ def partition_date_range(
16
+ since: datetime, until: datetime
17
+ ) -> list[tuple[datetime, datetime]]:
18
+ since = _to_utc(since)
19
+ until = _to_utc(until)
20
+
21
+ ranges = []
22
+ current_start = since
23
+
24
+ while current_start < until:
25
+ next_day = (current_start + timedelta(days=1)).replace(
26
+ hour=0, minute=0, second=0, microsecond=0
27
+ )
28
+
29
+ current_end = min(next_day, until)
30
+
31
+ ranges.append((current_start, current_end))
32
+
33
+ current_start = next_day
34
+
35
+ return ranges
@@ -0,0 +1,52 @@
1
+ from typing import Any, TypeVar, cast, overload
2
+
3
+ T = TypeVar("T")
4
+ K = TypeVar("K")
5
+
6
+
7
+ @overload
8
+ def remove_null_unicode_character(data: str) -> str: ...
9
+
10
+
11
+ @overload
12
+ def remove_null_unicode_character(data: dict[K, T]) -> dict[K, T]: ...
13
+
14
+
15
+ @overload
16
+ def remove_null_unicode_character(data: list[T]) -> list[T]: ...
17
+
18
+
19
+ @overload
20
+ def remove_null_unicode_character(data: tuple[T, ...]) -> tuple[T, ...]: ...
21
+
22
+
23
+ def remove_null_unicode_character(
24
+ data: str | dict[K, T] | list[T] | tuple[T, ...],
25
+ ) -> str | dict[K, T] | list[T] | tuple[T, ...]:
26
+ """
27
+ Recursively traverse a dictionary (a task's output) and remove the unicode escape sequence \\u0000 which will cause unexpected behavior in Hatchet.
28
+
29
+ Needed as Hatchet does not support \\u0000 in task outputs
30
+
31
+ :param data: The task output (a JSON-serializable dictionary or mapping)
32
+ :return: The same dictionary with all \\u0000 characters removed from strings, and nested dictionaries/lists processed recursively.
33
+ :raises TypeError: If the input is not a string, dictionary, list, or tuple.
34
+ """
35
+ if isinstance(data, str):
36
+ return data.replace("\u0000", "")
37
+
38
+ if isinstance(data, dict):
39
+ return {
40
+ key: remove_null_unicode_character(cast(Any, value))
41
+ for key, value in data.items()
42
+ }
43
+
44
+ if isinstance(data, list):
45
+ return [remove_null_unicode_character(cast(Any, item)) for item in data]
46
+
47
+ if isinstance(data, tuple):
48
+ return tuple(remove_null_unicode_character(cast(Any, item)) for item in data)
49
+
50
+ raise TypeError(
51
+ f"Unsupported type {type(data)}. Expected str, dict, list, or tuple."
52
+ )
@@ -4,9 +4,9 @@ import functools
4
4
  import json
5
5
  from collections.abc import Callable
6
6
  from concurrent.futures import ThreadPoolExecutor
7
- from contextlib import suppress
8
7
  from enum import Enum
9
8
  from multiprocessing import Queue
9
+ from textwrap import dedent
10
10
  from threading import Thread, current_thread
11
11
  from typing import Any, Literal, cast, overload
12
12
 
@@ -49,6 +49,7 @@ from hatchet_sdk.runnables.contextvars import (
49
49
  )
50
50
  from hatchet_sdk.runnables.task import Task
51
51
  from hatchet_sdk.runnables.types import R, TWorkflowInput
52
+ from hatchet_sdk.utils.serde import remove_null_unicode_character
52
53
  from hatchet_sdk.worker.action_listener_process import ActionEvent
53
54
  from hatchet_sdk.worker.runner.utils.capture_logs import (
54
55
  AsyncLogSender,
@@ -410,7 +411,7 @@ class Runner:
410
411
  )
411
412
 
412
413
  ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
413
- async def handle_start_step_run(self, action: Action) -> None:
414
+ async def handle_start_step_run(self, action: Action) -> Exception | None:
414
415
  action_name = action.action_id
415
416
 
416
417
  # Find the corresponding action function from the registry
@@ -444,8 +445,11 @@ class Runner:
444
445
 
445
446
  ## FIXME: Handle cancelled exceptions and other special exceptions
446
447
  ## that we don't want to suppress here
447
- with suppress(Exception):
448
+ try:
448
449
  await task
450
+ except Exception as e:
451
+ ## Used for the OTel instrumentor to capture exceptions
452
+ return e
449
453
 
450
454
  ## Once the step run completes, we need to remove the workflow spawn index
451
455
  ## so we don't leak memory
@@ -453,6 +457,8 @@ class Runner:
453
457
  async with spawn_index_lock:
454
458
  workflow_spawn_indices.pop(action.key)
455
459
 
460
+ return None
461
+
456
462
  ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor
457
463
  async def handle_start_group_key_run(self, action: Action) -> Exception | None:
458
464
  action_name = action.action_id
@@ -550,21 +556,37 @@ class Runner:
550
556
  return ""
551
557
 
552
558
  if isinstance(output, BaseModel):
553
- output = output.model_dump()
559
+ output = output.model_dump(mode="json")
554
560
 
555
561
  if not isinstance(output, dict):
556
562
  raise IllegalTaskOutputError(
557
563
  f"Tasks must return either a dictionary or a Pydantic BaseModel which can be serialized to a JSON object. Got object of type {type(output)} instead."
558
564
  )
559
565
 
560
- if output is not None:
561
- try:
562
- return json.dumps(output, default=str)
563
- except Exception as e:
564
- logger.error(f"Could not serialize output: {e}")
565
- return str(output)
566
+ if output is None:
567
+ return ""
568
+
569
+ try:
570
+ serialized_output = json.dumps(output, default=str)
571
+ except Exception as e:
572
+ logger.error(f"Could not serialize output: {e}")
573
+ serialized_output = str(output)
574
+
575
+ if "\\u0000" in serialized_output:
576
+ raise IllegalTaskOutputError(
577
+ dedent(
578
+ f"""
579
+ Task outputs cannot contain the unicode null character \\u0000
580
+
581
+ Please see this Discord thread: https://discord.com/channels/1088927970518909068/1384324576166678710/1386714014565928992
582
+ Relevant Postgres documentation: https://www.postgresql.org/docs/current/datatype-json.html
583
+
584
+ Use `hatchet_sdk.{remove_null_unicode_character.__name__}` to sanitize your output if you'd like to remove the character.
585
+ """
586
+ )
587
+ )
566
588
 
567
- return ""
589
+ return serialized_output
568
590
 
569
591
  async def wait_for_tasks(self) -> None:
570
592
  running = len(self.tasks.keys())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 1.14.1
3
+ Version: 1.14.3
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Alexander Belanger
@@ -1,4 +1,4 @@
1
- hatchet_sdk/__init__.py,sha256=3iVKyZ4OnGxZWU0YtXQG9imlljPNBdPARqqCm6Ft03o,10640
1
+ hatchet_sdk/__init__.py,sha256=LdT8xJ7-95gwksRZuCzuKDM5bfLx8uW8gXGPL7VMqwc,10743
2
2
  hatchet_sdk/client.py,sha256=OXb2hOJ5p7pY5QMlM4cydb4aGyf6bDdbyWQjPMVCe64,2413
3
3
  hatchet_sdk/clients/admin.py,sha256=t8GXhjMiFp9iL0ofISNxSOKlfwaOTX2iQbZfz1G0pHU,16936
4
4
  hatchet_sdk/clients/dispatcher/action_listener.py,sha256=tFknXH9iSP0OFYYVcKeDZVrcDNIz00ZQVTxSbHpbKhI,13863
@@ -229,7 +229,7 @@ hatchet_sdk/clients/rest/models/workflow_version_meta.py,sha256=TW4R7bAuYAg_LraN
229
229
  hatchet_sdk/clients/rest/models/workflow_workers_count.py,sha256=qhzqfvjjIDyARkiiLGluMIqEmqO-diHTsjlu0Doi0yg,2875
230
230
  hatchet_sdk/clients/rest/rest.py,sha256=zZHTzgl-NBdcK6XhG23m_s9RKRONGPPItzGe407s7GA,9262
231
231
  hatchet_sdk/clients/rest/tenacity_utils.py,sha256=SQuy4x9scgBpqygRrGdjP7dJz4u4VVAEbjnieid4KcU,1029
232
- hatchet_sdk/clients/v1/api_client.py,sha256=mJQUZ3cOxlFJiwWKK5F8jBxcpNZ7A2292HucrBqurbg,1205
232
+ hatchet_sdk/clients/v1/api_client.py,sha256=Gs7OX47YsZ4mpJlItEg-MSeHk6wWb3iCwPQu1cffGZk,1974
233
233
  hatchet_sdk/conditions.py,sha256=CnhpkXgVXM3wc0kAX8KZQA6tp8NFAbdzAN2xFbw7Hb0,4522
234
234
  hatchet_sdk/config.py,sha256=DKOSCyOhFMx9d3Rvu5z9aImbOgZgwdNSg3XVzyVHn3g,5185
235
235
  hatchet_sdk/connection.py,sha256=oRxLs_lBRgHfE4jGLZJimr25ymlEJnK1ZXlh-CasjPo,2696
@@ -255,16 +255,16 @@ hatchet_sdk/contracts/workflows_pb2.py,sha256=daEsUwZnlDQ5GGLJ8WHgLdI1Tgr3lBXxGV
255
255
  hatchet_sdk/contracts/workflows_pb2.pyi,sha256=WJ3b45pWvoNmmWTWjBJt61IiAoVn61F62AG5OrRsnd8,15538
256
256
  hatchet_sdk/contracts/workflows_pb2_grpc.py,sha256=2V8E72DlJx5qlH2yiQpVCu5cQbKUba5X7T1yNrQDF_s,10819
257
257
  hatchet_sdk/exceptions.py,sha256=DG-mS0wZiB-4Pnyr-BgY-LRrAEAdgP2nqQlIheU99t4,2646
258
- hatchet_sdk/features/cron.py,sha256=42QJiSOXI9xAJw_d3nKhg0f_LLnwGQaLxBZ5JPhoT_w,9690
259
- hatchet_sdk/features/filters.py,sha256=a75EDS-gLguPEs31eVsn48mvczS2Robqju4Wr3o5B2E,6414
260
- hatchet_sdk/features/logs.py,sha256=OcmgtmNyqFJI03_5ncuSy6M-Ho7AVTa8hnO0CDE3wi4,1172
261
- hatchet_sdk/features/metrics.py,sha256=TzAEB4Ogmgcq-EB7lEWQ9V8y-15d23ZuhAgPH6It92Y,4519
258
+ hatchet_sdk/features/cron.py,sha256=k6Y-JJBPaf2Dtx-fwvNA2j7lTzHLBZpwVMA_u-p6Lvw,9723
259
+ hatchet_sdk/features/filters.py,sha256=n6PPeRiqd5SOFlcx8V2strUaCGma9JPRAOLx44XpC0o,6443
260
+ hatchet_sdk/features/logs.py,sha256=H_vQnOqiN5q_wQWVoOGAJp7eOPKFYZsLJ1Hb63LriRA,1190
261
+ hatchet_sdk/features/metrics.py,sha256=mUz51fQsrmyp_UUoZ5KxSrOmUl2mEGoYYHRuXGxl_7w,4563
262
262
  hatchet_sdk/features/rate_limits.py,sha256=eh55Z3w75cYUthqTyoWmNxj_6tN3rjebMKm3of-vxv0,2155
263
- hatchet_sdk/features/runs.py,sha256=mmarF1kXFVWz6ayoYBAzZTkXBQVNb2Xq60d_atnkj-U,16120
264
- hatchet_sdk/features/scheduled.py,sha256=bp5h6QP8B9keOsCTG1laB2GWIaXQ7ylBSNdsRFD3VU8,8906
265
- hatchet_sdk/features/tenant.py,sha256=vU6buEKVPCydpgrHFsQ_gbKgO5lRmlZG2ypsT7-O4S8,868
266
- hatchet_sdk/features/workers.py,sha256=vD6j7GCttu0fm23_XmBMdE0IuX4mUbL0adgMoC8Sk_E,2571
267
- hatchet_sdk/features/workflows.py,sha256=15MSYNIjlN1Ilk8sHq_DjLS7XuqlvcAMFuAdFLdjPGY,4012
263
+ hatchet_sdk/features/runs.py,sha256=8geS5l1CU3GCLudFORmCvXyjVVcgNKsNAPV9mLcI8sU,23004
264
+ hatchet_sdk/features/scheduled.py,sha256=t7YA9CoJrzBhH82ChTSFWaTF_dyoC9i1O_wf9ywsphc,8939
265
+ hatchet_sdk/features/tenant.py,sha256=xkhh5mRKCWbunk_S1iBmGR-DYR-F4mjxk8jLyYUyzNE,886
266
+ hatchet_sdk/features/workers.py,sha256=DVdno28RmtlfhMJUkaPcOMHNKXCPV0RFrXtLqV6zWyE,2600
267
+ hatchet_sdk/features/workflows.py,sha256=a_0UwX_Y-6DX8TcQ7yuiugWHvKzQmMqafKanq4HEiSg,4052
268
268
  hatchet_sdk/hatchet.py,sha256=rOuE7YPqu4setE_zGbmgvb2WN-q8HNXTDFYcp9N0Wj8,25759
269
269
  hatchet_sdk/labels.py,sha256=nATgxWE3lFxRTnfISEpoIRLGbMfAZsHF4lZTuG4Mfic,182
270
270
  hatchet_sdk/logger.py,sha256=5uOr52T4mImSQm1QvWT8HvZFK5WfPNh3Y1cBQZRFgUQ,333
@@ -276,11 +276,14 @@ hatchet_sdk/runnables/action.py,sha256=zrVHpyzIQ9XZgWwY69b_6uhZd53An4trRoLd9b3os
276
276
  hatchet_sdk/runnables/contextvars.py,sha256=T2LWiXhcSyQYJY_-pfqMjDNjf6PdtDwyXyCZ6zIyWK0,929
277
277
  hatchet_sdk/runnables/task.py,sha256=VEYabAl38U9L_igSDgZSzUL7-c-sX6YFRP0wsqGTadU,6714
278
278
  hatchet_sdk/runnables/types.py,sha256=QFayOJ_Oi8tOYI6Sjl9bwjftM96QZh9XIlfFnSNgEXI,4359
279
- hatchet_sdk/runnables/workflow.py,sha256=AVYMg_QTN9YkddFuOBa8iq9oQinRPdHo9UJsgI2TWHQ,49327
279
+ hatchet_sdk/runnables/workflow.py,sha256=SxxNhv1SAMh4Rw68_fyQJh3Su-e_zscPmILo3ghzzaU,49225
280
280
  hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
281
+ hatchet_sdk/utils/aio.py,sha256=cu1rD_UZkShtfzi7iXMYwBBaCRdxJQTdUC0_mf8nU2E,499
281
282
  hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
283
+ hatchet_sdk/utils/datetimes.py,sha256=vIZNEX8tt-bknaIuTmoLEmKVt18dBjClH3urYtCJAys,775
282
284
  hatchet_sdk/utils/opentelemetry.py,sha256=64TVwCLrUzEmcL2BUNPV_QubfiR5jajOZtVeGYLnEEA,1226
283
285
  hatchet_sdk/utils/proto_enums.py,sha256=v2gp_ZmIhPxURVXwz5lscllXwZXDl5XGXeL6gezw3o0,1241
286
+ hatchet_sdk/utils/serde.py,sha256=d2iypdK2UQCPA19NgYa0Tr7CTbk25KPbaCMqcqN3CYk,1645
284
287
  hatchet_sdk/utils/timedelta_to_expression.py,sha256=YujnBnGn7lxtkUdKIeqmOiN_ZCGBpRPjCCSzcD3jxzA,644
285
288
  hatchet_sdk/utils/typing.py,sha256=FgYnZyJSoRjNVFodxlI9gn0X8ST1KFed7xfUynIxa2U,978
286
289
  hatchet_sdk/v0/__init__.py,sha256=r3Q7l2RsLgdIkK2jjiz7-JJpD1T_Zy--Oa9MN5n_yEs,9654
@@ -507,11 +510,11 @@ hatchet_sdk/v0/workflow_run.py,sha256=jsEZprXshrSV7i_TtL5uoCL03D18zQ3NeJCq7mp97D
507
510
  hatchet_sdk/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
508
511
  hatchet_sdk/worker/action_listener_process.py,sha256=Xzzn1dDFJrqnC9HBsh3fYI8lfpOD4Ecze47qmm_XUWE,12923
509
512
  hatchet_sdk/worker/runner/run_loop_manager.py,sha256=GeILClNXaDbsjXCQb0bBdgeyAwZGem2JdaH0t6wz__I,4053
510
- hatchet_sdk/worker/runner/runner.py,sha256=5dOLFJIEWHed2sQxSeWmqMFWVViYTnfvk70n1FRLoU4,20770
513
+ hatchet_sdk/worker/runner/runner.py,sha256=Btz7wd0DQ-5aPEZLN3T5X5__8DjuxN6H4yQEVX0eZiI,21678
511
514
  hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=DKw6boqVsSCM1XvBWYuc833MZxCdSpMxg3l4aAqKPyw,3465
512
515
  hatchet_sdk/worker/worker.py,sha256=nDuRo_LishRuOCTnDonc7G7qeOoW93nRHGd-fQOE_bs,16541
513
516
  hatchet_sdk/workflow_run.py,sha256=KcylcqRwKADtnzOTjoiVr1vdr7qTZFtDeD5aRS6A4Y8,2823
514
- hatchet_sdk-1.14.1.dist-info/METADATA,sha256=Eygcnag5XEsRLK-kC1LDbF5L3YJmLIazhyLFTOZeLBY,3636
515
- hatchet_sdk-1.14.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
516
- hatchet_sdk-1.14.1.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
517
- hatchet_sdk-1.14.1.dist-info/RECORD,,
517
+ hatchet_sdk-1.14.3.dist-info/METADATA,sha256=3D-GMkNjhRG37vaE8W4lXoG1MMu_2MpnKcAwK5elTkY,3636
518
+ hatchet_sdk-1.14.3.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
519
+ hatchet_sdk-1.14.3.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
520
+ hatchet_sdk-1.14.3.dist-info/RECORD,,