hatchet-sdk 1.8.1__py3-none-any.whl → 1.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

hatchet_sdk/config.py CHANGED
@@ -64,13 +64,20 @@ class ClientConfig(BaseSettings):
64
64
  )
65
65
 
66
66
  worker_preset_labels: dict[str, str] = Field(default_factory=dict)
67
+
67
68
  enable_force_kill_sync_threads: bool = False
69
+ enable_thread_pool_monitoring: bool = False
68
70
 
69
71
  @model_validator(mode="after")
70
72
  def validate_token_and_tenant(self) -> "ClientConfig":
71
73
  if not self.token:
72
74
  raise ValueError("Token must be set")
73
75
 
76
+ if not self.token.startswith("ey"):
77
+ raise ValueError(
78
+ f"Token must be a valid JWT. Hint: These are the first few characters of the token provided: {self.token[:5]}"
79
+ )
80
+
74
81
  if not self.tenant_id:
75
82
  self.tenant_id = get_tenant_id_from_jwt(self.token)
76
83
 
@@ -2,7 +2,7 @@ import asyncio
2
2
  from enum import Enum
3
3
  from typing import Any, Awaitable, Callable, ParamSpec, Type, TypeGuard, TypeVar, Union
4
4
 
5
- from pydantic import BaseModel, ConfigDict, Field, model_validator
5
+ from pydantic import BaseModel, ConfigDict, Field
6
6
 
7
7
  from hatchet_sdk.context.context import Context, DurableContext
8
8
  from hatchet_sdk.contracts.v1.workflows_pb2 import Concurrency
@@ -26,8 +26,6 @@ class StickyStrategy(str, Enum):
26
26
 
27
27
  class ConcurrencyLimitStrategy(str, Enum):
28
28
  CANCEL_IN_PROGRESS = "CANCEL_IN_PROGRESS"
29
- DROP_NEWEST = "DROP_NEWEST"
30
- QUEUE_NEWEST = "QUEUE_NEWEST"
31
29
  GROUP_ROUND_ROBIN = "GROUP_ROUND_ROBIN"
32
30
  CANCEL_NEWEST = "CANCEL_NEWEST"
33
31
 
@@ -82,31 +80,6 @@ class WorkflowConfig(BaseModel):
82
80
 
83
81
  task_defaults: TaskDefaults = TaskDefaults()
84
82
 
85
- def _raise_for_invalid_expression(self, expr: str) -> None:
86
- if not expr.startswith("input."):
87
- return None
88
-
89
- _, field = expr.split(".", maxsplit=2)
90
-
91
- if field not in self.input_validator.model_fields.keys():
92
- raise ValueError(
93
- f"The concurrency expression provided relies on the `{field}` field, which was not present in `{self.input_validator.__name__}`."
94
- )
95
-
96
- @model_validator(mode="after")
97
- def validate_concurrency_expression(self) -> "WorkflowConfig":
98
- if not self.concurrency:
99
- return self
100
-
101
- if isinstance(self.concurrency, list):
102
- for item in self.concurrency:
103
- self._raise_for_invalid_expression(item.expression)
104
-
105
- if isinstance(self.concurrency, ConcurrencyExpression):
106
- self._raise_for_invalid_expression(self.concurrency.expression)
107
-
108
- return self
109
-
110
83
 
111
84
  class StepType(str, Enum):
112
85
  DEFAULT = "default"
@@ -132,34 +132,6 @@ class BaseWorkflow(Generic[TWorkflowInput]):
132
132
  def _create_action_name(self, step: Task[TWorkflowInput, Any]) -> str:
133
133
  return self.service_name + ":" + step.name
134
134
 
135
- def _raise_for_invalid_concurrency(
136
- self, concurrency: ConcurrencyExpression
137
- ) -> bool:
138
- expr = concurrency.expression
139
-
140
- if not expr.startswith("input."):
141
- return True
142
-
143
- _, field = expr.split(".", maxsplit=2)
144
-
145
- if field not in self.config.input_validator.model_fields.keys():
146
- raise ValueError(
147
- f"The concurrency expression provided relies on the `{field}` field, which was not present in `{self.config.input_validator.__name__}`."
148
- )
149
-
150
- return True
151
-
152
- def _validate_priority(self, default_priority: int | None) -> int | None:
153
- validated_priority = (
154
- max(1, min(3, default_priority)) if default_priority else None
155
- )
156
- if validated_priority != default_priority:
157
- logger.warning(
158
- "Warning: Default Priority Must be between 1 and 3 -- inclusively. Adjusted to be within the range."
159
- )
160
-
161
- return validated_priority
162
-
163
135
  def _is_leaf_task(self, task: Task[TWorkflowInput, Any]) -> bool:
164
136
  return not any(task in t.parents for t in self.tasks if task != t)
165
137
 
@@ -291,6 +291,9 @@ class WorkerActionListenerProcess:
291
291
  self.event_queue.put(STOP_LOOP)
292
292
 
293
293
  async def exit_gracefully(self) -> None:
294
+ if self.listener:
295
+ self.listener.stop_signal = True
296
+
294
297
  await self.pause_task_assignment()
295
298
 
296
299
  if self.killing:
@@ -105,6 +105,9 @@ class Runner:
105
105
 
106
106
  self.lifespan_context = lifespan_context
107
107
 
108
+ if self.config.enable_thread_pool_monitoring:
109
+ self.start_background_monitoring()
110
+
108
111
  def create_workflow_run_url(self, action: Action) -> str:
109
112
  return f"{self.config.server_url}/workflow-runs/{action.workflow_run_id}?tenant={action.tenant_id}"
110
113
 
@@ -270,6 +273,47 @@ class Runner:
270
273
  finally:
271
274
  self.cleanup_run_id(action.key)
272
275
 
276
+ async def log_thread_pool_status(self) -> None:
277
+ thread_pool_details = {
278
+ "max_workers": self.slots,
279
+ "total_threads": len(self.thread_pool._threads),
280
+ "idle_threads": self.thread_pool._idle_semaphore._value,
281
+ "active_threads": len(self.threads),
282
+ "pending_tasks": len(self.tasks),
283
+ "queue_size": self.thread_pool._work_queue.qsize(),
284
+ "threads_alive": sum(1 for t in self.thread_pool._threads if t.is_alive()),
285
+ "threads_daemon": sum(1 for t in self.thread_pool._threads if t.daemon),
286
+ }
287
+
288
+ logger.warning("Thread pool detailed status %s", thread_pool_details)
289
+
290
+ async def _start_monitoring(self) -> None:
291
+ logger.debug("Thread pool monitoring started")
292
+ try:
293
+ while True:
294
+ await self.log_thread_pool_status()
295
+
296
+ for key in self.threads.keys():
297
+ if key not in self.tasks:
298
+ logger.debug(f"Potential zombie thread found for key {key}")
299
+
300
+ for key, task in self.tasks.items():
301
+ if task.done() and key in self.threads:
302
+ logger.debug(
303
+ f"Task is done but thread still exists for key {key}"
304
+ )
305
+
306
+ await asyncio.sleep(60)
307
+ except asyncio.CancelledError:
308
+ logger.warning("Thread pool monitoring task cancelled")
309
+ except Exception as e:
310
+ logger.exception(f"Error in thread pool monitoring: {e}")
311
+
312
+ def start_background_monitoring(self) -> None:
313
+ loop = asyncio.get_event_loop()
314
+ self.monitoring_task = loop.create_task(self._start_monitoring())
315
+ logger.debug("Started thread pool monitoring background task")
316
+
273
317
  def cleanup_run_id(self, key: ActionKey) -> None:
274
318
  if key in self.tasks:
275
319
  del self.tasks[key]
@@ -419,23 +463,18 @@ class Runner:
419
463
  try:
420
464
  # call cancel to signal the context to stop
421
465
  if key in self.contexts:
422
- context = self.contexts.get(key)
423
-
424
- if context:
425
- context._set_cancellation_flag()
466
+ self.contexts[key]._set_cancellation_flag()
426
467
 
427
468
  await asyncio.sleep(1)
428
469
 
429
470
  if key in self.tasks:
430
- future = self.tasks.get(key)
431
-
432
- if future:
433
- future.cancel()
471
+ self.tasks[key].cancel()
434
472
 
435
473
  # check if thread is still running, if so, print a warning
436
474
  if key in self.threads:
437
- thread = self.threads.get(key)
438
- if thread and self.config.enable_force_kill_sync_threads:
475
+ thread = self.threads[key]
476
+
477
+ if self.config.enable_force_kill_sync_threads:
439
478
  self.force_kill_thread(thread)
440
479
  await asyncio.sleep(1)
441
480
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 1.8.1
3
+ Version: 1.8.2
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Alexander Belanger
@@ -220,7 +220,7 @@ hatchet_sdk/clients/rest/models/workflow_workers_count.py,sha256=qhzqfvjjIDyARki
220
220
  hatchet_sdk/clients/rest/rest.py,sha256=zZHTzgl-NBdcK6XhG23m_s9RKRONGPPItzGe407s7GA,9262
221
221
  hatchet_sdk/clients/rest/tenacity_utils.py,sha256=n6QvwuGwinLQpiWNU5GxrDNhFBE8_wZdg3WNur21rJ0,1055
222
222
  hatchet_sdk/clients/v1/api_client.py,sha256=mJQUZ3cOxlFJiwWKK5F8jBxcpNZ7A2292HucrBqurbg,1205
223
- hatchet_sdk/config.py,sha256=jJA76BOvVdfOQHy6TKclAvr2qyblcM-Pz5J-hVAdpQ4,3588
223
+ hatchet_sdk/config.py,sha256=Jm-3ja29cpDf2MwfJAJSpL1qCLmYg_CPC29GhN-eUBY,3853
224
224
  hatchet_sdk/connection.py,sha256=B5gT5NL9BBB5-l9U_cN6pMlraQk880rEYMnqaK_dgL0,2590
225
225
  hatchet_sdk/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
226
226
  hatchet_sdk/context/context.py,sha256=c45AadcE4mm-1ahSoj2khjcdUcYZzqYBQkndxbs2ock,9519
@@ -262,8 +262,8 @@ hatchet_sdk/rate_limit.py,sha256=TwbCuggiZaWpYuo4mjVLlE-z1OfQ2mRBiVvCSaG3lv4,391
262
262
  hatchet_sdk/runnables/contextvars.py,sha256=6MDocAMmlyiRW37oQ1jyx10tAlJs-xgDjR3xPoPz05g,426
263
263
  hatchet_sdk/runnables/standalone.py,sha256=pCAIS40q9ltkK7K97ff79MzE73_k22ymZdrLdFdFwD8,15233
264
264
  hatchet_sdk/runnables/task.py,sha256=5VOgi413eH8Gz9_XBxFTfbfLITTpPJYwRB2ZXshysW8,7014
265
- hatchet_sdk/runnables/types.py,sha256=OBhqa6rvEaY4ypKtUpKHHyQxOXKYvxao_-Hknu5jVns,4802
266
- hatchet_sdk/runnables/workflow.py,sha256=Ucjguf3SxyJzYe7R427U76iUOLDoHQMzwOQ7g9xYuQA,39765
265
+ hatchet_sdk/runnables/types.py,sha256=hVO4AI5OmzgWU5X74oeccLIbSfj1G7FgZKDtyyVohfw,3803
266
+ hatchet_sdk/runnables/workflow.py,sha256=is61MQz6qBdzF48pixCTz_ILtpqVv6tiQAAbWEJtYbI,38775
267
267
  hatchet_sdk/token.py,sha256=KjIiInwG5Kqd_FO4BSW1x_5Uc7PFbnzIVJqr50-ZldE,779
268
268
  hatchet_sdk/utils/backoff.py,sha256=6B5Rb5nLKw_TqqgpJMYjIBV1PTTtbOMRZCveisVhg_I,353
269
269
  hatchet_sdk/utils/proto_enums.py,sha256=0UybwE3s7TcqmzoQSO8YnhgAKOS8WZXsyPchB8-eksw,1247
@@ -502,13 +502,13 @@ hatchet_sdk/v0/workflow.py,sha256=d4o425efk7J3JgLIge34MW_A3pzwnwSRtwEOgIqM2pc,93
502
502
  hatchet_sdk/v0/workflow_run.py,sha256=jsEZprXshrSV7i_TtL5uoCL03D18zQ3NeJCq7mp97Dg,1752
503
503
  hatchet_sdk/waits.py,sha256=L2xZUcmrQX-pTVXWv1W8suMoYU_eA0uowpollauQmOM,3893
504
504
  hatchet_sdk/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
505
- hatchet_sdk/worker/action_listener_process.py,sha256=KxS7-wBpfKnsq0LNSvk-MG442Lh60iQMy3VpD1FW3mU,11703
505
+ hatchet_sdk/worker/action_listener_process.py,sha256=t6COI8KmYoYooFLMZY5KLNPQmJrIrs4luoVZxPnKN_I,11775
506
506
  hatchet_sdk/worker/runner/run_loop_manager.py,sha256=RNWKDCjR57nJ0LCoLUMi0_3pnmpqyo80mz_RaxHYGIc,3812
507
- hatchet_sdk/worker/runner/runner.py,sha256=z8ri-viK_avAfF6zgbVNBc-rztFDbxSwng3RHsof92w,17063
507
+ hatchet_sdk/worker/runner/runner.py,sha256=CdsWl0l4tFp8Yy35uLsR74jNsBs-fIHiDAJZwPSGrKg,18805
508
508
  hatchet_sdk/worker/runner/utils/capture_logs.py,sha256=nHRPSiDBqzhObM7i2X7t03OupVFnE7kQBdR2Ckgg-2w,2709
509
509
  hatchet_sdk/worker/worker.py,sha256=SfUeYYGfPDVa7Hr1Tdgrzn_A0T-e_apIzW26BhsiB70,16101
510
510
  hatchet_sdk/workflow_run.py,sha256=ZwH0HLFGFVXz6jbiqSv4w0Om2XuR52Tzzw6LH4y65jQ,2765
511
- hatchet_sdk-1.8.1.dist-info/METADATA,sha256=OGkuC1TmJprUiDEHQ9i1eDjhDS6SxMsaX7A22oApNSA,3635
512
- hatchet_sdk-1.8.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
513
- hatchet_sdk-1.8.1.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
514
- hatchet_sdk-1.8.1.dist-info/RECORD,,
511
+ hatchet_sdk-1.8.2.dist-info/METADATA,sha256=dFJDcLYWDvetxbfsX-ljK5816vmuGNvsC2exJbq4_40,3635
512
+ hatchet_sdk-1.8.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
513
+ hatchet_sdk-1.8.2.dist-info/entry_points.txt,sha256=Un_76pcLse-ZGBlwebhQpnTPyQrripeHW8J7qmEpGOk,1400
514
+ hatchet_sdk-1.8.2.dist-info/RECORD,,