hatchet-sdk 1.0.2__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (35) hide show
  1. hatchet_sdk/client.py +3 -16
  2. hatchet_sdk/clients/admin.py +13 -33
  3. hatchet_sdk/clients/dispatcher/action_listener.py +7 -11
  4. hatchet_sdk/clients/dispatcher/dispatcher.py +20 -5
  5. hatchet_sdk/clients/durable_event_listener.py +11 -12
  6. hatchet_sdk/clients/events.py +11 -15
  7. hatchet_sdk/clients/rest/models/tenant_resource.py +2 -0
  8. hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +1 -5
  9. hatchet_sdk/clients/run_event_listener.py +62 -66
  10. hatchet_sdk/clients/v1/api_client.py +1 -38
  11. hatchet_sdk/clients/workflow_listener.py +9 -10
  12. hatchet_sdk/context/context.py +9 -0
  13. hatchet_sdk/contracts/dispatcher_pb2_grpc.py +1 -1
  14. hatchet_sdk/contracts/events_pb2_grpc.py +1 -1
  15. hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +1 -1
  16. hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +1 -1
  17. hatchet_sdk/contracts/workflows_pb2_grpc.py +1 -1
  18. hatchet_sdk/features/cron.py +5 -4
  19. hatchet_sdk/features/logs.py +2 -1
  20. hatchet_sdk/features/metrics.py +4 -3
  21. hatchet_sdk/features/rate_limits.py +1 -1
  22. hatchet_sdk/features/runs.py +8 -7
  23. hatchet_sdk/features/scheduled.py +5 -4
  24. hatchet_sdk/features/workers.py +4 -3
  25. hatchet_sdk/features/workflows.py +4 -3
  26. hatchet_sdk/metadata.py +2 -2
  27. hatchet_sdk/runnables/standalone.py +3 -18
  28. hatchet_sdk/utils/aio.py +43 -0
  29. hatchet_sdk/worker/runner/run_loop_manager.py +1 -1
  30. hatchet_sdk/workflow_run.py +7 -20
  31. {hatchet_sdk-1.0.2.dist-info → hatchet_sdk-1.1.0.dist-info}/METADATA +1 -1
  32. {hatchet_sdk-1.0.2.dist-info → hatchet_sdk-1.1.0.dist-info}/RECORD +34 -34
  33. {hatchet_sdk-1.0.2.dist-info → hatchet_sdk-1.1.0.dist-info}/entry_points.txt +1 -0
  34. hatchet_sdk/utils/aio_utils.py +0 -18
  35. {hatchet_sdk-1.0.2.dist-info → hatchet_sdk-1.1.0.dist-info}/WHEEL +0 -0
@@ -1,6 +1,4 @@
1
- import asyncio
2
- from concurrent.futures import ThreadPoolExecutor
3
- from typing import AsyncContextManager, Callable, Coroutine, ParamSpec, TypeVar
1
+ from typing import AsyncContextManager, ParamSpec, TypeVar
4
2
 
5
3
  from hatchet_sdk.clients.rest.api_client import ApiClient
6
4
  from hatchet_sdk.clients.rest.configuration import Configuration
@@ -44,38 +42,3 @@ class BaseRestClient:
44
42
 
45
43
  def client(self) -> AsyncContextManager[ApiClient]:
46
44
  return ApiClient(self.api_config)
47
-
48
- def _run_async_function_do_not_use_directly(
49
- self,
50
- async_func: Callable[P, Coroutine[Y, S, R]],
51
- *args: P.args,
52
- **kwargs: P.kwargs,
53
- ) -> R:
54
- loop = asyncio.new_event_loop()
55
- asyncio.set_event_loop(loop)
56
- try:
57
- return loop.run_until_complete(async_func(*args, **kwargs))
58
- finally:
59
- loop.close()
60
-
61
- def _run_async_from_sync(
62
- self,
63
- async_func: Callable[P, Coroutine[Y, S, R]],
64
- *args: P.args,
65
- **kwargs: P.kwargs,
66
- ) -> R:
67
- try:
68
- loop = asyncio.get_event_loop()
69
- except RuntimeError:
70
- loop = None
71
-
72
- if loop and loop.is_running():
73
- return loop.run_until_complete(async_func(*args, **kwargs))
74
- else:
75
- with ThreadPoolExecutor() as executor:
76
- future = executor.submit(
77
- lambda: self._run_async_function_do_not_use_directly(
78
- async_func, *args, **kwargs
79
- )
80
- )
81
- return future.result()
@@ -54,14 +54,6 @@ class _Subscription:
54
54
 
55
55
  class PooledWorkflowRunListener:
56
56
  def __init__(self, config: ClientConfig):
57
- try:
58
- asyncio.get_running_loop()
59
- except RuntimeError:
60
- loop = asyncio.new_event_loop()
61
- asyncio.set_event_loop(loop)
62
-
63
- conn = new_conn(config, True)
64
- self.client = DispatcherStub(conn) # type: ignore[no-untyped-call]
65
57
  self.token = config.token
66
58
  self.config = config
67
59
 
@@ -91,6 +83,10 @@ class PooledWorkflowRunListener:
91
83
 
92
84
  self.interrupter: asyncio.Task[None] | None = None
93
85
 
86
+ ## IMPORTANT: This needs to be created lazily so we don't require
87
+ ## an event loop to instantiate the client.
88
+ self.client: DispatcherStub | None = None
89
+
94
90
  async def _interrupter(self) -> None:
95
91
  """
96
92
  _interrupter runs in a separate thread and interrupts the listener according to a configurable duration.
@@ -239,7 +235,7 @@ class PooledWorkflowRunListener:
239
235
  if subscription_id:
240
236
  self.cleanup_subscription(subscription_id)
241
237
 
242
- async def result(self, workflow_run_id: str) -> dict[str, Any]:
238
+ async def aio_result(self, workflow_run_id: str) -> dict[str, Any]:
243
239
  from hatchet_sdk.clients.admin import DedupeViolationErr
244
240
 
245
241
  event = await self.subscribe(workflow_run_id)
@@ -261,6 +257,9 @@ class PooledWorkflowRunListener:
261
257
  self,
262
258
  ) -> grpc.aio.UnaryStreamCall[SubscribeToWorkflowRunsRequest, WorkflowRunEvent]:
263
259
  retries = 0
260
+ if self.client is None:
261
+ conn = new_conn(self.config, True)
262
+ self.client = DispatcherStub(conn)
264
263
 
265
264
  while retries < DEFAULT_WORKFLOW_LISTENER_RETRY_COUNT:
266
265
  try:
@@ -276,7 +275,7 @@ class PooledWorkflowRunListener:
276
275
  SubscribeToWorkflowRunsRequest, WorkflowRunEvent
277
276
  ],
278
277
  self.client.SubscribeToWorkflowRuns(
279
- self._request(),
278
+ self._request(), # type: ignore[arg-type]
280
279
  metadata=get_metadata(self.token),
281
280
  ),
282
281
  )
@@ -113,6 +113,14 @@ class Context:
113
113
 
114
114
  return parent_step_data
115
115
 
116
+ def aio_task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
117
+ if task.is_async_function:
118
+ return self.task_output(task)
119
+
120
+ raise ValueError(
121
+ f"Task '{task.name}' is not an async function. Use `task_output` instead."
122
+ )
123
+
116
124
  @property
117
125
  def was_triggered_by_event(self) -> bool:
118
126
  return self.data.triggered_by == "event"
@@ -157,6 +165,7 @@ class Context:
157
165
 
158
166
  def handle_result(future: Future[tuple[bool, Exception | None]]) -> None:
159
167
  success, exception = future.result()
168
+
160
169
  if not success and exception:
161
170
  if raise_on_error:
162
171
  raise exception
@@ -33,7 +33,7 @@ if _version_not_supported:
33
33
  class DispatcherStub(object):
34
34
  """Missing associated documentation comment in .proto file."""
35
35
 
36
- def __init__(self, channel):
36
+ def __init__(self, channel: grpc.Channel | grpc.aio.Channel) -> None:
37
37
  """Constructor.
38
38
 
39
39
  Args:
@@ -33,7 +33,7 @@ if _version_not_supported:
33
33
  class EventsServiceStub(object):
34
34
  """Missing associated documentation comment in .proto file."""
35
35
 
36
- def __init__(self, channel):
36
+ def __init__(self, channel: grpc.Channel | grpc.aio.Channel) -> None:
37
37
  """Constructor.
38
38
 
39
39
  Args:
@@ -33,7 +33,7 @@ if _version_not_supported:
33
33
  class V1DispatcherStub(object):
34
34
  """Missing associated documentation comment in .proto file."""
35
35
 
36
- def __init__(self, channel):
36
+ def __init__(self, channel: grpc.Channel | grpc.aio.Channel) -> None:
37
37
  """Constructor.
38
38
 
39
39
  Args:
@@ -34,7 +34,7 @@ class AdminServiceStub(object):
34
34
  """AdminService represents a set of RPCs for admin management of tasks, workflows, etc.
35
35
  """
36
36
 
37
- def __init__(self, channel):
37
+ def __init__(self, channel: grpc.Channel | grpc.aio.Channel) -> None:
38
38
  """Constructor.
39
39
 
40
40
  Args:
@@ -34,7 +34,7 @@ class WorkflowServiceStub(object):
34
34
  """WorkflowService represents a set of RPCs for managing workflows.
35
35
  """
36
36
 
37
- def __init__(self, channel):
37
+ def __init__(self, channel: grpc.Channel | grpc.aio.Channel) -> None:
38
38
  """Constructor.
39
39
 
40
40
  Args:
@@ -18,6 +18,7 @@ from hatchet_sdk.clients.v1.api_client import (
18
18
  BaseRestClient,
19
19
  maybe_additional_metadata_to_kv,
20
20
  )
21
+ from hatchet_sdk.utils.aio import run_async_from_sync
21
22
  from hatchet_sdk.utils.typing import JSONSerializableMapping
22
23
 
23
24
 
@@ -121,7 +122,7 @@ class CronClient(BaseRestClient):
121
122
  input: JSONSerializableMapping,
122
123
  additional_metadata: JSONSerializableMapping,
123
124
  ) -> CronWorkflows:
124
- return self._run_async_from_sync(
125
+ return run_async_from_sync(
125
126
  self.aio_create,
126
127
  workflow_name,
127
128
  cron_name,
@@ -143,7 +144,7 @@ class CronClient(BaseRestClient):
143
144
  )
144
145
 
145
146
  def delete(self, cron_id: str) -> None:
146
- return self._run_async_from_sync(self.aio_delete, cron_id)
147
+ return run_async_from_sync(self.aio_delete, cron_id)
147
148
 
148
149
  async def aio_list(
149
150
  self,
@@ -204,7 +205,7 @@ class CronClient(BaseRestClient):
204
205
  Returns:
205
206
  CronWorkflowsList: A list of cron workflows.
206
207
  """
207
- return self._run_async_from_sync(
208
+ return run_async_from_sync(
208
209
  self.aio_list,
209
210
  offset=offset,
210
211
  limit=limit,
@@ -239,4 +240,4 @@ class CronClient(BaseRestClient):
239
240
  Returns:
240
241
  CronWorkflows: The requested cron workflow instance.
241
242
  """
242
- return self._run_async_from_sync(self.aio_get, cron_id)
243
+ return run_async_from_sync(self.aio_get, cron_id)
@@ -2,6 +2,7 @@ from hatchet_sdk.clients.rest.api.log_api import LogApi
2
2
  from hatchet_sdk.clients.rest.api_client import ApiClient
3
3
  from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList
4
4
  from hatchet_sdk.clients.v1.api_client import BaseRestClient
5
+ from hatchet_sdk.utils.aio import run_async_from_sync
5
6
 
6
7
 
7
8
  class LogsClient(BaseRestClient):
@@ -13,4 +14,4 @@ class LogsClient(BaseRestClient):
13
14
  return await self._la(client).v1_log_line_list(task=task_run_id)
14
15
 
15
16
  def list(self, task_run_id: str) -> V1LogLineList:
16
- return self._run_async_from_sync(self.aio_list, task_run_id)
17
+ return run_async_from_sync(self.aio_list, task_run_id)
@@ -11,6 +11,7 @@ from hatchet_sdk.clients.v1.api_client import (
11
11
  BaseRestClient,
12
12
  maybe_additional_metadata_to_kv,
13
13
  )
14
+ from hatchet_sdk.utils.aio import run_async_from_sync
14
15
  from hatchet_sdk.utils.typing import JSONSerializableMapping
15
16
 
16
17
 
@@ -38,7 +39,7 @@ class MetricsClient(BaseRestClient):
38
39
  status: WorkflowRunStatus | None = None,
39
40
  group_key: str | None = None,
40
41
  ) -> WorkflowMetrics:
41
- return self._run_async_from_sync(
42
+ return run_async_from_sync(
42
43
  self.aio_get_workflow_metrics, workflow_id, status, group_key
43
44
  )
44
45
 
@@ -61,7 +62,7 @@ class MetricsClient(BaseRestClient):
61
62
  workflow_ids: list[str] | None = None,
62
63
  additional_metadata: JSONSerializableMapping | None = None,
63
64
  ) -> TenantQueueMetrics:
64
- return self._run_async_from_sync(
65
+ return run_async_from_sync(
65
66
  self.aio_get_queue_metrics, workflow_ids, additional_metadata
66
67
  )
67
68
 
@@ -72,4 +73,4 @@ class MetricsClient(BaseRestClient):
72
73
  )
73
74
 
74
75
  def get_task_metrics(self) -> TenantStepRunQueueMetrics:
75
- return self._run_async_from_sync(self.aio_get_task_metrics)
76
+ return run_async_from_sync(self.aio_get_task_metrics)
@@ -24,7 +24,7 @@ class RateLimitsClient(BaseRestClient):
24
24
  )
25
25
 
26
26
  conn = new_conn(self.client_config, False)
27
- client = WorkflowServiceStub(conn) # type: ignore[no-untyped-call]
27
+ client = WorkflowServiceStub(conn)
28
28
 
29
29
  client.PutRateLimit(
30
30
  v0_workflow_protos.PutRateLimitRequest(
@@ -19,6 +19,7 @@ from hatchet_sdk.clients.v1.api_client import (
19
19
  BaseRestClient,
20
20
  maybe_additional_metadata_to_kv,
21
21
  )
22
+ from hatchet_sdk.utils.aio import run_async_from_sync
22
23
  from hatchet_sdk.utils.typing import JSONSerializableMapping
23
24
 
24
25
 
@@ -93,7 +94,7 @@ class RunsClient(BaseRestClient):
93
94
  return await self._wra(client).v1_workflow_run_get(str(workflow_run_id))
94
95
 
95
96
  def get(self, workflow_run_id: str) -> V1WorkflowRunDetails:
96
- return self._run_async_from_sync(self.aio_get, workflow_run_id)
97
+ return run_async_from_sync(self.aio_get, workflow_run_id)
97
98
 
98
99
  async def aio_list(
99
100
  self,
@@ -138,7 +139,7 @@ class RunsClient(BaseRestClient):
138
139
  worker_id: str | None = None,
139
140
  parent_task_external_id: str | None = None,
140
141
  ) -> V1TaskSummaryList:
141
- return self._run_async_from_sync(
142
+ return run_async_from_sync(
142
143
  self.aio_list,
143
144
  since=since,
144
145
  only_tasks=only_tasks,
@@ -174,7 +175,7 @@ class RunsClient(BaseRestClient):
174
175
  input: JSONSerializableMapping,
175
176
  additional_metadata: JSONSerializableMapping = {},
176
177
  ) -> V1WorkflowRunDetails:
177
- return self._run_async_from_sync(
178
+ return run_async_from_sync(
178
179
  self.aio_create, workflow_name, input, additional_metadata
179
180
  )
180
181
 
@@ -182,7 +183,7 @@ class RunsClient(BaseRestClient):
182
183
  await self.aio_bulk_replay(opts=BulkCancelReplayOpts(ids=[run_id]))
183
184
 
184
185
  def replay(self, run_id: str) -> None:
185
- return self._run_async_from_sync(self.aio_replay, run_id)
186
+ return run_async_from_sync(self.aio_replay, run_id)
186
187
 
187
188
  async def aio_bulk_replay(self, opts: BulkCancelReplayOpts) -> None:
188
189
  async with self.client() as client:
@@ -192,13 +193,13 @@ class RunsClient(BaseRestClient):
192
193
  )
193
194
 
194
195
  def bulk_replay(self, opts: BulkCancelReplayOpts) -> None:
195
- return self._run_async_from_sync(self.aio_bulk_replay, opts)
196
+ return run_async_from_sync(self.aio_bulk_replay, opts)
196
197
 
197
198
  async def aio_cancel(self, run_id: str) -> None:
198
199
  await self.aio_bulk_cancel(opts=BulkCancelReplayOpts(ids=[run_id]))
199
200
 
200
201
  def cancel(self, run_id: str) -> None:
201
- return self._run_async_from_sync(self.aio_cancel, run_id)
202
+ return run_async_from_sync(self.aio_cancel, run_id)
202
203
 
203
204
  async def aio_bulk_cancel(self, opts: BulkCancelReplayOpts) -> None:
204
205
  async with self.client() as client:
@@ -208,7 +209,7 @@ class RunsClient(BaseRestClient):
208
209
  )
209
210
 
210
211
  def bulk_cancel(self, opts: BulkCancelReplayOpts) -> None:
211
- return self._run_async_from_sync(self.aio_bulk_cancel, opts)
212
+ return run_async_from_sync(self.aio_bulk_cancel, opts)
212
213
 
213
214
  async def aio_get_result(self, run_id: str) -> JSONSerializableMapping:
214
215
  details = await self.aio_get(run_id)
@@ -22,6 +22,7 @@ from hatchet_sdk.clients.v1.api_client import (
22
22
  BaseRestClient,
23
23
  maybe_additional_metadata_to_kv,
24
24
  )
25
+ from hatchet_sdk.utils.aio import run_async_from_sync
25
26
  from hatchet_sdk.utils.typing import JSONSerializableMapping
26
27
 
27
28
 
@@ -82,7 +83,7 @@ class ScheduledClient(BaseRestClient):
82
83
  ScheduledWorkflows: The created scheduled workflow instance.
83
84
  """
84
85
 
85
- return self._run_async_from_sync(
86
+ return run_async_from_sync(
86
87
  self.aio_create,
87
88
  workflow_name,
88
89
  trigger_at,
@@ -104,7 +105,7 @@ class ScheduledClient(BaseRestClient):
104
105
  )
105
106
 
106
107
  def delete(self, scheduled_id: str) -> None:
107
- self._run_async_from_sync(self.aio_delete, scheduled_id)
108
+ run_async_from_sync(self.aio_delete, scheduled_id)
108
109
 
109
110
  async def aio_list(
110
111
  self,
@@ -175,7 +176,7 @@ class ScheduledClient(BaseRestClient):
175
176
  Returns:
176
177
  List[ScheduledWorkflows]: A list of scheduled workflows matching the criteria.
177
178
  """
178
- return self._run_async_from_sync(
179
+ return run_async_from_sync(
179
180
  self.aio_list,
180
181
  offset=offset,
181
182
  limit=limit,
@@ -214,4 +215,4 @@ class ScheduledClient(BaseRestClient):
214
215
  Returns:
215
216
  ScheduledWorkflows: The requested scheduled workflow instance.
216
217
  """
217
- return self._run_async_from_sync(self.aio_get, scheduled_id)
218
+ return run_async_from_sync(self.aio_get, scheduled_id)
@@ -4,6 +4,7 @@ from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRe
4
4
  from hatchet_sdk.clients.rest.models.worker import Worker
5
5
  from hatchet_sdk.clients.rest.models.worker_list import WorkerList
6
6
  from hatchet_sdk.clients.v1.api_client import BaseRestClient
7
+ from hatchet_sdk.utils.aio import run_async_from_sync
7
8
 
8
9
 
9
10
  class WorkersClient(BaseRestClient):
@@ -15,7 +16,7 @@ class WorkersClient(BaseRestClient):
15
16
  return await self._wa(client).worker_get(worker_id)
16
17
 
17
18
  def get(self, worker_id: str) -> Worker:
18
- return self._run_async_from_sync(self.aio_get, worker_id)
19
+ return run_async_from_sync(self.aio_get, worker_id)
19
20
 
20
21
  async def aio_list(
21
22
  self,
@@ -28,7 +29,7 @@ class WorkersClient(BaseRestClient):
28
29
  def list(
29
30
  self,
30
31
  ) -> WorkerList:
31
- return self._run_async_from_sync(self.aio_list)
32
+ return run_async_from_sync(self.aio_list)
32
33
 
33
34
  async def aio_update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
34
35
  async with self.client() as client:
@@ -38,4 +39,4 @@ class WorkersClient(BaseRestClient):
38
39
  )
39
40
 
40
41
  def update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
41
- return self._run_async_from_sync(self.aio_update, worker_id, opts)
42
+ return run_async_from_sync(self.aio_update, worker_id, opts)
@@ -5,6 +5,7 @@ from hatchet_sdk.clients.rest.models.workflow import Workflow
5
5
  from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
6
6
  from hatchet_sdk.clients.rest.models.workflow_version import WorkflowVersion
7
7
  from hatchet_sdk.clients.v1.api_client import BaseRestClient
8
+ from hatchet_sdk.utils.aio import run_async_from_sync
8
9
 
9
10
 
10
11
  class WorkflowsClient(BaseRestClient):
@@ -19,7 +20,7 @@ class WorkflowsClient(BaseRestClient):
19
20
  return await self._wa(client).workflow_get(workflow_id)
20
21
 
21
22
  def get(self, workflow_id: str) -> Workflow:
22
- return self._run_async_from_sync(self.aio_get, workflow_id)
23
+ return run_async_from_sync(self.aio_get, workflow_id)
23
24
 
24
25
  async def aio_list(
25
26
  self,
@@ -41,7 +42,7 @@ class WorkflowsClient(BaseRestClient):
41
42
  limit: int | None = None,
42
43
  offset: int | None = None,
43
44
  ) -> WorkflowList:
44
- return self._run_async_from_sync(self.aio_list, workflow_name, limit, offset)
45
+ return run_async_from_sync(self.aio_list, workflow_name, limit, offset)
45
46
 
46
47
  async def aio_get_version(
47
48
  self, workflow_id: str, version: str | None = None
@@ -52,4 +53,4 @@ class WorkflowsClient(BaseRestClient):
52
53
  def get_version(
53
54
  self, workflow_id: str, version: str | None = None
54
55
  ) -> WorkflowVersion:
55
- return self._run_async_from_sync(self.aio_get_version, workflow_id, version)
56
+ return run_async_from_sync(self.aio_get_version, workflow_id, version)
hatchet_sdk/metadata.py CHANGED
@@ -1,2 +1,2 @@
1
- def get_metadata(token: str) -> list[tuple[str, str]]:
2
- return [("authorization", "bearer " + token)]
1
+ def get_metadata(token: str) -> tuple[tuple[str, str]]:
2
+ return (("authorization", "bearer " + token),)
@@ -1,4 +1,3 @@
1
- import asyncio
2
1
  from datetime import datetime
3
2
  from typing import Any, Generic, cast, get_type_hints
4
3
 
@@ -12,7 +11,7 @@ from hatchet_sdk.contracts.workflows_pb2 import WorkflowVersion
12
11
  from hatchet_sdk.runnables.task import Task
13
12
  from hatchet_sdk.runnables.types import EmptyModel, R, TWorkflowInput
14
13
  from hatchet_sdk.runnables.workflow import BaseWorkflow, Workflow
15
- from hatchet_sdk.utils.aio_utils import get_active_event_loop
14
+ from hatchet_sdk.utils.aio import run_async_from_sync
16
15
  from hatchet_sdk.utils.typing import JSONSerializableMapping, is_basemodel_subclass
17
16
  from hatchet_sdk.workflow_run import WorkflowRunRef
18
17
 
@@ -27,25 +26,11 @@ class TaskRunRef(Generic[TWorkflowInput, R]):
27
26
  self._wrr = workflow_run_ref
28
27
 
29
28
  async def aio_result(self) -> R:
30
- result = await self._wrr.workflow_listener.result(self._wrr.workflow_run_id)
29
+ result = await self._wrr.workflow_listener.aio_result(self._wrr.workflow_run_id)
31
30
  return self._s._extract_result(result)
32
31
 
33
32
  def result(self) -> R:
34
- coro = self._wrr.workflow_listener.result(self._wrr.workflow_run_id)
35
-
36
- loop = get_active_event_loop()
37
-
38
- if loop is None:
39
- loop = asyncio.new_event_loop()
40
- asyncio.set_event_loop(loop)
41
- try:
42
- result = loop.run_until_complete(coro)
43
- finally:
44
- asyncio.set_event_loop(None)
45
- else:
46
- result = loop.run_until_complete(coro)
47
-
48
- return self._s._extract_result(result)
33
+ return run_async_from_sync(self.aio_result)
49
34
 
50
35
 
51
36
  class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
@@ -0,0 +1,43 @@
1
+ import asyncio
2
+ from concurrent.futures import ThreadPoolExecutor
3
+ from typing import Callable, Coroutine, ParamSpec, TypeVar
4
+
5
+ P = ParamSpec("P")
6
+ R = TypeVar("R")
7
+ Y = TypeVar("Y")
8
+ S = TypeVar("S")
9
+
10
+
11
+ def _run_async_function_do_not_use_directly(
12
+ async_func: Callable[P, Coroutine[Y, S, R]],
13
+ *args: P.args,
14
+ **kwargs: P.kwargs,
15
+ ) -> R:
16
+ loop = asyncio.new_event_loop()
17
+ asyncio.set_event_loop(loop)
18
+ try:
19
+ return loop.run_until_complete(async_func(*args, **kwargs))
20
+ finally:
21
+ loop.close()
22
+
23
+
24
+ def run_async_from_sync(
25
+ async_func: Callable[P, Coroutine[Y, S, R]],
26
+ *args: P.args,
27
+ **kwargs: P.kwargs,
28
+ ) -> R:
29
+ try:
30
+ loop = asyncio.get_event_loop()
31
+ except RuntimeError:
32
+ loop = None
33
+
34
+ if loop and loop.is_running():
35
+ return loop.run_until_complete(async_func(*args, **kwargs))
36
+ else:
37
+ with ThreadPoolExecutor() as executor:
38
+ future = executor.submit(
39
+ lambda: _run_async_function_do_not_use_directly(
40
+ async_func, *args, **kwargs
41
+ )
42
+ )
43
+ return future.result()
@@ -60,7 +60,7 @@ class WorkerActionRunLoopManager:
60
60
 
61
61
  async def aio_start(self, retry_count: int = 1) -> None:
62
62
  await capture_logs(
63
- self.client.logInterceptor,
63
+ self.client.log_interceptor,
64
64
  self.client.event,
65
65
  self._async_start,
66
66
  )(retry_count=retry_count)
@@ -1,4 +1,3 @@
1
- import asyncio
2
1
  from typing import Any
3
2
 
4
3
  from hatchet_sdk.clients.run_event_listener import (
@@ -6,19 +5,19 @@ from hatchet_sdk.clients.run_event_listener import (
6
5
  RunEventListenerClient,
7
6
  )
8
7
  from hatchet_sdk.clients.workflow_listener import PooledWorkflowRunListener
9
- from hatchet_sdk.utils.aio_utils import get_active_event_loop
8
+ from hatchet_sdk.config import ClientConfig
9
+ from hatchet_sdk.utils.aio import run_async_from_sync
10
10
 
11
11
 
12
12
  class WorkflowRunRef:
13
13
  def __init__(
14
14
  self,
15
15
  workflow_run_id: str,
16
- workflow_listener: PooledWorkflowRunListener,
17
- workflow_run_event_listener: RunEventListenerClient,
16
+ config: ClientConfig,
18
17
  ):
19
18
  self.workflow_run_id = workflow_run_id
20
- self.workflow_listener = workflow_listener
21
- self.workflow_run_event_listener = workflow_run_event_listener
19
+ self.workflow_listener = PooledWorkflowRunListener(config)
20
+ self.workflow_run_event_listener = RunEventListenerClient(config=config)
22
21
 
23
22
  def __str__(self) -> str:
24
23
  return self.workflow_run_id
@@ -27,19 +26,7 @@ class WorkflowRunRef:
27
26
  return self.workflow_run_event_listener.stream(self.workflow_run_id)
28
27
 
29
28
  async def aio_result(self) -> dict[str, Any]:
30
- return await self.workflow_listener.result(self.workflow_run_id)
29
+ return await self.workflow_listener.aio_result(self.workflow_run_id)
31
30
 
32
31
  def result(self) -> dict[str, Any]:
33
- coro = self.workflow_listener.result(self.workflow_run_id)
34
-
35
- loop = get_active_event_loop()
36
-
37
- if loop is None:
38
- loop = asyncio.new_event_loop()
39
- asyncio.set_event_loop(loop)
40
- try:
41
- return loop.run_until_complete(coro)
42
- finally:
43
- asyncio.set_event_loop(None)
44
- else:
45
- return loop.run_until_complete(coro)
32
+ return run_async_from_sync(self.aio_result)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 1.0.2
3
+ Version: 1.1.0
4
4
  Summary:
5
5
  Author: Alexander Belanger
6
6
  Author-email: alexander@hatchet.run