hatchet-sdk 1.2.6__py3-none-any.whl → 1.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (60) hide show
  1. hatchet_sdk/__init__.py +7 -5
  2. hatchet_sdk/client.py +14 -6
  3. hatchet_sdk/clients/admin.py +57 -15
  4. hatchet_sdk/clients/dispatcher/action_listener.py +2 -2
  5. hatchet_sdk/clients/dispatcher/dispatcher.py +20 -7
  6. hatchet_sdk/clients/event_ts.py +25 -5
  7. hatchet_sdk/clients/listeners/durable_event_listener.py +125 -0
  8. hatchet_sdk/clients/listeners/pooled_listener.py +255 -0
  9. hatchet_sdk/clients/listeners/workflow_listener.py +62 -0
  10. hatchet_sdk/clients/rest/api/api_token_api.py +24 -24
  11. hatchet_sdk/clients/rest/api/default_api.py +64 -64
  12. hatchet_sdk/clients/rest/api/event_api.py +64 -64
  13. hatchet_sdk/clients/rest/api/github_api.py +8 -8
  14. hatchet_sdk/clients/rest/api/healthcheck_api.py +16 -16
  15. hatchet_sdk/clients/rest/api/log_api.py +16 -16
  16. hatchet_sdk/clients/rest/api/metadata_api.py +24 -24
  17. hatchet_sdk/clients/rest/api/rate_limits_api.py +8 -8
  18. hatchet_sdk/clients/rest/api/slack_api.py +16 -16
  19. hatchet_sdk/clients/rest/api/sns_api.py +24 -24
  20. hatchet_sdk/clients/rest/api/step_run_api.py +56 -56
  21. hatchet_sdk/clients/rest/api/task_api.py +56 -56
  22. hatchet_sdk/clients/rest/api/tenant_api.py +128 -128
  23. hatchet_sdk/clients/rest/api/user_api.py +96 -96
  24. hatchet_sdk/clients/rest/api/worker_api.py +24 -24
  25. hatchet_sdk/clients/rest/api/workflow_api.py +144 -144
  26. hatchet_sdk/clients/rest/api/workflow_run_api.py +48 -48
  27. hatchet_sdk/clients/rest/api/workflow_runs_api.py +40 -40
  28. hatchet_sdk/clients/rest/api_client.py +5 -8
  29. hatchet_sdk/clients/rest/configuration.py +7 -3
  30. hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py +2 -2
  31. hatchet_sdk/clients/rest/models/v1_task_summary.py +5 -0
  32. hatchet_sdk/clients/rest/models/v1_workflow_run.py +5 -0
  33. hatchet_sdk/clients/rest/rest.py +160 -111
  34. hatchet_sdk/clients/v1/api_client.py +2 -2
  35. hatchet_sdk/context/context.py +22 -21
  36. hatchet_sdk/features/cron.py +41 -40
  37. hatchet_sdk/features/logs.py +7 -6
  38. hatchet_sdk/features/metrics.py +19 -18
  39. hatchet_sdk/features/runs.py +88 -68
  40. hatchet_sdk/features/scheduled.py +42 -42
  41. hatchet_sdk/features/workers.py +17 -16
  42. hatchet_sdk/features/workflows.py +15 -14
  43. hatchet_sdk/hatchet.py +1 -1
  44. hatchet_sdk/runnables/standalone.py +12 -9
  45. hatchet_sdk/runnables/task.py +66 -2
  46. hatchet_sdk/runnables/types.py +8 -0
  47. hatchet_sdk/runnables/workflow.py +26 -125
  48. hatchet_sdk/waits.py +8 -8
  49. hatchet_sdk/worker/runner/run_loop_manager.py +4 -4
  50. hatchet_sdk/worker/runner/runner.py +22 -11
  51. hatchet_sdk/worker/worker.py +29 -25
  52. hatchet_sdk/workflow_run.py +58 -9
  53. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/METADATA +1 -1
  54. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/RECORD +57 -57
  55. hatchet_sdk/clients/durable_event_listener.py +0 -329
  56. hatchet_sdk/clients/workflow_listener.py +0 -288
  57. hatchet_sdk/utils/aio.py +0 -43
  58. /hatchet_sdk/clients/{run_event_listener.py → listeners/run_event_listener.py} +0 -0
  59. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/WHEEL +0 -0
  60. {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.1.dist-info}/entry_points.txt +0 -0
@@ -15,6 +15,7 @@
15
15
  import copy
16
16
  import http.client as httplib
17
17
  import logging
18
+ import multiprocessing
18
19
  import sys
19
20
  from logging import FileHandler
20
21
  from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict, Union
@@ -304,9 +305,12 @@ class Configuration:
304
305
  Set this to the SNI value expected by the server.
305
306
  """
306
307
 
307
- self.connection_pool_maxsize = 100
308
- """This value is passed to the aiohttp to limit simultaneous connections.
309
- Default values is 100, None means no-limit.
308
+ self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
309
+ """urllib3 connection pool's maximum number of connections saved
310
+ per pool. urllib3 uses 1 connection as default value, but this is
311
+ not the best value when you are making a lot of possibly parallel
312
+ requests to the same host, which is often the case here.
313
+ cpu_count * 5 is used as default value to increase performance.
310
314
  """
311
315
 
312
316
  self.proxy: Optional[str] = None
@@ -19,7 +19,7 @@ import pprint
19
19
  import re # noqa: F401
20
20
  from typing import Any, ClassVar, Dict, List, Optional, Set
21
21
 
22
- from pydantic import BaseModel, ConfigDict, StrictInt
22
+ from pydantic import BaseModel, ConfigDict
23
23
  from typing_extensions import Self
24
24
 
25
25
 
@@ -28,7 +28,7 @@ class TenantStepRunQueueMetrics(BaseModel):
28
28
  TenantStepRunQueueMetrics
29
29
  """ # noqa: E501
30
30
 
31
- queues: Optional[Dict[str, StrictInt]] = None
31
+ queues: Optional[Dict[str, Any]] = None
32
32
  __properties: ClassVar[List[str]] = ["queues"]
33
33
 
34
34
  model_config = ConfigDict(
@@ -34,6 +34,9 @@ class V1TaskSummary(BaseModel):
34
34
  """ # noqa: E501
35
35
 
36
36
  metadata: APIResourceMeta
37
+ action_id: Optional[StrictStr] = Field(
38
+ default=None, description="The action ID of the task.", alias="actionId"
39
+ )
37
40
  additional_metadata: Optional[Dict[str, Any]] = Field(
38
41
  default=None,
39
42
  description="Additional metadata for the task run.",
@@ -104,6 +107,7 @@ class V1TaskSummary(BaseModel):
104
107
  )
105
108
  __properties: ClassVar[List[str]] = [
106
109
  "metadata",
110
+ "actionId",
107
111
  "additionalMetadata",
108
112
  "children",
109
113
  "createdAt",
@@ -193,6 +197,7 @@ class V1TaskSummary(BaseModel):
193
197
  if obj.get("metadata") is not None
194
198
  else None
195
199
  ),
200
+ "actionId": obj.get("actionId"),
196
201
  "additionalMetadata": obj.get("additionalMetadata"),
197
202
  "children": (
198
203
  [V1TaskSummary.from_dict(_item) for _item in obj["children"]]
@@ -78,6 +78,9 @@ class V1WorkflowRun(BaseModel):
78
78
  description="The timestamp the task run was created.",
79
79
  alias="createdAt",
80
80
  )
81
+ parent_task_external_id: Optional[
82
+ Annotated[str, Field(min_length=36, strict=True, max_length=36)]
83
+ ] = Field(default=None, alias="parentTaskExternalId")
81
84
  __properties: ClassVar[List[str]] = [
82
85
  "metadata",
83
86
  "status",
@@ -93,6 +96,7 @@ class V1WorkflowRun(BaseModel):
93
96
  "workflowVersionId",
94
97
  "input",
95
98
  "createdAt",
99
+ "parentTaskExternalId",
96
100
  ]
97
101
 
98
102
  model_config = ConfigDict(
@@ -166,6 +170,7 @@ class V1WorkflowRun(BaseModel):
166
170
  "workflowVersionId": obj.get("workflowVersionId"),
167
171
  "input": obj.get("input"),
168
172
  "createdAt": obj.get("createdAt"),
173
+ "parentTaskExternalId": obj.get("parentTaskExternalId"),
169
174
  }
170
175
  )
171
176
  return _obj
@@ -16,16 +16,23 @@ import io
16
16
  import json
17
17
  import re
18
18
  import ssl
19
- from typing import Optional, Union
20
19
 
21
- import aiohttp
22
- import aiohttp_retry
20
+ import urllib3
23
21
 
24
22
  from hatchet_sdk.clients.rest.exceptions import ApiException, ApiValueError
25
23
 
26
- RESTResponseType = aiohttp.ClientResponse
24
+ SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
25
+ RESTResponseType = urllib3.HTTPResponse
27
26
 
28
- ALLOW_RETRY_METHODS = frozenset({"DELETE", "GET", "HEAD", "OPTIONS", "PUT", "TRACE"})
27
+
28
+ def is_socks_proxy_url(url):
29
+ if url is None:
30
+ return False
31
+ split_section = url.split("://")
32
+ if len(split_section) < 2:
33
+ return False
34
+ else:
35
+ return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
29
36
 
30
37
 
31
38
  class RESTResponse(io.IOBase):
@@ -36,13 +43,13 @@ class RESTResponse(io.IOBase):
36
43
  self.reason = resp.reason
37
44
  self.data = None
38
45
 
39
- async def read(self):
46
+ def read(self):
40
47
  if self.data is None:
41
- self.data = await self.response.read()
48
+ self.data = self.response.data
42
49
  return self.data
43
50
 
44
51
  def getheaders(self):
45
- """Returns a CIMultiDictProxy of the response headers."""
52
+ """Returns a dictionary of the response headers."""
46
53
  return self.response.headers
47
54
 
48
55
  def getheader(self, name, default=None):
@@ -53,38 +60,57 @@ class RESTResponse(io.IOBase):
53
60
  class RESTClientObject:
54
61
 
55
62
  def __init__(self, configuration) -> None:
63
+ # urllib3.PoolManager will pass all kw parameters to connectionpool
64
+ # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
65
+ # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
66
+ # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
67
+
68
+ # cert_reqs
69
+ if configuration.verify_ssl:
70
+ cert_reqs = ssl.CERT_REQUIRED
71
+ else:
72
+ cert_reqs = ssl.CERT_NONE
73
+
74
+ pool_args = {
75
+ "cert_reqs": cert_reqs,
76
+ "ca_certs": configuration.ssl_ca_cert,
77
+ "cert_file": configuration.cert_file,
78
+ "key_file": configuration.key_file,
79
+ "ca_cert_data": configuration.ca_cert_data,
80
+ }
81
+ if configuration.assert_hostname is not None:
82
+ pool_args["assert_hostname"] = configuration.assert_hostname
83
+
84
+ if configuration.retries is not None:
85
+ pool_args["retries"] = configuration.retries
86
+
87
+ if configuration.tls_server_name:
88
+ pool_args["server_hostname"] = configuration.tls_server_name
89
+
90
+ if configuration.socket_options is not None:
91
+ pool_args["socket_options"] = configuration.socket_options
92
+
93
+ if configuration.connection_pool_maxsize is not None:
94
+ pool_args["maxsize"] = configuration.connection_pool_maxsize
56
95
 
57
- # maxsize is number of requests to host that are allowed in parallel
58
- self.maxsize = configuration.connection_pool_maxsize
59
-
60
- self.ssl_context = ssl.create_default_context(
61
- cafile=configuration.ssl_ca_cert,
62
- cadata=configuration.ca_cert_data,
63
- )
64
- if configuration.cert_file:
65
- self.ssl_context.load_cert_chain(
66
- configuration.cert_file, keyfile=configuration.key_file
67
- )
68
-
69
- if not configuration.verify_ssl:
70
- self.ssl_context.check_hostname = False
71
- self.ssl_context.verify_mode = ssl.CERT_NONE
72
-
73
- self.proxy = configuration.proxy
74
- self.proxy_headers = configuration.proxy_headers
75
-
76
- self.retries = configuration.retries
96
+ # https pool manager
97
+ self.pool_manager: urllib3.PoolManager
77
98
 
78
- self.pool_manager: Optional[aiohttp.ClientSession] = None
79
- self.retry_client: Optional[aiohttp_retry.RetryClient] = None
99
+ if configuration.proxy:
100
+ if is_socks_proxy_url(configuration.proxy):
101
+ from urllib3.contrib.socks import SOCKSProxyManager
80
102
 
81
- async def close(self) -> None:
82
- if self.pool_manager:
83
- await self.pool_manager.close()
84
- if self.retry_client is not None:
85
- await self.retry_client.close()
103
+ pool_args["proxy_url"] = configuration.proxy
104
+ pool_args["headers"] = configuration.proxy_headers
105
+ self.pool_manager = SOCKSProxyManager(**pool_args)
106
+ else:
107
+ pool_args["proxy_url"] = configuration.proxy
108
+ pool_args["proxy_headers"] = configuration.proxy_headers
109
+ self.pool_manager = urllib3.ProxyManager(**pool_args)
110
+ else:
111
+ self.pool_manager = urllib3.PoolManager(**pool_args)
86
112
 
87
- async def request(
113
+ def request(
88
114
  self,
89
115
  method,
90
116
  url,
@@ -93,7 +119,7 @@ class RESTClientObject:
93
119
  post_params=None,
94
120
  _request_timeout=None,
95
121
  ):
96
- """Execute request
122
+ """Perform requests.
97
123
 
98
124
  :param method: http request method
99
125
  :param url: http request url
@@ -117,82 +143,105 @@ class RESTClientObject:
117
143
 
118
144
  post_params = post_params or {}
119
145
  headers = headers or {}
120
- # url already contains the URL query string
121
- timeout = _request_timeout or 5 * 60
122
-
123
- if "Content-Type" not in headers:
124
- headers["Content-Type"] = "application/json"
125
-
126
- args = {"method": method, "url": url, "timeout": timeout, "headers": headers}
127
-
128
- if self.proxy:
129
- args["proxy"] = self.proxy
130
- if self.proxy_headers:
131
- args["proxy_headers"] = self.proxy_headers
132
-
133
- # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
134
- if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
135
- if re.search("json", headers["Content-Type"], re.IGNORECASE):
136
- if body is not None:
137
- body = json.dumps(body)
138
- args["data"] = body
139
- elif headers["Content-Type"] == "application/x-www-form-urlencoded":
140
- args["data"] = aiohttp.FormData(post_params)
141
- elif headers["Content-Type"] == "multipart/form-data":
142
- # must del headers['Content-Type'], or the correct
143
- # Content-Type which generated by aiohttp
144
- del headers["Content-Type"]
145
- data = aiohttp.FormData()
146
- for param in post_params:
147
- k, v = param
148
- if isinstance(v, tuple) and len(v) == 3:
149
- data.add_field(k, value=v[1], filename=v[0], content_type=v[2])
150
- else:
151
- # Ensures that dict objects are serialized
152
- if isinstance(v, dict):
153
- v = json.dumps(v)
154
- elif isinstance(v, int):
155
- v = str(v)
156
- data.add_field(k, v)
157
- args["data"] = data
158
-
159
- # Pass a `bytes` or `str` parameter directly in the body to support
160
- # other content types than Json when `body` argument is provided
161
- # in serialized form
162
- elif isinstance(body, str) or isinstance(body, bytes):
163
- args["data"] = body
164
- else:
165
- # Cannot generate the request from given parameters
166
- msg = """Cannot prepare a request message for provided
167
- arguments. Please check that your arguments match
168
- declared content type."""
169
- raise ApiException(status=0, reason=msg)
170
146
 
171
- pool_manager: Union[aiohttp.ClientSession, aiohttp_retry.RetryClient]
172
-
173
- # https pool manager
174
- if self.pool_manager is None:
175
- self.pool_manager = aiohttp.ClientSession(
176
- connector=aiohttp.TCPConnector(
177
- limit=self.maxsize, ssl=self.ssl_context
178
- ),
179
- trust_env=True,
180
- )
181
- pool_manager = self.pool_manager
182
-
183
- if self.retries is not None and method in ALLOW_RETRY_METHODS:
184
- if self.retry_client is None:
185
- self.retry_client = aiohttp_retry.RetryClient(
186
- client_session=self.pool_manager,
187
- retry_options=aiohttp_retry.ExponentialRetry(
188
- attempts=self.retries,
189
- factor=2.0,
190
- start_timeout=0.1,
191
- max_timeout=120.0,
192
- ),
147
+ timeout = None
148
+ if _request_timeout:
149
+ if isinstance(_request_timeout, (int, float)):
150
+ timeout = urllib3.Timeout(total=_request_timeout)
151
+ elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2:
152
+ timeout = urllib3.Timeout(
153
+ connect=_request_timeout[0], read=_request_timeout[1]
193
154
  )
194
- pool_manager = self.retry_client
195
155
 
196
- r = await pool_manager.request(**args)
156
+ try:
157
+ # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
158
+ if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
159
+
160
+ # no content type provided or payload is json
161
+ content_type = headers.get("Content-Type")
162
+ if not content_type or re.search("json", content_type, re.IGNORECASE):
163
+ request_body = None
164
+ if body is not None:
165
+ request_body = json.dumps(body)
166
+ r = self.pool_manager.request(
167
+ method,
168
+ url,
169
+ body=request_body,
170
+ timeout=timeout,
171
+ headers=headers,
172
+ preload_content=False,
173
+ )
174
+ elif content_type == "application/x-www-form-urlencoded":
175
+ r = self.pool_manager.request(
176
+ method,
177
+ url,
178
+ fields=post_params,
179
+ encode_multipart=False,
180
+ timeout=timeout,
181
+ headers=headers,
182
+ preload_content=False,
183
+ )
184
+ elif content_type == "multipart/form-data":
185
+ # must del headers['Content-Type'], or the correct
186
+ # Content-Type which generated by urllib3 will be
187
+ # overwritten.
188
+ del headers["Content-Type"]
189
+ # Ensures that dict objects are serialized
190
+ post_params = [
191
+ (a, json.dumps(b)) if isinstance(b, dict) else (a, b)
192
+ for a, b in post_params
193
+ ]
194
+ r = self.pool_manager.request(
195
+ method,
196
+ url,
197
+ fields=post_params,
198
+ encode_multipart=True,
199
+ timeout=timeout,
200
+ headers=headers,
201
+ preload_content=False,
202
+ )
203
+ # Pass a `string` parameter directly in the body to support
204
+ # other content types than JSON when `body` argument is
205
+ # provided in serialized form.
206
+ elif isinstance(body, str) or isinstance(body, bytes):
207
+ r = self.pool_manager.request(
208
+ method,
209
+ url,
210
+ body=body,
211
+ timeout=timeout,
212
+ headers=headers,
213
+ preload_content=False,
214
+ )
215
+ elif headers["Content-Type"].startswith("text/") and isinstance(
216
+ body, bool
217
+ ):
218
+ request_body = "true" if body else "false"
219
+ r = self.pool_manager.request(
220
+ method,
221
+ url,
222
+ body=request_body,
223
+ preload_content=False,
224
+ timeout=timeout,
225
+ headers=headers,
226
+ )
227
+ else:
228
+ # Cannot generate the request from given parameters
229
+ msg = """Cannot prepare a request message for provided
230
+ arguments. Please check that your arguments match
231
+ declared content type."""
232
+ raise ApiException(status=0, reason=msg)
233
+ # For `GET`, `HEAD`
234
+ else:
235
+ r = self.pool_manager.request(
236
+ method,
237
+ url,
238
+ fields={},
239
+ timeout=timeout,
240
+ headers=headers,
241
+ preload_content=False,
242
+ )
243
+ except urllib3.exceptions.SSLError as e:
244
+ msg = "\n".join([type(e).__name__, str(e)])
245
+ raise ApiException(status=0, reason=msg)
197
246
 
198
247
  return RESTResponse(r)
@@ -1,4 +1,4 @@
1
- from typing import AsyncContextManager, ParamSpec, TypeVar
1
+ from typing import ParamSpec, TypeVar
2
2
 
3
3
  from hatchet_sdk.clients.rest.api_client import ApiClient
4
4
  from hatchet_sdk.clients.rest.configuration import Configuration
@@ -40,5 +40,5 @@ class BaseRestClient:
40
40
 
41
41
  self.api_config.datetime_format = "%Y-%m-%dT%H:%M:%S.%fZ"
42
42
 
43
- def client(self) -> AsyncContextManager[ApiClient]:
43
+ def client(self) -> ApiClient:
44
44
  return ApiClient(self.api_config)
@@ -1,22 +1,20 @@
1
- import inspect
2
1
  import json
3
2
  import traceback
4
3
  from concurrent.futures import Future, ThreadPoolExecutor
5
4
  from datetime import timedelta
6
5
  from typing import TYPE_CHECKING, Any, cast
7
-
8
- from pydantic import BaseModel
6
+ from warnings import warn
9
7
 
10
8
  from hatchet_sdk.clients.admin import AdminClient
11
9
  from hatchet_sdk.clients.dispatcher.dispatcher import ( # type: ignore[attr-defined]
12
10
  Action,
13
11
  DispatcherClient,
14
12
  )
15
- from hatchet_sdk.clients.durable_event_listener import (
13
+ from hatchet_sdk.clients.events import EventClient
14
+ from hatchet_sdk.clients.listeners.durable_event_listener import (
16
15
  DurableEventListener,
17
16
  RegisterDurableEventRequest,
18
17
  )
19
- from hatchet_sdk.clients.events import EventClient
20
18
  from hatchet_sdk.context.worker_context import WorkerContext
21
19
  from hatchet_sdk.features.runs import RunsClient
22
20
  from hatchet_sdk.logger import logger
@@ -29,21 +27,6 @@ if TYPE_CHECKING:
29
27
  from hatchet_sdk.runnables.types import R, TWorkflowInput
30
28
 
31
29
 
32
- DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds
33
-
34
-
35
- def get_caller_file_path() -> str:
36
- caller_frame = inspect.stack()[2]
37
-
38
- return caller_frame.filename
39
-
40
-
41
- class StepRunError(BaseModel):
42
- step_id: str
43
- step_run_action_name: str
44
- error: str
45
-
46
-
47
30
  class Context:
48
31
  def __init__(
49
32
  self,
@@ -85,7 +68,7 @@ class Context:
85
68
  def trigger_data(self) -> JSONSerializableMapping:
86
69
  return self.data.triggers
87
70
 
88
- def task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
71
+ def _task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
89
72
  from hatchet_sdk.runnables.types import R
90
73
 
91
74
  if self.was_skipped(task):
@@ -116,7 +99,25 @@ class Context:
116
99
 
117
100
  return parent_step_data
118
101
 
102
+ def task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
103
+ from hatchet_sdk.runnables.types import R
104
+
105
+ ## If the task is async, we need to wrap its output in a coroutine
106
+ ## so that the type checker behaves right
107
+ async def _aio_output() -> "R":
108
+ return self._task_output(task)
109
+
110
+ if task.is_async_function:
111
+ return cast(R, _aio_output())
112
+
113
+ return self._task_output(task)
114
+
119
115
  def aio_task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
116
+ warn(
117
+ "`aio_task_output` is deprecated. Use `task_output` instead.",
118
+ DeprecationWarning,
119
+ )
120
+
120
121
  if task.is_async_function:
121
122
  return self.task_output(task)
122
123