hatchet-sdk 1.2.6__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hatchet-sdk might be problematic. Click here for more details.
- hatchet_sdk/__init__.py +7 -5
- hatchet_sdk/client.py +14 -6
- hatchet_sdk/clients/admin.py +57 -15
- hatchet_sdk/clients/dispatcher/action_listener.py +2 -2
- hatchet_sdk/clients/dispatcher/dispatcher.py +20 -7
- hatchet_sdk/clients/event_ts.py +25 -5
- hatchet_sdk/clients/listeners/durable_event_listener.py +125 -0
- hatchet_sdk/clients/listeners/pooled_listener.py +255 -0
- hatchet_sdk/clients/listeners/workflow_listener.py +62 -0
- hatchet_sdk/clients/rest/api/api_token_api.py +24 -24
- hatchet_sdk/clients/rest/api/default_api.py +64 -64
- hatchet_sdk/clients/rest/api/event_api.py +64 -64
- hatchet_sdk/clients/rest/api/github_api.py +8 -8
- hatchet_sdk/clients/rest/api/healthcheck_api.py +16 -16
- hatchet_sdk/clients/rest/api/log_api.py +16 -16
- hatchet_sdk/clients/rest/api/metadata_api.py +24 -24
- hatchet_sdk/clients/rest/api/rate_limits_api.py +8 -8
- hatchet_sdk/clients/rest/api/slack_api.py +16 -16
- hatchet_sdk/clients/rest/api/sns_api.py +24 -24
- hatchet_sdk/clients/rest/api/step_run_api.py +56 -56
- hatchet_sdk/clients/rest/api/task_api.py +56 -56
- hatchet_sdk/clients/rest/api/tenant_api.py +128 -128
- hatchet_sdk/clients/rest/api/user_api.py +96 -96
- hatchet_sdk/clients/rest/api/worker_api.py +24 -24
- hatchet_sdk/clients/rest/api/workflow_api.py +144 -144
- hatchet_sdk/clients/rest/api/workflow_run_api.py +48 -48
- hatchet_sdk/clients/rest/api/workflow_runs_api.py +40 -40
- hatchet_sdk/clients/rest/api_client.py +5 -8
- hatchet_sdk/clients/rest/configuration.py +7 -3
- hatchet_sdk/clients/rest/models/tenant_step_run_queue_metrics.py +2 -2
- hatchet_sdk/clients/rest/models/v1_task_summary.py +5 -0
- hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +5 -1
- hatchet_sdk/clients/rest/rest.py +160 -111
- hatchet_sdk/clients/v1/api_client.py +2 -2
- hatchet_sdk/context/context.py +22 -21
- hatchet_sdk/features/cron.py +41 -40
- hatchet_sdk/features/logs.py +7 -6
- hatchet_sdk/features/metrics.py +19 -18
- hatchet_sdk/features/runs.py +88 -68
- hatchet_sdk/features/scheduled.py +42 -42
- hatchet_sdk/features/workers.py +17 -16
- hatchet_sdk/features/workflows.py +15 -14
- hatchet_sdk/hatchet.py +1 -1
- hatchet_sdk/runnables/standalone.py +12 -9
- hatchet_sdk/runnables/task.py +66 -2
- hatchet_sdk/runnables/types.py +8 -0
- hatchet_sdk/runnables/workflow.py +26 -125
- hatchet_sdk/waits.py +8 -8
- hatchet_sdk/worker/runner/run_loop_manager.py +4 -4
- hatchet_sdk/worker/runner/runner.py +22 -11
- hatchet_sdk/worker/worker.py +29 -25
- hatchet_sdk/workflow_run.py +55 -9
- {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.0.dist-info}/METADATA +1 -1
- {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.0.dist-info}/RECORD +57 -57
- hatchet_sdk/clients/durable_event_listener.py +0 -329
- hatchet_sdk/clients/workflow_listener.py +0 -288
- hatchet_sdk/utils/aio.py +0 -43
- /hatchet_sdk/clients/{run_event_listener.py → listeners/run_event_listener.py} +0 -0
- {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.0.dist-info}/WHEEL +0 -0
- {hatchet_sdk-1.2.6.dist-info → hatchet_sdk-1.3.0.dist-info}/entry_points.txt +0 -0
|
@@ -15,6 +15,7 @@
|
|
|
15
15
|
import copy
|
|
16
16
|
import http.client as httplib
|
|
17
17
|
import logging
|
|
18
|
+
import multiprocessing
|
|
18
19
|
import sys
|
|
19
20
|
from logging import FileHandler
|
|
20
21
|
from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict, Union
|
|
@@ -304,9 +305,12 @@ class Configuration:
|
|
|
304
305
|
Set this to the SNI value expected by the server.
|
|
305
306
|
"""
|
|
306
307
|
|
|
307
|
-
self.connection_pool_maxsize =
|
|
308
|
-
"""
|
|
309
|
-
|
|
308
|
+
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
|
|
309
|
+
"""urllib3 connection pool's maximum number of connections saved
|
|
310
|
+
per pool. urllib3 uses 1 connection as default value, but this is
|
|
311
|
+
not the best value when you are making a lot of possibly parallel
|
|
312
|
+
requests to the same host, which is often the case here.
|
|
313
|
+
cpu_count * 5 is used as default value to increase performance.
|
|
310
314
|
"""
|
|
311
315
|
|
|
312
316
|
self.proxy: Optional[str] = None
|
|
@@ -19,7 +19,7 @@ import pprint
|
|
|
19
19
|
import re # noqa: F401
|
|
20
20
|
from typing import Any, ClassVar, Dict, List, Optional, Set
|
|
21
21
|
|
|
22
|
-
from pydantic import BaseModel, ConfigDict
|
|
22
|
+
from pydantic import BaseModel, ConfigDict
|
|
23
23
|
from typing_extensions import Self
|
|
24
24
|
|
|
25
25
|
|
|
@@ -28,7 +28,7 @@ class TenantStepRunQueueMetrics(BaseModel):
|
|
|
28
28
|
TenantStepRunQueueMetrics
|
|
29
29
|
""" # noqa: E501
|
|
30
30
|
|
|
31
|
-
queues: Optional[Dict[str,
|
|
31
|
+
queues: Optional[Dict[str, Any]] = None
|
|
32
32
|
__properties: ClassVar[List[str]] = ["queues"]
|
|
33
33
|
|
|
34
34
|
model_config = ConfigDict(
|
|
@@ -34,6 +34,9 @@ class V1TaskSummary(BaseModel):
|
|
|
34
34
|
""" # noqa: E501
|
|
35
35
|
|
|
36
36
|
metadata: APIResourceMeta
|
|
37
|
+
action_id: StrictStr = Field(
|
|
38
|
+
description="The action ID of the task.", alias="actionId"
|
|
39
|
+
)
|
|
37
40
|
additional_metadata: Optional[Dict[str, Any]] = Field(
|
|
38
41
|
default=None,
|
|
39
42
|
description="Additional metadata for the task run.",
|
|
@@ -104,6 +107,7 @@ class V1TaskSummary(BaseModel):
|
|
|
104
107
|
)
|
|
105
108
|
__properties: ClassVar[List[str]] = [
|
|
106
109
|
"metadata",
|
|
110
|
+
"actionId",
|
|
107
111
|
"additionalMetadata",
|
|
108
112
|
"children",
|
|
109
113
|
"createdAt",
|
|
@@ -193,6 +197,7 @@ class V1TaskSummary(BaseModel):
|
|
|
193
197
|
if obj.get("metadata") is not None
|
|
194
198
|
else None
|
|
195
199
|
),
|
|
200
|
+
"actionId": obj.get("actionId"),
|
|
196
201
|
"additionalMetadata": obj.get("additionalMetadata"),
|
|
197
202
|
"children": (
|
|
198
203
|
[V1TaskSummary.from_dict(_item) for _item in obj["children"]]
|
|
@@ -22,13 +22,17 @@ from typing import Any, ClassVar, Dict, List, Optional, Set
|
|
|
22
22
|
from pydantic import BaseModel, ConfigDict
|
|
23
23
|
from typing_extensions import Self
|
|
24
24
|
|
|
25
|
+
from hatchet_sdk.clients.rest.models.workflow_runs_metrics_counts import (
|
|
26
|
+
WorkflowRunsMetricsCounts,
|
|
27
|
+
)
|
|
28
|
+
|
|
25
29
|
|
|
26
30
|
class WorkflowRunsMetrics(BaseModel):
|
|
27
31
|
"""
|
|
28
32
|
WorkflowRunsMetrics
|
|
29
33
|
""" # noqa: E501
|
|
30
34
|
|
|
31
|
-
counts: Optional[
|
|
35
|
+
counts: Optional[WorkflowRunsMetricsCounts] = None
|
|
32
36
|
__properties: ClassVar[List[str]] = ["counts"]
|
|
33
37
|
|
|
34
38
|
model_config = ConfigDict(
|
hatchet_sdk/clients/rest/rest.py
CHANGED
|
@@ -16,16 +16,23 @@ import io
|
|
|
16
16
|
import json
|
|
17
17
|
import re
|
|
18
18
|
import ssl
|
|
19
|
-
from typing import Optional, Union
|
|
20
19
|
|
|
21
|
-
import
|
|
22
|
-
import aiohttp_retry
|
|
20
|
+
import urllib3
|
|
23
21
|
|
|
24
22
|
from hatchet_sdk.clients.rest.exceptions import ApiException, ApiValueError
|
|
25
23
|
|
|
26
|
-
|
|
24
|
+
SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
|
|
25
|
+
RESTResponseType = urllib3.HTTPResponse
|
|
27
26
|
|
|
28
|
-
|
|
27
|
+
|
|
28
|
+
def is_socks_proxy_url(url):
|
|
29
|
+
if url is None:
|
|
30
|
+
return False
|
|
31
|
+
split_section = url.split("://")
|
|
32
|
+
if len(split_section) < 2:
|
|
33
|
+
return False
|
|
34
|
+
else:
|
|
35
|
+
return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
|
|
29
36
|
|
|
30
37
|
|
|
31
38
|
class RESTResponse(io.IOBase):
|
|
@@ -36,13 +43,13 @@ class RESTResponse(io.IOBase):
|
|
|
36
43
|
self.reason = resp.reason
|
|
37
44
|
self.data = None
|
|
38
45
|
|
|
39
|
-
|
|
46
|
+
def read(self):
|
|
40
47
|
if self.data is None:
|
|
41
|
-
self.data =
|
|
48
|
+
self.data = self.response.data
|
|
42
49
|
return self.data
|
|
43
50
|
|
|
44
51
|
def getheaders(self):
|
|
45
|
-
"""Returns a
|
|
52
|
+
"""Returns a dictionary of the response headers."""
|
|
46
53
|
return self.response.headers
|
|
47
54
|
|
|
48
55
|
def getheader(self, name, default=None):
|
|
@@ -53,38 +60,57 @@ class RESTResponse(io.IOBase):
|
|
|
53
60
|
class RESTClientObject:
|
|
54
61
|
|
|
55
62
|
def __init__(self, configuration) -> None:
|
|
63
|
+
# urllib3.PoolManager will pass all kw parameters to connectionpool
|
|
64
|
+
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
|
|
65
|
+
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
|
|
66
|
+
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
|
|
67
|
+
|
|
68
|
+
# cert_reqs
|
|
69
|
+
if configuration.verify_ssl:
|
|
70
|
+
cert_reqs = ssl.CERT_REQUIRED
|
|
71
|
+
else:
|
|
72
|
+
cert_reqs = ssl.CERT_NONE
|
|
73
|
+
|
|
74
|
+
pool_args = {
|
|
75
|
+
"cert_reqs": cert_reqs,
|
|
76
|
+
"ca_certs": configuration.ssl_ca_cert,
|
|
77
|
+
"cert_file": configuration.cert_file,
|
|
78
|
+
"key_file": configuration.key_file,
|
|
79
|
+
"ca_cert_data": configuration.ca_cert_data,
|
|
80
|
+
}
|
|
81
|
+
if configuration.assert_hostname is not None:
|
|
82
|
+
pool_args["assert_hostname"] = configuration.assert_hostname
|
|
83
|
+
|
|
84
|
+
if configuration.retries is not None:
|
|
85
|
+
pool_args["retries"] = configuration.retries
|
|
86
|
+
|
|
87
|
+
if configuration.tls_server_name:
|
|
88
|
+
pool_args["server_hostname"] = configuration.tls_server_name
|
|
89
|
+
|
|
90
|
+
if configuration.socket_options is not None:
|
|
91
|
+
pool_args["socket_options"] = configuration.socket_options
|
|
92
|
+
|
|
93
|
+
if configuration.connection_pool_maxsize is not None:
|
|
94
|
+
pool_args["maxsize"] = configuration.connection_pool_maxsize
|
|
56
95
|
|
|
57
|
-
#
|
|
58
|
-
self.
|
|
59
|
-
|
|
60
|
-
self.ssl_context = ssl.create_default_context(
|
|
61
|
-
cafile=configuration.ssl_ca_cert,
|
|
62
|
-
cadata=configuration.ca_cert_data,
|
|
63
|
-
)
|
|
64
|
-
if configuration.cert_file:
|
|
65
|
-
self.ssl_context.load_cert_chain(
|
|
66
|
-
configuration.cert_file, keyfile=configuration.key_file
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
if not configuration.verify_ssl:
|
|
70
|
-
self.ssl_context.check_hostname = False
|
|
71
|
-
self.ssl_context.verify_mode = ssl.CERT_NONE
|
|
72
|
-
|
|
73
|
-
self.proxy = configuration.proxy
|
|
74
|
-
self.proxy_headers = configuration.proxy_headers
|
|
75
|
-
|
|
76
|
-
self.retries = configuration.retries
|
|
96
|
+
# https pool manager
|
|
97
|
+
self.pool_manager: urllib3.PoolManager
|
|
77
98
|
|
|
78
|
-
|
|
79
|
-
|
|
99
|
+
if configuration.proxy:
|
|
100
|
+
if is_socks_proxy_url(configuration.proxy):
|
|
101
|
+
from urllib3.contrib.socks import SOCKSProxyManager
|
|
80
102
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
103
|
+
pool_args["proxy_url"] = configuration.proxy
|
|
104
|
+
pool_args["headers"] = configuration.proxy_headers
|
|
105
|
+
self.pool_manager = SOCKSProxyManager(**pool_args)
|
|
106
|
+
else:
|
|
107
|
+
pool_args["proxy_url"] = configuration.proxy
|
|
108
|
+
pool_args["proxy_headers"] = configuration.proxy_headers
|
|
109
|
+
self.pool_manager = urllib3.ProxyManager(**pool_args)
|
|
110
|
+
else:
|
|
111
|
+
self.pool_manager = urllib3.PoolManager(**pool_args)
|
|
86
112
|
|
|
87
|
-
|
|
113
|
+
def request(
|
|
88
114
|
self,
|
|
89
115
|
method,
|
|
90
116
|
url,
|
|
@@ -93,7 +119,7 @@ class RESTClientObject:
|
|
|
93
119
|
post_params=None,
|
|
94
120
|
_request_timeout=None,
|
|
95
121
|
):
|
|
96
|
-
"""
|
|
122
|
+
"""Perform requests.
|
|
97
123
|
|
|
98
124
|
:param method: http request method
|
|
99
125
|
:param url: http request url
|
|
@@ -117,82 +143,105 @@ class RESTClientObject:
|
|
|
117
143
|
|
|
118
144
|
post_params = post_params or {}
|
|
119
145
|
headers = headers or {}
|
|
120
|
-
# url already contains the URL query string
|
|
121
|
-
timeout = _request_timeout or 5 * 60
|
|
122
|
-
|
|
123
|
-
if "Content-Type" not in headers:
|
|
124
|
-
headers["Content-Type"] = "application/json"
|
|
125
|
-
|
|
126
|
-
args = {"method": method, "url": url, "timeout": timeout, "headers": headers}
|
|
127
|
-
|
|
128
|
-
if self.proxy:
|
|
129
|
-
args["proxy"] = self.proxy
|
|
130
|
-
if self.proxy_headers:
|
|
131
|
-
args["proxy_headers"] = self.proxy_headers
|
|
132
|
-
|
|
133
|
-
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
|
|
134
|
-
if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
|
|
135
|
-
if re.search("json", headers["Content-Type"], re.IGNORECASE):
|
|
136
|
-
if body is not None:
|
|
137
|
-
body = json.dumps(body)
|
|
138
|
-
args["data"] = body
|
|
139
|
-
elif headers["Content-Type"] == "application/x-www-form-urlencoded":
|
|
140
|
-
args["data"] = aiohttp.FormData(post_params)
|
|
141
|
-
elif headers["Content-Type"] == "multipart/form-data":
|
|
142
|
-
# must del headers['Content-Type'], or the correct
|
|
143
|
-
# Content-Type which generated by aiohttp
|
|
144
|
-
del headers["Content-Type"]
|
|
145
|
-
data = aiohttp.FormData()
|
|
146
|
-
for param in post_params:
|
|
147
|
-
k, v = param
|
|
148
|
-
if isinstance(v, tuple) and len(v) == 3:
|
|
149
|
-
data.add_field(k, value=v[1], filename=v[0], content_type=v[2])
|
|
150
|
-
else:
|
|
151
|
-
# Ensures that dict objects are serialized
|
|
152
|
-
if isinstance(v, dict):
|
|
153
|
-
v = json.dumps(v)
|
|
154
|
-
elif isinstance(v, int):
|
|
155
|
-
v = str(v)
|
|
156
|
-
data.add_field(k, v)
|
|
157
|
-
args["data"] = data
|
|
158
|
-
|
|
159
|
-
# Pass a `bytes` or `str` parameter directly in the body to support
|
|
160
|
-
# other content types than Json when `body` argument is provided
|
|
161
|
-
# in serialized form
|
|
162
|
-
elif isinstance(body, str) or isinstance(body, bytes):
|
|
163
|
-
args["data"] = body
|
|
164
|
-
else:
|
|
165
|
-
# Cannot generate the request from given parameters
|
|
166
|
-
msg = """Cannot prepare a request message for provided
|
|
167
|
-
arguments. Please check that your arguments match
|
|
168
|
-
declared content type."""
|
|
169
|
-
raise ApiException(status=0, reason=msg)
|
|
170
146
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
),
|
|
179
|
-
trust_env=True,
|
|
180
|
-
)
|
|
181
|
-
pool_manager = self.pool_manager
|
|
182
|
-
|
|
183
|
-
if self.retries is not None and method in ALLOW_RETRY_METHODS:
|
|
184
|
-
if self.retry_client is None:
|
|
185
|
-
self.retry_client = aiohttp_retry.RetryClient(
|
|
186
|
-
client_session=self.pool_manager,
|
|
187
|
-
retry_options=aiohttp_retry.ExponentialRetry(
|
|
188
|
-
attempts=self.retries,
|
|
189
|
-
factor=2.0,
|
|
190
|
-
start_timeout=0.1,
|
|
191
|
-
max_timeout=120.0,
|
|
192
|
-
),
|
|
147
|
+
timeout = None
|
|
148
|
+
if _request_timeout:
|
|
149
|
+
if isinstance(_request_timeout, (int, float)):
|
|
150
|
+
timeout = urllib3.Timeout(total=_request_timeout)
|
|
151
|
+
elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2:
|
|
152
|
+
timeout = urllib3.Timeout(
|
|
153
|
+
connect=_request_timeout[0], read=_request_timeout[1]
|
|
193
154
|
)
|
|
194
|
-
pool_manager = self.retry_client
|
|
195
155
|
|
|
196
|
-
|
|
156
|
+
try:
|
|
157
|
+
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
|
|
158
|
+
if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]:
|
|
159
|
+
|
|
160
|
+
# no content type provided or payload is json
|
|
161
|
+
content_type = headers.get("Content-Type")
|
|
162
|
+
if not content_type or re.search("json", content_type, re.IGNORECASE):
|
|
163
|
+
request_body = None
|
|
164
|
+
if body is not None:
|
|
165
|
+
request_body = json.dumps(body)
|
|
166
|
+
r = self.pool_manager.request(
|
|
167
|
+
method,
|
|
168
|
+
url,
|
|
169
|
+
body=request_body,
|
|
170
|
+
timeout=timeout,
|
|
171
|
+
headers=headers,
|
|
172
|
+
preload_content=False,
|
|
173
|
+
)
|
|
174
|
+
elif content_type == "application/x-www-form-urlencoded":
|
|
175
|
+
r = self.pool_manager.request(
|
|
176
|
+
method,
|
|
177
|
+
url,
|
|
178
|
+
fields=post_params,
|
|
179
|
+
encode_multipart=False,
|
|
180
|
+
timeout=timeout,
|
|
181
|
+
headers=headers,
|
|
182
|
+
preload_content=False,
|
|
183
|
+
)
|
|
184
|
+
elif content_type == "multipart/form-data":
|
|
185
|
+
# must del headers['Content-Type'], or the correct
|
|
186
|
+
# Content-Type which generated by urllib3 will be
|
|
187
|
+
# overwritten.
|
|
188
|
+
del headers["Content-Type"]
|
|
189
|
+
# Ensures that dict objects are serialized
|
|
190
|
+
post_params = [
|
|
191
|
+
(a, json.dumps(b)) if isinstance(b, dict) else (a, b)
|
|
192
|
+
for a, b in post_params
|
|
193
|
+
]
|
|
194
|
+
r = self.pool_manager.request(
|
|
195
|
+
method,
|
|
196
|
+
url,
|
|
197
|
+
fields=post_params,
|
|
198
|
+
encode_multipart=True,
|
|
199
|
+
timeout=timeout,
|
|
200
|
+
headers=headers,
|
|
201
|
+
preload_content=False,
|
|
202
|
+
)
|
|
203
|
+
# Pass a `string` parameter directly in the body to support
|
|
204
|
+
# other content types than JSON when `body` argument is
|
|
205
|
+
# provided in serialized form.
|
|
206
|
+
elif isinstance(body, str) or isinstance(body, bytes):
|
|
207
|
+
r = self.pool_manager.request(
|
|
208
|
+
method,
|
|
209
|
+
url,
|
|
210
|
+
body=body,
|
|
211
|
+
timeout=timeout,
|
|
212
|
+
headers=headers,
|
|
213
|
+
preload_content=False,
|
|
214
|
+
)
|
|
215
|
+
elif headers["Content-Type"].startswith("text/") and isinstance(
|
|
216
|
+
body, bool
|
|
217
|
+
):
|
|
218
|
+
request_body = "true" if body else "false"
|
|
219
|
+
r = self.pool_manager.request(
|
|
220
|
+
method,
|
|
221
|
+
url,
|
|
222
|
+
body=request_body,
|
|
223
|
+
preload_content=False,
|
|
224
|
+
timeout=timeout,
|
|
225
|
+
headers=headers,
|
|
226
|
+
)
|
|
227
|
+
else:
|
|
228
|
+
# Cannot generate the request from given parameters
|
|
229
|
+
msg = """Cannot prepare a request message for provided
|
|
230
|
+
arguments. Please check that your arguments match
|
|
231
|
+
declared content type."""
|
|
232
|
+
raise ApiException(status=0, reason=msg)
|
|
233
|
+
# For `GET`, `HEAD`
|
|
234
|
+
else:
|
|
235
|
+
r = self.pool_manager.request(
|
|
236
|
+
method,
|
|
237
|
+
url,
|
|
238
|
+
fields={},
|
|
239
|
+
timeout=timeout,
|
|
240
|
+
headers=headers,
|
|
241
|
+
preload_content=False,
|
|
242
|
+
)
|
|
243
|
+
except urllib3.exceptions.SSLError as e:
|
|
244
|
+
msg = "\n".join([type(e).__name__, str(e)])
|
|
245
|
+
raise ApiException(status=0, reason=msg)
|
|
197
246
|
|
|
198
247
|
return RESTResponse(r)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import
|
|
1
|
+
from typing import ParamSpec, TypeVar
|
|
2
2
|
|
|
3
3
|
from hatchet_sdk.clients.rest.api_client import ApiClient
|
|
4
4
|
from hatchet_sdk.clients.rest.configuration import Configuration
|
|
@@ -40,5 +40,5 @@ class BaseRestClient:
|
|
|
40
40
|
|
|
41
41
|
self.api_config.datetime_format = "%Y-%m-%dT%H:%M:%S.%fZ"
|
|
42
42
|
|
|
43
|
-
def client(self) ->
|
|
43
|
+
def client(self) -> ApiClient:
|
|
44
44
|
return ApiClient(self.api_config)
|
hatchet_sdk/context/context.py
CHANGED
|
@@ -1,22 +1,20 @@
|
|
|
1
|
-
import inspect
|
|
2
1
|
import json
|
|
3
2
|
import traceback
|
|
4
3
|
from concurrent.futures import Future, ThreadPoolExecutor
|
|
5
4
|
from datetime import timedelta
|
|
6
5
|
from typing import TYPE_CHECKING, Any, cast
|
|
7
|
-
|
|
8
|
-
from pydantic import BaseModel
|
|
6
|
+
from warnings import warn
|
|
9
7
|
|
|
10
8
|
from hatchet_sdk.clients.admin import AdminClient
|
|
11
9
|
from hatchet_sdk.clients.dispatcher.dispatcher import ( # type: ignore[attr-defined]
|
|
12
10
|
Action,
|
|
13
11
|
DispatcherClient,
|
|
14
12
|
)
|
|
15
|
-
from hatchet_sdk.clients.
|
|
13
|
+
from hatchet_sdk.clients.events import EventClient
|
|
14
|
+
from hatchet_sdk.clients.listeners.durable_event_listener import (
|
|
16
15
|
DurableEventListener,
|
|
17
16
|
RegisterDurableEventRequest,
|
|
18
17
|
)
|
|
19
|
-
from hatchet_sdk.clients.events import EventClient
|
|
20
18
|
from hatchet_sdk.context.worker_context import WorkerContext
|
|
21
19
|
from hatchet_sdk.features.runs import RunsClient
|
|
22
20
|
from hatchet_sdk.logger import logger
|
|
@@ -29,21 +27,6 @@ if TYPE_CHECKING:
|
|
|
29
27
|
from hatchet_sdk.runnables.types import R, TWorkflowInput
|
|
30
28
|
|
|
31
29
|
|
|
32
|
-
DEFAULT_WORKFLOW_POLLING_INTERVAL = 5 # Seconds
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def get_caller_file_path() -> str:
|
|
36
|
-
caller_frame = inspect.stack()[2]
|
|
37
|
-
|
|
38
|
-
return caller_frame.filename
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
class StepRunError(BaseModel):
|
|
42
|
-
step_id: str
|
|
43
|
-
step_run_action_name: str
|
|
44
|
-
error: str
|
|
45
|
-
|
|
46
|
-
|
|
47
30
|
class Context:
|
|
48
31
|
def __init__(
|
|
49
32
|
self,
|
|
@@ -85,7 +68,7 @@ class Context:
|
|
|
85
68
|
def trigger_data(self) -> JSONSerializableMapping:
|
|
86
69
|
return self.data.triggers
|
|
87
70
|
|
|
88
|
-
def
|
|
71
|
+
def _task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
|
|
89
72
|
from hatchet_sdk.runnables.types import R
|
|
90
73
|
|
|
91
74
|
if self.was_skipped(task):
|
|
@@ -116,7 +99,25 @@ class Context:
|
|
|
116
99
|
|
|
117
100
|
return parent_step_data
|
|
118
101
|
|
|
102
|
+
def task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
|
|
103
|
+
from hatchet_sdk.runnables.types import R
|
|
104
|
+
|
|
105
|
+
## If the task is async, we need to wrap its output in a coroutine
|
|
106
|
+
## so that the type checker behaves right
|
|
107
|
+
async def _aio_output() -> "R":
|
|
108
|
+
return self._task_output(task)
|
|
109
|
+
|
|
110
|
+
if task.is_async_function:
|
|
111
|
+
return cast(R, _aio_output())
|
|
112
|
+
|
|
113
|
+
return self._task_output(task)
|
|
114
|
+
|
|
119
115
|
def aio_task_output(self, task: "Task[TWorkflowInput, R]") -> "R":
|
|
116
|
+
warn(
|
|
117
|
+
"`aio_task_output` is deprecated. Use `task_output` instead.",
|
|
118
|
+
DeprecationWarning,
|
|
119
|
+
)
|
|
120
|
+
|
|
120
121
|
if task.is_async_function:
|
|
121
122
|
return self.task_output(task)
|
|
122
123
|
|
hatchet_sdk/features/cron.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
1
3
|
from pydantic import BaseModel, Field, field_validator
|
|
2
4
|
|
|
3
5
|
from hatchet_sdk.clients.rest.api.workflow_api import WorkflowApi
|
|
@@ -18,7 +20,6 @@ from hatchet_sdk.clients.v1.api_client import (
|
|
|
18
20
|
BaseRestClient,
|
|
19
21
|
maybe_additional_metadata_to_kv,
|
|
20
22
|
)
|
|
21
|
-
from hatchet_sdk.utils.aio import run_async_from_sync
|
|
22
23
|
from hatchet_sdk.utils.typing import JSONSerializableMapping
|
|
23
24
|
|
|
24
25
|
|
|
@@ -77,7 +78,7 @@ class CronClient(BaseRestClient):
|
|
|
77
78
|
def _wa(self, client: ApiClient) -> WorkflowApi:
|
|
78
79
|
return WorkflowApi(client)
|
|
79
80
|
|
|
80
|
-
|
|
81
|
+
def create(
|
|
81
82
|
self,
|
|
82
83
|
workflow_name: str,
|
|
83
84
|
cron_name: str,
|
|
@@ -102,8 +103,8 @@ class CronClient(BaseRestClient):
|
|
|
102
103
|
expression=expression, input=input, additional_metadata=additional_metadata
|
|
103
104
|
)
|
|
104
105
|
|
|
105
|
-
|
|
106
|
-
return
|
|
106
|
+
with self.client() as client:
|
|
107
|
+
return self._wra(client).cron_workflow_trigger_create(
|
|
107
108
|
tenant=self.client_config.tenant_id,
|
|
108
109
|
workflow=workflow_name,
|
|
109
110
|
create_cron_workflow_trigger_request=CreateCronWorkflowTriggerRequest(
|
|
@@ -114,7 +115,7 @@ class CronClient(BaseRestClient):
|
|
|
114
115
|
),
|
|
115
116
|
)
|
|
116
117
|
|
|
117
|
-
def
|
|
118
|
+
async def aio_create(
|
|
118
119
|
self,
|
|
119
120
|
workflow_name: str,
|
|
120
121
|
cron_name: str,
|
|
@@ -122,8 +123,8 @@ class CronClient(BaseRestClient):
|
|
|
122
123
|
input: JSONSerializableMapping,
|
|
123
124
|
additional_metadata: JSONSerializableMapping,
|
|
124
125
|
) -> CronWorkflows:
|
|
125
|
-
return
|
|
126
|
-
self.
|
|
126
|
+
return await asyncio.to_thread(
|
|
127
|
+
self.create,
|
|
127
128
|
workflow_name,
|
|
128
129
|
cron_name,
|
|
129
130
|
expression,
|
|
@@ -131,20 +132,20 @@ class CronClient(BaseRestClient):
|
|
|
131
132
|
additional_metadata,
|
|
132
133
|
)
|
|
133
134
|
|
|
134
|
-
|
|
135
|
+
def delete(self, cron_id: str) -> None:
|
|
135
136
|
"""
|
|
136
137
|
Asynchronously deletes a workflow cron trigger.
|
|
137
138
|
|
|
138
139
|
Args:
|
|
139
140
|
cron_id (str): The cron trigger ID or CronWorkflows instance to delete.
|
|
140
141
|
"""
|
|
141
|
-
|
|
142
|
-
|
|
142
|
+
with self.client() as client:
|
|
143
|
+
self._wa(client).workflow_cron_delete(
|
|
143
144
|
tenant=self.client_config.tenant_id, cron_workflow=str(cron_id)
|
|
144
145
|
)
|
|
145
146
|
|
|
146
|
-
def
|
|
147
|
-
return
|
|
147
|
+
async def aio_delete(self, cron_id: str) -> None:
|
|
148
|
+
return await asyncio.to_thread(self.delete, cron_id)
|
|
148
149
|
|
|
149
150
|
async def aio_list(
|
|
150
151
|
self,
|
|
@@ -156,7 +157,7 @@ class CronClient(BaseRestClient):
|
|
|
156
157
|
order_by_direction: WorkflowRunOrderByDirection | None = None,
|
|
157
158
|
) -> CronWorkflowsList:
|
|
158
159
|
"""
|
|
159
|
-
|
|
160
|
+
Synchronously retrieves a list of all workflow cron triggers matching the criteria.
|
|
160
161
|
|
|
161
162
|
Args:
|
|
162
163
|
offset (int | None): The offset to start the list from.
|
|
@@ -169,18 +170,15 @@ class CronClient(BaseRestClient):
|
|
|
169
170
|
Returns:
|
|
170
171
|
CronWorkflowsList: A list of cron workflows.
|
|
171
172
|
"""
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
order_by_field=order_by_field,
|
|
182
|
-
order_by_direction=order_by_direction,
|
|
183
|
-
)
|
|
173
|
+
return await asyncio.to_thread(
|
|
174
|
+
self.list,
|
|
175
|
+
offset=offset,
|
|
176
|
+
limit=limit,
|
|
177
|
+
workflow_id=workflow_id,
|
|
178
|
+
additional_metadata=additional_metadata,
|
|
179
|
+
order_by_field=order_by_field,
|
|
180
|
+
order_by_direction=order_by_direction,
|
|
181
|
+
)
|
|
184
182
|
|
|
185
183
|
def list(
|
|
186
184
|
self,
|
|
@@ -192,7 +190,7 @@ class CronClient(BaseRestClient):
|
|
|
192
190
|
order_by_direction: WorkflowRunOrderByDirection | None = None,
|
|
193
191
|
) -> CronWorkflowsList:
|
|
194
192
|
"""
|
|
195
|
-
|
|
193
|
+
Asynchronously retrieves a list of all workflow cron triggers matching the criteria.
|
|
196
194
|
|
|
197
195
|
Args:
|
|
198
196
|
offset (int | None): The offset to start the list from.
|
|
@@ -205,17 +203,20 @@ class CronClient(BaseRestClient):
|
|
|
205
203
|
Returns:
|
|
206
204
|
CronWorkflowsList: A list of cron workflows.
|
|
207
205
|
"""
|
|
208
|
-
|
|
209
|
-
self.
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
206
|
+
with self.client() as client:
|
|
207
|
+
return self._wa(client).cron_workflow_list(
|
|
208
|
+
tenant=self.client_config.tenant_id,
|
|
209
|
+
offset=offset,
|
|
210
|
+
limit=limit,
|
|
211
|
+
workflow_id=workflow_id,
|
|
212
|
+
additional_metadata=maybe_additional_metadata_to_kv(
|
|
213
|
+
additional_metadata
|
|
214
|
+
),
|
|
215
|
+
order_by_field=order_by_field,
|
|
216
|
+
order_by_direction=order_by_direction,
|
|
217
|
+
)
|
|
217
218
|
|
|
218
|
-
|
|
219
|
+
def get(self, cron_id: str) -> CronWorkflows:
|
|
219
220
|
"""
|
|
220
221
|
Asynchronously retrieves a specific workflow cron trigger by ID.
|
|
221
222
|
|
|
@@ -225,12 +226,12 @@ class CronClient(BaseRestClient):
|
|
|
225
226
|
Returns:
|
|
226
227
|
CronWorkflows: The requested cron workflow instance.
|
|
227
228
|
"""
|
|
228
|
-
|
|
229
|
-
return
|
|
229
|
+
with self.client() as client:
|
|
230
|
+
return self._wa(client).workflow_cron_get(
|
|
230
231
|
tenant=self.client_config.tenant_id, cron_workflow=str(cron_id)
|
|
231
232
|
)
|
|
232
233
|
|
|
233
|
-
def
|
|
234
|
+
async def aio_get(self, cron_id: str) -> CronWorkflows:
|
|
234
235
|
"""
|
|
235
236
|
Synchronously retrieves a specific workflow cron trigger by ID.
|
|
236
237
|
|
|
@@ -240,4 +241,4 @@ class CronClient(BaseRestClient):
|
|
|
240
241
|
Returns:
|
|
241
242
|
CronWorkflows: The requested cron workflow instance.
|
|
242
243
|
"""
|
|
243
|
-
return
|
|
244
|
+
return await asyncio.to_thread(self.get, cron_id)
|