hatchet-sdk 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (73) hide show
  1. hatchet_sdk/__init__.py +32 -16
  2. hatchet_sdk/client.py +25 -63
  3. hatchet_sdk/clients/admin.py +203 -142
  4. hatchet_sdk/clients/dispatcher/action_listener.py +42 -42
  5. hatchet_sdk/clients/dispatcher/dispatcher.py +18 -16
  6. hatchet_sdk/clients/durable_event_listener.py +327 -0
  7. hatchet_sdk/clients/rest/__init__.py +12 -1
  8. hatchet_sdk/clients/rest/api/log_api.py +258 -0
  9. hatchet_sdk/clients/rest/api/task_api.py +32 -6
  10. hatchet_sdk/clients/rest/api/workflow_runs_api.py +626 -0
  11. hatchet_sdk/clients/rest/models/__init__.py +12 -1
  12. hatchet_sdk/clients/rest/models/v1_log_line.py +94 -0
  13. hatchet_sdk/clients/rest/models/v1_log_line_level.py +39 -0
  14. hatchet_sdk/clients/rest/models/v1_log_line_list.py +110 -0
  15. hatchet_sdk/clients/rest/models/v1_task_summary.py +80 -64
  16. hatchet_sdk/clients/rest/models/v1_trigger_workflow_run_request.py +95 -0
  17. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name.py +98 -0
  18. hatchet_sdk/clients/rest/models/v1_workflow_run_display_name_list.py +114 -0
  19. hatchet_sdk/clients/rest/models/workflow_run_shape_item_for_workflow_run_details.py +9 -4
  20. hatchet_sdk/clients/rest/models/workflow_runs_metrics.py +5 -1
  21. hatchet_sdk/clients/run_event_listener.py +0 -1
  22. hatchet_sdk/clients/v1/api_client.py +81 -0
  23. hatchet_sdk/context/context.py +86 -159
  24. hatchet_sdk/contracts/dispatcher_pb2_grpc.py +1 -1
  25. hatchet_sdk/contracts/events_pb2.py +2 -2
  26. hatchet_sdk/contracts/events_pb2_grpc.py +1 -1
  27. hatchet_sdk/contracts/v1/dispatcher_pb2.py +36 -0
  28. hatchet_sdk/contracts/v1/dispatcher_pb2.pyi +38 -0
  29. hatchet_sdk/contracts/v1/dispatcher_pb2_grpc.py +145 -0
  30. hatchet_sdk/contracts/v1/shared/condition_pb2.py +39 -0
  31. hatchet_sdk/contracts/v1/shared/condition_pb2.pyi +72 -0
  32. hatchet_sdk/contracts/v1/shared/condition_pb2_grpc.py +29 -0
  33. hatchet_sdk/contracts/v1/workflows_pb2.py +67 -0
  34. hatchet_sdk/contracts/v1/workflows_pb2.pyi +228 -0
  35. hatchet_sdk/contracts/v1/workflows_pb2_grpc.py +234 -0
  36. hatchet_sdk/contracts/workflows_pb2_grpc.py +1 -1
  37. hatchet_sdk/features/cron.py +91 -121
  38. hatchet_sdk/features/logs.py +16 -0
  39. hatchet_sdk/features/metrics.py +75 -0
  40. hatchet_sdk/features/rate_limits.py +45 -0
  41. hatchet_sdk/features/runs.py +221 -0
  42. hatchet_sdk/features/scheduled.py +114 -131
  43. hatchet_sdk/features/workers.py +41 -0
  44. hatchet_sdk/features/workflows.py +55 -0
  45. hatchet_sdk/hatchet.py +463 -165
  46. hatchet_sdk/opentelemetry/instrumentor.py +8 -13
  47. hatchet_sdk/rate_limit.py +33 -39
  48. hatchet_sdk/runnables/contextvars.py +12 -0
  49. hatchet_sdk/runnables/standalone.py +192 -0
  50. hatchet_sdk/runnables/task.py +144 -0
  51. hatchet_sdk/runnables/types.py +138 -0
  52. hatchet_sdk/runnables/workflow.py +771 -0
  53. hatchet_sdk/utils/aio_utils.py +0 -79
  54. hatchet_sdk/utils/proto_enums.py +0 -7
  55. hatchet_sdk/utils/timedelta_to_expression.py +23 -0
  56. hatchet_sdk/utils/typing.py +2 -2
  57. hatchet_sdk/v0/clients/rest_client.py +9 -0
  58. hatchet_sdk/v0/worker/action_listener_process.py +18 -2
  59. hatchet_sdk/waits.py +120 -0
  60. hatchet_sdk/worker/action_listener_process.py +64 -30
  61. hatchet_sdk/worker/runner/run_loop_manager.py +35 -26
  62. hatchet_sdk/worker/runner/runner.py +72 -55
  63. hatchet_sdk/worker/runner/utils/capture_logs.py +3 -11
  64. hatchet_sdk/worker/worker.py +155 -118
  65. hatchet_sdk/workflow_run.py +4 -5
  66. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/METADATA +1 -2
  67. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/RECORD +69 -43
  68. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/entry_points.txt +2 -0
  69. hatchet_sdk/clients/rest_client.py +0 -636
  70. hatchet_sdk/semver.py +0 -30
  71. hatchet_sdk/worker/runner/utils/error_with_traceback.py +0 -6
  72. hatchet_sdk/workflow.py +0 -527
  73. {hatchet_sdk-1.0.0.dist-info → hatchet_sdk-1.0.1.dist-info}/WHEEL +0 -0
@@ -55,17 +55,19 @@ class DispatcherClient:
55
55
  for key, value in preset_labels.items():
56
56
  req.labels[key] = WorkerLabels(strValue=str(value))
57
57
 
58
- # Register the worker
59
- response: WorkerRegisterResponse = await self.aio_client.Register(
60
- WorkerRegisterRequest(
61
- workerName=req.worker_name,
62
- actions=req.actions,
63
- services=req.services,
64
- maxRuns=req.max_runs,
65
- labels=req.labels,
58
+ response = cast(
59
+ WorkerRegisterResponse,
60
+ await self.aio_client.Register(
61
+ WorkerRegisterRequest(
62
+ workerName=req.worker_name,
63
+ actions=req.actions,
64
+ services=req.services,
65
+ maxRuns=req.slots,
66
+ labels=req.labels,
67
+ ),
68
+ timeout=DEFAULT_REGISTER_TIMEOUT,
69
+ metadata=get_metadata(self.token),
66
70
  ),
67
- timeout=DEFAULT_REGISTER_TIMEOUT,
68
- metadata=get_metadata(self.token),
69
71
  )
70
72
 
71
73
  return ActionListener(self.config, response.workerId)
@@ -93,8 +95,8 @@ class DispatcherClient:
93
95
  async def _try_send_step_action_event(
94
96
  self, action: Action, event_type: StepActionEventType, payload: str
95
97
  ) -> grpc.aio.UnaryUnaryCall[StepActionEvent, ActionEventResponse]:
96
- eventTimestamp = Timestamp()
97
- eventTimestamp.GetCurrentTime()
98
+ event_timestamp = Timestamp()
99
+ event_timestamp.GetCurrentTime()
98
100
 
99
101
  event = StepActionEvent(
100
102
  workerId=action.worker_id,
@@ -103,7 +105,7 @@ class DispatcherClient:
103
105
  stepId=action.step_id,
104
106
  stepRunId=action.step_run_id,
105
107
  actionId=action.action_id,
106
- eventTimestamp=eventTimestamp,
108
+ eventTimestamp=event_timestamp,
107
109
  eventType=event_type,
108
110
  eventPayload=payload,
109
111
  retryCount=action.retry_count,
@@ -120,15 +122,15 @@ class DispatcherClient:
120
122
  async def send_group_key_action_event(
121
123
  self, action: Action, event_type: GroupKeyActionEventType, payload: str
122
124
  ) -> grpc.aio.UnaryUnaryCall[GroupKeyActionEvent, ActionEventResponse]:
123
- eventTimestamp = Timestamp()
124
- eventTimestamp.GetCurrentTime()
125
+ event_timestamp = Timestamp()
126
+ event_timestamp.GetCurrentTime()
125
127
 
126
128
  event = GroupKeyActionEvent(
127
129
  workerId=action.worker_id,
128
130
  workflowRunId=action.workflow_run_id,
129
131
  getGroupKeyRunId=action.get_group_key_run_id,
130
132
  actionId=action.action_id,
131
- eventTimestamp=eventTimestamp,
133
+ eventTimestamp=event_timestamp,
132
134
  eventType=event_type,
133
135
  eventPayload=payload,
134
136
  )
@@ -0,0 +1,327 @@
1
+ import asyncio
2
+ import json
3
+ from collections.abc import AsyncIterator
4
+ from typing import Any, Literal, cast
5
+
6
+ import grpc
7
+ import grpc.aio
8
+ from grpc._cython import cygrpc # type: ignore[attr-defined]
9
+ from pydantic import BaseModel, ConfigDict
10
+
11
+ from hatchet_sdk.clients.event_ts import ThreadSafeEvent, read_with_interrupt
12
+ from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
13
+ from hatchet_sdk.config import ClientConfig
14
+ from hatchet_sdk.connection import new_conn
15
+ from hatchet_sdk.contracts.v1.dispatcher_pb2 import (
16
+ DurableEvent,
17
+ ListenForDurableEventRequest,
18
+ )
19
+ from hatchet_sdk.contracts.v1.dispatcher_pb2 import (
20
+ RegisterDurableEventRequest as RegisterDurableEventRequestProto,
21
+ )
22
+ from hatchet_sdk.contracts.v1.dispatcher_pb2_grpc import V1DispatcherStub
23
+ from hatchet_sdk.contracts.v1.shared.condition_pb2 import DurableEventListenerConditions
24
+ from hatchet_sdk.logger import logger
25
+ from hatchet_sdk.metadata import get_metadata
26
+ from hatchet_sdk.waits import SleepCondition, UserEventCondition
27
+
28
+ DEFAULT_DURABLE_EVENT_LISTENER_RETRY_INTERVAL = 3 # seconds
29
+ DEFAULT_DURABLE_EVENT_LISTENER_RETRY_COUNT = 5
30
+ DEFAULT_DURABLE_EVENT_LISTENER_INTERRUPT_INTERVAL = 1800 # 30 minutes
31
+
32
+
33
+ class _Subscription:
34
+ def __init__(self, id: int, task_id: str, signal_key: str):
35
+ self.id = id
36
+ self.task_id = task_id
37
+ self.signal_key = signal_key
38
+ self.queue: asyncio.Queue[DurableEvent | None] = asyncio.Queue()
39
+
40
+ async def __aiter__(self) -> "_Subscription":
41
+ return self
42
+
43
+ async def __anext__(self) -> DurableEvent | None:
44
+ return await self.queue.get()
45
+
46
+ async def get(self) -> DurableEvent:
47
+ event = await self.queue.get()
48
+
49
+ if event is None:
50
+ raise StopAsyncIteration
51
+
52
+ return event
53
+
54
+ async def put(self, item: DurableEvent) -> None:
55
+ await self.queue.put(item)
56
+
57
+ async def close(self) -> None:
58
+ await self.queue.put(None)
59
+
60
+
61
+ class RegisterDurableEventRequest(BaseModel):
62
+ model_config = ConfigDict(arbitrary_types_allowed=True)
63
+
64
+ task_id: str
65
+ signal_key: str
66
+ conditions: list[SleepCondition | UserEventCondition]
67
+
68
+ def to_proto(self) -> RegisterDurableEventRequestProto:
69
+ return RegisterDurableEventRequestProto(
70
+ task_id=self.task_id,
71
+ signal_key=self.signal_key,
72
+ conditions=DurableEventListenerConditions(
73
+ sleep_conditions=[
74
+ c.to_pb() for c in self.conditions if isinstance(c, SleepCondition)
75
+ ],
76
+ user_event_conditions=[
77
+ c.to_pb()
78
+ for c in self.conditions
79
+ if isinstance(c, UserEventCondition)
80
+ ],
81
+ ),
82
+ )
83
+
84
+
85
+ class DurableEventListener:
86
+ def __init__(self, config: ClientConfig):
87
+ try:
88
+ asyncio.get_running_loop()
89
+ except RuntimeError:
90
+ loop = asyncio.new_event_loop()
91
+ asyncio.set_event_loop(loop)
92
+
93
+ conn = new_conn(config, True)
94
+ self.client = V1DispatcherStub(conn) # type: ignore[no-untyped-call]
95
+ self.token = config.token
96
+ self.config = config
97
+
98
+ # list of all active subscriptions, mapping from a subscription id to a task id and signal key
99
+ self.subscriptions_to_task_id_signal_key: dict[int, tuple[str, str]] = {}
100
+
101
+ # task id-signal key tuples mapped to an array of subscription ids
102
+ self.task_id_signal_key_to_subscriptions: dict[tuple[str, str], list[int]] = {}
103
+
104
+ self.subscription_counter: int = 0
105
+ self.subscription_counter_lock: asyncio.Lock = asyncio.Lock()
106
+
107
+ self.requests: asyncio.Queue[ListenForDurableEventRequest | int] = (
108
+ asyncio.Queue()
109
+ )
110
+
111
+ self.listener: (
112
+ grpc.aio.UnaryStreamCall[ListenForDurableEventRequest, DurableEvent] | None
113
+ ) = None
114
+ self.listener_task: asyncio.Task[None] | None = None
115
+
116
+ self.curr_requester: int = 0
117
+
118
+ self.events: dict[int, _Subscription] = {}
119
+
120
+ self.interrupter: asyncio.Task[None] | None = None
121
+
122
+ async def _interrupter(self) -> None:
123
+ """
124
+ _interrupter runs in a separate thread and interrupts the listener according to a configurable duration.
125
+ """
126
+ await asyncio.sleep(DEFAULT_DURABLE_EVENT_LISTENER_INTERRUPT_INTERVAL)
127
+
128
+ if self.interrupt is not None:
129
+ self.interrupt.set()
130
+
131
+ async def _init_producer(self) -> None:
132
+ try:
133
+ if not self.listener:
134
+ while True:
135
+ try:
136
+ self.listener = await self._retry_subscribe()
137
+
138
+ logger.debug("Workflow run listener connected.")
139
+
140
+ # spawn an interrupter task
141
+ if self.interrupter is not None and not self.interrupter.done():
142
+ self.interrupter.cancel()
143
+
144
+ self.interrupter = asyncio.create_task(self._interrupter())
145
+
146
+ while True:
147
+ self.interrupt = ThreadSafeEvent()
148
+ if self.listener is None:
149
+ continue
150
+
151
+ t = asyncio.create_task(
152
+ read_with_interrupt(self.listener, self.interrupt)
153
+ )
154
+ await self.interrupt.wait()
155
+
156
+ if not t.done():
157
+ logger.warning(
158
+ "Interrupted read_with_interrupt task of durable event listener"
159
+ )
160
+
161
+ t.cancel()
162
+ if self.listener:
163
+ self.listener.cancel()
164
+ await asyncio.sleep(
165
+ DEFAULT_DURABLE_EVENT_LISTENER_RETRY_INTERVAL
166
+ )
167
+ break
168
+
169
+ event = t.result()
170
+
171
+ if event is cygrpc.EOF:
172
+ break
173
+
174
+ # get a list of subscriptions for this task-signal pair
175
+ subscriptions = (
176
+ self.task_id_signal_key_to_subscriptions.get(
177
+ (event.task_id, event.signal_key), []
178
+ )
179
+ )
180
+
181
+ for subscription_id in subscriptions:
182
+ await self.events[subscription_id].put(event)
183
+
184
+ except grpc.RpcError as e:
185
+ logger.debug(f"grpc error in durable event listener: {e}")
186
+ await asyncio.sleep(
187
+ DEFAULT_DURABLE_EVENT_LISTENER_RETRY_INTERVAL
188
+ )
189
+ continue
190
+
191
+ except Exception as e:
192
+ logger.error(f"Error in durable event listener: {e}")
193
+
194
+ self.listener = None
195
+
196
+ # close all subscriptions
197
+ for subscription_id in self.events:
198
+ await self.events[subscription_id].close()
199
+
200
+ raise e
201
+
202
+ async def _request(self) -> AsyncIterator[ListenForDurableEventRequest]:
203
+ self.curr_requester = self.curr_requester + 1
204
+
205
+ # replay all existing subscriptions
206
+ for task_id, signal_key in set(
207
+ self.subscriptions_to_task_id_signal_key.values()
208
+ ):
209
+ yield ListenForDurableEventRequest(
210
+ task_id=task_id,
211
+ signal_key=signal_key,
212
+ )
213
+
214
+ while True:
215
+ request = await self.requests.get()
216
+
217
+ # if the request is an int which matches the current requester, then we should stop
218
+ if request == self.curr_requester:
219
+ break
220
+
221
+ # if we've gotten an int that doesn't match the current requester, then we should ignore it
222
+ if isinstance(request, int):
223
+ continue
224
+
225
+ yield request
226
+ self.requests.task_done()
227
+
228
+ def cleanup_subscription(self, subscription_id: int) -> None:
229
+ task_id_signal_key = self.subscriptions_to_task_id_signal_key[subscription_id]
230
+
231
+ if task_id_signal_key in self.task_id_signal_key_to_subscriptions:
232
+ self.task_id_signal_key_to_subscriptions[task_id_signal_key].remove(
233
+ subscription_id
234
+ )
235
+
236
+ del self.subscriptions_to_task_id_signal_key[subscription_id]
237
+ del self.events[subscription_id]
238
+
239
+ async def subscribe(self, task_id: str, signal_key: str) -> DurableEvent:
240
+ try:
241
+ # create a new subscription id, place a mutex on the counter
242
+ async with self.subscription_counter_lock:
243
+ self.subscription_counter += 1
244
+ subscription_id = self.subscription_counter
245
+
246
+ self.subscriptions_to_task_id_signal_key[subscription_id] = (
247
+ task_id,
248
+ signal_key,
249
+ )
250
+
251
+ if (task_id, signal_key) not in self.task_id_signal_key_to_subscriptions:
252
+ self.task_id_signal_key_to_subscriptions[(task_id, signal_key)] = [
253
+ subscription_id
254
+ ]
255
+ else:
256
+ self.task_id_signal_key_to_subscriptions[(task_id, signal_key)].append(
257
+ subscription_id
258
+ )
259
+
260
+ self.events[subscription_id] = _Subscription(
261
+ subscription_id, task_id, signal_key
262
+ )
263
+
264
+ await self.requests.put(
265
+ ListenForDurableEventRequest(
266
+ task_id=task_id,
267
+ signal_key=signal_key,
268
+ )
269
+ )
270
+
271
+ if not self.listener_task or self.listener_task.done():
272
+ self.listener_task = asyncio.create_task(self._init_producer())
273
+
274
+ return await self.events[subscription_id].get()
275
+ except asyncio.CancelledError:
276
+ raise
277
+ finally:
278
+ self.cleanup_subscription(subscription_id)
279
+
280
+ async def _retry_subscribe(
281
+ self,
282
+ ) -> grpc.aio.UnaryStreamCall[ListenForDurableEventRequest, DurableEvent]:
283
+ retries = 0
284
+
285
+ while retries < DEFAULT_DURABLE_EVENT_LISTENER_RETRY_COUNT:
286
+ try:
287
+ if retries > 0:
288
+ await asyncio.sleep(DEFAULT_DURABLE_EVENT_LISTENER_RETRY_INTERVAL)
289
+
290
+ # signal previous async iterator to stop
291
+ if self.curr_requester != 0:
292
+ self.requests.put_nowait(self.curr_requester)
293
+
294
+ return cast(
295
+ grpc.aio.UnaryStreamCall[
296
+ ListenForDurableEventRequest, DurableEvent
297
+ ],
298
+ self.client.ListenForDurableEvent(
299
+ self._request(),
300
+ metadata=get_metadata(self.token),
301
+ ),
302
+ )
303
+ except grpc.RpcError as e:
304
+ if e.code() == grpc.StatusCode.UNAVAILABLE:
305
+ retries = retries + 1
306
+ else:
307
+ raise ValueError(f"gRPC error: {e}")
308
+
309
+ raise ValueError("Failed to connect to durable event listener")
310
+
311
+ @tenacity_retry
312
+ def register_durable_event(
313
+ self, request: RegisterDurableEventRequest
314
+ ) -> Literal[True]:
315
+ self.client.RegisterDurableEvent(
316
+ request.to_proto(),
317
+ timeout=5,
318
+ metadata=get_metadata(self.token),
319
+ )
320
+
321
+ return True
322
+
323
+ @tenacity_retry
324
+ async def result(self, task_id: str, signal_key: str) -> dict[str, Any]:
325
+ event = await self.subscribe(task_id, signal_key)
326
+
327
+ return cast(dict[str, Any], json.loads(event.data.decode("utf-8")))
@@ -230,8 +230,10 @@ from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
230
230
  from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
231
231
  from hatchet_sdk.clients.rest.models.v1_cancel_task_request import V1CancelTaskRequest
232
232
  from hatchet_sdk.clients.rest.models.v1_dag_children import V1DagChildren
233
+ from hatchet_sdk.clients.rest.models.v1_log_line import V1LogLine
234
+ from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel
235
+ from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList
233
236
  from hatchet_sdk.clients.rest.models.v1_replay_task_request import V1ReplayTaskRequest
234
- from hatchet_sdk.clients.rest.models.v1_task import V1Task
235
237
  from hatchet_sdk.clients.rest.models.v1_task_event import V1TaskEvent
236
238
  from hatchet_sdk.clients.rest.models.v1_task_event_list import V1TaskEventList
237
239
  from hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType
@@ -243,8 +245,17 @@ from hatchet_sdk.clients.rest.models.v1_task_run_status import V1TaskRunStatus
243
245
  from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus
244
246
  from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary
245
247
  from hatchet_sdk.clients.rest.models.v1_task_summary_list import V1TaskSummaryList
248
+ from hatchet_sdk.clients.rest.models.v1_trigger_workflow_run_request import (
249
+ V1TriggerWorkflowRunRequest,
250
+ )
246
251
  from hatchet_sdk.clients.rest.models.v1_workflow_run import V1WorkflowRun
247
252
  from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
253
+ from hatchet_sdk.clients.rest.models.v1_workflow_run_display_name import (
254
+ V1WorkflowRunDisplayName,
255
+ )
256
+ from hatchet_sdk.clients.rest.models.v1_workflow_run_display_name_list import (
257
+ V1WorkflowRunDisplayNameList,
258
+ )
248
259
  from hatchet_sdk.clients.rest.models.v1_workflow_type import V1WorkflowType
249
260
  from hatchet_sdk.clients.rest.models.webhook_worker import WebhookWorker
250
261
  from hatchet_sdk.clients.rest.models.webhook_worker_create_request import (
@@ -25,6 +25,7 @@ from hatchet_sdk.clients.rest.models.log_line_order_by_direction import (
25
25
  LogLineOrderByDirection,
26
26
  )
27
27
  from hatchet_sdk.clients.rest.models.log_line_order_by_field import LogLineOrderByField
28
+ from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList
28
29
  from hatchet_sdk.clients.rest.rest import RESTResponseType
29
30
 
30
31
 
@@ -445,3 +446,260 @@ class LogApi:
445
446
  _host=_host,
446
447
  _request_auth=_request_auth,
447
448
  )
449
+
450
+ @validate_call
451
+ async def v1_log_line_list(
452
+ self,
453
+ task: Annotated[
454
+ str,
455
+ Field(min_length=36, strict=True, max_length=36, description="The task id"),
456
+ ],
457
+ _request_timeout: Union[
458
+ None,
459
+ Annotated[StrictFloat, Field(gt=0)],
460
+ Tuple[
461
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
462
+ ],
463
+ ] = None,
464
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
465
+ _content_type: Optional[StrictStr] = None,
466
+ _headers: Optional[Dict[StrictStr, Any]] = None,
467
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
468
+ ) -> V1LogLineList:
469
+ """List log lines
470
+
471
+ Lists log lines for a task
472
+
473
+ :param task: The task id (required)
474
+ :type task: str
475
+ :param _request_timeout: timeout setting for this request. If one
476
+ number provided, it will be total request
477
+ timeout. It can also be a pair (tuple) of
478
+ (connection, read) timeouts.
479
+ :type _request_timeout: int, tuple(int, int), optional
480
+ :param _request_auth: set to override the auth_settings for an a single
481
+ request; this effectively ignores the
482
+ authentication in the spec for a single request.
483
+ :type _request_auth: dict, optional
484
+ :param _content_type: force content-type for the request.
485
+ :type _content_type: str, Optional
486
+ :param _headers: set to override the headers for a single
487
+ request; this effectively ignores the headers
488
+ in the spec for a single request.
489
+ :type _headers: dict, optional
490
+ :param _host_index: set to override the host_index for a single
491
+ request; this effectively ignores the host_index
492
+ in the spec for a single request.
493
+ :type _host_index: int, optional
494
+ :return: Returns the result object.
495
+ """ # noqa: E501
496
+
497
+ _param = self._v1_log_line_list_serialize(
498
+ task=task,
499
+ _request_auth=_request_auth,
500
+ _content_type=_content_type,
501
+ _headers=_headers,
502
+ _host_index=_host_index,
503
+ )
504
+
505
+ _response_types_map: Dict[str, Optional[str]] = {
506
+ "200": "V1LogLineList",
507
+ "400": "APIErrors",
508
+ "403": "APIErrors",
509
+ }
510
+ response_data = await self.api_client.call_api(
511
+ *_param, _request_timeout=_request_timeout
512
+ )
513
+ await response_data.read()
514
+ return self.api_client.response_deserialize(
515
+ response_data=response_data,
516
+ response_types_map=_response_types_map,
517
+ ).data
518
+
519
+ @validate_call
520
+ async def v1_log_line_list_with_http_info(
521
+ self,
522
+ task: Annotated[
523
+ str,
524
+ Field(min_length=36, strict=True, max_length=36, description="The task id"),
525
+ ],
526
+ _request_timeout: Union[
527
+ None,
528
+ Annotated[StrictFloat, Field(gt=0)],
529
+ Tuple[
530
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
531
+ ],
532
+ ] = None,
533
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
534
+ _content_type: Optional[StrictStr] = None,
535
+ _headers: Optional[Dict[StrictStr, Any]] = None,
536
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
537
+ ) -> ApiResponse[V1LogLineList]:
538
+ """List log lines
539
+
540
+ Lists log lines for a task
541
+
542
+ :param task: The task id (required)
543
+ :type task: str
544
+ :param _request_timeout: timeout setting for this request. If one
545
+ number provided, it will be total request
546
+ timeout. It can also be a pair (tuple) of
547
+ (connection, read) timeouts.
548
+ :type _request_timeout: int, tuple(int, int), optional
549
+ :param _request_auth: set to override the auth_settings for an a single
550
+ request; this effectively ignores the
551
+ authentication in the spec for a single request.
552
+ :type _request_auth: dict, optional
553
+ :param _content_type: force content-type for the request.
554
+ :type _content_type: str, Optional
555
+ :param _headers: set to override the headers for a single
556
+ request; this effectively ignores the headers
557
+ in the spec for a single request.
558
+ :type _headers: dict, optional
559
+ :param _host_index: set to override the host_index for a single
560
+ request; this effectively ignores the host_index
561
+ in the spec for a single request.
562
+ :type _host_index: int, optional
563
+ :return: Returns the result object.
564
+ """ # noqa: E501
565
+
566
+ _param = self._v1_log_line_list_serialize(
567
+ task=task,
568
+ _request_auth=_request_auth,
569
+ _content_type=_content_type,
570
+ _headers=_headers,
571
+ _host_index=_host_index,
572
+ )
573
+
574
+ _response_types_map: Dict[str, Optional[str]] = {
575
+ "200": "V1LogLineList",
576
+ "400": "APIErrors",
577
+ "403": "APIErrors",
578
+ }
579
+ response_data = await self.api_client.call_api(
580
+ *_param, _request_timeout=_request_timeout
581
+ )
582
+ await response_data.read()
583
+ return self.api_client.response_deserialize(
584
+ response_data=response_data,
585
+ response_types_map=_response_types_map,
586
+ )
587
+
588
+ @validate_call
589
+ async def v1_log_line_list_without_preload_content(
590
+ self,
591
+ task: Annotated[
592
+ str,
593
+ Field(min_length=36, strict=True, max_length=36, description="The task id"),
594
+ ],
595
+ _request_timeout: Union[
596
+ None,
597
+ Annotated[StrictFloat, Field(gt=0)],
598
+ Tuple[
599
+ Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
600
+ ],
601
+ ] = None,
602
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
603
+ _content_type: Optional[StrictStr] = None,
604
+ _headers: Optional[Dict[StrictStr, Any]] = None,
605
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
606
+ ) -> RESTResponseType:
607
+ """List log lines
608
+
609
+ Lists log lines for a task
610
+
611
+ :param task: The task id (required)
612
+ :type task: str
613
+ :param _request_timeout: timeout setting for this request. If one
614
+ number provided, it will be total request
615
+ timeout. It can also be a pair (tuple) of
616
+ (connection, read) timeouts.
617
+ :type _request_timeout: int, tuple(int, int), optional
618
+ :param _request_auth: set to override the auth_settings for an a single
619
+ request; this effectively ignores the
620
+ authentication in the spec for a single request.
621
+ :type _request_auth: dict, optional
622
+ :param _content_type: force content-type for the request.
623
+ :type _content_type: str, Optional
624
+ :param _headers: set to override the headers for a single
625
+ request; this effectively ignores the headers
626
+ in the spec for a single request.
627
+ :type _headers: dict, optional
628
+ :param _host_index: set to override the host_index for a single
629
+ request; this effectively ignores the host_index
630
+ in the spec for a single request.
631
+ :type _host_index: int, optional
632
+ :return: Returns the result object.
633
+ """ # noqa: E501
634
+
635
+ _param = self._v1_log_line_list_serialize(
636
+ task=task,
637
+ _request_auth=_request_auth,
638
+ _content_type=_content_type,
639
+ _headers=_headers,
640
+ _host_index=_host_index,
641
+ )
642
+
643
+ _response_types_map: Dict[str, Optional[str]] = {
644
+ "200": "V1LogLineList",
645
+ "400": "APIErrors",
646
+ "403": "APIErrors",
647
+ }
648
+ response_data = await self.api_client.call_api(
649
+ *_param, _request_timeout=_request_timeout
650
+ )
651
+ return response_data.response
652
+
653
+ def _v1_log_line_list_serialize(
654
+ self,
655
+ task,
656
+ _request_auth,
657
+ _content_type,
658
+ _headers,
659
+ _host_index,
660
+ ) -> RequestSerialized:
661
+
662
+ _host = None
663
+
664
+ _collection_formats: Dict[str, str] = {}
665
+
666
+ _path_params: Dict[str, str] = {}
667
+ _query_params: List[Tuple[str, str]] = []
668
+ _header_params: Dict[str, Optional[str]] = _headers or {}
669
+ _form_params: List[Tuple[str, str]] = []
670
+ _files: Dict[
671
+ str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
672
+ ] = {}
673
+ _body_params: Optional[bytes] = None
674
+
675
+ # process the path parameters
676
+ if task is not None:
677
+ _path_params["task"] = task
678
+ # process the query parameters
679
+ # process the header parameters
680
+ # process the form parameters
681
+ # process the body parameter
682
+
683
+ # set the HTTP header `Accept`
684
+ if "Accept" not in _header_params:
685
+ _header_params["Accept"] = self.api_client.select_header_accept(
686
+ ["application/json"]
687
+ )
688
+
689
+ # authentication setting
690
+ _auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
691
+
692
+ return self.api_client.param_serialize(
693
+ method="GET",
694
+ resource_path="/api/v1/stable/tasks/{task}/logs",
695
+ path_params=_path_params,
696
+ query_params=_query_params,
697
+ header_params=_header_params,
698
+ body=_body_params,
699
+ post_params=_form_params,
700
+ files=_files,
701
+ auth_settings=_auth_settings,
702
+ collection_formats=_collection_formats,
703
+ _host=_host,
704
+ _request_auth=_request_auth,
705
+ )