hatchet-sdk 0.43.2__py3-none-any.whl → 0.44.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

@@ -252,11 +252,11 @@ class AdminClientAioImpl(AdminClientBase):
252
252
  workflow_listener=self.pooled_workflow_listener,
253
253
  workflow_run_event_listener=self.listener_client,
254
254
  )
255
- except grpc.RpcError as e:
255
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
256
256
  if e.code() == grpc.StatusCode.ALREADY_EXISTS:
257
257
  raise DedupeViolationErr(e.details())
258
258
 
259
- raise ValueError(f"gRPC error: {e}")
259
+ raise e
260
260
 
261
261
  @tenacity_retry
262
262
  async def run_workflows(
@@ -266,56 +266,49 @@ class AdminClientAioImpl(AdminClientBase):
266
266
  ) -> List[WorkflowRunRef]:
267
267
  if len(workflows) == 0:
268
268
  raise ValueError("No workflows to run")
269
- try:
270
- if not self.pooled_workflow_listener:
271
- self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
272
269
 
273
- namespace = self.namespace
270
+ if not self.pooled_workflow_listener:
271
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
274
272
 
275
- if (
276
- options is not None
277
- and "namespace" in options
278
- and options["namespace"] is not None
279
- ):
280
- namespace = options["namespace"]
281
- del options["namespace"]
273
+ namespace = self.namespace
282
274
 
283
- workflow_run_requests: TriggerWorkflowRequest = []
275
+ if (
276
+ options is not None
277
+ and "namespace" in options
278
+ and options["namespace"] is not None
279
+ ):
280
+ namespace = options["namespace"]
281
+ del options["namespace"]
284
282
 
285
- for workflow in workflows:
286
- workflow_name = workflow["workflow_name"]
287
- input_data = workflow["input"]
288
- options = workflow["options"]
283
+ workflow_run_requests: TriggerWorkflowRequest = []
289
284
 
290
- if namespace != "" and not workflow_name.startswith(self.namespace):
291
- workflow_name = f"{namespace}{workflow_name}"
285
+ for workflow in workflows:
286
+ workflow_name = workflow["workflow_name"]
287
+ input_data = workflow["input"]
288
+ options = workflow["options"]
292
289
 
293
- # Prepare and trigger workflow for each workflow name and input
294
- request = self._prepare_workflow_request(
295
- workflow_name, input_data, options
296
- )
297
- workflow_run_requests.append(request)
290
+ if namespace != "" and not workflow_name.startswith(self.namespace):
291
+ workflow_name = f"{namespace}{workflow_name}"
298
292
 
299
- request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
293
+ # Prepare and trigger workflow for each workflow name and input
294
+ request = self._prepare_workflow_request(workflow_name, input_data, options)
295
+ workflow_run_requests.append(request)
300
296
 
301
- resp: BulkTriggerWorkflowResponse = (
302
- await self.aio_client.BulkTriggerWorkflow(
303
- request,
304
- metadata=get_metadata(self.token),
305
- )
306
- )
297
+ request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
307
298
 
308
- return [
309
- WorkflowRunRef(
310
- workflow_run_id=workflow_run_id,
311
- workflow_listener=self.pooled_workflow_listener,
312
- workflow_run_event_listener=self.listener_client,
313
- )
314
- for workflow_run_id in resp.workflow_run_ids
315
- ]
299
+ resp: BulkTriggerWorkflowResponse = await self.aio_client.BulkTriggerWorkflow(
300
+ request,
301
+ metadata=get_metadata(self.token),
302
+ )
316
303
 
317
- except grpc.RpcError as e:
318
- raise ValueError(f"gRPC error: {e}")
304
+ return [
305
+ WorkflowRunRef(
306
+ workflow_run_id=workflow_run_id,
307
+ workflow_listener=self.pooled_workflow_listener,
308
+ workflow_run_event_listener=self.listener_client,
309
+ )
310
+ for workflow_run_id in resp.workflow_run_ids
311
+ ]
319
312
 
320
313
  @tenacity_retry
321
314
  async def put_workflow(
@@ -324,15 +317,12 @@ class AdminClientAioImpl(AdminClientBase):
324
317
  workflow: CreateWorkflowVersionOpts | WorkflowMeta,
325
318
  overrides: CreateWorkflowVersionOpts | None = None,
326
319
  ) -> WorkflowVersion:
327
- try:
328
- opts = self._prepare_put_workflow_request(name, workflow, overrides)
320
+ opts = self._prepare_put_workflow_request(name, workflow, overrides)
329
321
 
330
- return await self.aio_client.PutWorkflow(
331
- opts,
332
- metadata=get_metadata(self.token),
333
- )
334
- except grpc.RpcError as e:
335
- raise ValueError(f"Could not put workflow: {e}")
322
+ return await self.aio_client.PutWorkflow(
323
+ opts,
324
+ metadata=get_metadata(self.token),
325
+ )
336
326
 
337
327
  @tenacity_retry
338
328
  async def put_rate_limit(
@@ -341,17 +331,14 @@ class AdminClientAioImpl(AdminClientBase):
341
331
  limit: int,
342
332
  duration: RateLimitDuration = RateLimitDuration.SECOND,
343
333
  ):
344
- try:
345
- await self.aio_client.PutRateLimit(
346
- PutRateLimitRequest(
347
- key=key,
348
- limit=limit,
349
- duration=duration,
350
- ),
351
- metadata=get_metadata(self.token),
352
- )
353
- except grpc.RpcError as e:
354
- raise ValueError(f"Could not put rate limit: {e}")
334
+ await self.aio_client.PutRateLimit(
335
+ PutRateLimitRequest(
336
+ key=key,
337
+ limit=limit,
338
+ duration=duration,
339
+ ),
340
+ metadata=get_metadata(self.token),
341
+ )
355
342
 
356
343
  @tenacity_retry
357
344
  async def schedule_workflow(
@@ -383,11 +370,11 @@ class AdminClientAioImpl(AdminClientBase):
383
370
  request,
384
371
  metadata=get_metadata(self.token),
385
372
  )
386
- except grpc.RpcError as e:
373
+ except (grpc.aio.AioRpcError, grpc.RpcError) as e:
387
374
  if e.code() == grpc.StatusCode.ALREADY_EXISTS:
388
375
  raise DedupeViolationErr(e.details())
389
376
 
390
- raise ValueError(f"gRPC error: {e}")
377
+ raise e
391
378
 
392
379
 
393
380
  class AdminClient(AdminClientBase):
@@ -408,17 +395,14 @@ class AdminClient(AdminClientBase):
408
395
  workflow: CreateWorkflowVersionOpts | WorkflowMeta,
409
396
  overrides: CreateWorkflowVersionOpts | None = None,
410
397
  ) -> WorkflowVersion:
411
- try:
412
- opts = self._prepare_put_workflow_request(name, workflow, overrides)
398
+ opts = self._prepare_put_workflow_request(name, workflow, overrides)
413
399
 
414
- resp: WorkflowVersion = self.client.PutWorkflow(
415
- opts,
416
- metadata=get_metadata(self.token),
417
- )
400
+ resp: WorkflowVersion = self.client.PutWorkflow(
401
+ opts,
402
+ metadata=get_metadata(self.token),
403
+ )
418
404
 
419
- return resp
420
- except grpc.RpcError as e:
421
- raise ValueError(f"Could not put workflow: {e}")
405
+ return resp
422
406
 
423
407
  @tenacity_retry
424
408
  def put_rate_limit(
@@ -427,17 +411,14 @@ class AdminClient(AdminClientBase):
427
411
  limit: int,
428
412
  duration: Union[RateLimitDuration.Value, str] = RateLimitDuration.SECOND,
429
413
  ):
430
- try:
431
- self.client.PutRateLimit(
432
- PutRateLimitRequest(
433
- key=key,
434
- limit=limit,
435
- duration=duration,
436
- ),
437
- metadata=get_metadata(self.token),
438
- )
439
- except grpc.RpcError as e:
440
- raise ValueError(f"Could not put rate limit: {e}")
414
+ self.client.PutRateLimit(
415
+ PutRateLimitRequest(
416
+ key=key,
417
+ limit=limit,
418
+ duration=duration,
419
+ ),
420
+ metadata=get_metadata(self.token),
421
+ )
441
422
 
442
423
  @tenacity_retry
443
424
  def schedule_workflow(
@@ -469,11 +450,11 @@ class AdminClient(AdminClientBase):
469
450
  request,
470
451
  metadata=get_metadata(self.token),
471
452
  )
472
- except grpc.RpcError as e:
453
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
473
454
  if e.code() == grpc.StatusCode.ALREADY_EXISTS:
474
455
  raise DedupeViolationErr(e.details())
475
456
 
476
- raise ValueError(f"gRPC error: {e}")
457
+ raise e
477
458
 
478
459
  ## TODO: `options` is treated as a dict (wrong type hint)
479
460
  ## TODO: `any` type hint should come from `typing`
@@ -541,55 +522,49 @@ class AdminClient(AdminClientBase):
541
522
  workflow_listener=self.pooled_workflow_listener,
542
523
  workflow_run_event_listener=self.listener_client,
543
524
  )
544
- except grpc.RpcError as e:
525
+ except (grpc.RpcError, grpc.aio.AioRpcError) as e:
545
526
  if e.code() == grpc.StatusCode.ALREADY_EXISTS:
546
527
  raise DedupeViolationErr(e.details())
547
528
 
548
- raise ValueError(f"gRPC error: {e}")
529
+ raise e
549
530
 
550
531
  @tenacity_retry
551
532
  def run_workflows(
552
533
  self, workflows: List[WorkflowRunDict], options: TriggerWorkflowOptions = None
553
534
  ) -> list[WorkflowRunRef]:
554
535
  workflow_run_requests: TriggerWorkflowRequest = []
555
- try:
556
- if not self.pooled_workflow_listener:
557
- self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
536
+ if not self.pooled_workflow_listener:
537
+ self.pooled_workflow_listener = PooledWorkflowRunListener(self.config)
558
538
 
559
- for workflow in workflows:
560
- workflow_name = workflow["workflow_name"]
561
- input_data = workflow["input"]
562
- options = workflow["options"]
539
+ for workflow in workflows:
540
+ workflow_name = workflow["workflow_name"]
541
+ input_data = workflow["input"]
542
+ options = workflow["options"]
563
543
 
564
- namespace = self.namespace
565
-
566
- if (
567
- options is not None
568
- and "namespace" in options
569
- and options["namespace"] is not None
570
- ):
571
- namespace = options["namespace"]
572
- del options["namespace"]
544
+ namespace = self.namespace
573
545
 
574
- if namespace != "" and not workflow_name.startswith(self.namespace):
575
- workflow_name = f"{namespace}{workflow_name}"
546
+ if (
547
+ options is not None
548
+ and "namespace" in options
549
+ and options["namespace"] is not None
550
+ ):
551
+ namespace = options["namespace"]
552
+ del options["namespace"]
576
553
 
577
- # Prepare and trigger workflow for each workflow name and input
578
- request = self._prepare_workflow_request(
579
- workflow_name, input_data, options
580
- )
554
+ if namespace != "" and not workflow_name.startswith(self.namespace):
555
+ workflow_name = f"{namespace}{workflow_name}"
581
556
 
582
- workflow_run_requests.append(request)
557
+ # Prepare and trigger workflow for each workflow name and input
558
+ request = self._prepare_workflow_request(workflow_name, input_data, options)
583
559
 
584
- request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
560
+ workflow_run_requests.append(request)
585
561
 
586
- resp: BulkTriggerWorkflowResponse = self.client.BulkTriggerWorkflow(
587
- request,
588
- metadata=get_metadata(self.token),
589
- )
562
+ request = BulkTriggerWorkflowRequest(workflows=workflow_run_requests)
590
563
 
591
- except grpc.RpcError as e:
592
- raise ValueError(f"gRPC error: {e}")
564
+ resp: BulkTriggerWorkflowResponse = self.client.BulkTriggerWorkflow(
565
+ request,
566
+ metadata=get_metadata(self.token),
567
+ )
593
568
 
594
569
  return [
595
570
  WorkflowRunRef(
@@ -125,10 +125,7 @@ class EventClient:
125
125
 
126
126
  span.add_event("Pushing event", attributes={"key": namespaced_event_key})
127
127
 
128
- try:
129
- return self.client.Push(request, metadata=get_metadata(self.token))
130
- except grpc.RpcError as e:
131
- raise ValueError(f"gRPC error: {e}")
128
+ return self.client.Push(request, metadata=get_metadata(self.token))
132
129
 
133
130
  @tenacity_retry
134
131
  def bulk_push(
@@ -188,13 +185,9 @@ class EventClient:
188
185
  bulk_request = BulkPushEventRequest(events=bulk_events)
189
186
 
190
187
  span.add_event("Pushing bulk events")
191
- try:
192
- response = self.client.BulkPush(
193
- bulk_request, metadata=get_metadata(self.token)
194
- )
195
- return response.events
196
- except grpc.RpcError as e:
197
- raise ValueError(f"gRPC error: {e}")
188
+ response = self.client.BulkPush(bulk_request, metadata=get_metadata(self.token))
189
+
190
+ return response.events
198
191
 
199
192
  def log(self, message: str, step_run_id: str):
200
193
  try:
@@ -28,8 +28,8 @@ def tenacity_alert_retry(retry_state: tenacity.RetryCallState) -> None:
28
28
 
29
29
 
30
30
  def tenacity_should_retry(ex: Exception) -> bool:
31
- if isinstance(ex, grpc.aio.AioRpcError):
32
- if ex.code in [
31
+ if isinstance(ex, (grpc.aio.AioRpcError, grpc.RpcError)):
32
+ if ex.code() in [
33
33
  grpc.StatusCode.UNIMPLEMENTED,
34
34
  grpc.StatusCode.NOT_FOUND,
35
35
  ]:
@@ -376,6 +376,16 @@ class Context(BaseContext):
376
376
  def parent_workflow_run_id(self) -> str | None:
377
377
  return self.action.parent_workflow_run_id
378
378
 
379
+ def step_run_errors(self) -> dict[str, str]:
380
+ errors = cast(dict[str, str], self.data.get("step_run_errors", {}))
381
+
382
+ if not errors:
383
+ logger.error(
384
+ "No step run errors found. `context.step_run_errors` is intended to be run in an on-failure step, and will only work on engine versions more recent than v0.53.10"
385
+ )
386
+
387
+ return errors
388
+
379
389
  def fetch_run_failures(self) -> list[dict[str, StrictStr]]:
380
390
  data = self.rest_client.workflow_run_get(self.action.workflow_run_id)
381
391
  other_job_runs = [
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hatchet-sdk
3
- Version: 0.43.2
3
+ Version: 0.44.0
4
4
  Summary:
5
5
  Author: Alexander Belanger
6
6
  Author-email: alexander@hatchet.run
@@ -1,10 +1,10 @@
1
1
  hatchet_sdk/__init__.py,sha256=R5ogd_Dn_6gA_u9a5W2URNq6eDtN1i56cObBv1tOwyU,9408
2
2
  hatchet_sdk/client.py,sha256=ajjLd-gZptVuAx25gG_SdAW8xDA4V7HMIhgYuh9MkVY,3486
3
- hatchet_sdk/clients/admin.py,sha256=RGi-cXGTn54EmgiqbUsnb9a9gbA3WFr9Qo2VdBHdSzQ,21530
3
+ hatchet_sdk/clients/admin.py,sha256=1YsLK5AzpgFBKPblcPY5ACA0xLysynA4tihNJkcNyl4,20419
4
4
  hatchet_sdk/clients/dispatcher/action_listener.py,sha256=eOq5z29MhC7ynbOOegDRQr-RqDhpS3gfeLswIlZbuGg,15378
5
5
  hatchet_sdk/clients/dispatcher/dispatcher.py,sha256=qTcfIXn9EpfUlafff8Wun_fWTvz-UxOa4i4m6ZVIAnM,6402
6
6
  hatchet_sdk/clients/event_ts.py,sha256=ACGvDdfhvK6ZLKdsPxy-PksLhjIU69P9cdH3AxX-X10,728
7
- hatchet_sdk/clients/events.py,sha256=8QOHv4ObSNXb6t5EnaXPwkwr1hN4D087iWn02TRtXb0,7505
7
+ hatchet_sdk/clients/events.py,sha256=pz68MOFEdvKV9ywu0e4DIaWFgqjwzm8zUV747Fim9ac,7258
8
8
  hatchet_sdk/clients/rest/__init__.py,sha256=AQBp3fX79IKrcjUmzLRGFaFKMYBnEbQD9DnwYOHU0jQ,14157
9
9
  hatchet_sdk/clients/rest/api/__init__.py,sha256=LaTEK7cYklb8R0iYj727C-jVk-MGHenADN8TJpoIASs,1067
10
10
  hatchet_sdk/clients/rest/api/api_token_api.py,sha256=C10FEIHHGBpwq-bIKkrBhvPlg6az4aHlREWEUlJHWl0,33577
@@ -184,13 +184,13 @@ hatchet_sdk/clients/rest/models/workflow_version_definition.py,sha256=e18BUh1XO0
184
184
  hatchet_sdk/clients/rest/models/workflow_version_meta.py,sha256=TW4R7bAuYAg_LraN-8psdZqp2E8wH9hYyL5Sji86aLk,3791
185
185
  hatchet_sdk/clients/rest/models/workflow_workers_count.py,sha256=qhzqfvjjIDyARkiiLGluMIqEmqO-diHTsjlu0Doi0yg,2875
186
186
  hatchet_sdk/clients/rest/rest.py,sha256=G83F1k4g_ePzvXW95rApzvaRDQPcaxrj-JmZyq1LvGw,6606
187
- hatchet_sdk/clients/rest/tenacity_utils.py,sha256=AmVMjML3pd8qzsWyvaEnaI33zkidou5EK_EQXF69SAE,1032
187
+ hatchet_sdk/clients/rest/tenacity_utils.py,sha256=gy500kHXQ-4ZrZH1biHGmavhfh0NarCJcaIfW-A9Qd0,1051
188
188
  hatchet_sdk/clients/rest_client.py,sha256=wYCRQjjZha9XcYg6pdVgrFV4pcli89Y_G45EDEDteCk,21874
189
189
  hatchet_sdk/clients/run_event_listener.py,sha256=51WTg52_aISgYPOFPHJ21rb4IO6aEd7Ugp7FCf4HnfM,10184
190
190
  hatchet_sdk/clients/workflow_listener.py,sha256=Q_WJcGlZNHJGSpxzDac9wELjgxhP0vLaNTXRy_xnxc8,9466
191
191
  hatchet_sdk/connection.py,sha256=593aUGAj7Ouf00lcVwx_pmhdQ9NOC5ANT1Jrf8nwkHs,2165
192
192
  hatchet_sdk/context/__init__.py,sha256=Pl_seJ_SJpW34BBZp4KixuZ8GiRK9sJFfegf9u3m7zk,29
193
- hatchet_sdk/context/context.py,sha256=R2RsPgzBNYi1FJvg4MOowSY-V1Yf69qUmXe3MsAWkyo,13518
193
+ hatchet_sdk/context/context.py,sha256=sRAih-dBqxkmQUxgo_IpQP2YIJJinzJ2C09RP-EM94o,13910
194
194
  hatchet_sdk/context/worker_context.py,sha256=OVcEWvdT_Kpd0nlg61VAPUgIPSFzSLs0aSrXWj-1GX4,974
195
195
  hatchet_sdk/contracts/dispatcher_pb2.py,sha256=-9YGf7pfuMyRpkjjcmFa6jlr-95gha4CJw9hG3seNFI,14573
196
196
  hatchet_sdk/contracts/dispatcher_pb2.pyi,sha256=2dvxvN4OETOc-OhLNWDntN7GvI4QUxbMiGLdK7FM1wE,18224
@@ -230,7 +230,7 @@ hatchet_sdk/worker/runner/utils/error_with_traceback.py,sha256=Iih_s8JNqrinXETFJ
230
230
  hatchet_sdk/worker/worker.py,sha256=7UPm3qTzNYSSm9QTNX6zBBMJqVA6nKFeCbAdqLLjUBs,13007
231
231
  hatchet_sdk/workflow.py,sha256=XRj5jcCQSvPQMXxBipf-ZlARua2E8Z9igRzGcQ5alkI,9375
232
232
  hatchet_sdk/workflow_run.py,sha256=BwK5cefvXXvyQ1Ednj_7LeejMwQJqWnvUC_FTBmJNxk,1805
233
- hatchet_sdk-0.43.2.dist-info/METADATA,sha256=4X-r_xOmoJR1IfkCwPuRybgiMjt2Z1l0bkI-6EpMBBk,1736
234
- hatchet_sdk-0.43.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
235
- hatchet_sdk-0.43.2.dist-info/entry_points.txt,sha256=LTtQRABmSGYOxRI68cUVEz5dp9Qb57eqXGic9lU8RMo,1023
236
- hatchet_sdk-0.43.2.dist-info/RECORD,,
233
+ hatchet_sdk-0.44.0.dist-info/METADATA,sha256=pJrEPNQ7Lyx6R7FVf2gaUNlWGCa0y9fz5qVwMi1UcuA,1736
234
+ hatchet_sdk-0.44.0.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
235
+ hatchet_sdk-0.44.0.dist-info/entry_points.txt,sha256=LTtQRABmSGYOxRI68cUVEz5dp9Qb57eqXGic9lU8RMo,1023
236
+ hatchet_sdk-0.44.0.dist-info/RECORD,,