hatchet-sdk 1.9.1__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (40) hide show
  1. hatchet_sdk/client.py +2 -0
  2. hatchet_sdk/clients/admin.py +2 -6
  3. hatchet_sdk/clients/dispatcher/action_listener.py +14 -1
  4. hatchet_sdk/clients/events.py +58 -8
  5. hatchet_sdk/clients/rest/__init__.py +11 -0
  6. hatchet_sdk/clients/rest/api/__init__.py +1 -0
  7. hatchet_sdk/clients/rest/api/event_api.py +335 -0
  8. hatchet_sdk/clients/rest/api/filter_api.py +1305 -0
  9. hatchet_sdk/clients/rest/api/task_api.py +51 -0
  10. hatchet_sdk/clients/rest/api/workflow_runs_api.py +34 -0
  11. hatchet_sdk/clients/rest/models/__init__.py +10 -0
  12. hatchet_sdk/clients/rest/models/create_event_request.py +16 -2
  13. hatchet_sdk/clients/rest/models/v1_create_filter_request.py +99 -0
  14. hatchet_sdk/clients/rest/models/v1_event.py +142 -0
  15. hatchet_sdk/clients/rest/models/v1_event_list.py +110 -0
  16. hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py +101 -0
  17. hatchet_sdk/clients/rest/models/v1_filter.py +127 -0
  18. hatchet_sdk/clients/rest/models/v1_filter_list.py +110 -0
  19. hatchet_sdk/clients/rest/models/v1_log_line.py +21 -2
  20. hatchet_sdk/clients/rest/models/v1_task_event.py +12 -0
  21. hatchet_sdk/clients/rest/models/v1_task_summary.py +12 -0
  22. hatchet_sdk/clients/rest/models/v1_task_timing.py +19 -0
  23. hatchet_sdk/clients/rest/models/workflow.py +5 -0
  24. hatchet_sdk/config.py +29 -0
  25. hatchet_sdk/context/context.py +1 -0
  26. hatchet_sdk/contracts/events_pb2.py +20 -20
  27. hatchet_sdk/contracts/events_pb2.pyi +14 -6
  28. hatchet_sdk/features/cron.py +1 -1
  29. hatchet_sdk/features/filters.py +181 -0
  30. hatchet_sdk/features/runs.py +7 -1
  31. hatchet_sdk/features/scheduled.py +1 -1
  32. hatchet_sdk/features/workflows.py +1 -1
  33. hatchet_sdk/hatchet.py +82 -71
  34. hatchet_sdk/runnables/standalone.py +6 -0
  35. hatchet_sdk/runnables/workflow.py +29 -2
  36. hatchet_sdk/worker/worker.py +1 -1
  37. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.0.dist-info}/METADATA +1 -1
  38. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.0.dist-info}/RECORD +40 -32
  39. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.0.dist-info}/WHEEL +0 -0
  40. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.0.dist-info}/entry_points.txt +0 -0
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
15
15
  from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb4\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\x92\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\tB\x08\n\x06_level\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\x9c\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd2\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05scope\x18\x07 \x01(\tH\x01\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x08\n\x06_scope\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\xc2\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x1b\n\x0etaskRetryCount\x18\x06 \x01(\x05H\x01\x88\x01\x01\x42\x08\n\x06_levelB\x11\n\x0f_taskRetryCount\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\xde\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08priority\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x12\n\x05scope\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x0b\n\t_priorityB\x08\n\x06_scope\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3')
19
19
 
20
20
  _globals = globals()
21
21
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -24,23 +24,23 @@ if not _descriptor._USE_C_DESCRIPTORS:
24
24
  _globals['DESCRIPTOR']._loaded_options = None
25
25
  _globals['DESCRIPTOR']._serialized_options = b'ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contracts'
26
26
  _globals['_EVENT']._serialized_start=50
27
- _globals['_EVENT']._serialized_end=230
28
- _globals['_EVENTS']._serialized_start=232
29
- _globals['_EVENTS']._serialized_end=264
30
- _globals['_PUTLOGREQUEST']._serialized_start=267
31
- _globals['_PUTLOGREQUEST']._serialized_end=413
32
- _globals['_PUTLOGRESPONSE']._serialized_start=415
33
- _globals['_PUTLOGRESPONSE']._serialized_end=431
34
- _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=433
35
- _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=557
36
- _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=559
37
- _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=583
38
- _globals['_BULKPUSHEVENTREQUEST']._serialized_start=585
39
- _globals['_BULKPUSHEVENTREQUEST']._serialized_end=642
40
- _globals['_PUSHEVENTREQUEST']._serialized_start=645
41
- _globals['_PUSHEVENTREQUEST']._serialized_end=801
42
- _globals['_REPLAYEVENTREQUEST']._serialized_start=803
43
- _globals['_REPLAYEVENTREQUEST']._serialized_end=840
44
- _globals['_EVENTSSERVICE']._serialized_start=843
45
- _globals['_EVENTSSERVICE']._serialized_end=1107
27
+ _globals['_EVENT']._serialized_end=260
28
+ _globals['_EVENTS']._serialized_start=262
29
+ _globals['_EVENTS']._serialized_end=294
30
+ _globals['_PUTLOGREQUEST']._serialized_start=297
31
+ _globals['_PUTLOGREQUEST']._serialized_end=491
32
+ _globals['_PUTLOGRESPONSE']._serialized_start=493
33
+ _globals['_PUTLOGRESPONSE']._serialized_end=509
34
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=511
35
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=635
36
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=637
37
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=661
38
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_start=663
39
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_end=720
40
+ _globals['_PUSHEVENTREQUEST']._serialized_start=723
41
+ _globals['_PUSHEVENTREQUEST']._serialized_end=945
42
+ _globals['_REPLAYEVENTREQUEST']._serialized_start=947
43
+ _globals['_REPLAYEVENTREQUEST']._serialized_end=984
44
+ _globals['_EVENTSSERVICE']._serialized_start=987
45
+ _globals['_EVENTSSERVICE']._serialized_end=1251
46
46
  # @@protoc_insertion_point(module_scope)
@@ -7,20 +7,22 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
7
7
  DESCRIPTOR: _descriptor.FileDescriptor
8
8
 
9
9
  class Event(_message.Message):
10
- __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata")
10
+ __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata", "scope")
11
11
  TENANTID_FIELD_NUMBER: _ClassVar[int]
12
12
  EVENTID_FIELD_NUMBER: _ClassVar[int]
13
13
  KEY_FIELD_NUMBER: _ClassVar[int]
14
14
  PAYLOAD_FIELD_NUMBER: _ClassVar[int]
15
15
  EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
16
16
  ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
17
+ SCOPE_FIELD_NUMBER: _ClassVar[int]
17
18
  tenantId: str
18
19
  eventId: str
19
20
  key: str
20
21
  payload: str
21
22
  eventTimestamp: _timestamp_pb2.Timestamp
22
23
  additionalMetadata: str
23
- def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
24
+ scope: str
25
+ def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., scope: _Optional[str] = ...) -> None: ...
24
26
 
25
27
  class Events(_message.Message):
26
28
  __slots__ = ("events",)
@@ -29,18 +31,20 @@ class Events(_message.Message):
29
31
  def __init__(self, events: _Optional[_Iterable[_Union[Event, _Mapping]]] = ...) -> None: ...
30
32
 
31
33
  class PutLogRequest(_message.Message):
32
- __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata")
34
+ __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata", "taskRetryCount")
33
35
  STEPRUNID_FIELD_NUMBER: _ClassVar[int]
34
36
  CREATEDAT_FIELD_NUMBER: _ClassVar[int]
35
37
  MESSAGE_FIELD_NUMBER: _ClassVar[int]
36
38
  LEVEL_FIELD_NUMBER: _ClassVar[int]
37
39
  METADATA_FIELD_NUMBER: _ClassVar[int]
40
+ TASKRETRYCOUNT_FIELD_NUMBER: _ClassVar[int]
38
41
  stepRunId: str
39
42
  createdAt: _timestamp_pb2.Timestamp
40
43
  message: str
41
44
  level: str
42
45
  metadata: str
43
- def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ...) -> None: ...
46
+ taskRetryCount: int
47
+ def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ..., taskRetryCount: _Optional[int] = ...) -> None: ...
44
48
 
45
49
  class PutLogResponse(_message.Message):
46
50
  __slots__ = ()
@@ -69,16 +73,20 @@ class BulkPushEventRequest(_message.Message):
69
73
  def __init__(self, events: _Optional[_Iterable[_Union[PushEventRequest, _Mapping]]] = ...) -> None: ...
70
74
 
71
75
  class PushEventRequest(_message.Message):
72
- __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata")
76
+ __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata", "priority", "scope")
73
77
  KEY_FIELD_NUMBER: _ClassVar[int]
74
78
  PAYLOAD_FIELD_NUMBER: _ClassVar[int]
75
79
  EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
76
80
  ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
81
+ PRIORITY_FIELD_NUMBER: _ClassVar[int]
82
+ SCOPE_FIELD_NUMBER: _ClassVar[int]
77
83
  key: str
78
84
  payload: str
79
85
  eventTimestamp: _timestamp_pb2.Timestamp
80
86
  additionalMetadata: str
81
- def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
87
+ priority: int
88
+ scope: str
89
+ def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., priority: _Optional[int] = ..., scope: _Optional[str] = ...) -> None: ...
82
90
 
83
91
  class ReplayEventRequest(_message.Message):
84
92
  __slots__ = ("eventId",)
@@ -102,7 +102,7 @@ class CronClient(BaseRestClient):
102
102
  with self.client() as client:
103
103
  return self._wra(client).cron_workflow_trigger_create(
104
104
  tenant=self.client_config.tenant_id,
105
- workflow=workflow_name,
105
+ workflow=self.client_config.apply_namespace(workflow_name),
106
106
  create_cron_workflow_trigger_request=CreateCronWorkflowTriggerRequest(
107
107
  cronName=cron_name,
108
108
  cronExpression=validated_input.expression,
@@ -0,0 +1,181 @@
1
+ import asyncio
2
+
3
+ from hatchet_sdk.clients.rest.api.filter_api import FilterApi
4
+ from hatchet_sdk.clients.rest.api_client import ApiClient
5
+ from hatchet_sdk.clients.rest.models.v1_create_filter_request import (
6
+ V1CreateFilterRequest,
7
+ )
8
+ from hatchet_sdk.clients.rest.models.v1_filter import V1Filter
9
+ from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList
10
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient
11
+ from hatchet_sdk.utils.typing import JSONSerializableMapping
12
+
13
+
14
+ class FiltersClient(BaseRestClient):
15
+ """
16
+ The filters client is a client for interacting with Hatchet's filters API.
17
+ """
18
+
19
+ def _fa(self, client: ApiClient) -> FilterApi:
20
+ return FilterApi(client)
21
+
22
+ async def aio_list(
23
+ self,
24
+ limit: int | None = None,
25
+ offset: int | None = None,
26
+ workflow_id_scope_pairs: list[tuple[str, str]] | None = None,
27
+ ) -> V1FilterList:
28
+ """
29
+ List filters for a given tenant.
30
+
31
+ :param limit: The maximum number of filters to return.
32
+ :param offset: The number of filters to skip before starting to collect the result set.
33
+ :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second.
34
+
35
+ :return: A list of filters matching the specified criteria.
36
+ """
37
+ return await asyncio.to_thread(
38
+ self.list, limit, offset, workflow_id_scope_pairs
39
+ )
40
+
41
+ def list(
42
+ self,
43
+ limit: int | None = None,
44
+ offset: int | None = None,
45
+ workflow_id_scope_pairs: list[tuple[str, str]] | None = None,
46
+ ) -> V1FilterList:
47
+ """
48
+ List filters for a given tenant.
49
+
50
+ :param limit: The maximum number of filters to return.
51
+ :param offset: The number of filters to skip before starting to collect the result set.
52
+ :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second.
53
+
54
+ :return: A list of filters matching the specified criteria.
55
+ """
56
+ workflow_ids = (
57
+ [pair[0] for pair in workflow_id_scope_pairs]
58
+ if workflow_id_scope_pairs
59
+ else None
60
+ )
61
+ scopes = (
62
+ [pair[1] for pair in workflow_id_scope_pairs]
63
+ if workflow_id_scope_pairs
64
+ else None
65
+ )
66
+
67
+ with self.client() as client:
68
+ return self._fa(client).v1_filter_list(
69
+ tenant=self.tenant_id,
70
+ limit=limit,
71
+ offset=offset,
72
+ workflow_ids=workflow_ids,
73
+ scopes=scopes,
74
+ )
75
+
76
+ def get(
77
+ self,
78
+ filter_id: str,
79
+ ) -> V1Filter:
80
+ """
81
+ Get a filter by its ID.
82
+
83
+ :param filter_id: The ID of the filter to retrieve.
84
+
85
+ :return: The filter with the specified ID.
86
+ """
87
+ with self.client() as client:
88
+ return self._fa(client).v1_filter_get(
89
+ tenant=self.tenant_id,
90
+ v1_filter=filter_id,
91
+ )
92
+
93
+ async def aio_get(
94
+ self,
95
+ filter_id: str,
96
+ ) -> V1Filter:
97
+ """
98
+ Get a filter by its ID.
99
+
100
+ :param filter_id: The ID of the filter to retrieve.
101
+
102
+ :return: The filter with the specified ID.
103
+ """
104
+ return await asyncio.to_thread(self.get, filter_id)
105
+
106
+ def create(
107
+ self,
108
+ workflow_id: str,
109
+ expression: str,
110
+ scope: str,
111
+ payload: JSONSerializableMapping = {},
112
+ ) -> V1Filter:
113
+ """
114
+ Create a new filter.
115
+
116
+ :param workflow_id: The ID of the workflow to associate with the filter.
117
+ :param expression: The expression to evaluate for the filter.
118
+ :param scope: The scope for the filter.
119
+ :param payload: The payload to send with the filter.
120
+
121
+ :return: The created filter.
122
+ """
123
+ with self.client() as client:
124
+ return self._fa(client).v1_filter_create(
125
+ tenant=self.tenant_id,
126
+ v1_create_filter_request=V1CreateFilterRequest(
127
+ workflowId=workflow_id,
128
+ expression=expression,
129
+ scope=scope,
130
+ payload=dict(payload),
131
+ ),
132
+ )
133
+
134
+ async def aio_create(
135
+ self,
136
+ workflow_id: str,
137
+ expression: str,
138
+ scope: str,
139
+ payload: JSONSerializableMapping = {},
140
+ ) -> V1Filter:
141
+ """
142
+ Create a new filter.
143
+
144
+ :param workflow_id: The ID of the workflow to associate with the filter.
145
+ :param expression: The expression to evaluate for the filter.
146
+ :param scope: The scope for the filter.
147
+ :param payload: The payload to send with the filter.
148
+
149
+ :return: The created filter.
150
+ """
151
+ return await asyncio.to_thread(
152
+ self.create, workflow_id, expression, scope, payload
153
+ )
154
+
155
+ def delete(
156
+ self,
157
+ filter_id: str,
158
+ ) -> V1Filter:
159
+ """
160
+ Delete a filter by its ID.
161
+
162
+ :param filter_id: The ID of the filter to delete.
163
+ :return: The deleted filter.
164
+ """
165
+ with self.client() as client:
166
+ return self._fa(client).v1_filter_delete(
167
+ tenant=self.tenant_id,
168
+ v1_filter=filter_id,
169
+ )
170
+
171
+ async def aio_delete(
172
+ self,
173
+ filter_id: str,
174
+ ) -> V1Filter:
175
+ """
176
+ Delete a filter by its ID.
177
+
178
+ :param filter_id: The ID of the filter to delete.
179
+ :return: The deleted filter.
180
+ """
181
+ return await asyncio.to_thread(self.delete, filter_id)
@@ -141,6 +141,7 @@ class RunsClient(BaseRestClient):
141
141
  workflow_ids: list[str] | None = None,
142
142
  worker_id: str | None = None,
143
143
  parent_task_external_id: str | None = None,
144
+ triggering_event_external_id: str | None = None,
144
145
  ) -> V1TaskSummaryList:
145
146
  """
146
147
  List task runs according to a set of filters.
@@ -155,6 +156,7 @@ class RunsClient(BaseRestClient):
155
156
  :param workflow_ids: The workflow IDs to filter task runs by.
156
157
  :param worker_id: The worker ID to filter task runs by.
157
158
  :param parent_task_external_id: The parent task external ID to filter task runs by.
159
+ :param triggering_event_external_id: The event id that triggered the task run.
158
160
 
159
161
  :return: A list of task runs matching the specified filters.
160
162
  """
@@ -170,6 +172,7 @@ class RunsClient(BaseRestClient):
170
172
  workflow_ids=workflow_ids,
171
173
  worker_id=worker_id,
172
174
  parent_task_external_id=parent_task_external_id,
175
+ triggering_event_external_id=triggering_event_external_id,
173
176
  )
174
177
 
175
178
  def list(
@@ -184,6 +187,7 @@ class RunsClient(BaseRestClient):
184
187
  workflow_ids: list[str] | None = None,
185
188
  worker_id: str | None = None,
186
189
  parent_task_external_id: str | None = None,
190
+ triggering_event_external_id: str | None = None,
187
191
  ) -> V1TaskSummaryList:
188
192
  """
189
193
  List task runs according to a set of filters.
@@ -198,6 +202,7 @@ class RunsClient(BaseRestClient):
198
202
  :param workflow_ids: The workflow IDs to filter task runs by.
199
203
  :param worker_id: The worker ID to filter task runs by.
200
204
  :param parent_task_external_id: The parent task external ID to filter task runs by.
205
+ :param triggering_event_external_id: The event id that triggered the task run.
201
206
 
202
207
  :return: A list of task runs matching the specified filters.
203
208
  """
@@ -216,6 +221,7 @@ class RunsClient(BaseRestClient):
216
221
  workflow_ids=workflow_ids,
217
222
  worker_id=worker_id,
218
223
  parent_task_external_id=parent_task_external_id,
224
+ triggering_event_external_id=triggering_event_external_id,
219
225
  )
220
226
 
221
227
  def create(
@@ -241,7 +247,7 @@ class RunsClient(BaseRestClient):
241
247
  return self._wra(client).v1_workflow_run_create(
242
248
  tenant=self.client_config.tenant_id,
243
249
  v1_trigger_workflow_run_request=V1TriggerWorkflowRunRequest(
244
- workflowName=workflow_name,
250
+ workflowName=self.client_config.apply_namespace(workflow_name),
245
251
  input=dict(input),
246
252
  additionalMetadata=dict(additional_metadata),
247
253
  priority=priority,
@@ -59,7 +59,7 @@ class ScheduledClient(BaseRestClient):
59
59
  with self.client() as client:
60
60
  return self._wra(client).scheduled_workflow_run_create(
61
61
  tenant=self.client_config.tenant_id,
62
- workflow=workflow_name,
62
+ workflow=self.client_config.apply_namespace(workflow_name),
63
63
  schedule_workflow_run_request=ScheduleWorkflowRunRequest(
64
64
  triggerAt=trigger_at,
65
65
  input=dict(input),
@@ -61,7 +61,7 @@ class WorkflowsClient(BaseRestClient):
61
61
  tenant=self.client_config.tenant_id,
62
62
  limit=limit,
63
63
  offset=offset,
64
- name=workflow_name,
64
+ name=self.client_config.apply_namespace(workflow_name),
65
65
  )
66
66
 
67
67
  async def aio_list(
hatchet_sdk/hatchet.py CHANGED
@@ -10,6 +10,7 @@ from hatchet_sdk.clients.events import EventClient
10
10
  from hatchet_sdk.clients.listeners.run_event_listener import RunEventListenerClient
11
11
  from hatchet_sdk.config import ClientConfig
12
12
  from hatchet_sdk.features.cron import CronClient
13
+ from hatchet_sdk.features.filters import FiltersClient
13
14
  from hatchet_sdk.features.logs import LogsClient
14
15
  from hatchet_sdk.features.metrics import MetricsClient
15
16
  from hatchet_sdk.features.rate_limits import RateLimitsClient
@@ -64,6 +65,13 @@ class Hatchet:
64
65
  """
65
66
  return self._client.cron
66
67
 
68
+ @property
69
+ def filters(self) -> FiltersClient:
70
+ """
71
+ The filters client is a client for interacting with Hatchet's filters API.
72
+ """
73
+ return self._client.filters
74
+
67
75
  @property
68
76
  def logs(self) -> LogsClient:
69
77
  """
@@ -285,7 +293,7 @@ class Hatchet:
285
293
  def task(
286
294
  self,
287
295
  *,
288
- name: str,
296
+ name: str | None = None,
289
297
  description: str | None = None,
290
298
  input_validator: None = None,
291
299
  on_events: list[str] = [],
@@ -310,7 +318,7 @@ class Hatchet:
310
318
  def task(
311
319
  self,
312
320
  *,
313
- name: str,
321
+ name: str | None = None,
314
322
  description: str | None = None,
315
323
  input_validator: Type[TWorkflowInput],
316
324
  on_events: list[str] = [],
@@ -334,7 +342,7 @@ class Hatchet:
334
342
  def task(
335
343
  self,
336
344
  *,
337
- name: str,
345
+ name: str | None = None,
338
346
  description: str | None = None,
339
347
  input_validator: Type[TWorkflowInput] | None = None,
340
348
  on_events: list[str] = [],
@@ -398,45 +406,47 @@ class Hatchet:
398
406
  :returns: A decorator which creates a `Standalone` task object.
399
407
  """
400
408
 
401
- workflow = Workflow[TWorkflowInput](
402
- WorkflowConfig(
403
- name=name,
404
- version=version,
405
- description=description,
406
- on_events=on_events,
407
- on_crons=on_crons,
408
- sticky=sticky,
409
- concurrency=concurrency,
410
- default_priority=default_priority,
411
- input_validator=input_validator
412
- or cast(Type[TWorkflowInput], EmptyModel),
413
- ),
414
- self,
415
- )
416
-
417
- if isinstance(concurrency, list):
418
- _concurrency = concurrency
419
- elif isinstance(concurrency, ConcurrencyExpression):
420
- _concurrency = [concurrency]
421
- else:
422
- _concurrency = []
423
-
424
- task_wrapper = workflow.task(
425
- name=name,
426
- schedule_timeout=schedule_timeout,
427
- execution_timeout=execution_timeout,
428
- parents=[],
429
- retries=retries,
430
- rate_limits=rate_limits,
431
- desired_worker_labels=desired_worker_labels,
432
- backoff_factor=backoff_factor,
433
- backoff_max_seconds=backoff_max_seconds,
434
- concurrency=_concurrency,
435
- )
436
-
437
409
  def inner(
438
410
  func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]],
439
411
  ) -> Standalone[TWorkflowInput, R]:
412
+ inferred_name = name or func.__name__
413
+
414
+ workflow = Workflow[TWorkflowInput](
415
+ WorkflowConfig(
416
+ name=inferred_name,
417
+ version=version,
418
+ description=description,
419
+ on_events=on_events,
420
+ on_crons=on_crons,
421
+ sticky=sticky,
422
+ concurrency=concurrency,
423
+ default_priority=default_priority,
424
+ input_validator=input_validator
425
+ or cast(Type[TWorkflowInput], EmptyModel),
426
+ ),
427
+ self,
428
+ )
429
+
430
+ if isinstance(concurrency, list):
431
+ _concurrency = concurrency
432
+ elif isinstance(concurrency, ConcurrencyExpression):
433
+ _concurrency = [concurrency]
434
+ else:
435
+ _concurrency = []
436
+
437
+ task_wrapper = workflow.task(
438
+ name=inferred_name,
439
+ schedule_timeout=schedule_timeout,
440
+ execution_timeout=execution_timeout,
441
+ parents=[],
442
+ retries=retries,
443
+ rate_limits=rate_limits,
444
+ desired_worker_labels=desired_worker_labels,
445
+ backoff_factor=backoff_factor,
446
+ backoff_max_seconds=backoff_max_seconds,
447
+ concurrency=_concurrency,
448
+ )
449
+
440
450
  created_task = task_wrapper(func)
441
451
 
442
452
  return Standalone[TWorkflowInput, R](
@@ -450,7 +460,7 @@ class Hatchet:
450
460
  def durable_task(
451
461
  self,
452
462
  *,
453
- name: str,
463
+ name: str | None = None,
454
464
  description: str | None = None,
455
465
  input_validator: None = None,
456
466
  on_events: list[str] = [],
@@ -475,7 +485,7 @@ class Hatchet:
475
485
  def durable_task(
476
486
  self,
477
487
  *,
478
- name: str,
488
+ name: str | None = None,
479
489
  description: str | None = None,
480
490
  input_validator: Type[TWorkflowInput],
481
491
  on_events: list[str] = [],
@@ -499,7 +509,7 @@ class Hatchet:
499
509
  def durable_task(
500
510
  self,
501
511
  *,
502
- name: str,
512
+ name: str | None = None,
503
513
  description: str | None = None,
504
514
  input_validator: Type[TWorkflowInput] | None = None,
505
515
  on_events: list[str] = [],
@@ -563,38 +573,39 @@ class Hatchet:
563
573
  :returns: A decorator which creates a `Standalone` task object.
564
574
  """
565
575
 
566
- workflow = Workflow[TWorkflowInput](
567
- WorkflowConfig(
568
- name=name,
569
- version=version,
570
- description=description,
571
- on_events=on_events,
572
- on_crons=on_crons,
573
- sticky=sticky,
574
- concurrency=concurrency,
575
- input_validator=input_validator
576
- or cast(Type[TWorkflowInput], EmptyModel),
577
- default_priority=default_priority,
578
- ),
579
- self,
580
- )
581
-
582
- task_wrapper = workflow.durable_task(
583
- name=name,
584
- schedule_timeout=schedule_timeout,
585
- execution_timeout=execution_timeout,
586
- parents=[],
587
- retries=retries,
588
- rate_limits=rate_limits,
589
- desired_worker_labels=desired_worker_labels,
590
- backoff_factor=backoff_factor,
591
- backoff_max_seconds=backoff_max_seconds,
592
- concurrency=[concurrency] if concurrency else [],
593
- )
594
-
595
576
  def inner(
596
577
  func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]],
597
578
  ) -> Standalone[TWorkflowInput, R]:
579
+ inferred_name = name or func.__name__
580
+ workflow = Workflow[TWorkflowInput](
581
+ WorkflowConfig(
582
+ name=inferred_name,
583
+ version=version,
584
+ description=description,
585
+ on_events=on_events,
586
+ on_crons=on_crons,
587
+ sticky=sticky,
588
+ concurrency=concurrency,
589
+ input_validator=input_validator
590
+ or cast(Type[TWorkflowInput], EmptyModel),
591
+ default_priority=default_priority,
592
+ ),
593
+ self,
594
+ )
595
+
596
+ task_wrapper = workflow.durable_task(
597
+ name=inferred_name,
598
+ schedule_timeout=schedule_timeout,
599
+ execution_timeout=execution_timeout,
600
+ parents=[],
601
+ retries=retries,
602
+ rate_limits=rate_limits,
603
+ desired_worker_labels=desired_worker_labels,
604
+ backoff_factor=backoff_factor,
605
+ backoff_max_seconds=backoff_max_seconds,
606
+ concurrency=[concurrency] if concurrency else [],
607
+ )
608
+
598
609
  created_task = task_wrapper(func)
599
610
 
600
611
  return Standalone[TWorkflowInput, R](
@@ -309,6 +309,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
309
309
  additional_metadata: dict[str, str] | None = None,
310
310
  worker_id: str | None = None,
311
311
  parent_task_external_id: str | None = None,
312
+ triggering_event_external_id: str | None = None,
312
313
  ) -> list[V1TaskSummary]:
313
314
  """
314
315
  List runs of the workflow.
@@ -321,6 +322,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
321
322
  :param additional_metadata: Additional metadata for filtering the runs.
322
323
  :param worker_id: The ID of the worker that ran the tasks.
323
324
  :param parent_task_external_id: The external ID of the parent task.
325
+ :param triggering_event_external_id: The event id that triggered the task run.
324
326
 
325
327
  :returns: A list of `V1TaskSummary` objects representing the runs of the workflow.
326
328
  """
@@ -343,6 +345,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
343
345
  additional_metadata=additional_metadata,
344
346
  worker_id=worker_id,
345
347
  parent_task_external_id=parent_task_external_id,
348
+ triggering_event_external_id=triggering_event_external_id,
346
349
  )
347
350
 
348
351
  return response.rows
@@ -357,6 +360,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
357
360
  additional_metadata: dict[str, str] | None = None,
358
361
  worker_id: str | None = None,
359
362
  parent_task_external_id: str | None = None,
363
+ triggering_event_external_id: str | None = None,
360
364
  ) -> list[V1TaskSummary]:
361
365
  """
362
366
  List runs of the workflow.
@@ -369,6 +373,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
369
373
  :param additional_metadata: Additional metadata for filtering the runs.
370
374
  :param worker_id: The ID of the worker that ran the tasks.
371
375
  :param parent_task_external_id: The external ID of the parent task.
376
+ :param triggering_event_external_id: The event id that triggered the task run.
372
377
 
373
378
  :returns: A list of `V1TaskSummary` objects representing the runs of the workflow.
374
379
  """
@@ -382,4 +387,5 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
382
387
  additional_metadata=additional_metadata,
383
388
  worker_id=worker_id,
384
389
  parent_task_external_id=parent_task_external_id,
390
+ triggering_event_external_id=triggering_event_external_id,
385
391
  )