hatchet-sdk 1.9.1__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hatchet-sdk might be problematic. Click here for more details.

Files changed (43) hide show
  1. hatchet_sdk/__init__.py +5 -1
  2. hatchet_sdk/client.py +2 -0
  3. hatchet_sdk/clients/admin.py +2 -6
  4. hatchet_sdk/clients/dispatcher/action_listener.py +43 -23
  5. hatchet_sdk/clients/events.py +58 -8
  6. hatchet_sdk/clients/rest/__init__.py +11 -0
  7. hatchet_sdk/clients/rest/api/__init__.py +1 -0
  8. hatchet_sdk/clients/rest/api/event_api.py +335 -0
  9. hatchet_sdk/clients/rest/api/filter_api.py +1305 -0
  10. hatchet_sdk/clients/rest/api/task_api.py +51 -0
  11. hatchet_sdk/clients/rest/api/workflow_runs_api.py +34 -0
  12. hatchet_sdk/clients/rest/models/__init__.py +10 -0
  13. hatchet_sdk/clients/rest/models/create_event_request.py +16 -2
  14. hatchet_sdk/clients/rest/models/v1_create_filter_request.py +99 -0
  15. hatchet_sdk/clients/rest/models/v1_event.py +142 -0
  16. hatchet_sdk/clients/rest/models/v1_event_list.py +110 -0
  17. hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py +101 -0
  18. hatchet_sdk/clients/rest/models/v1_filter.py +127 -0
  19. hatchet_sdk/clients/rest/models/v1_filter_list.py +110 -0
  20. hatchet_sdk/clients/rest/models/v1_log_line.py +21 -2
  21. hatchet_sdk/clients/rest/models/v1_task_event.py +12 -0
  22. hatchet_sdk/clients/rest/models/v1_task_summary.py +12 -0
  23. hatchet_sdk/clients/rest/models/v1_task_timing.py +19 -0
  24. hatchet_sdk/clients/rest/models/workflow.py +5 -0
  25. hatchet_sdk/config.py +42 -0
  26. hatchet_sdk/context/context.py +1 -0
  27. hatchet_sdk/contracts/events_pb2.py +20 -20
  28. hatchet_sdk/contracts/events_pb2.pyi +14 -6
  29. hatchet_sdk/features/cron.py +1 -1
  30. hatchet_sdk/features/filters.py +181 -0
  31. hatchet_sdk/features/runs.py +7 -1
  32. hatchet_sdk/features/scheduled.py +1 -1
  33. hatchet_sdk/features/workflows.py +1 -1
  34. hatchet_sdk/hatchet.py +82 -71
  35. hatchet_sdk/opentelemetry/instrumentor.py +7 -2
  36. hatchet_sdk/runnables/standalone.py +6 -0
  37. hatchet_sdk/runnables/workflow.py +29 -2
  38. hatchet_sdk/utils/opentelemetry.py +19 -0
  39. hatchet_sdk/worker/worker.py +1 -1
  40. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.1.dist-info}/METADATA +1 -1
  41. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.1.dist-info}/RECORD +43 -34
  42. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.1.dist-info}/WHEEL +0 -0
  43. {hatchet_sdk-1.9.1.dist-info → hatchet_sdk-1.10.1.dist-info}/entry_points.txt +0 -0
hatchet_sdk/config.py CHANGED
@@ -1,10 +1,12 @@
1
1
  import json
2
2
  from logging import Logger, getLogger
3
+ from typing import overload
3
4
 
4
5
  from pydantic import Field, field_validator, model_validator
5
6
  from pydantic_settings import BaseSettings, SettingsConfigDict
6
7
 
7
8
  from hatchet_sdk.token import get_addresses_from_jwt, get_tenant_id_from_jwt
9
+ from hatchet_sdk.utils.opentelemetry import OTelAttribute
8
10
 
9
11
 
10
12
  def create_settings_config(env_prefix: str) -> SettingsConfigDict:
@@ -36,6 +38,17 @@ class HealthcheckConfig(BaseSettings):
36
38
  enabled: bool = False
37
39
 
38
40
 
41
+ class OpenTelemetryConfig(BaseSettings):
42
+ model_config = create_settings_config(
43
+ env_prefix="HATCHET_CLIENT_OPENTELEMETRY_",
44
+ )
45
+
46
+ excluded_attributes: list[OTelAttribute] = Field(
47
+ default_factory=list,
48
+ description='Note that if specifying this field via an environment variable, the variable must be a valid JSON array. For example: \'["action_name", "action_payload"]\'',
49
+ )
50
+
51
+
39
52
  DEFAULT_HOST_PORT = "localhost:7070"
40
53
 
41
54
 
@@ -54,6 +67,7 @@ class ClientConfig(BaseSettings):
54
67
 
55
68
  tls_config: ClientTLSConfig = Field(default_factory=lambda: ClientTLSConfig())
56
69
  healthcheck: HealthcheckConfig = Field(default_factory=lambda: HealthcheckConfig())
70
+ otel: OpenTelemetryConfig = Field(default_factory=lambda: OpenTelemetryConfig())
57
71
 
58
72
  listener_v2_timeout: int | None = None
59
73
  grpc_max_recv_message_length: int = Field(
@@ -121,9 +135,37 @@ class ClientConfig(BaseSettings):
121
135
  def validate_namespace(cls, namespace: str) -> str:
122
136
  if not namespace:
123
137
  return ""
138
+
124
139
  if not namespace.endswith("_"):
125
140
  namespace = f"{namespace}_"
141
+
126
142
  return namespace.lower()
127
143
 
128
144
  def __hash__(self) -> int:
129
145
  return hash(json.dumps(self.model_dump(), default=str))
146
+
147
+ @overload
148
+ def apply_namespace(
149
+ self, resource_name: str, namespace_override: str | None = None
150
+ ) -> str: ...
151
+
152
+ @overload
153
+ def apply_namespace(
154
+ self, resource_name: None, namespace_override: str | None = None
155
+ ) -> None: ...
156
+
157
+ def apply_namespace(
158
+ self, resource_name: str | None, namespace_override: str | None = None
159
+ ) -> str | None:
160
+ if resource_name is None:
161
+ return None
162
+
163
+ namespace = namespace_override or self.namespace
164
+
165
+ if not namespace:
166
+ return resource_name
167
+
168
+ if resource_name.startswith(namespace):
169
+ return resource_name
170
+
171
+ return namespace + resource_name
@@ -59,6 +59,7 @@ class Context:
59
59
  self.stream_event_thread_pool = ThreadPoolExecutor(max_workers=1)
60
60
 
61
61
  self.input = self.data.input
62
+ self.filter_payload = self.data.filter_payload
62
63
 
63
64
  self._lifespan_context = lifespan_context
64
65
 
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
15
15
  from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb4\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\x92\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\tB\x08\n\x06_level\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\x9c\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd2\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05scope\x18\x07 \x01(\tH\x01\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x08\n\x06_scope\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\xc2\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x1b\n\x0etaskRetryCount\x18\x06 \x01(\x05H\x01\x88\x01\x01\x42\x08\n\x06_levelB\x11\n\x0f_taskRetryCount\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\xde\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08priority\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x12\n\x05scope\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x0b\n\t_priorityB\x08\n\x06_scope\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3')
19
19
 
20
20
  _globals = globals()
21
21
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -24,23 +24,23 @@ if not _descriptor._USE_C_DESCRIPTORS:
24
24
  _globals['DESCRIPTOR']._loaded_options = None
25
25
  _globals['DESCRIPTOR']._serialized_options = b'ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contracts'
26
26
  _globals['_EVENT']._serialized_start=50
27
- _globals['_EVENT']._serialized_end=230
28
- _globals['_EVENTS']._serialized_start=232
29
- _globals['_EVENTS']._serialized_end=264
30
- _globals['_PUTLOGREQUEST']._serialized_start=267
31
- _globals['_PUTLOGREQUEST']._serialized_end=413
32
- _globals['_PUTLOGRESPONSE']._serialized_start=415
33
- _globals['_PUTLOGRESPONSE']._serialized_end=431
34
- _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=433
35
- _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=557
36
- _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=559
37
- _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=583
38
- _globals['_BULKPUSHEVENTREQUEST']._serialized_start=585
39
- _globals['_BULKPUSHEVENTREQUEST']._serialized_end=642
40
- _globals['_PUSHEVENTREQUEST']._serialized_start=645
41
- _globals['_PUSHEVENTREQUEST']._serialized_end=801
42
- _globals['_REPLAYEVENTREQUEST']._serialized_start=803
43
- _globals['_REPLAYEVENTREQUEST']._serialized_end=840
44
- _globals['_EVENTSSERVICE']._serialized_start=843
45
- _globals['_EVENTSSERVICE']._serialized_end=1107
27
+ _globals['_EVENT']._serialized_end=260
28
+ _globals['_EVENTS']._serialized_start=262
29
+ _globals['_EVENTS']._serialized_end=294
30
+ _globals['_PUTLOGREQUEST']._serialized_start=297
31
+ _globals['_PUTLOGREQUEST']._serialized_end=491
32
+ _globals['_PUTLOGRESPONSE']._serialized_start=493
33
+ _globals['_PUTLOGRESPONSE']._serialized_end=509
34
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=511
35
+ _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=635
36
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=637
37
+ _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=661
38
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_start=663
39
+ _globals['_BULKPUSHEVENTREQUEST']._serialized_end=720
40
+ _globals['_PUSHEVENTREQUEST']._serialized_start=723
41
+ _globals['_PUSHEVENTREQUEST']._serialized_end=945
42
+ _globals['_REPLAYEVENTREQUEST']._serialized_start=947
43
+ _globals['_REPLAYEVENTREQUEST']._serialized_end=984
44
+ _globals['_EVENTSSERVICE']._serialized_start=987
45
+ _globals['_EVENTSSERVICE']._serialized_end=1251
46
46
  # @@protoc_insertion_point(module_scope)
@@ -7,20 +7,22 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map
7
7
  DESCRIPTOR: _descriptor.FileDescriptor
8
8
 
9
9
  class Event(_message.Message):
10
- __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata")
10
+ __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata", "scope")
11
11
  TENANTID_FIELD_NUMBER: _ClassVar[int]
12
12
  EVENTID_FIELD_NUMBER: _ClassVar[int]
13
13
  KEY_FIELD_NUMBER: _ClassVar[int]
14
14
  PAYLOAD_FIELD_NUMBER: _ClassVar[int]
15
15
  EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
16
16
  ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
17
+ SCOPE_FIELD_NUMBER: _ClassVar[int]
17
18
  tenantId: str
18
19
  eventId: str
19
20
  key: str
20
21
  payload: str
21
22
  eventTimestamp: _timestamp_pb2.Timestamp
22
23
  additionalMetadata: str
23
- def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
24
+ scope: str
25
+ def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., scope: _Optional[str] = ...) -> None: ...
24
26
 
25
27
  class Events(_message.Message):
26
28
  __slots__ = ("events",)
@@ -29,18 +31,20 @@ class Events(_message.Message):
29
31
  def __init__(self, events: _Optional[_Iterable[_Union[Event, _Mapping]]] = ...) -> None: ...
30
32
 
31
33
  class PutLogRequest(_message.Message):
32
- __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata")
34
+ __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata", "taskRetryCount")
33
35
  STEPRUNID_FIELD_NUMBER: _ClassVar[int]
34
36
  CREATEDAT_FIELD_NUMBER: _ClassVar[int]
35
37
  MESSAGE_FIELD_NUMBER: _ClassVar[int]
36
38
  LEVEL_FIELD_NUMBER: _ClassVar[int]
37
39
  METADATA_FIELD_NUMBER: _ClassVar[int]
40
+ TASKRETRYCOUNT_FIELD_NUMBER: _ClassVar[int]
38
41
  stepRunId: str
39
42
  createdAt: _timestamp_pb2.Timestamp
40
43
  message: str
41
44
  level: str
42
45
  metadata: str
43
- def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ...) -> None: ...
46
+ taskRetryCount: int
47
+ def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ..., taskRetryCount: _Optional[int] = ...) -> None: ...
44
48
 
45
49
  class PutLogResponse(_message.Message):
46
50
  __slots__ = ()
@@ -69,16 +73,20 @@ class BulkPushEventRequest(_message.Message):
69
73
  def __init__(self, events: _Optional[_Iterable[_Union[PushEventRequest, _Mapping]]] = ...) -> None: ...
70
74
 
71
75
  class PushEventRequest(_message.Message):
72
- __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata")
76
+ __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata", "priority", "scope")
73
77
  KEY_FIELD_NUMBER: _ClassVar[int]
74
78
  PAYLOAD_FIELD_NUMBER: _ClassVar[int]
75
79
  EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int]
76
80
  ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int]
81
+ PRIORITY_FIELD_NUMBER: _ClassVar[int]
82
+ SCOPE_FIELD_NUMBER: _ClassVar[int]
77
83
  key: str
78
84
  payload: str
79
85
  eventTimestamp: _timestamp_pb2.Timestamp
80
86
  additionalMetadata: str
81
- def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ...
87
+ priority: int
88
+ scope: str
89
+ def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., priority: _Optional[int] = ..., scope: _Optional[str] = ...) -> None: ...
82
90
 
83
91
  class ReplayEventRequest(_message.Message):
84
92
  __slots__ = ("eventId",)
@@ -102,7 +102,7 @@ class CronClient(BaseRestClient):
102
102
  with self.client() as client:
103
103
  return self._wra(client).cron_workflow_trigger_create(
104
104
  tenant=self.client_config.tenant_id,
105
- workflow=workflow_name,
105
+ workflow=self.client_config.apply_namespace(workflow_name),
106
106
  create_cron_workflow_trigger_request=CreateCronWorkflowTriggerRequest(
107
107
  cronName=cron_name,
108
108
  cronExpression=validated_input.expression,
@@ -0,0 +1,181 @@
1
+ import asyncio
2
+
3
+ from hatchet_sdk.clients.rest.api.filter_api import FilterApi
4
+ from hatchet_sdk.clients.rest.api_client import ApiClient
5
+ from hatchet_sdk.clients.rest.models.v1_create_filter_request import (
6
+ V1CreateFilterRequest,
7
+ )
8
+ from hatchet_sdk.clients.rest.models.v1_filter import V1Filter
9
+ from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList
10
+ from hatchet_sdk.clients.v1.api_client import BaseRestClient
11
+ from hatchet_sdk.utils.typing import JSONSerializableMapping
12
+
13
+
14
+ class FiltersClient(BaseRestClient):
15
+ """
16
+ The filters client is a client for interacting with Hatchet's filters API.
17
+ """
18
+
19
+ def _fa(self, client: ApiClient) -> FilterApi:
20
+ return FilterApi(client)
21
+
22
+ async def aio_list(
23
+ self,
24
+ limit: int | None = None,
25
+ offset: int | None = None,
26
+ workflow_id_scope_pairs: list[tuple[str, str]] | None = None,
27
+ ) -> V1FilterList:
28
+ """
29
+ List filters for a given tenant.
30
+
31
+ :param limit: The maximum number of filters to return.
32
+ :param offset: The number of filters to skip before starting to collect the result set.
33
+ :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second.
34
+
35
+ :return: A list of filters matching the specified criteria.
36
+ """
37
+ return await asyncio.to_thread(
38
+ self.list, limit, offset, workflow_id_scope_pairs
39
+ )
40
+
41
+ def list(
42
+ self,
43
+ limit: int | None = None,
44
+ offset: int | None = None,
45
+ workflow_id_scope_pairs: list[tuple[str, str]] | None = None,
46
+ ) -> V1FilterList:
47
+ """
48
+ List filters for a given tenant.
49
+
50
+ :param limit: The maximum number of filters to return.
51
+ :param offset: The number of filters to skip before starting to collect the result set.
52
+ :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second.
53
+
54
+ :return: A list of filters matching the specified criteria.
55
+ """
56
+ workflow_ids = (
57
+ [pair[0] for pair in workflow_id_scope_pairs]
58
+ if workflow_id_scope_pairs
59
+ else None
60
+ )
61
+ scopes = (
62
+ [pair[1] for pair in workflow_id_scope_pairs]
63
+ if workflow_id_scope_pairs
64
+ else None
65
+ )
66
+
67
+ with self.client() as client:
68
+ return self._fa(client).v1_filter_list(
69
+ tenant=self.tenant_id,
70
+ limit=limit,
71
+ offset=offset,
72
+ workflow_ids=workflow_ids,
73
+ scopes=scopes,
74
+ )
75
+
76
+ def get(
77
+ self,
78
+ filter_id: str,
79
+ ) -> V1Filter:
80
+ """
81
+ Get a filter by its ID.
82
+
83
+ :param filter_id: The ID of the filter to retrieve.
84
+
85
+ :return: The filter with the specified ID.
86
+ """
87
+ with self.client() as client:
88
+ return self._fa(client).v1_filter_get(
89
+ tenant=self.tenant_id,
90
+ v1_filter=filter_id,
91
+ )
92
+
93
+ async def aio_get(
94
+ self,
95
+ filter_id: str,
96
+ ) -> V1Filter:
97
+ """
98
+ Get a filter by its ID.
99
+
100
+ :param filter_id: The ID of the filter to retrieve.
101
+
102
+ :return: The filter with the specified ID.
103
+ """
104
+ return await asyncio.to_thread(self.get, filter_id)
105
+
106
+ def create(
107
+ self,
108
+ workflow_id: str,
109
+ expression: str,
110
+ scope: str,
111
+ payload: JSONSerializableMapping = {},
112
+ ) -> V1Filter:
113
+ """
114
+ Create a new filter.
115
+
116
+ :param workflow_id: The ID of the workflow to associate with the filter.
117
+ :param expression: The expression to evaluate for the filter.
118
+ :param scope: The scope for the filter.
119
+ :param payload: The payload to send with the filter.
120
+
121
+ :return: The created filter.
122
+ """
123
+ with self.client() as client:
124
+ return self._fa(client).v1_filter_create(
125
+ tenant=self.tenant_id,
126
+ v1_create_filter_request=V1CreateFilterRequest(
127
+ workflowId=workflow_id,
128
+ expression=expression,
129
+ scope=scope,
130
+ payload=dict(payload),
131
+ ),
132
+ )
133
+
134
+ async def aio_create(
135
+ self,
136
+ workflow_id: str,
137
+ expression: str,
138
+ scope: str,
139
+ payload: JSONSerializableMapping = {},
140
+ ) -> V1Filter:
141
+ """
142
+ Create a new filter.
143
+
144
+ :param workflow_id: The ID of the workflow to associate with the filter.
145
+ :param expression: The expression to evaluate for the filter.
146
+ :param scope: The scope for the filter.
147
+ :param payload: The payload to send with the filter.
148
+
149
+ :return: The created filter.
150
+ """
151
+ return await asyncio.to_thread(
152
+ self.create, workflow_id, expression, scope, payload
153
+ )
154
+
155
+ def delete(
156
+ self,
157
+ filter_id: str,
158
+ ) -> V1Filter:
159
+ """
160
+ Delete a filter by its ID.
161
+
162
+ :param filter_id: The ID of the filter to delete.
163
+ :return: The deleted filter.
164
+ """
165
+ with self.client() as client:
166
+ return self._fa(client).v1_filter_delete(
167
+ tenant=self.tenant_id,
168
+ v1_filter=filter_id,
169
+ )
170
+
171
+ async def aio_delete(
172
+ self,
173
+ filter_id: str,
174
+ ) -> V1Filter:
175
+ """
176
+ Delete a filter by its ID.
177
+
178
+ :param filter_id: The ID of the filter to delete.
179
+ :return: The deleted filter.
180
+ """
181
+ return await asyncio.to_thread(self.delete, filter_id)
@@ -141,6 +141,7 @@ class RunsClient(BaseRestClient):
141
141
  workflow_ids: list[str] | None = None,
142
142
  worker_id: str | None = None,
143
143
  parent_task_external_id: str | None = None,
144
+ triggering_event_external_id: str | None = None,
144
145
  ) -> V1TaskSummaryList:
145
146
  """
146
147
  List task runs according to a set of filters.
@@ -155,6 +156,7 @@ class RunsClient(BaseRestClient):
155
156
  :param workflow_ids: The workflow IDs to filter task runs by.
156
157
  :param worker_id: The worker ID to filter task runs by.
157
158
  :param parent_task_external_id: The parent task external ID to filter task runs by.
159
+ :param triggering_event_external_id: The event id that triggered the task run.
158
160
 
159
161
  :return: A list of task runs matching the specified filters.
160
162
  """
@@ -170,6 +172,7 @@ class RunsClient(BaseRestClient):
170
172
  workflow_ids=workflow_ids,
171
173
  worker_id=worker_id,
172
174
  parent_task_external_id=parent_task_external_id,
175
+ triggering_event_external_id=triggering_event_external_id,
173
176
  )
174
177
 
175
178
  def list(
@@ -184,6 +187,7 @@ class RunsClient(BaseRestClient):
184
187
  workflow_ids: list[str] | None = None,
185
188
  worker_id: str | None = None,
186
189
  parent_task_external_id: str | None = None,
190
+ triggering_event_external_id: str | None = None,
187
191
  ) -> V1TaskSummaryList:
188
192
  """
189
193
  List task runs according to a set of filters.
@@ -198,6 +202,7 @@ class RunsClient(BaseRestClient):
198
202
  :param workflow_ids: The workflow IDs to filter task runs by.
199
203
  :param worker_id: The worker ID to filter task runs by.
200
204
  :param parent_task_external_id: The parent task external ID to filter task runs by.
205
+ :param triggering_event_external_id: The event id that triggered the task run.
201
206
 
202
207
  :return: A list of task runs matching the specified filters.
203
208
  """
@@ -216,6 +221,7 @@ class RunsClient(BaseRestClient):
216
221
  workflow_ids=workflow_ids,
217
222
  worker_id=worker_id,
218
223
  parent_task_external_id=parent_task_external_id,
224
+ triggering_event_external_id=triggering_event_external_id,
219
225
  )
220
226
 
221
227
  def create(
@@ -241,7 +247,7 @@ class RunsClient(BaseRestClient):
241
247
  return self._wra(client).v1_workflow_run_create(
242
248
  tenant=self.client_config.tenant_id,
243
249
  v1_trigger_workflow_run_request=V1TriggerWorkflowRunRequest(
244
- workflowName=workflow_name,
250
+ workflowName=self.client_config.apply_namespace(workflow_name),
245
251
  input=dict(input),
246
252
  additionalMetadata=dict(additional_metadata),
247
253
  priority=priority,
@@ -59,7 +59,7 @@ class ScheduledClient(BaseRestClient):
59
59
  with self.client() as client:
60
60
  return self._wra(client).scheduled_workflow_run_create(
61
61
  tenant=self.client_config.tenant_id,
62
- workflow=workflow_name,
62
+ workflow=self.client_config.apply_namespace(workflow_name),
63
63
  schedule_workflow_run_request=ScheduleWorkflowRunRequest(
64
64
  triggerAt=trigger_at,
65
65
  input=dict(input),
@@ -61,7 +61,7 @@ class WorkflowsClient(BaseRestClient):
61
61
  tenant=self.client_config.tenant_id,
62
62
  limit=limit,
63
63
  offset=offset,
64
- name=workflow_name,
64
+ name=self.client_config.apply_namespace(workflow_name),
65
65
  )
66
66
 
67
67
  async def aio_list(