zenml-nightly 0.58.2.dev20240623__py3-none-any.whl → 0.58.2.dev20240626__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. zenml/VERSION +1 -1
  2. zenml/actions/base_action.py +177 -174
  3. zenml/actions/pipeline_run/pipeline_run_action.py +28 -23
  4. zenml/artifact_stores/base_artifact_store.py +7 -1
  5. zenml/artifacts/utils.py +13 -10
  6. zenml/cli/service_connectors.py +1 -0
  7. zenml/client.py +234 -58
  8. zenml/config/compiler.py +10 -9
  9. zenml/config/docker_settings.py +25 -9
  10. zenml/constants.py +1 -1
  11. zenml/event_hub/base_event_hub.py +5 -5
  12. zenml/event_hub/event_hub.py +15 -6
  13. zenml/event_sources/base_event.py +0 -11
  14. zenml/event_sources/base_event_source.py +7 -0
  15. zenml/event_sources/webhooks/base_webhook_event_source.py +1 -4
  16. zenml/exceptions.py +4 -0
  17. zenml/hooks/hook_validators.py +2 -3
  18. zenml/integrations/bitbucket/plugins/event_sources/bitbucket_webhook_event_source.py +3 -3
  19. zenml/integrations/mlflow/__init__.py +1 -1
  20. zenml/integrations/s3/artifact_stores/s3_artifact_store.py +76 -3
  21. zenml/logging/step_logging.py +54 -51
  22. zenml/models/__init__.py +17 -0
  23. zenml/models/v2/core/action.py +276 -0
  24. zenml/models/v2/core/trigger.py +182 -141
  25. zenml/new/pipelines/pipeline.py +13 -3
  26. zenml/new/pipelines/pipeline_decorator.py +1 -2
  27. zenml/new/pipelines/run_utils.py +1 -12
  28. zenml/new/steps/step_decorator.py +2 -3
  29. zenml/pipelines/base_pipeline.py +0 -2
  30. zenml/pipelines/pipeline_decorator.py +1 -2
  31. zenml/stack/stack_component.py +4 -0
  32. zenml/steps/base_step.py +1 -2
  33. zenml/steps/step_decorator.py +1 -2
  34. zenml/types.py +10 -1
  35. zenml/utils/pipeline_docker_image_builder.py +20 -5
  36. zenml/zen_server/rbac/models.py +1 -0
  37. zenml/zen_server/rbac/utils.py +22 -1
  38. zenml/zen_server/routers/actions_endpoints.py +324 -0
  39. zenml/zen_server/routers/triggers_endpoints.py +30 -158
  40. zenml/zen_server/zen_server_api.py +2 -0
  41. zenml/zen_stores/migrations/versions/25155145c545_separate_actions_and_triggers.py +228 -0
  42. zenml/zen_stores/rest_zen_store.py +103 -4
  43. zenml/zen_stores/schemas/__init__.py +2 -0
  44. zenml/zen_stores/schemas/action_schemas.py +192 -0
  45. zenml/zen_stores/schemas/trigger_schemas.py +43 -50
  46. zenml/zen_stores/schemas/user_schemas.py +10 -2
  47. zenml/zen_stores/schemas/workspace_schemas.py +5 -0
  48. zenml/zen_stores/sql_zen_store.py +240 -30
  49. zenml/zen_stores/zen_store_interface.py +85 -0
  50. {zenml_nightly-0.58.2.dev20240623.dist-info → zenml_nightly-0.58.2.dev20240626.dist-info}/METADATA +2 -2
  51. {zenml_nightly-0.58.2.dev20240623.dist-info → zenml_nightly-0.58.2.dev20240626.dist-info}/RECORD +54 -50
  52. {zenml_nightly-0.58.2.dev20240623.dist-info → zenml_nightly-0.58.2.dev20240626.dist-info}/LICENSE +0 -0
  53. {zenml_nightly-0.58.2.dev20240623.dist-info → zenml_nightly-0.58.2.dev20240626.dist-info}/WHEEL +0 -0
  54. {zenml_nightly-0.58.2.dev20240623.dist-info → zenml_nightly-0.58.2.dev20240626.dist-info}/entry_points.txt +0 -0
@@ -16,7 +16,7 @@
16
16
  from enum import Enum
17
17
  from typing import Any, Dict, List, Optional, Union
18
18
 
19
- from pydantic import Field, model_validator
19
+ from pydantic import BaseModel, Field, model_validator
20
20
  from pydantic_settings import SettingsConfigDict
21
21
 
22
22
  from zenml.config.base_settings import BaseSettings
@@ -65,6 +65,23 @@ class PythonPackageInstaller(Enum):
65
65
  UV = "uv"
66
66
 
67
67
 
68
+ class DockerBuildConfig(BaseModel):
69
+ """Configuration for a Docker build.
70
+
71
+ Attributes:
72
+ build_options: Additional options that will be passed unmodified to the
73
+ Docker build call when building an image. You can use this to for
74
+ example specify build args or a target stage. See
75
+ https://docker-py.readthedocs.io/en/stable/images.html#docker.models.images.ImageCollection.build
76
+ for a full list of available options.
77
+ dockerignore: Path to a dockerignore file to use when building the
78
+ Docker image.
79
+ """
80
+
81
+ build_options: Dict[str, Any] = {}
82
+ dockerignore: Optional[str] = None
83
+
84
+
68
85
  class DockerSettings(BaseSettings):
69
86
  """Settings for building Docker images to run ZenML pipelines.
70
87
 
@@ -116,12 +133,9 @@ class DockerSettings(BaseSettings):
116
133
  build_context_root: Build context root for the Docker build, only used
117
134
  when the `dockerfile` attribute is set. If this is left empty, the
118
135
  build context will only contain the Dockerfile.
119
- build_options: Additional options that will be passed unmodified to the
120
- Docker build call when building an image using the specified
121
- `dockerfile`. You can use this to for example specify build
122
- args or a target stage. See
123
- https://docker-py.readthedocs.io/en/stable/images.html#docker.models.images.ImageCollection.build
124
- for a full list of available options.
136
+ parent_image_build_config: Configuration for the parent image build.
137
+ build_options: DEPRECATED, use parent_image_build_config.build_options
138
+ instead.
125
139
  skip_build: If set to `True`, the parent image will be used directly to
126
140
  run the steps of your pipeline.
127
141
  target_repository: Name of the Docker repository to which the
@@ -158,8 +172,8 @@ class DockerSettings(BaseSettings):
158
172
  apt_packages: APT packages to install inside the Docker image.
159
173
  environment: Dictionary of environment variables to set inside the
160
174
  Docker image.
161
- dockerignore: Path to a dockerignore file to use when building the
162
- Docker image.
175
+ build_config: Configuration for the main image build.
176
+ dockerignore: DEPRECATED, use build_config.dockerignore instead.
163
177
  copy_files: DEPRECATED, use the `source_files` attribute instead.
164
178
  copy_global_config: DEPRECATED/UNUSED.
165
179
  user: If not `None`, will set the user, make it owner of the `/app`
@@ -184,6 +198,7 @@ class DockerSettings(BaseSettings):
184
198
  dockerfile: Optional[str] = None
185
199
  build_context_root: Optional[str] = None
186
200
  build_options: Dict[str, Any] = {}
201
+ parent_image_build_config: Optional[DockerBuildConfig] = None
187
202
  skip_build: bool = False
188
203
  target_repository: str = "zenml"
189
204
  python_package_installer: PythonPackageInstaller = (
@@ -205,6 +220,7 @@ class DockerSettings(BaseSettings):
205
220
  copy_files: bool = True
206
221
  copy_global_config: bool = True
207
222
  user: Optional[str] = None
223
+ build_config: Optional[DockerBuildConfig] = None
208
224
 
209
225
  source_files: SourceFileMode = SourceFileMode.DOWNLOAD_OR_INCLUDE
210
226
 
zenml/constants.py CHANGED
@@ -326,7 +326,7 @@ REQUIRES_CUSTOM_RESOURCE_REPORTING = ["pipeline", "pipeline_run"]
326
326
 
327
327
  # API Endpoint paths:
328
328
  ACTIVATE = "/activate"
329
- ACTIONS = "/action-flavors"
329
+ ACTIONS = "/actions"
330
330
  API = "/api"
331
331
  API_KEYS = "/api_keys"
332
332
  API_KEY_ROTATE = "/rotate"
@@ -121,7 +121,7 @@ class BaseEventHub(ABC):
121
121
  trigger=trigger.id, event_metadata=event.model_dump()
122
122
  )
123
123
 
124
- action_config = trigger.get_metadata().action
124
+ action_config = trigger.action.configuration
125
125
 
126
126
  trigger_execution = self.zen_store.create_trigger_execution(request)
127
127
 
@@ -130,16 +130,16 @@ class BaseEventHub(ABC):
130
130
  # is associated with the service account configured for the trigger
131
131
  # and has a validity defined by the trigger's authentication window.
132
132
  token = JWTToken(
133
- user_id=trigger.service_account.id,
133
+ user_id=trigger.action.service_account.id,
134
134
  )
135
135
  expires: Optional[datetime] = None
136
- if trigger.auth_window:
136
+ if trigger.action.auth_window:
137
137
  expires = datetime.utcnow() + timedelta(
138
- minutes=trigger.auth_window
138
+ minutes=trigger.action.auth_window
139
139
  )
140
140
  encoded_token = token.encode(expires=expires)
141
141
  auth_context = AuthContext(
142
- user=trigger.service_account,
142
+ user=trigger.action.service_account,
143
143
  access_token=token,
144
144
  encoded_access_token=encoded_token,
145
145
  )
@@ -14,7 +14,9 @@
14
14
  """Base class for all the Event Hub."""
15
15
 
16
16
  from functools import partial
17
- from typing import TYPE_CHECKING, List
17
+ from typing import List
18
+
19
+ from pydantic import ValidationError
18
20
 
19
21
  from zenml import EventSourceResponse
20
22
  from zenml.enums import PluginType
@@ -35,9 +37,6 @@ from zenml.zen_server.utils import plugin_flavor_registry
35
37
 
36
38
  logger = get_logger(__name__)
37
39
 
38
- if TYPE_CHECKING:
39
- pass
40
-
41
40
 
42
41
  class InternalEventHub(BaseEventHub):
43
42
  """Internal in-server event hub implementation.
@@ -158,9 +157,19 @@ class InternalEventHub(BaseEventHub):
158
157
 
159
158
  assert issubclass(plugin_flavor, BaseEventSourceFlavor)
160
159
 
161
- # Get the filter class from the plugin flavor class
162
160
  event_filter_config_class = plugin_flavor.EVENT_FILTER_CONFIG_CLASS
163
- event_filter = event_filter_config_class(**trigger.event_filter)
161
+ try:
162
+ event_filter = event_filter_config_class(
163
+ **trigger.event_filter if trigger.event_filter else {}
164
+ )
165
+ except ValidationError:
166
+ logger.exception(
167
+ f"Could not instantiate event filter config class for "
168
+ f"event source {event_source.id}. Skipping trigger "
169
+ f"{trigger.id}."
170
+ )
171
+ continue
172
+
164
173
  if event_filter.event_matches_filter(event=event):
165
174
  trigger_list.append(trigger)
166
175
 
@@ -13,19 +13,8 @@
13
13
  # permissions and limitations under the License.
14
14
  """Base implementation for events."""
15
15
 
16
- from typing import (
17
- TYPE_CHECKING,
18
- )
19
-
20
16
  from pydantic import BaseModel
21
17
 
22
- from zenml.logger import get_logger
23
-
24
- if TYPE_CHECKING:
25
- pass
26
-
27
- logger = get_logger(__name__)
28
-
29
18
  # -------------------- Event Models -----------------------------------
30
19
 
31
20
 
@@ -449,6 +449,13 @@ class BaseEventSourceHandler(BasePlugin, ABC):
449
449
  event: The event to dispatch.
450
450
  event_source: The event source that produced the event.
451
451
  """
452
+ if not event_source.is_active:
453
+ logger.debug(
454
+ f"Event source {event_source.id} is not active. Skipping event "
455
+ f"dispatch."
456
+ )
457
+ return
458
+
452
459
  self.event_hub.publish_event(
453
460
  event=event,
454
461
  event_source=event_source,
@@ -17,7 +17,7 @@ import hashlib
17
17
  import hmac
18
18
  import json
19
19
  from abc import ABC, abstractmethod
20
- from typing import TYPE_CHECKING, Any, ClassVar, Dict, Optional, Type
20
+ from typing import Any, ClassVar, Dict, Optional, Type
21
21
 
22
22
  from zenml.enums import PluginSubType
23
23
  from zenml.event_sources.base_event import BaseEvent
@@ -33,9 +33,6 @@ from zenml.models import EventSourceResponse
33
33
 
34
34
  logger = get_logger(__name__)
35
35
 
36
- if TYPE_CHECKING:
37
- pass
38
-
39
36
 
40
37
  # -------------------- Event Models -----------------------------------
41
38
 
zenml/exceptions.py CHANGED
@@ -197,6 +197,10 @@ class EntityExistsError(ZenMLBaseException):
197
197
  """Raised when trying to register an entity that already exists."""
198
198
 
199
199
 
200
+ class ActionExistsError(EntityExistsError):
201
+ """Raised when registering an action with a name that already exists."""
202
+
203
+
200
204
  class TriggerExistsError(EntityExistsError):
201
205
  """Raised when registering a trigger with name that already exists."""
202
206
 
@@ -14,14 +14,13 @@
14
14
  """Validation functions for hooks."""
15
15
 
16
16
  import inspect
17
- from types import FunctionType
18
- from typing import TYPE_CHECKING, Union
17
+ from typing import TYPE_CHECKING
19
18
 
20
19
  from zenml.config.source import Source
21
20
  from zenml.utils import source_utils
22
21
 
23
22
  if TYPE_CHECKING:
24
- HookSpecification = Union[str, Source, FunctionType]
23
+ from zenml.types import HookSpecification
25
24
 
26
25
 
27
26
  def resolve_and_validate_hook(hook: "HookSpecification") -> Source:
@@ -152,9 +152,9 @@ class BitbucketEvent(BaseEvent):
152
152
  class BitbucketWebhookEventFilterConfiguration(WebhookEventFilterConfig):
153
153
  """Configuration for Bitbucket event filters."""
154
154
 
155
- repo: Optional[str]
156
- branch: Optional[str]
157
- event_type: Optional[BitbucketEventType]
155
+ repo: Optional[str] = None
156
+ branch: Optional[str] = None
157
+ event_type: Optional[BitbucketEventType] = None
158
158
 
159
159
  def event_matches_filter(self, event: BaseEvent) -> bool:
160
160
  """Checks the filter against the inbound event.
@@ -33,7 +33,7 @@ class MlflowIntegration(Integration):
33
33
  NAME = MLFLOW
34
34
 
35
35
  REQUIREMENTS = [
36
- "mlflow>=2.1.1,<=2.12.2",
36
+ "mlflow>=2.1.1,<=2.14.1",
37
37
  "mlserver>=1.3.3",
38
38
  "mlserver-mlflow>=1.3.3",
39
39
  # TODO: remove this requirement once rapidjson is fixed
@@ -26,6 +26,7 @@ from typing import (
26
26
  )
27
27
 
28
28
  import s3fs
29
+ from fsspec.asyn import FSTimeoutError, sync, sync_wrapper
29
30
 
30
31
  from zenml.artifact_stores import BaseArtifactStore
31
32
  from zenml.integrations.s3.flavors.s3_artifact_store_flavor import (
@@ -38,10 +39,77 @@ from zenml.stack.authentication_mixin import AuthenticationMixin
38
39
  PathType = Union[bytes, str]
39
40
 
40
41
 
42
+ class ZenMLS3Filesystem(s3fs.S3FileSystem): # type: ignore[misc]
43
+ """Modified s3fs.S3FileSystem to disable caching.
44
+
45
+ The original s3fs.S3FileSystem caches all class instances based on the
46
+ constructor input arguments and it never releases them. This is problematic
47
+ in the context of the ZenML server, because the server is a long-running
48
+ process that instantiates many S3 filesystems with different credentials,
49
+ especially when the credentials are generated by service connectors.
50
+
51
+ The caching behavior of s3fs causes the server to slowly consume more and
52
+ more memory over time until it crashes. This class disables the caching
53
+ behavior of s3fs by setting the `cachable` attribute to `False`.
54
+
55
+ In addition to disabling instance caching, this class also provides a
56
+ correct cleanup implementation by overriding the `close_session` method
57
+ the S3 aiobotocore client. The original one provided by s3fs was causing
58
+ memory leaks by creating a new event loop in the destructor instead of
59
+ using the existing one.
60
+
61
+ A `close` method is also provided to allow for synchronous on-demand cleanup
62
+ of the S3 client.
63
+ """
64
+
65
+ cachable = False
66
+
67
+ async def _close(self) -> None:
68
+ """Close the S3 client."""
69
+ if self._s3creator is not None: # type: ignore[has-type]
70
+ await self._s3creator.__aexit__(None, None, None) # type: ignore[has-type]
71
+ self._s3creator = None
72
+ self._s3 = None
73
+
74
+ close = sync_wrapper(_close)
75
+
76
+ @staticmethod
77
+ def close_session(loop: Any, s3: Any) -> None:
78
+ """Close the S3 client session.
79
+
80
+ Args:
81
+ loop: The event loop to use for closing the session.
82
+ s3: The S3 client to close.
83
+ """
84
+ # IMPORTANT: This method is a copy of the original close_session method
85
+ # from s3fs.S3FileSystem. The only difference is that it uses the
86
+ # provided event loop instead of creating a new one.
87
+ if loop is not None and loop.is_running():
88
+ try:
89
+ # NOTE: this is the line in the original method that causes
90
+ # the memory leak
91
+ # loop = asyncio.get_event_loop()
92
+ loop.create_task(s3.__aexit__(None, None, None))
93
+ return
94
+ except RuntimeError:
95
+ pass
96
+ try:
97
+ sync(loop, s3.__aexit__, None, None, None, timeout=0.1)
98
+ return
99
+ except FSTimeoutError:
100
+ pass
101
+ try:
102
+ # close the actual socket
103
+ s3._client._endpoint.http_session._connector._close()
104
+ except AttributeError:
105
+ # but during shutdown, it may have gone
106
+ pass
107
+
108
+
41
109
  class S3ArtifactStore(BaseArtifactStore, AuthenticationMixin):
42
110
  """Artifact Store for S3 based artifacts."""
43
111
 
44
- _filesystem: Optional[s3fs.S3FileSystem] = None
112
+ _filesystem: Optional[ZenMLS3Filesystem] = None
45
113
 
46
114
  @property
47
115
  def config(self) -> S3ArtifactStoreConfig:
@@ -98,7 +166,7 @@ class S3ArtifactStore(BaseArtifactStore, AuthenticationMixin):
98
166
  return self.config.key, self.config.secret, self.config.token
99
167
 
100
168
  @property
101
- def filesystem(self) -> s3fs.S3FileSystem:
169
+ def filesystem(self) -> ZenMLS3Filesystem:
102
170
  """The s3 filesystem to access this artifact store.
103
171
 
104
172
  Returns:
@@ -110,7 +178,7 @@ class S3ArtifactStore(BaseArtifactStore, AuthenticationMixin):
110
178
 
111
179
  key, secret, token = self.get_credentials()
112
180
 
113
- self._filesystem = s3fs.S3FileSystem(
181
+ self._filesystem = ZenMLS3Filesystem(
114
182
  key=key,
115
183
  secret=secret,
116
184
  token=token,
@@ -120,6 +188,11 @@ class S3ArtifactStore(BaseArtifactStore, AuthenticationMixin):
120
188
  )
121
189
  return self._filesystem
122
190
 
191
+ def cleanup(self) -> None:
192
+ """Close the filesystem."""
193
+ if self._filesystem:
194
+ self._filesystem.close()
195
+
123
196
  def open(self, path: PathType, mode: str = "r") -> Any:
124
197
  """Open a file at the given path.
125
198
 
@@ -145,61 +145,64 @@ def fetch_logs(
145
145
  )
146
146
 
147
147
  artifact_store = _load_artifact_store(artifact_store_id, zen_store)
148
- if not artifact_store.isdir(logs_uri):
149
- return _read_file(logs_uri, offset, length)
150
- else:
151
- files = artifact_store.listdir(logs_uri)
152
- if len(files) == 1:
153
- return _read_file(
154
- os.path.join(logs_uri, str(files[0])), offset, length
155
- )
148
+ try:
149
+ if not artifact_store.isdir(logs_uri):
150
+ return _read_file(logs_uri, offset, length)
156
151
  else:
157
- is_negative_offset = offset < 0
158
- files.sort(reverse=is_negative_offset)
159
-
160
- # search for the first file we need to read
161
- latest_file_id = 0
162
- for i, file in enumerate(files):
163
- file_size: int = artifact_store.size(
164
- os.path.join(logs_uri, str(file))
165
- ) # type: ignore[assignment]
166
-
167
- if is_negative_offset:
168
- if file_size >= -offset:
169
- latest_file_id = -(i + 1)
170
- break
152
+ files = artifact_store.listdir(logs_uri)
153
+ if len(files) == 1:
154
+ return _read_file(
155
+ os.path.join(logs_uri, str(files[0])), offset, length
156
+ )
157
+ else:
158
+ is_negative_offset = offset < 0
159
+ files.sort(reverse=is_negative_offset)
160
+
161
+ # search for the first file we need to read
162
+ latest_file_id = 0
163
+ for i, file in enumerate(files):
164
+ file_size: int = artifact_store.size(
165
+ os.path.join(logs_uri, str(file))
166
+ ) # type: ignore[assignment]
167
+
168
+ if is_negative_offset:
169
+ if file_size >= -offset:
170
+ latest_file_id = -(i + 1)
171
+ break
172
+ else:
173
+ offset += file_size
171
174
  else:
172
- offset += file_size
173
- else:
174
- if file_size > offset:
175
- latest_file_id = i
175
+ if file_size > offset:
176
+ latest_file_id = i
177
+ break
178
+ else:
179
+ offset -= file_size
180
+
181
+ # read the files according to pre-filtering
182
+ files.sort()
183
+ ret = []
184
+ for file in files[latest_file_id:]:
185
+ ret.append(
186
+ _read_file(
187
+ os.path.join(logs_uri, str(file)),
188
+ offset,
189
+ length,
190
+ )
191
+ )
192
+ offset = 0
193
+ length -= len(ret[-1])
194
+ if length <= 0:
195
+ # stop further reading, if the whole length is already read
176
196
  break
177
- else:
178
- offset -= file_size
179
-
180
- # read the files according to pre-filtering
181
- files.sort()
182
- ret = []
183
- for file in files[latest_file_id:]:
184
- ret.append(
185
- _read_file(
186
- os.path.join(logs_uri, str(file)),
187
- offset,
188
- length,
197
+
198
+ if not ret:
199
+ raise DoesNotExistException(
200
+ f"Folder '{logs_uri}' is empty in artifact store "
201
+ f"'{artifact_store.name}'."
189
202
  )
190
- )
191
- offset = 0
192
- length -= len(ret[-1])
193
- if length <= 0:
194
- # stop further reading, if the whole length is already read
195
- break
196
-
197
- if not ret:
198
- raise DoesNotExistException(
199
- f"Folder '{logs_uri}' is empty in artifact store "
200
- f"'{artifact_store.name}'."
201
- )
202
- return "".join(ret)
203
+ return "".join(ret)
204
+ finally:
205
+ artifact_store.cleanup()
203
206
 
204
207
 
205
208
  class StepLogsStorage:
zenml/models/__init__.py CHANGED
@@ -51,6 +51,15 @@ from zenml.models.v2.base.filter import (
51
51
  from zenml.models.v2.base.page import Page
52
52
 
53
53
  # V2 Core
54
+ from zenml.models.v2.core.action import (
55
+ ActionFilter,
56
+ ActionRequest,
57
+ ActionResponse,
58
+ ActionResponseBody,
59
+ ActionResponseMetadata,
60
+ ActionResponseResources,
61
+ ActionUpdate,
62
+ )
54
63
  from zenml.models.v2.core.action_flavor import (
55
64
  ActionFlavorResponse,
56
65
  ActionFlavorResponseBody,
@@ -381,6 +390,7 @@ from zenml.models.v2.core.server_settings import (
381
390
  # ----------------------------- Forward References -----------------------------
382
391
 
383
392
  # V2
393
+ ActionResponseResources.model_rebuild()
384
394
  APIKeyResponseBody.model_rebuild()
385
395
  ArtifactVersionRequest.model_rebuild()
386
396
  ArtifactVersionResponseBody.model_rebuild()
@@ -470,6 +480,13 @@ __all__ = [
470
480
  "UUIDFilter",
471
481
  "Page",
472
482
  # V2 Core
483
+ "ActionFilter",
484
+ "ActionRequest",
485
+ "ActionResponse",
486
+ "ActionResponseBody",
487
+ "ActionResponseMetadata",
488
+ "ActionResponseResources",
489
+ "ActionUpdate",
473
490
  "ActionFlavorResponse",
474
491
  "ActionFlavorResponseBody",
475
492
  "ActionFlavorResponseMetadata",