prefect-client 2.19.2__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (239) hide show
  1. prefect/__init__.py +8 -56
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/concurrency/api.py +0 -34
  5. prefect/_internal/concurrency/calls.py +0 -6
  6. prefect/_internal/concurrency/cancellation.py +0 -3
  7. prefect/_internal/concurrency/event_loop.py +0 -20
  8. prefect/_internal/concurrency/inspection.py +3 -3
  9. prefect/_internal/concurrency/threads.py +35 -0
  10. prefect/_internal/concurrency/waiters.py +0 -28
  11. prefect/_internal/pydantic/__init__.py +0 -45
  12. prefect/_internal/pydantic/v1_schema.py +21 -22
  13. prefect/_internal/pydantic/v2_schema.py +0 -2
  14. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  15. prefect/_internal/schemas/bases.py +44 -177
  16. prefect/_internal/schemas/fields.py +1 -43
  17. prefect/_internal/schemas/validators.py +60 -158
  18. prefect/artifacts.py +161 -14
  19. prefect/automations.py +39 -4
  20. prefect/blocks/abstract.py +1 -1
  21. prefect/blocks/core.py +268 -148
  22. prefect/blocks/fields.py +2 -57
  23. prefect/blocks/kubernetes.py +8 -12
  24. prefect/blocks/notifications.py +40 -20
  25. prefect/blocks/system.py +22 -11
  26. prefect/blocks/webhook.py +2 -9
  27. prefect/client/base.py +4 -4
  28. prefect/client/cloud.py +8 -13
  29. prefect/client/orchestration.py +347 -341
  30. prefect/client/schemas/actions.py +92 -86
  31. prefect/client/schemas/filters.py +20 -40
  32. prefect/client/schemas/objects.py +151 -145
  33. prefect/client/schemas/responses.py +16 -24
  34. prefect/client/schemas/schedules.py +47 -35
  35. prefect/client/subscriptions.py +2 -2
  36. prefect/client/utilities.py +5 -2
  37. prefect/concurrency/asyncio.py +3 -1
  38. prefect/concurrency/events.py +1 -1
  39. prefect/concurrency/services.py +6 -3
  40. prefect/context.py +195 -27
  41. prefect/deployments/__init__.py +5 -6
  42. prefect/deployments/base.py +7 -5
  43. prefect/deployments/flow_runs.py +185 -0
  44. prefect/deployments/runner.py +50 -45
  45. prefect/deployments/schedules.py +28 -23
  46. prefect/deployments/steps/__init__.py +0 -1
  47. prefect/deployments/steps/core.py +1 -0
  48. prefect/deployments/steps/pull.py +7 -21
  49. prefect/engine.py +12 -2422
  50. prefect/events/actions.py +17 -23
  51. prefect/events/cli/automations.py +19 -6
  52. prefect/events/clients.py +14 -37
  53. prefect/events/filters.py +14 -18
  54. prefect/events/related.py +2 -2
  55. prefect/events/schemas/__init__.py +0 -5
  56. prefect/events/schemas/automations.py +55 -46
  57. prefect/events/schemas/deployment_triggers.py +7 -197
  58. prefect/events/schemas/events.py +34 -65
  59. prefect/events/schemas/labelling.py +10 -14
  60. prefect/events/utilities.py +2 -3
  61. prefect/events/worker.py +2 -3
  62. prefect/filesystems.py +6 -517
  63. prefect/{new_flow_engine.py → flow_engine.py} +313 -72
  64. prefect/flow_runs.py +377 -5
  65. prefect/flows.py +307 -166
  66. prefect/futures.py +186 -345
  67. prefect/infrastructure/__init__.py +0 -27
  68. prefect/infrastructure/provisioners/__init__.py +5 -3
  69. prefect/infrastructure/provisioners/cloud_run.py +11 -6
  70. prefect/infrastructure/provisioners/container_instance.py +11 -7
  71. prefect/infrastructure/provisioners/ecs.py +6 -4
  72. prefect/infrastructure/provisioners/modal.py +8 -5
  73. prefect/input/actions.py +2 -4
  74. prefect/input/run_input.py +5 -7
  75. prefect/logging/formatters.py +0 -2
  76. prefect/logging/handlers.py +3 -11
  77. prefect/logging/loggers.py +2 -2
  78. prefect/manifests.py +2 -1
  79. prefect/records/__init__.py +1 -0
  80. prefect/records/result_store.py +42 -0
  81. prefect/records/store.py +9 -0
  82. prefect/results.py +43 -39
  83. prefect/runner/runner.py +19 -15
  84. prefect/runner/server.py +6 -10
  85. prefect/runner/storage.py +3 -8
  86. prefect/runner/submit.py +2 -2
  87. prefect/runner/utils.py +2 -2
  88. prefect/serializers.py +24 -35
  89. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  90. prefect/settings.py +70 -133
  91. prefect/states.py +17 -47
  92. prefect/task_engine.py +697 -58
  93. prefect/task_runners.py +269 -301
  94. prefect/task_server.py +53 -34
  95. prefect/tasks.py +327 -337
  96. prefect/transactions.py +220 -0
  97. prefect/types/__init__.py +61 -82
  98. prefect/utilities/asyncutils.py +195 -136
  99. prefect/utilities/callables.py +311 -43
  100. prefect/utilities/collections.py +23 -38
  101. prefect/utilities/dispatch.py +11 -3
  102. prefect/utilities/dockerutils.py +4 -0
  103. prefect/utilities/engine.py +140 -20
  104. prefect/utilities/importtools.py +97 -27
  105. prefect/utilities/pydantic.py +128 -38
  106. prefect/utilities/schema_tools/hydration.py +5 -1
  107. prefect/utilities/templating.py +12 -2
  108. prefect/variables.py +78 -61
  109. prefect/workers/__init__.py +0 -1
  110. prefect/workers/base.py +15 -17
  111. prefect/workers/process.py +3 -8
  112. prefect/workers/server.py +2 -2
  113. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
  114. prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
  115. prefect/_internal/pydantic/_base_model.py +0 -51
  116. prefect/_internal/pydantic/_compat.py +0 -82
  117. prefect/_internal/pydantic/_flags.py +0 -20
  118. prefect/_internal/pydantic/_types.py +0 -8
  119. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  120. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  121. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  122. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  123. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  124. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  125. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  126. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  127. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  128. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  129. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  130. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  131. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  132. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  133. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  134. prefect/_vendor/__init__.py +0 -0
  135. prefect/_vendor/fastapi/__init__.py +0 -25
  136. prefect/_vendor/fastapi/applications.py +0 -946
  137. prefect/_vendor/fastapi/background.py +0 -3
  138. prefect/_vendor/fastapi/concurrency.py +0 -44
  139. prefect/_vendor/fastapi/datastructures.py +0 -58
  140. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  141. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  142. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  143. prefect/_vendor/fastapi/encoders.py +0 -177
  144. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  145. prefect/_vendor/fastapi/exceptions.py +0 -46
  146. prefect/_vendor/fastapi/logger.py +0 -3
  147. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  148. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  149. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  150. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  151. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  152. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  153. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  154. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  155. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  156. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  157. prefect/_vendor/fastapi/openapi/models.py +0 -480
  158. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  159. prefect/_vendor/fastapi/param_functions.py +0 -340
  160. prefect/_vendor/fastapi/params.py +0 -453
  161. prefect/_vendor/fastapi/requests.py +0 -4
  162. prefect/_vendor/fastapi/responses.py +0 -40
  163. prefect/_vendor/fastapi/routing.py +0 -1331
  164. prefect/_vendor/fastapi/security/__init__.py +0 -15
  165. prefect/_vendor/fastapi/security/api_key.py +0 -98
  166. prefect/_vendor/fastapi/security/base.py +0 -6
  167. prefect/_vendor/fastapi/security/http.py +0 -172
  168. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  169. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  170. prefect/_vendor/fastapi/security/utils.py +0 -10
  171. prefect/_vendor/fastapi/staticfiles.py +0 -1
  172. prefect/_vendor/fastapi/templating.py +0 -3
  173. prefect/_vendor/fastapi/testclient.py +0 -1
  174. prefect/_vendor/fastapi/types.py +0 -3
  175. prefect/_vendor/fastapi/utils.py +0 -235
  176. prefect/_vendor/fastapi/websockets.py +0 -7
  177. prefect/_vendor/starlette/__init__.py +0 -1
  178. prefect/_vendor/starlette/_compat.py +0 -28
  179. prefect/_vendor/starlette/_exception_handler.py +0 -80
  180. prefect/_vendor/starlette/_utils.py +0 -88
  181. prefect/_vendor/starlette/applications.py +0 -261
  182. prefect/_vendor/starlette/authentication.py +0 -159
  183. prefect/_vendor/starlette/background.py +0 -43
  184. prefect/_vendor/starlette/concurrency.py +0 -59
  185. prefect/_vendor/starlette/config.py +0 -151
  186. prefect/_vendor/starlette/convertors.py +0 -87
  187. prefect/_vendor/starlette/datastructures.py +0 -707
  188. prefect/_vendor/starlette/endpoints.py +0 -130
  189. prefect/_vendor/starlette/exceptions.py +0 -60
  190. prefect/_vendor/starlette/formparsers.py +0 -276
  191. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  192. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  193. prefect/_vendor/starlette/middleware/base.py +0 -220
  194. prefect/_vendor/starlette/middleware/cors.py +0 -176
  195. prefect/_vendor/starlette/middleware/errors.py +0 -265
  196. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  197. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  198. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  199. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  200. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  201. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  202. prefect/_vendor/starlette/requests.py +0 -328
  203. prefect/_vendor/starlette/responses.py +0 -347
  204. prefect/_vendor/starlette/routing.py +0 -933
  205. prefect/_vendor/starlette/schemas.py +0 -154
  206. prefect/_vendor/starlette/staticfiles.py +0 -248
  207. prefect/_vendor/starlette/status.py +0 -199
  208. prefect/_vendor/starlette/templating.py +0 -231
  209. prefect/_vendor/starlette/testclient.py +0 -804
  210. prefect/_vendor/starlette/types.py +0 -30
  211. prefect/_vendor/starlette/websockets.py +0 -193
  212. prefect/agent.py +0 -698
  213. prefect/deployments/deployments.py +0 -1042
  214. prefect/deprecated/__init__.py +0 -0
  215. prefect/deprecated/data_documents.py +0 -350
  216. prefect/deprecated/packaging/__init__.py +0 -12
  217. prefect/deprecated/packaging/base.py +0 -96
  218. prefect/deprecated/packaging/docker.py +0 -146
  219. prefect/deprecated/packaging/file.py +0 -92
  220. prefect/deprecated/packaging/orion.py +0 -80
  221. prefect/deprecated/packaging/serializers.py +0 -171
  222. prefect/events/instrument.py +0 -135
  223. prefect/infrastructure/base.py +0 -323
  224. prefect/infrastructure/container.py +0 -818
  225. prefect/infrastructure/kubernetes.py +0 -920
  226. prefect/infrastructure/process.py +0 -289
  227. prefect/new_task_engine.py +0 -423
  228. prefect/pydantic/__init__.py +0 -76
  229. prefect/pydantic/main.py +0 -39
  230. prefect/software/__init__.py +0 -2
  231. prefect/software/base.py +0 -50
  232. prefect/software/conda.py +0 -199
  233. prefect/software/pip.py +0 -122
  234. prefect/software/python.py +0 -52
  235. prefect/workers/block.py +0 -218
  236. prefect_client-2.19.2.dist-info/RECORD +0 -292
  237. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
  238. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
  239. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -1,80 +0,0 @@
1
- """
2
- DEPRECATION WARNING:
3
- This module is deprecated as of March 2024 and will not be available after September 2024.
4
- """
5
-
6
- from uuid import UUID
7
-
8
- from prefect._internal.compatibility.deprecated import deprecated_class
9
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
10
-
11
- if HAS_PYDANTIC_V2:
12
- from pydantic.v1 import Field
13
- else:
14
- from pydantic import Field
15
-
16
- from typing_extensions import Literal
17
-
18
- from prefect.blocks.system import JSON
19
- from prefect.client.orchestration import PrefectClient
20
- from prefect.client.utilities import inject_client
21
- from prefect.deprecated.packaging.base import PackageManifest, Packager, Serializer
22
- from prefect.deprecated.packaging.serializers import SourceSerializer
23
- from prefect.flows import Flow
24
-
25
-
26
- @deprecated_class(start_date="Mar 2024")
27
- class OrionPackageManifest(PackageManifest):
28
- """
29
- DEPRECATION WARNING:
30
-
31
- This class is deprecated as of version March 2024 and will not be available after September 2024.
32
- """
33
-
34
- type: str = "orion"
35
- serializer: Serializer
36
- block_document_id: UUID
37
-
38
- @inject_client
39
- async def unpackage(self, client: PrefectClient) -> Flow:
40
- document = await client.read_block_document(self.block_document_id)
41
- block = JSON._from_block_document(document)
42
- serialized_flow: str = block.value["flow"]
43
- # Cast to bytes before deserialization
44
- return self.serializer.loads(serialized_flow.encode())
45
-
46
-
47
- @deprecated_class(start_date="Mar 2024")
48
- class OrionPackager(Packager):
49
- """
50
- DEPRECATION WARNING:
51
-
52
- This class is deprecated as of version March 2024 and will not be available after September 2024.
53
-
54
- This packager stores the flow as an anonymous JSON block in the Prefect database.
55
- The content of the block are encrypted at rest.
56
-
57
- By default, the content is the source code of the module the flow is defined in.
58
- Alternative serialization modes are available in `prefect.deprecated.packaging.serializers`.
59
- """
60
-
61
- type: Literal["orion"] = "orion"
62
- serializer: Serializer = Field(default_factory=SourceSerializer)
63
-
64
- async def package(self, flow: Flow) -> OrionPackageManifest:
65
- """
66
- Package a flow in the Prefect database as an anonymous block.
67
- """
68
- block_document_id = await JSON(
69
- value={"flow": self.serializer.dumps(flow)}
70
- )._save(is_anonymous=True)
71
-
72
- return OrionPackageManifest(
73
- **{
74
- **self.base_manifest(flow).dict(),
75
- **{
76
- "serializer": self.serializer,
77
- "block_document_id": block_document_id,
78
- },
79
- }
80
- )
@@ -1,171 +0,0 @@
1
- """
2
- DEPRECATION WARNING:
3
- This module is deprecated as of March 2024 and will not be available after September 2024.
4
- """
5
-
6
- import base64
7
- import inspect
8
- import json
9
- import os.path
10
- from pathlib import Path
11
- from tempfile import TemporaryDirectory
12
- from typing import Any, List
13
-
14
- from prefect._internal.compatibility.deprecated import deprecated_class
15
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
16
- from prefect._internal.schemas.validators import (
17
- validate_picklelib,
18
- validate_picklelib_and_modules,
19
- validate_picklelib_version,
20
- )
21
-
22
- if HAS_PYDANTIC_V2:
23
- import pydantic.v1 as pydantic
24
- else:
25
- import pydantic
26
-
27
- from typing_extensions import Literal
28
-
29
- from prefect.deprecated.packaging.base import Serializer
30
- from prefect.utilities.importtools import (
31
- from_qualified_name,
32
- load_script_as_module,
33
- to_qualified_name,
34
- )
35
-
36
-
37
- @deprecated_class(start_date="Mar 2024")
38
- class PickleSerializer(Serializer):
39
- """
40
- DEPRECATION WARNING:
41
-
42
- This class is deprecated as of version March 2024 and will not be available after September 2024.
43
-
44
- Serializes objects using the pickle protocol.
45
-
46
- If using cloudpickle, you may specify a list of 'pickle_modules'. These modules will
47
- be serialized by value instead of by reference, which means they do not have to be
48
- installed in the runtime location. This is especially useful for serializing objects
49
- that rely on local packages.
50
-
51
- Wraps pickles in base64 for safe transmission.
52
- """
53
-
54
- type: Literal["pickle"] = "pickle"
55
-
56
- picklelib: str = "cloudpickle"
57
- picklelib_version: str = None
58
-
59
- pickle_modules: List[str] = pydantic.Field(default_factory=list)
60
-
61
- @pydantic.validator("picklelib")
62
- def check_picklelib(cls, value):
63
- return validate_picklelib(value)
64
-
65
- @pydantic.root_validator
66
- def check_picklelib_and_modules(cls, values):
67
- return validate_picklelib_and_modules(values)
68
-
69
- @pydantic.root_validator
70
- def check_picklelib_version(cls, values):
71
- return validate_picklelib_version(values)
72
-
73
- def dumps(self, obj: Any) -> bytes:
74
- pickler = from_qualified_name(self.picklelib)
75
-
76
- for module in self.pickle_modules:
77
- pickler.register_pickle_by_value(from_qualified_name(module))
78
-
79
- blob = pickler.dumps(obj)
80
-
81
- for module in self.pickle_modules:
82
- # Restore the pickler settings
83
- pickler.unregister_pickle_by_value(from_qualified_name(module))
84
-
85
- return base64.encodebytes(blob)
86
-
87
- def loads(self, blob: bytes) -> Any:
88
- pickler = from_qualified_name(self.picklelib)
89
- return pickler.loads(base64.decodebytes(blob))
90
-
91
-
92
- @deprecated_class(start_date="Mar 2024")
93
- class SourceSerializer(Serializer):
94
- """
95
- DEPRECATION WARNING:
96
-
97
- This class is deprecated as of version March 2024 and will not be available after September 2024.
98
-
99
- Serializes objects by retrieving the source code of the module they are defined in.
100
-
101
- Creates a JSON blob with keys:
102
- source: The source code
103
- file_name: The name of the file the source was in
104
- symbol_name: The name of the object to extract from the source code
105
-
106
- Deserialization requires the code to run with `exec`.
107
- """
108
-
109
- type: Literal["source"] = "source"
110
-
111
- def dumps(self, obj: Any) -> bytes:
112
- module = inspect.getmodule(obj)
113
-
114
- if module is None:
115
- raise ValueError(f"Cannot determine source module for object: {obj!r}.")
116
-
117
- if not getattr(module, "__file__", None):
118
- raise ValueError(
119
- f"Found module {module!r} without source code file while serializing "
120
- f"object: {obj!r}."
121
- )
122
-
123
- source = inspect.getsource(module)
124
-
125
- return json.dumps(
126
- {
127
- "source": source,
128
- "file_name": os.path.basename(module.__file__),
129
- "symbol_name": obj.__name__,
130
- }
131
- ).encode()
132
-
133
- def loads(self, blob: bytes) -> Any:
134
- document = json.loads(blob)
135
- if not isinstance(document, dict) or set(document.keys()) != {
136
- "source",
137
- "file_name",
138
- "symbol_name",
139
- }:
140
- raise ValueError(
141
- "Invalid serialized data. "
142
- "Expected dictionary with keys 'source', 'file_name', and "
143
- "'symbol_name'. "
144
- f"Got: {document}"
145
- )
146
-
147
- with TemporaryDirectory() as tmpdir:
148
- temp_script = Path(tmpdir) / document["file_name"]
149
- temp_script.write_text(document["source"])
150
- module = load_script_as_module(str(temp_script))
151
-
152
- return getattr(module, document["symbol_name"])
153
-
154
-
155
- @deprecated_class(start_date="Mar 2024")
156
- class ImportSerializer(Serializer):
157
- """
158
- DEPRECATION WARNING:
159
-
160
- This class is deprecated as of version March 2024 and will not be available after September 2024.
161
-
162
- Serializes objects by storing their importable path.
163
- """
164
-
165
- type: Literal["import"] = "import"
166
-
167
- def dumps(self, obj: Any) -> bytes:
168
- return to_qualified_name(obj).encode()
169
-
170
- def loads(self, blob: bytes) -> Any:
171
- return from_qualified_name(blob.decode())
@@ -1,135 +0,0 @@
1
- import functools
2
- import inspect
3
- from typing import (
4
- Any,
5
- Callable,
6
- Dict,
7
- Generator,
8
- List,
9
- Optional,
10
- Set,
11
- Tuple,
12
- Type,
13
- TypeVar,
14
- Union,
15
- cast,
16
- )
17
-
18
- from prefect.events import emit_event
19
-
20
- ResourceTuple = Tuple[Dict[str, Any], List[Dict[str, Any]]]
21
-
22
-
23
- def emit_instance_method_called_event(
24
- instance: Any,
25
- method_name: str,
26
- successful: bool,
27
- payload: Optional[Dict[str, Any]] = None,
28
- ):
29
- kind = instance._event_kind()
30
- resources: Optional[ResourceTuple] = instance._event_method_called_resources()
31
-
32
- if not resources:
33
- return
34
-
35
- resource, related = resources
36
- result = "called" if successful else "failed"
37
-
38
- emit_event(
39
- event=f"{kind}.{method_name}.{result}",
40
- resource=resource,
41
- related=related,
42
- payload=payload,
43
- )
44
-
45
-
46
- F = TypeVar("F", bound=Callable)
47
-
48
-
49
- def instrument_instance_method_call(function: F) -> F:
50
- if is_instrumented(function):
51
- return function
52
-
53
- if inspect.iscoroutinefunction(function):
54
-
55
- @functools.wraps(function)
56
- async def inner(self, *args, **kwargs):
57
- success = True
58
- try:
59
- return await function(self, *args, **kwargs)
60
- except Exception as exc:
61
- success = False
62
- raise exc
63
- finally:
64
- emit_instance_method_called_event(
65
- instance=self, method_name=function.__name__, successful=success
66
- )
67
-
68
- else:
69
-
70
- @functools.wraps(function)
71
- def inner(self, *args, **kwargs):
72
- success = True
73
- try:
74
- return function(self, *args, **kwargs)
75
- except Exception as exc:
76
- success = False
77
- raise exc
78
- finally:
79
- emit_instance_method_called_event(
80
- instance=self, method_name=function.__name__, successful=success
81
- )
82
-
83
- setattr(inner, "__events_instrumented__", True)
84
- return cast(F, inner)
85
-
86
-
87
- def is_instrumented(function: Callable) -> bool:
88
- """Indicates whether the given function is already instrumented"""
89
- return getattr(function, "__events_instrumented__", False)
90
-
91
-
92
- def instrumentable_methods(
93
- cls: Type,
94
- exclude_methods: Union[List[str], Set[str], None] = None,
95
- ) -> Generator[Tuple[str, Callable], None, None]:
96
- """Returns all of the public methods on a class."""
97
-
98
- for name, kind, _, method in inspect.classify_class_attrs(cls):
99
- if kind == "method" and callable(method):
100
- if exclude_methods and name in exclude_methods:
101
- continue
102
- if name.startswith("_"):
103
- continue
104
-
105
- yield name, method
106
-
107
-
108
- def instrument_method_calls_on_class_instances(cls: Type) -> Type:
109
- """Given a Python class, instruments all "public" methods that are
110
- defined directly on the class to emit events when called.
111
-
112
- Examples:
113
-
114
- @instrument_class
115
- class MyClass(MyBase):
116
- def my_method(self):
117
- ... this method will be instrumented ...
118
-
119
- def _my_method(self):
120
- ... this method will not ...
121
- """
122
-
123
- required_events_methods = ["_event_kind", "_event_method_called_resources"]
124
- for method_name in required_events_methods:
125
- if not hasattr(cls, method_name):
126
- raise RuntimeError(
127
- f"Unable to instrument class {cls}. Class must define {method_name!r}."
128
- )
129
-
130
- for method_name, method in instrumentable_methods(
131
- cls,
132
- exclude_methods=getattr(cls, "_events_excluded_methods", []),
133
- ):
134
- setattr(cls, method_name, instrument_instance_method_call(method))
135
- return cls
@@ -1,323 +0,0 @@
1
- """
2
- DEPRECATION WARNING:
3
-
4
- This module is deprecated as of March 2024 and will not be available after September 2024.
5
- Infrastructure blocks have been replaced by workers, which offer enhanced functionality and better performance.
6
-
7
- For upgrade instructions, see https://docs.prefect.io/latest/guides/upgrade-guide-agents-to-workers/.
8
- """
9
- import abc
10
- import shlex
11
- import warnings
12
- from typing import TYPE_CHECKING, Dict, List, Optional
13
-
14
- import anyio.abc
15
-
16
- from prefect._internal.compatibility.deprecated import deprecated_class
17
- from prefect._internal.compatibility.experimental import (
18
- EXPERIMENTAL_WARNING,
19
- ExperimentalFeature,
20
- experiment_enabled,
21
- )
22
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
23
- from prefect.client.schemas.actions import WorkPoolCreate
24
- from prefect.exceptions import ObjectAlreadyExists
25
-
26
- if HAS_PYDANTIC_V2:
27
- import pydantic.v1 as pydantic
28
- else:
29
- import pydantic
30
-
31
- from rich.console import Console
32
- from typing_extensions import Self
33
-
34
- import prefect
35
- from prefect.blocks.core import Block, BlockNotSavedError
36
- from prefect.logging import get_logger
37
- from prefect.settings import (
38
- PREFECT_EXPERIMENTAL_WARN,
39
- PREFECT_EXPERIMENTAL_WARN_ENHANCED_CANCELLATION,
40
- PREFECT_UI_URL,
41
- get_current_settings,
42
- )
43
- from prefect.utilities.asyncutils import sync_compatible
44
-
45
- MIN_COMPAT_PREFECT_VERSION = "2.0b12"
46
-
47
-
48
- if TYPE_CHECKING:
49
- from prefect.client.schemas.objects import Deployment, Flow, FlowRun
50
-
51
-
52
- class InfrastructureResult(pydantic.BaseModel, abc.ABC):
53
- identifier: str
54
- status_code: int
55
-
56
- def __bool__(self):
57
- return self.status_code == 0
58
-
59
-
60
- @deprecated_class(
61
- start_date="Mar 2024",
62
- help="Use the `BaseWorker` class to create custom infrastructure integrations instead."
63
- " Refer to the upgrade guide for more information:"
64
- " https://docs.prefect.io/latest/guides/upgrade-guide-agents-to-workers/.",
65
- )
66
- class Infrastructure(Block, abc.ABC):
67
- _block_schema_capabilities = ["run-infrastructure"]
68
-
69
- type: str
70
-
71
- env: Dict[str, Optional[str]] = pydantic.Field(
72
- default_factory=dict,
73
- title="Environment",
74
- description="Environment variables to set in the configured infrastructure.",
75
- )
76
- labels: Dict[str, str] = pydantic.Field(
77
- default_factory=dict,
78
- description="Labels applied to the infrastructure for metadata purposes.",
79
- )
80
- name: Optional[str] = pydantic.Field(
81
- default=None,
82
- description="Name applied to the infrastructure for identification.",
83
- )
84
- command: Optional[List[str]] = pydantic.Field(
85
- default=None,
86
- description="The command to run in the infrastructure.",
87
- )
88
-
89
- async def generate_work_pool_base_job_template(self):
90
- if self._block_document_id is None:
91
- raise BlockNotSavedError(
92
- "Cannot publish as work pool, block has not been saved. Please call"
93
- " `.save()` on your block before publishing."
94
- )
95
-
96
- block_schema = self.__class__.schema()
97
- return {
98
- "job_configuration": {"block": "{{ block }}"},
99
- "variables": {
100
- "type": "object",
101
- "properties": {
102
- "block": {
103
- "title": "Block",
104
- "description": (
105
- "The infrastructure block to use for job creation."
106
- ),
107
- "allOf": [{"$ref": f"#/definitions/{self.__class__.__name__}"}],
108
- "default": {
109
- "$ref": {"block_document_id": str(self._block_document_id)}
110
- },
111
- }
112
- },
113
- "required": ["block"],
114
- "definitions": {self.__class__.__name__: block_schema},
115
- },
116
- }
117
-
118
- def get_corresponding_worker_type(self):
119
- return "block"
120
-
121
- @sync_compatible
122
- async def publish_as_work_pool(self, work_pool_name: Optional[str] = None):
123
- """
124
- Creates a work pool configured to use the given block as the job creator.
125
-
126
- Used to migrate from a agents setup to a worker setup.
127
-
128
- Args:
129
- work_pool_name: The name to give to the created work pool. If not provided, the name of the current
130
- block will be used.
131
- """
132
-
133
- base_job_template = await self.generate_work_pool_base_job_template()
134
- work_pool_name = work_pool_name or self._block_document_name
135
-
136
- if work_pool_name is None:
137
- raise ValueError(
138
- "`work_pool_name` must be provided if the block has not been saved."
139
- )
140
-
141
- console = Console()
142
-
143
- try:
144
- async with prefect.get_client() as client:
145
- work_pool = await client.create_work_pool(
146
- work_pool=WorkPoolCreate(
147
- name=work_pool_name,
148
- type=self.get_corresponding_worker_type(),
149
- base_job_template=base_job_template,
150
- )
151
- )
152
- except ObjectAlreadyExists:
153
- console.print(
154
- (
155
- f"Work pool with name {work_pool_name!r} already exists, please use"
156
- " a different name."
157
- ),
158
- style="red",
159
- )
160
- return
161
-
162
- console.print(
163
- f"Work pool {work_pool.name} created!",
164
- style="green",
165
- )
166
- if PREFECT_UI_URL:
167
- console.print(
168
- "You see your new work pool in the UI at"
169
- f" {PREFECT_UI_URL.value()}/work-pools/work-pool/{work_pool.name}"
170
- )
171
-
172
- deploy_script = (
173
- "my_flow.deploy(work_pool_name='{work_pool.name}', image='my_image:tag')"
174
- )
175
- if not hasattr(self, "image"):
176
- deploy_script = (
177
- "my_flow.from_source(source='https://github.com/org/repo.git',"
178
- f" entrypoint='flow.py:my_flow').deploy(work_pool_name='{work_pool.name}')"
179
- )
180
- console.print(
181
- "\nYou can deploy a flow to this work pool by calling"
182
- f" [blue].deploy[/]:\n\n\t{deploy_script}\n"
183
- )
184
- console.print(
185
- "\nTo start a worker to execute flow runs in this work pool run:\n"
186
- )
187
- console.print(f"\t[blue]prefect worker start --pool {work_pool.name}[/]\n")
188
-
189
- @abc.abstractmethod
190
- async def run(
191
- self,
192
- task_status: anyio.abc.TaskStatus = None,
193
- ) -> InfrastructureResult:
194
- """
195
- Run the infrastructure.
196
-
197
- If provided a `task_status`, the status will be reported as started when the
198
- infrastructure is successfully created. The status return value will be an
199
- identifier for the infrastructure.
200
-
201
- The call will then monitor the created infrastructure, returning a result at
202
- the end containing a status code indicating if the infrastructure exited cleanly
203
- or encountered an error.
204
- """
205
- # Note: implementations should include `sync_compatible`
206
-
207
- @abc.abstractmethod
208
- def preview(self) -> str:
209
- """
210
- View a preview of the infrastructure that would be run.
211
- """
212
-
213
- @property
214
- def logger(self):
215
- return get_logger(f"prefect.infrastructure.{self.type}")
216
-
217
- @property
218
- def is_using_a_runner(self):
219
- return self.command is not None and "prefect flow-run execute" in shlex.join(
220
- self.command
221
- )
222
-
223
- @classmethod
224
- def _base_environment(cls) -> Dict[str, str]:
225
- """
226
- Environment variables that should be passed to all created infrastructure.
227
-
228
- These values should be overridable with the `env` field.
229
- """
230
- return get_current_settings().to_environment_variables(exclude_unset=True)
231
-
232
- def prepare_for_flow_run(
233
- self: Self,
234
- flow_run: "FlowRun",
235
- deployment: Optional["Deployment"] = None,
236
- flow: Optional["Flow"] = None,
237
- ) -> Self:
238
- """
239
- Return an infrastructure block that is prepared to execute a flow run.
240
- """
241
- if deployment is not None:
242
- deployment_labels = self._base_deployment_labels(deployment)
243
- else:
244
- deployment_labels = {}
245
-
246
- if flow is not None:
247
- flow_labels = self._base_flow_labels(flow)
248
- else:
249
- flow_labels = {}
250
-
251
- return self.copy(
252
- update={
253
- "env": {**self._base_flow_run_environment(flow_run), **self.env},
254
- "labels": {
255
- **self._base_flow_run_labels(flow_run),
256
- **deployment_labels,
257
- **flow_labels,
258
- **self.labels,
259
- },
260
- "name": self.name or flow_run.name,
261
- "command": self.command or self._base_flow_run_command(),
262
- }
263
- )
264
-
265
- @staticmethod
266
- def _base_flow_run_command() -> List[str]:
267
- """
268
- Generate a command for a flow run job.
269
- """
270
- if experiment_enabled("enhanced_cancellation"):
271
- if (
272
- PREFECT_EXPERIMENTAL_WARN
273
- and PREFECT_EXPERIMENTAL_WARN_ENHANCED_CANCELLATION
274
- ):
275
- warnings.warn(
276
- EXPERIMENTAL_WARNING.format(
277
- feature="Enhanced flow run cancellation",
278
- group="enhanced_cancellation",
279
- help="",
280
- ),
281
- ExperimentalFeature,
282
- stacklevel=3,
283
- )
284
- return ["prefect", "flow-run", "execute"]
285
-
286
- return ["python", "-m", "prefect.engine"]
287
-
288
- @staticmethod
289
- def _base_flow_run_labels(flow_run: "FlowRun") -> Dict[str, str]:
290
- """
291
- Generate a dictionary of labels for a flow run job.
292
- """
293
- return {
294
- "prefect.io/flow-run-id": str(flow_run.id),
295
- "prefect.io/flow-run-name": flow_run.name,
296
- "prefect.io/version": prefect.__version__,
297
- }
298
-
299
- @staticmethod
300
- def _base_flow_run_environment(flow_run: "FlowRun") -> Dict[str, str]:
301
- """
302
- Generate a dictionary of environment variables for a flow run job.
303
- """
304
- environment = {}
305
- environment["PREFECT__FLOW_RUN_ID"] = str(flow_run.id)
306
- return environment
307
-
308
- @staticmethod
309
- def _base_deployment_labels(deployment: "Deployment") -> Dict[str, str]:
310
- labels = {
311
- "prefect.io/deployment-name": deployment.name,
312
- }
313
- if deployment.updated is not None:
314
- labels["prefect.io/deployment-updated"] = deployment.updated.in_timezone(
315
- "utc"
316
- ).to_iso8601_string()
317
- return labels
318
-
319
- @staticmethod
320
- def _base_flow_labels(flow: "Flow") -> Dict[str, str]:
321
- return {
322
- "prefect.io/flow-name": flow.name,
323
- }