prefect-client 3.0.0rc1__py3-none-any.whl → 3.0.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_internal/compatibility/migration.py +124 -0
- prefect/_internal/concurrency/__init__.py +2 -2
- prefect/_internal/concurrency/primitives.py +1 -0
- prefect/_internal/pydantic/annotations/pendulum.py +2 -2
- prefect/_internal/pytz.py +1 -1
- prefect/blocks/core.py +1 -1
- prefect/blocks/redis.py +168 -0
- prefect/client/orchestration.py +113 -23
- prefect/client/schemas/actions.py +1 -1
- prefect/client/schemas/filters.py +6 -0
- prefect/client/schemas/objects.py +22 -11
- prefect/client/subscriptions.py +3 -2
- prefect/concurrency/asyncio.py +1 -1
- prefect/concurrency/services.py +1 -1
- prefect/context.py +1 -27
- prefect/deployments/__init__.py +3 -0
- prefect/deployments/base.py +11 -3
- prefect/deployments/deployments.py +3 -0
- prefect/deployments/steps/pull.py +1 -0
- prefect/deployments/steps/utility.py +2 -1
- prefect/engine.py +3 -0
- prefect/events/cli/automations.py +1 -1
- prefect/events/clients.py +7 -1
- prefect/events/schemas/events.py +2 -0
- prefect/exceptions.py +9 -0
- prefect/filesystems.py +22 -11
- prefect/flow_engine.py +118 -156
- prefect/flow_runs.py +2 -2
- prefect/flows.py +91 -35
- prefect/futures.py +44 -43
- prefect/infrastructure/provisioners/container_instance.py +1 -0
- prefect/infrastructure/provisioners/ecs.py +2 -2
- prefect/input/__init__.py +4 -0
- prefect/input/run_input.py +4 -2
- prefect/logging/formatters.py +2 -2
- prefect/logging/handlers.py +2 -2
- prefect/logging/loggers.py +1 -1
- prefect/plugins.py +1 -0
- prefect/records/cache_policies.py +179 -0
- prefect/records/result_store.py +10 -3
- prefect/results.py +27 -55
- prefect/runner/runner.py +1 -1
- prefect/runner/server.py +1 -1
- prefect/runtime/__init__.py +1 -0
- prefect/runtime/deployment.py +1 -0
- prefect/runtime/flow_run.py +1 -0
- prefect/runtime/task_run.py +1 -0
- prefect/settings.py +21 -5
- prefect/states.py +17 -4
- prefect/task_engine.py +337 -209
- prefect/task_runners.py +15 -5
- prefect/task_runs.py +203 -0
- prefect/{task_server.py → task_worker.py} +66 -36
- prefect/tasks.py +180 -77
- prefect/transactions.py +92 -16
- prefect/types/__init__.py +1 -1
- prefect/utilities/asyncutils.py +3 -3
- prefect/utilities/callables.py +90 -7
- prefect/utilities/dockerutils.py +5 -3
- prefect/utilities/engine.py +11 -0
- prefect/utilities/filesystem.py +4 -5
- prefect/utilities/importtools.py +34 -5
- prefect/utilities/services.py +2 -2
- prefect/utilities/urls.py +195 -0
- prefect/utilities/visualization.py +1 -0
- prefect/variables.py +19 -10
- prefect/workers/base.py +46 -1
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/METADATA +3 -2
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/RECORD +72 -66
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/LICENSE +0 -0
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/WHEEL +0 -0
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,124 @@
|
|
1
|
+
"""
|
2
|
+
This module provides a function to handle imports for moved or removed objects in Prefect 3.0 upgrade.
|
3
|
+
|
4
|
+
The `getattr_migration` function is used to handle imports for moved or removed objects in Prefect 3.0 upgrade.
|
5
|
+
It is used in the `__getattr__` attribute of modules that have moved or removed objects.
|
6
|
+
|
7
|
+
Usage:
|
8
|
+
```python
|
9
|
+
from prefect._internal.compatibility.migration import getattr_migration
|
10
|
+
|
11
|
+
__getattr__ = getattr_migration(__name__)
|
12
|
+
```
|
13
|
+
"""
|
14
|
+
|
15
|
+
import sys
|
16
|
+
from typing import Any, Callable, Dict
|
17
|
+
|
18
|
+
from pydantic_core import PydanticCustomError
|
19
|
+
|
20
|
+
from prefect.exceptions import PrefectImportError
|
21
|
+
|
22
|
+
MOVED_IN_V3 = {
|
23
|
+
"prefect.deployments.deployments:load_flow_from_flow_run": "prefect.flows:load_flow_from_flow_run",
|
24
|
+
"prefect.deployments:load_flow_from_flow_run": "prefect.flows:load_flow_from_flow_run",
|
25
|
+
"prefect.variables:get": "prefect.variables:Variable.get",
|
26
|
+
"prefect.engine:pause_flow_run": "prefect.flow_runs:pause_flow_run",
|
27
|
+
"prefect.engine:resume_flow_run": "prefect.flow_runs:resume_flow_run",
|
28
|
+
"prefect.engine:suspend_flow_run": "prefect.flow_runs:suspend_flow_run",
|
29
|
+
"prefect.engine:_in_process_pause": "prefect.flow_runs:_in_process_pause",
|
30
|
+
}
|
31
|
+
|
32
|
+
REMOVED_IN_V3 = {
|
33
|
+
"prefect.deployments.deployments:Deployment": "Use 'flow.serve()' or `prefect deploy` instead.",
|
34
|
+
"prefect.deployments:Deployment": "Use 'flow.serve()' or `prefect deploy` instead.",
|
35
|
+
"prefect.filesystems:GCS": "Use 'prefect_gcp' instead.",
|
36
|
+
"prefect.filesystems:Azure": "Use 'prefect_azure' instead.",
|
37
|
+
"prefect.filesystems:S3": "Use 'prefect_aws' instead.",
|
38
|
+
"prefect.engine:_out_of_process_pause": "Use 'prefect.flow_runs.pause_flow_run' instead.",
|
39
|
+
}
|
40
|
+
|
41
|
+
# IMPORTANT FOR USAGE: When adding new modules to MOVED_IN_V3 or REMOVED_IN_V3, include the following lines at the bottom of that module:
|
42
|
+
# from prefect._internal.compatibility.migration import getattr_migration
|
43
|
+
# __getattr__ = getattr_migration(__name__)
|
44
|
+
# See src/prefect/filesystems.py for an example
|
45
|
+
|
46
|
+
|
47
|
+
def import_string_class_method(new_location: str) -> Callable:
|
48
|
+
"""
|
49
|
+
Handle moved class methods.
|
50
|
+
|
51
|
+
`import_string` does not account for moved class methods. This function handles cases where a method has been
|
52
|
+
moved to a class. For example, if `new_location` is 'prefect.variables:Variable.get', `import_string(new_location)`
|
53
|
+
will raise an error because it does not handle class methods. This function will import the class and get the
|
54
|
+
method from the class.
|
55
|
+
|
56
|
+
Args:
|
57
|
+
new_location (str): The new location of the method.
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
method: The resolved method from the class.
|
61
|
+
|
62
|
+
Raises:
|
63
|
+
PrefectImportError: If the method is not found in the class.
|
64
|
+
"""
|
65
|
+
from pydantic._internal._validators import import_string
|
66
|
+
|
67
|
+
class_name, method_name = new_location.rsplit(".", 1)
|
68
|
+
|
69
|
+
cls = import_string(class_name)
|
70
|
+
method = getattr(cls, method_name, None)
|
71
|
+
|
72
|
+
if method is not None and callable(method):
|
73
|
+
return method
|
74
|
+
|
75
|
+
raise PrefectImportError(f"Unable to import {new_location!r}")
|
76
|
+
|
77
|
+
|
78
|
+
def getattr_migration(module_name: str) -> Callable[[str], Any]:
|
79
|
+
"""
|
80
|
+
Handle imports for moved or removed objects in Prefect 3.0 upgrade
|
81
|
+
|
82
|
+
Args:
|
83
|
+
module_name (str): The name of the module to handle imports for.
|
84
|
+
"""
|
85
|
+
|
86
|
+
def wrapper(name: str) -> object:
|
87
|
+
"""
|
88
|
+
Raise a PrefectImportError if the object is not found, moved, or removed.
|
89
|
+
"""
|
90
|
+
|
91
|
+
if name == "__path__":
|
92
|
+
raise AttributeError(f"{module_name!r} object has no attribute {name!r}")
|
93
|
+
import warnings
|
94
|
+
|
95
|
+
from pydantic._internal._validators import import_string
|
96
|
+
|
97
|
+
import_path = f"{module_name}:{name}"
|
98
|
+
|
99
|
+
# Check if the attribute name corresponds to a moved or removed class or module
|
100
|
+
if import_path in MOVED_IN_V3.keys():
|
101
|
+
new_location = MOVED_IN_V3[import_path]
|
102
|
+
warnings.warn(
|
103
|
+
f"{import_path!r} has been moved to {new_location!r}. Importing from {new_location!r} instead. This warning will raise an error in a future release.",
|
104
|
+
DeprecationWarning,
|
105
|
+
stacklevel=2,
|
106
|
+
)
|
107
|
+
try:
|
108
|
+
return import_string(new_location)
|
109
|
+
except PydanticCustomError:
|
110
|
+
return import_string_class_method(new_location)
|
111
|
+
|
112
|
+
if import_path in REMOVED_IN_V3.keys():
|
113
|
+
error_message = REMOVED_IN_V3[import_path]
|
114
|
+
raise PrefectImportError(
|
115
|
+
f"{import_path!r} has been removed. {error_message}"
|
116
|
+
)
|
117
|
+
|
118
|
+
globals: Dict[str, Any] = sys.modules[module_name].__dict__
|
119
|
+
if name in globals:
|
120
|
+
return globals[name]
|
121
|
+
|
122
|
+
raise AttributeError(f"module {module_name!r} has no attribute {name!r}")
|
123
|
+
|
124
|
+
return wrapper
|
@@ -5,7 +5,7 @@ both asynchronous and synchronous calls.
|
|
5
5
|
Much of the complexity managed here arises from ensuring that a thread of execution is
|
6
6
|
not blocked.
|
7
7
|
|
8
|
-
The main data structure is a `Call` which is created from a function call capturing
|
8
|
+
The main data structure is a `Call` which is created from a function call capturing
|
9
9
|
local context variables. The call is then submitted to run somewhere via a `Portal`.
|
10
10
|
The primary portal used is the `WorkerThread`, which executes work on a thread running
|
11
11
|
concurrently to the one that created the call. A singleton `EventLoopThread` portal
|
@@ -13,7 +13,7 @@ is also used to schedule work on a dedicated event loop.
|
|
13
13
|
|
14
14
|
The result of the call can be retrieved asynchronously using `Call.result()`. Behind
|
15
15
|
the scenes, a `Future` is used to report the result of the call. Retrieving the result
|
16
|
-
of a call is a blocking operation.
|
16
|
+
of a call is a blocking operation.
|
17
17
|
|
18
18
|
Sometimes, it is important not to block the current thread while retrieving the result
|
19
19
|
of a call. For this purpose, there is the `Waiter`. Waiters attach to a call and provide
|
prefect/_internal/pytz.py
CHANGED
prefect/blocks/core.py
CHANGED
@@ -1122,7 +1122,7 @@ class Block(BaseModel, ABC):
|
|
1122
1122
|
cls,
|
1123
1123
|
by_alias: bool = True,
|
1124
1124
|
ref_template: str = "#/definitions/{model}",
|
1125
|
-
schema_generator:
|
1125
|
+
schema_generator: Type[GenerateJsonSchema] = GenerateJsonSchema,
|
1126
1126
|
mode: Literal["validation", "serialization"] = "validation",
|
1127
1127
|
) -> Dict[str, Any]:
|
1128
1128
|
"""TODO: stop overriding this method - use GenerateSchema in ConfigDict instead?"""
|
prefect/blocks/redis.py
ADDED
@@ -0,0 +1,168 @@
|
|
1
|
+
from contextlib import asynccontextmanager
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import AsyncGenerator, Optional, Union
|
4
|
+
|
5
|
+
try:
|
6
|
+
import redis.asyncio as redis
|
7
|
+
except ImportError:
|
8
|
+
raise ImportError(
|
9
|
+
"`redis-py` must be installed to use the `RedisStorageContainer` block. "
|
10
|
+
"You can install it with `pip install redis>=5.0.1"
|
11
|
+
)
|
12
|
+
|
13
|
+
from pydantic import Field
|
14
|
+
from pydantic.types import SecretStr
|
15
|
+
from typing_extensions import Self
|
16
|
+
|
17
|
+
from prefect.filesystems import WritableFileSystem
|
18
|
+
from prefect.utilities.asyncutils import sync_compatible
|
19
|
+
|
20
|
+
|
21
|
+
class RedisStorageContainer(WritableFileSystem):
|
22
|
+
"""
|
23
|
+
Block used to interact with Redis as a filesystem
|
24
|
+
|
25
|
+
Attributes:
|
26
|
+
host (str): The value to store.
|
27
|
+
port (int): The value to store.
|
28
|
+
db (int): The value to store.
|
29
|
+
username (str): The value to store.
|
30
|
+
password (str): The value to store.
|
31
|
+
connection_string (str): The value to store.
|
32
|
+
|
33
|
+
Example:
|
34
|
+
Create a new block from hostname, username and password:
|
35
|
+
```python
|
36
|
+
from prefect.blocks.redis import RedisStorageContainer
|
37
|
+
|
38
|
+
block = RedisStorageContainer.from_host(
|
39
|
+
host="myredishost.com", username="redis", password="SuperSecret")
|
40
|
+
block.save("BLOCK_NAME")
|
41
|
+
```
|
42
|
+
|
43
|
+
Create a new block from a connection string
|
44
|
+
```python
|
45
|
+
from prefect.blocks.redis import RedisStorageContainer
|
46
|
+
block = RedisStorageContainer.from_url(""redis://redis:SuperSecret@myredishost.com:6379")
|
47
|
+
block.save("BLOCK_NAME")
|
48
|
+
```
|
49
|
+
"""
|
50
|
+
|
51
|
+
_logo_url = "https://stprododpcmscdnendpoint.azureedge.net/assets/icons/redis.png"
|
52
|
+
|
53
|
+
host: Optional[str] = Field(default=None, description="Redis hostname")
|
54
|
+
port: int = Field(default=6379, description="Redis port")
|
55
|
+
db: int = Field(default=0, description="Redis DB index")
|
56
|
+
username: Optional[SecretStr] = Field(default=None, description="Redis username")
|
57
|
+
password: Optional[SecretStr] = Field(default=None, description="Redis password")
|
58
|
+
connection_string: Optional[SecretStr] = Field(
|
59
|
+
default=None, description="Redis connection string"
|
60
|
+
)
|
61
|
+
|
62
|
+
def block_initialization(self) -> None:
|
63
|
+
if self.connection_string:
|
64
|
+
return
|
65
|
+
if not self.host:
|
66
|
+
raise ValueError("Initialization error: 'host' is required but missing.")
|
67
|
+
if self.username and not self.password:
|
68
|
+
raise ValueError(
|
69
|
+
"Initialization error: 'username' is provided, but 'password' is missing. Both are required."
|
70
|
+
)
|
71
|
+
|
72
|
+
@sync_compatible
|
73
|
+
async def read_path(self, path: Union[Path, str]):
|
74
|
+
"""Read the redis content at `path`
|
75
|
+
|
76
|
+
Args:
|
77
|
+
path: Redis key to read from
|
78
|
+
|
79
|
+
Returns:
|
80
|
+
Contents at key as bytes
|
81
|
+
"""
|
82
|
+
async with self._client() as client:
|
83
|
+
return await client.get(str(path))
|
84
|
+
|
85
|
+
@sync_compatible
|
86
|
+
async def write_path(self, path: Union[Path, str], content: bytes):
|
87
|
+
"""Write `content` to the redis at `path`
|
88
|
+
|
89
|
+
Args:
|
90
|
+
path: Redis key to write to
|
91
|
+
content: Binary object to write
|
92
|
+
"""
|
93
|
+
|
94
|
+
async with self._client() as client:
|
95
|
+
return await client.set(str(path), content)
|
96
|
+
|
97
|
+
@asynccontextmanager
|
98
|
+
async def _client(self) -> AsyncGenerator[redis.Redis, None]:
|
99
|
+
if self.connection_string:
|
100
|
+
client = redis.Redis.from_url(self.connection_string.get_secret_value())
|
101
|
+
else:
|
102
|
+
assert self.host
|
103
|
+
client = redis.Redis(
|
104
|
+
host=self.host,
|
105
|
+
port=self.port,
|
106
|
+
username=self.username.get_secret_value() if self.username else None,
|
107
|
+
password=self.password.get_secret_value() if self.password else None,
|
108
|
+
db=self.db,
|
109
|
+
)
|
110
|
+
|
111
|
+
try:
|
112
|
+
yield client
|
113
|
+
finally:
|
114
|
+
await client.aclose()
|
115
|
+
|
116
|
+
@classmethod
|
117
|
+
def from_host(
|
118
|
+
cls,
|
119
|
+
host: str,
|
120
|
+
port: int = 6379,
|
121
|
+
db: int = 0,
|
122
|
+
username: Union[None, str, SecretStr] = None,
|
123
|
+
password: Union[None, str, SecretStr] = None,
|
124
|
+
) -> Self:
|
125
|
+
"""Create block from hostname, username and password
|
126
|
+
|
127
|
+
Args:
|
128
|
+
host: Redis hostname
|
129
|
+
username: Redis username
|
130
|
+
password: Redis password
|
131
|
+
port: Redis port
|
132
|
+
|
133
|
+
Returns:
|
134
|
+
`RedisStorageContainer` instance
|
135
|
+
"""
|
136
|
+
|
137
|
+
username = SecretStr(username) if isinstance(username, str) else username
|
138
|
+
password = SecretStr(password) if isinstance(password, str) else password
|
139
|
+
|
140
|
+
return cls(host=host, port=port, db=db, username=username, password=password)
|
141
|
+
|
142
|
+
@classmethod
|
143
|
+
def from_connection_string(cls, connection_string: Union[str, SecretStr]) -> Self:
|
144
|
+
"""Create block from a Redis connection string
|
145
|
+
|
146
|
+
Supports the following URL schemes:
|
147
|
+
- `redis://` creates a TCP socket connection
|
148
|
+
- `rediss://` creates a SSL wrapped TCP socket connection
|
149
|
+
- `unix://` creates a Unix Domain Socket connection
|
150
|
+
|
151
|
+
See [Redis docs](https://redis.readthedocs.io/en/stable/examples
|
152
|
+
/connection_examples.html#Connecting-to-Redis-instances-by-specifying-a-URL
|
153
|
+
-scheme.) for more info.
|
154
|
+
|
155
|
+
Args:
|
156
|
+
connection_string: Redis connection string
|
157
|
+
|
158
|
+
Returns:
|
159
|
+
`RedisStorageContainer` instance
|
160
|
+
"""
|
161
|
+
|
162
|
+
connection_string = (
|
163
|
+
SecretStr(connection_string)
|
164
|
+
if isinstance(connection_string, str)
|
165
|
+
else connection_string
|
166
|
+
)
|
167
|
+
|
168
|
+
return cls(connection_string=connection_string)
|
prefect/client/orchestration.py
CHANGED
@@ -8,11 +8,13 @@ from typing import (
|
|
8
8
|
Dict,
|
9
9
|
Iterable,
|
10
10
|
List,
|
11
|
+
Literal,
|
11
12
|
Optional,
|
12
13
|
Set,
|
13
14
|
Tuple,
|
14
15
|
TypeVar,
|
15
16
|
Union,
|
17
|
+
overload,
|
16
18
|
)
|
17
19
|
from uuid import UUID, uuid4
|
18
20
|
|
@@ -156,9 +158,23 @@ class ServerType(AutoEnum):
|
|
156
158
|
CLOUD = AutoEnum.auto()
|
157
159
|
|
158
160
|
|
161
|
+
@overload
|
162
|
+
def get_client(
|
163
|
+
httpx_settings: Optional[Dict[str, Any]] = None, sync_client: Literal[False] = False
|
164
|
+
) -> "PrefectClient":
|
165
|
+
...
|
166
|
+
|
167
|
+
|
168
|
+
@overload
|
169
|
+
def get_client(
|
170
|
+
httpx_settings: Optional[Dict[str, Any]] = None, sync_client: Literal[True] = True
|
171
|
+
) -> "SyncPrefectClient":
|
172
|
+
...
|
173
|
+
|
174
|
+
|
159
175
|
def get_client(
|
160
176
|
httpx_settings: Optional[Dict[str, Any]] = None, sync_client: bool = False
|
161
|
-
)
|
177
|
+
):
|
162
178
|
"""
|
163
179
|
Retrieve a HTTP client for communicating with the Prefect REST API.
|
164
180
|
|
@@ -254,8 +270,8 @@ class PrefectClient:
|
|
254
270
|
self,
|
255
271
|
api: Union[str, ASGIApp],
|
256
272
|
*,
|
257
|
-
api_key: str = None,
|
258
|
-
api_version: str = None,
|
273
|
+
api_key: Optional[str] = None,
|
274
|
+
api_version: Optional[str] = None,
|
259
275
|
httpx_settings: Optional[Dict[str, Any]] = None,
|
260
276
|
) -> None:
|
261
277
|
httpx_settings = httpx_settings.copy() if httpx_settings else {}
|
@@ -490,7 +506,7 @@ class PrefectClient:
|
|
490
506
|
work_pool_filter: WorkPoolFilter = None,
|
491
507
|
work_queue_filter: WorkQueueFilter = None,
|
492
508
|
sort: FlowSort = None,
|
493
|
-
limit: int = None,
|
509
|
+
limit: Optional[int] = None,
|
494
510
|
offset: int = 0,
|
495
511
|
) -> List[Flow]:
|
496
512
|
"""
|
@@ -560,12 +576,12 @@ class PrefectClient:
|
|
560
576
|
*,
|
561
577
|
parameters: Optional[Dict[str, Any]] = None,
|
562
578
|
context: Optional[Dict[str, Any]] = None,
|
563
|
-
state: prefect.states.State = None,
|
564
|
-
name: str = None,
|
565
|
-
tags: Iterable[str] = None,
|
566
|
-
idempotency_key: str = None,
|
567
|
-
parent_task_run_id: UUID = None,
|
568
|
-
work_queue_name: str = None,
|
579
|
+
state: Optional[prefect.states.State] = None,
|
580
|
+
name: Optional[str] = None,
|
581
|
+
tags: Optional[Iterable[str]] = None,
|
582
|
+
idempotency_key: Optional[str] = None,
|
583
|
+
parent_task_run_id: Optional[UUID] = None,
|
584
|
+
work_queue_name: Optional[str] = None,
|
569
585
|
job_variables: Optional[Dict[str, Any]] = None,
|
570
586
|
) -> FlowRun:
|
571
587
|
"""
|
@@ -1694,7 +1710,7 @@ class PrefectClient:
|
|
1694
1710
|
self,
|
1695
1711
|
deployment: Deployment,
|
1696
1712
|
schedule: SCHEDULE_TYPES = None,
|
1697
|
-
is_schedule_active: bool = None,
|
1713
|
+
is_schedule_active: Optional[bool] = None,
|
1698
1714
|
):
|
1699
1715
|
deployment_update = DeploymentUpdate(
|
1700
1716
|
version=deployment.version,
|
@@ -2044,7 +2060,7 @@ class PrefectClient:
|
|
2044
2060
|
work_pool_filter: WorkPoolFilter = None,
|
2045
2061
|
work_queue_filter: WorkQueueFilter = None,
|
2046
2062
|
sort: FlowRunSort = None,
|
2047
|
-
limit: int = None,
|
2063
|
+
limit: Optional[int] = None,
|
2048
2064
|
offset: int = 0,
|
2049
2065
|
) -> List[FlowRun]:
|
2050
2066
|
"""
|
@@ -2117,7 +2133,6 @@ class PrefectClient:
|
|
2117
2133
|
state_create = state.to_state_create()
|
2118
2134
|
state_create.state_details.flow_run_id = flow_run_id
|
2119
2135
|
state_create.state_details.transition_id = uuid4()
|
2120
|
-
print(repr(state_create))
|
2121
2136
|
try:
|
2122
2137
|
response = await self._client.post(
|
2123
2138
|
f"/flow_runs/{flow_run_id}/set_state",
|
@@ -2252,7 +2267,7 @@ class PrefectClient:
|
|
2252
2267
|
task_run_filter: TaskRunFilter = None,
|
2253
2268
|
deployment_filter: DeploymentFilter = None,
|
2254
2269
|
sort: TaskRunSort = None,
|
2255
|
-
limit: int = None,
|
2270
|
+
limit: Optional[int] = None,
|
2256
2271
|
offset: int = 0,
|
2257
2272
|
) -> List[TaskRun]:
|
2258
2273
|
"""
|
@@ -2516,8 +2531,8 @@ class PrefectClient:
|
|
2516
2531
|
async def read_logs(
|
2517
2532
|
self,
|
2518
2533
|
log_filter: LogFilter = None,
|
2519
|
-
limit: int = None,
|
2520
|
-
offset: int = None,
|
2534
|
+
limit: Optional[int] = None,
|
2535
|
+
offset: Optional[int] = None,
|
2521
2536
|
sort: LogSort = LogSort.TIMESTAMP_ASC,
|
2522
2537
|
) -> List[Log]:
|
2523
2538
|
"""
|
@@ -2848,7 +2863,7 @@ class PrefectClient:
|
|
2848
2863
|
flow_run_filter: FlowRunFilter = None,
|
2849
2864
|
task_run_filter: TaskRunFilter = None,
|
2850
2865
|
sort: ArtifactSort = None,
|
2851
|
-
limit: int = None,
|
2866
|
+
limit: Optional[int] = None,
|
2852
2867
|
offset: int = 0,
|
2853
2868
|
) -> List[Artifact]:
|
2854
2869
|
"""
|
@@ -2888,7 +2903,7 @@ class PrefectClient:
|
|
2888
2903
|
flow_run_filter: FlowRunFilter = None,
|
2889
2904
|
task_run_filter: TaskRunFilter = None,
|
2890
2905
|
sort: ArtifactCollectionSort = None,
|
2891
|
-
limit: int = None,
|
2906
|
+
limit: Optional[int] = None,
|
2892
2907
|
offset: int = 0,
|
2893
2908
|
) -> List[ArtifactCollection]:
|
2894
2909
|
"""
|
@@ -2988,7 +3003,7 @@ class PrefectClient:
|
|
2988
3003
|
else:
|
2989
3004
|
raise
|
2990
3005
|
|
2991
|
-
async def read_variables(self, limit: int = None) -> List[Variable]:
|
3006
|
+
async def read_variables(self, limit: Optional[int] = None) -> List[Variable]:
|
2992
3007
|
"""Reads all variables."""
|
2993
3008
|
response = await self._client.post("/variables/filter", json={"limit": limit})
|
2994
3009
|
return pydantic.TypeAdapter(List[Variable]).validate_python(response.json())
|
@@ -3355,8 +3370,8 @@ class SyncPrefectClient:
|
|
3355
3370
|
self,
|
3356
3371
|
api: Union[str, ASGIApp],
|
3357
3372
|
*,
|
3358
|
-
api_key: str = None,
|
3359
|
-
api_version: str = None,
|
3373
|
+
api_key: Optional[str] = None,
|
3374
|
+
api_version: Optional[str] = None,
|
3360
3375
|
httpx_settings: Optional[Dict[str, Any]] = None,
|
3361
3376
|
) -> None:
|
3362
3377
|
httpx_settings = httpx_settings.copy() if httpx_settings else {}
|
@@ -3653,6 +3668,61 @@ class SyncPrefectClient:
|
|
3653
3668
|
|
3654
3669
|
return flow_run
|
3655
3670
|
|
3671
|
+
def update_flow_run(
|
3672
|
+
self,
|
3673
|
+
flow_run_id: UUID,
|
3674
|
+
flow_version: Optional[str] = None,
|
3675
|
+
parameters: Optional[dict] = None,
|
3676
|
+
name: Optional[str] = None,
|
3677
|
+
tags: Optional[Iterable[str]] = None,
|
3678
|
+
empirical_policy: Optional[FlowRunPolicy] = None,
|
3679
|
+
infrastructure_pid: Optional[str] = None,
|
3680
|
+
job_variables: Optional[dict] = None,
|
3681
|
+
) -> httpx.Response:
|
3682
|
+
"""
|
3683
|
+
Update a flow run's details.
|
3684
|
+
|
3685
|
+
Args:
|
3686
|
+
flow_run_id: The identifier for the flow run to update.
|
3687
|
+
flow_version: A new version string for the flow run.
|
3688
|
+
parameters: A dictionary of parameter values for the flow run. This will not
|
3689
|
+
be merged with any existing parameters.
|
3690
|
+
name: A new name for the flow run.
|
3691
|
+
empirical_policy: A new flow run orchestration policy. This will not be
|
3692
|
+
merged with any existing policy.
|
3693
|
+
tags: An iterable of new tags for the flow run. These will not be merged with
|
3694
|
+
any existing tags.
|
3695
|
+
infrastructure_pid: The id of flow run as returned by an
|
3696
|
+
infrastructure block.
|
3697
|
+
|
3698
|
+
Returns:
|
3699
|
+
an `httpx.Response` object from the PATCH request
|
3700
|
+
"""
|
3701
|
+
params = {}
|
3702
|
+
if flow_version is not None:
|
3703
|
+
params["flow_version"] = flow_version
|
3704
|
+
if parameters is not None:
|
3705
|
+
params["parameters"] = parameters
|
3706
|
+
if name is not None:
|
3707
|
+
params["name"] = name
|
3708
|
+
if tags is not None:
|
3709
|
+
params["tags"] = tags
|
3710
|
+
if empirical_policy is not None:
|
3711
|
+
params["empirical_policy"] = empirical_policy.model_dump(
|
3712
|
+
mode="json", exclude_unset=True
|
3713
|
+
)
|
3714
|
+
if infrastructure_pid:
|
3715
|
+
params["infrastructure_pid"] = infrastructure_pid
|
3716
|
+
if job_variables is not None:
|
3717
|
+
params["job_variables"] = job_variables
|
3718
|
+
|
3719
|
+
flow_run_data = FlowRunUpdate(**params)
|
3720
|
+
|
3721
|
+
return self._client.patch(
|
3722
|
+
f"/flow_runs/{flow_run_id}",
|
3723
|
+
json=flow_run_data.model_dump(mode="json", exclude_unset=True),
|
3724
|
+
)
|
3725
|
+
|
3656
3726
|
def read_flow_run(self, flow_run_id: UUID) -> FlowRun:
|
3657
3727
|
"""
|
3658
3728
|
Query the Prefect API for a flow run by id.
|
@@ -3682,7 +3752,7 @@ class SyncPrefectClient:
|
|
3682
3752
|
work_pool_filter: WorkPoolFilter = None,
|
3683
3753
|
work_queue_filter: WorkQueueFilter = None,
|
3684
3754
|
sort: FlowRunSort = None,
|
3685
|
-
limit: int = None,
|
3755
|
+
limit: Optional[int] = None,
|
3686
3756
|
offset: int = 0,
|
3687
3757
|
) -> List[FlowRun]:
|
3688
3758
|
"""
|
@@ -3874,7 +3944,7 @@ class SyncPrefectClient:
|
|
3874
3944
|
task_run_filter: TaskRunFilter = None,
|
3875
3945
|
deployment_filter: DeploymentFilter = None,
|
3876
3946
|
sort: TaskRunSort = None,
|
3877
|
-
limit: int = None,
|
3947
|
+
limit: Optional[int] = None,
|
3878
3948
|
offset: int = 0,
|
3879
3949
|
) -> List[TaskRun]:
|
3880
3950
|
"""
|
@@ -3978,3 +4048,23 @@ class SyncPrefectClient:
|
|
3978
4048
|
else:
|
3979
4049
|
raise
|
3980
4050
|
return DeploymentResponse.model_validate(response.json())
|
4051
|
+
|
4052
|
+
def create_artifact(
|
4053
|
+
self,
|
4054
|
+
artifact: ArtifactCreate,
|
4055
|
+
) -> Artifact:
|
4056
|
+
"""
|
4057
|
+
Creates an artifact with the provided configuration.
|
4058
|
+
|
4059
|
+
Args:
|
4060
|
+
artifact: Desired configuration for the new artifact.
|
4061
|
+
Returns:
|
4062
|
+
Information about the newly created artifact.
|
4063
|
+
"""
|
4064
|
+
|
4065
|
+
response = self._client.post(
|
4066
|
+
"/artifacts/",
|
4067
|
+
json=artifact.model_dump_json(exclude_unset=True),
|
4068
|
+
)
|
4069
|
+
|
4070
|
+
return Artifact.model_validate(response.json())
|
@@ -788,6 +788,6 @@ class GlobalConcurrencyLimitUpdate(ActionBaseModel):
|
|
788
788
|
|
789
789
|
name: Optional[Name] = Field(None)
|
790
790
|
limit: Optional[NonNegativeInteger] = Field(None)
|
791
|
-
active: Optional[
|
791
|
+
active: Optional[bool] = Field(None)
|
792
792
|
active_slots: Optional[NonNegativeInteger] = Field(None)
|
793
793
|
slot_decay_per_second: Optional[NonNegativeFloat] = Field(None)
|
@@ -166,12 +166,18 @@ class FlowRunFilterStateType(PrefectBaseModel):
|
|
166
166
|
any_: Optional[List[StateType]] = Field(
|
167
167
|
default=None, description="A list of flow run state types to include"
|
168
168
|
)
|
169
|
+
not_any_: Optional[List[StateType]] = Field(
|
170
|
+
default=None, description="A list of flow run state types to exclude"
|
171
|
+
)
|
169
172
|
|
170
173
|
|
171
174
|
class FlowRunFilterStateName(PrefectBaseModel):
|
172
175
|
any_: Optional[List[str]] = Field(
|
173
176
|
default=None, description="A list of flow run state names to include"
|
174
177
|
)
|
178
|
+
not_any_: Optional[List[str]] = Field(
|
179
|
+
default=None, description="A list of flow run state names to exclude"
|
180
|
+
)
|
175
181
|
|
176
182
|
|
177
183
|
class FlowRunFilterState(PrefectBaseModel, OperatorMixin):
|