prefect-client 3.0.0rc1__py3-none-any.whl → 3.0.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_internal/compatibility/migration.py +124 -0
- prefect/_internal/concurrency/__init__.py +2 -2
- prefect/_internal/concurrency/primitives.py +1 -0
- prefect/_internal/pydantic/annotations/pendulum.py +2 -2
- prefect/_internal/pytz.py +1 -1
- prefect/blocks/core.py +1 -1
- prefect/blocks/redis.py +168 -0
- prefect/client/orchestration.py +113 -23
- prefect/client/schemas/actions.py +1 -1
- prefect/client/schemas/filters.py +6 -0
- prefect/client/schemas/objects.py +22 -11
- prefect/client/subscriptions.py +3 -2
- prefect/concurrency/asyncio.py +1 -1
- prefect/concurrency/services.py +1 -1
- prefect/context.py +1 -27
- prefect/deployments/__init__.py +3 -0
- prefect/deployments/base.py +11 -3
- prefect/deployments/deployments.py +3 -0
- prefect/deployments/steps/pull.py +1 -0
- prefect/deployments/steps/utility.py +2 -1
- prefect/engine.py +3 -0
- prefect/events/cli/automations.py +1 -1
- prefect/events/clients.py +7 -1
- prefect/events/schemas/events.py +2 -0
- prefect/exceptions.py +9 -0
- prefect/filesystems.py +22 -11
- prefect/flow_engine.py +118 -156
- prefect/flow_runs.py +2 -2
- prefect/flows.py +91 -35
- prefect/futures.py +44 -43
- prefect/infrastructure/provisioners/container_instance.py +1 -0
- prefect/infrastructure/provisioners/ecs.py +2 -2
- prefect/input/__init__.py +4 -0
- prefect/input/run_input.py +4 -2
- prefect/logging/formatters.py +2 -2
- prefect/logging/handlers.py +2 -2
- prefect/logging/loggers.py +1 -1
- prefect/plugins.py +1 -0
- prefect/records/cache_policies.py +179 -0
- prefect/records/result_store.py +10 -3
- prefect/results.py +27 -55
- prefect/runner/runner.py +1 -1
- prefect/runner/server.py +1 -1
- prefect/runtime/__init__.py +1 -0
- prefect/runtime/deployment.py +1 -0
- prefect/runtime/flow_run.py +1 -0
- prefect/runtime/task_run.py +1 -0
- prefect/settings.py +21 -5
- prefect/states.py +17 -4
- prefect/task_engine.py +337 -209
- prefect/task_runners.py +15 -5
- prefect/task_runs.py +203 -0
- prefect/{task_server.py → task_worker.py} +66 -36
- prefect/tasks.py +180 -77
- prefect/transactions.py +92 -16
- prefect/types/__init__.py +1 -1
- prefect/utilities/asyncutils.py +3 -3
- prefect/utilities/callables.py +90 -7
- prefect/utilities/dockerutils.py +5 -3
- prefect/utilities/engine.py +11 -0
- prefect/utilities/filesystem.py +4 -5
- prefect/utilities/importtools.py +34 -5
- prefect/utilities/services.py +2 -2
- prefect/utilities/urls.py +195 -0
- prefect/utilities/visualization.py +1 -0
- prefect/variables.py +19 -10
- prefect/workers/base.py +46 -1
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/METADATA +3 -2
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/RECORD +72 -66
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/LICENSE +0 -0
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/WHEEL +0 -0
- {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/top_level.txt +0 -0
prefect/flows.py
CHANGED
@@ -4,7 +4,6 @@ Module containing the base workflow class and decorator - for most use cases, us
|
|
4
4
|
|
5
5
|
# This file requires type-checking with pyright because mypy does not yet support PEP612
|
6
6
|
# See https://github.com/python/mypy/issues/8645
|
7
|
-
|
8
7
|
import ast
|
9
8
|
import datetime
|
10
9
|
import importlib.util
|
@@ -15,6 +14,7 @@ import re
|
|
15
14
|
import sys
|
16
15
|
import tempfile
|
17
16
|
import warnings
|
17
|
+
from copy import copy
|
18
18
|
from functools import partial, update_wrapper
|
19
19
|
from pathlib import Path
|
20
20
|
from tempfile import NamedTemporaryFile
|
@@ -102,7 +102,7 @@ from prefect.utilities.callables import (
|
|
102
102
|
from prefect.utilities.collections import listrepr
|
103
103
|
from prefect.utilities.filesystem import relative_path_to_current_platform
|
104
104
|
from prefect.utilities.hashing import file_hash
|
105
|
-
from prefect.utilities.importtools import import_object
|
105
|
+
from prefect.utilities.importtools import import_object, safe_load_namespace
|
106
106
|
|
107
107
|
from ._internal.pydantic.v2_schema import is_v2_type
|
108
108
|
from ._internal.pydantic.v2_validated_func import V2ValidatedFunction
|
@@ -354,6 +354,28 @@ class Flow(Generic[P, R]):
|
|
354
354
|
|
355
355
|
self._entrypoint = f"{module}:{fn.__name__}"
|
356
356
|
|
357
|
+
@property
|
358
|
+
def ismethod(self) -> bool:
|
359
|
+
return hasattr(self.fn, "__prefect_self__")
|
360
|
+
|
361
|
+
def __get__(self, instance, owner):
|
362
|
+
"""
|
363
|
+
Implement the descriptor protocol so that the flow can be used as an instance method.
|
364
|
+
When an instance method is loaded, this method is called with the "self" instance as
|
365
|
+
an argument. We return a copy of the flow with that instance bound to the flow's function.
|
366
|
+
"""
|
367
|
+
|
368
|
+
# if no instance is provided, it's being accessed on the class
|
369
|
+
if instance is None:
|
370
|
+
return self
|
371
|
+
|
372
|
+
# if the flow is being accessed on an instance, bind the instance to the __prefect_self__ attribute
|
373
|
+
# of the flow's function. This will allow it to be automatically added to the flow's parameters
|
374
|
+
else:
|
375
|
+
bound_flow = copy(self)
|
376
|
+
bound_flow.fn.__prefect_self__ = instance
|
377
|
+
return bound_flow
|
378
|
+
|
357
379
|
def with_options(
|
358
380
|
self,
|
359
381
|
*,
|
@@ -555,6 +577,9 @@ class Flow(Generic[P, R]):
|
|
555
577
|
"""
|
556
578
|
serialized_parameters = {}
|
557
579
|
for key, value in parameters.items():
|
580
|
+
# do not serialize the bound self object
|
581
|
+
if self.ismethod and value is self.fn.__prefect_self__:
|
582
|
+
continue
|
558
583
|
try:
|
559
584
|
serialized_parameters[key] = jsonable_encoder(value)
|
560
585
|
except (TypeError, ValueError):
|
@@ -1241,19 +1266,14 @@ class Flow(Generic[P, R]):
|
|
1241
1266
|
# we can add support for exploring subflows for tasks in the future.
|
1242
1267
|
return track_viz_task(self.isasync, self.name, parameters)
|
1243
1268
|
|
1244
|
-
from prefect.flow_engine import run_flow
|
1269
|
+
from prefect.flow_engine import run_flow
|
1245
1270
|
|
1246
|
-
|
1271
|
+
return run_flow(
|
1247
1272
|
flow=self,
|
1248
1273
|
parameters=parameters,
|
1249
1274
|
wait_for=wait_for,
|
1250
1275
|
return_type=return_type,
|
1251
1276
|
)
|
1252
|
-
if self.isasync:
|
1253
|
-
# this returns an awaitable coroutine
|
1254
|
-
return run_flow(**run_kwargs)
|
1255
|
-
else:
|
1256
|
-
return run_flow_sync(**run_kwargs)
|
1257
1277
|
|
1258
1278
|
@sync_compatible
|
1259
1279
|
async def visualize(self, *args, **kwargs):
|
@@ -1329,8 +1349,8 @@ def flow(
|
|
1329
1349
|
retries: Optional[int] = None,
|
1330
1350
|
retry_delay_seconds: Optional[Union[int, float]] = None,
|
1331
1351
|
task_runner: Optional[TaskRunner] = None,
|
1332
|
-
description: str = None,
|
1333
|
-
timeout_seconds: Union[int, float] = None,
|
1352
|
+
description: Optional[str] = None,
|
1353
|
+
timeout_seconds: Union[int, float, None] = None,
|
1334
1354
|
validate_parameters: bool = True,
|
1335
1355
|
persist_result: Optional[bool] = None,
|
1336
1356
|
result_storage: Optional[ResultStorage] = None,
|
@@ -1358,11 +1378,11 @@ def flow(
|
|
1358
1378
|
name: Optional[str] = None,
|
1359
1379
|
version: Optional[str] = None,
|
1360
1380
|
flow_run_name: Optional[Union[Callable[[], str], str]] = None,
|
1361
|
-
retries: int = None,
|
1362
|
-
retry_delay_seconds: Union[int, float] = None,
|
1381
|
+
retries: Optional[int] = None,
|
1382
|
+
retry_delay_seconds: Union[int, float, None] = None,
|
1363
1383
|
task_runner: Optional[TaskRunner] = None,
|
1364
|
-
description: str = None,
|
1365
|
-
timeout_seconds: Union[int, float] = None,
|
1384
|
+
description: Optional[str] = None,
|
1385
|
+
timeout_seconds: Union[int, float, None] = None,
|
1366
1386
|
validate_parameters: bool = True,
|
1367
1387
|
persist_result: Optional[bool] = None,
|
1368
1388
|
result_storage: Optional[ResultStorage] = None,
|
@@ -1485,6 +1505,9 @@ def flow(
|
|
1485
1505
|
>>> pass
|
1486
1506
|
"""
|
1487
1507
|
if __fn:
|
1508
|
+
if isinstance(__fn, (classmethod, staticmethod)):
|
1509
|
+
method_decorator = type(__fn).__name__
|
1510
|
+
raise TypeError(f"@{method_decorator} should be applied on top of @flow")
|
1488
1511
|
return cast(
|
1489
1512
|
Flow[P, R],
|
1490
1513
|
Flow(
|
@@ -1560,7 +1583,9 @@ flow.from_source = Flow.from_source
|
|
1560
1583
|
|
1561
1584
|
|
1562
1585
|
def select_flow(
|
1563
|
-
flows: Iterable[Flow],
|
1586
|
+
flows: Iterable[Flow],
|
1587
|
+
flow_name: Optional[str] = None,
|
1588
|
+
from_message: Optional[str] = None,
|
1564
1589
|
) -> Flow:
|
1565
1590
|
"""
|
1566
1591
|
Select the only flow in an iterable or a flow specified by name.
|
@@ -1574,33 +1599,33 @@ def select_flow(
|
|
1574
1599
|
UnspecifiedFlowError: If multiple flows exist but no flow name was provided
|
1575
1600
|
"""
|
1576
1601
|
# Convert to flows by name
|
1577
|
-
|
1602
|
+
flows_dict = {f.name: f for f in flows}
|
1578
1603
|
|
1579
1604
|
# Add a leading space if given, otherwise use an empty string
|
1580
1605
|
from_message = (" " + from_message) if from_message else ""
|
1581
|
-
if not
|
1606
|
+
if not Optional:
|
1582
1607
|
raise MissingFlowError(f"No flows found{from_message}.")
|
1583
1608
|
|
1584
|
-
elif flow_name and flow_name not in
|
1609
|
+
elif flow_name and flow_name not in flows_dict:
|
1585
1610
|
raise MissingFlowError(
|
1586
1611
|
f"Flow {flow_name!r} not found{from_message}. "
|
1587
|
-
f"Found the following flows: {listrepr(
|
1612
|
+
f"Found the following flows: {listrepr(flows_dict.keys())}. "
|
1588
1613
|
"Check to make sure that your flow function is decorated with `@flow`."
|
1589
1614
|
)
|
1590
1615
|
|
1591
|
-
elif not flow_name and len(
|
1616
|
+
elif not flow_name and len(flows_dict) > 1:
|
1592
1617
|
raise UnspecifiedFlowError(
|
1593
1618
|
(
|
1594
|
-
f"Found {len(
|
1595
|
-
f" {listrepr(sorted(
|
1619
|
+
f"Found {len(flows_dict)} flows{from_message}:"
|
1620
|
+
f" {listrepr(sorted(flows_dict.keys()))}. Specify a flow name to select a"
|
1596
1621
|
" flow."
|
1597
1622
|
),
|
1598
1623
|
)
|
1599
1624
|
|
1600
1625
|
if flow_name:
|
1601
|
-
return
|
1626
|
+
return flows_dict[flow_name]
|
1602
1627
|
else:
|
1603
|
-
return list(
|
1628
|
+
return list(flows_dict.values())[0]
|
1604
1629
|
|
1605
1630
|
|
1606
1631
|
def load_flows_from_script(path: str) -> List[Flow]:
|
@@ -1617,7 +1642,7 @@ def load_flows_from_script(path: str) -> List[Flow]:
|
|
1617
1642
|
return registry_from_script(path).get_instances(Flow)
|
1618
1643
|
|
1619
1644
|
|
1620
|
-
def load_flow_from_script(path: str, flow_name: str = None) -> Flow:
|
1645
|
+
def load_flow_from_script(path: str, flow_name: Optional[str] = None) -> Flow:
|
1621
1646
|
"""
|
1622
1647
|
Extract a flow object from a script by running all of the code in the file.
|
1623
1648
|
|
@@ -1661,7 +1686,7 @@ def load_flow_from_entrypoint(
|
|
1661
1686
|
FlowScriptError: If an exception is encountered while running the script
|
1662
1687
|
MissingFlowError: If the flow function specified in the entrypoint does not exist
|
1663
1688
|
"""
|
1664
|
-
with PrefectObjectRegistry(
|
1689
|
+
with PrefectObjectRegistry( # type: ignore
|
1665
1690
|
block_code_execution=True,
|
1666
1691
|
capture_failures=True,
|
1667
1692
|
):
|
@@ -1686,7 +1711,7 @@ def load_flow_from_entrypoint(
|
|
1686
1711
|
return flow
|
1687
1712
|
|
1688
1713
|
|
1689
|
-
def load_flow_from_text(script_contents: AnyStr, flow_name: str):
|
1714
|
+
def load_flow_from_text(script_contents: AnyStr, flow_name: str) -> Flow:
|
1690
1715
|
"""
|
1691
1716
|
Load a flow from a text script.
|
1692
1717
|
|
@@ -1717,7 +1742,7 @@ async def serve(
|
|
1717
1742
|
print_starting_message: bool = True,
|
1718
1743
|
limit: Optional[int] = None,
|
1719
1744
|
**kwargs,
|
1720
|
-
):
|
1745
|
+
) -> NoReturn:
|
1721
1746
|
"""
|
1722
1747
|
Serve the provided list of deployments.
|
1723
1748
|
|
@@ -1807,7 +1832,7 @@ async def load_flow_from_flow_run(
|
|
1807
1832
|
flow_run: "FlowRun",
|
1808
1833
|
ignore_storage: bool = False,
|
1809
1834
|
storage_base_path: Optional[str] = None,
|
1810
|
-
) ->
|
1835
|
+
) -> Flow:
|
1811
1836
|
"""
|
1812
1837
|
Load a flow from the location/script provided in a deployment's storage document.
|
1813
1838
|
|
@@ -1861,7 +1886,9 @@ async def load_flow_from_flow_run(
|
|
1861
1886
|
await storage_block.get_directory(from_path=from_path, local_path=".")
|
1862
1887
|
|
1863
1888
|
if deployment.pull_steps:
|
1864
|
-
run_logger.debug(
|
1889
|
+
run_logger.debug(
|
1890
|
+
f"Running {len(deployment.pull_steps)} deployment pull step(s)"
|
1891
|
+
)
|
1865
1892
|
output = await run_steps(deployment.pull_steps)
|
1866
1893
|
if output.get("directory"):
|
1867
1894
|
run_logger.debug(f"Changing working directory to {output['directory']!r}")
|
@@ -1913,7 +1940,14 @@ def load_flow_argument_from_entrypoint(
|
|
1913
1940
|
(
|
1914
1941
|
node
|
1915
1942
|
for node in ast.walk(parsed_code)
|
1916
|
-
if isinstance(
|
1943
|
+
if isinstance(
|
1944
|
+
node,
|
1945
|
+
(
|
1946
|
+
ast.FunctionDef,
|
1947
|
+
ast.AsyncFunctionDef,
|
1948
|
+
),
|
1949
|
+
)
|
1950
|
+
and node.name == func_name
|
1917
1951
|
),
|
1918
1952
|
None,
|
1919
1953
|
)
|
@@ -1926,11 +1960,33 @@ def load_flow_argument_from_entrypoint(
|
|
1926
1960
|
):
|
1927
1961
|
for keyword in decorator.keywords:
|
1928
1962
|
if keyword.arg == arg:
|
1929
|
-
|
1930
|
-
|
1931
|
-
|
1963
|
+
if isinstance(keyword.value, ast.Constant):
|
1964
|
+
return (
|
1965
|
+
keyword.value.value
|
1966
|
+
) # Return the string value of the argument
|
1967
|
+
|
1968
|
+
# if the arg value is not a raw str (i.e. a variable or expression),
|
1969
|
+
# then attempt to evaluate it
|
1970
|
+
namespace = safe_load_namespace(source_code)
|
1971
|
+
literal_arg_value = ast.get_source_segment(
|
1972
|
+
source_code, keyword.value
|
1973
|
+
)
|
1974
|
+
try:
|
1975
|
+
evaluated_value = eval(literal_arg_value, namespace) # type: ignore
|
1976
|
+
except Exception as e:
|
1977
|
+
logger.info(
|
1978
|
+
"Failed to parse @flow argument: `%s=%s` due to the following error. Ignoring and falling back to default behavior.",
|
1979
|
+
arg,
|
1980
|
+
literal_arg_value,
|
1981
|
+
exc_info=e,
|
1982
|
+
)
|
1983
|
+
# ignore the decorator arg and fallback to default behavior
|
1984
|
+
break
|
1985
|
+
return str(evaluated_value)
|
1932
1986
|
|
1933
1987
|
if arg == "name":
|
1934
1988
|
return func_name.replace(
|
1935
1989
|
"_", "-"
|
1936
1990
|
) # If no matching decorator or keyword argument is found
|
1991
|
+
|
1992
|
+
return None
|
prefect/futures.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
import abc
|
2
2
|
import concurrent.futures
|
3
3
|
import inspect
|
4
|
-
import time
|
5
4
|
import uuid
|
6
5
|
from functools import partial
|
7
6
|
from typing import Any, Generic, Optional, Set, Union, cast
|
@@ -11,13 +10,17 @@ from typing_extensions import TypeVar
|
|
11
10
|
from prefect.client.orchestration import get_client
|
12
11
|
from prefect.client.schemas.objects import TaskRun
|
13
12
|
from prefect.exceptions import ObjectNotFound
|
13
|
+
from prefect.logging.loggers import get_logger
|
14
14
|
from prefect.states import Pending, State
|
15
|
+
from prefect.task_runs import TaskRunWaiter
|
15
16
|
from prefect.utilities.annotations import quote
|
16
17
|
from prefect.utilities.asyncutils import run_coro_as_sync
|
17
18
|
from prefect.utilities.collections import StopVisiting, visit_collection
|
18
19
|
|
19
20
|
F = TypeVar("F")
|
20
21
|
|
22
|
+
logger = get_logger(__name__)
|
23
|
+
|
21
24
|
|
22
25
|
class PrefectFuture(abc.ABC):
|
23
26
|
"""
|
@@ -146,68 +149,66 @@ class PrefectDistributedFuture(PrefectFuture):
|
|
146
149
|
Represents the result of a computation happening anywhere.
|
147
150
|
|
148
151
|
This class is typically used to interact with the result of a task run
|
149
|
-
scheduled to run in a Prefect task
|
152
|
+
scheduled to run in a Prefect task worker but can be used to interact with
|
150
153
|
any task run scheduled in Prefect's API.
|
151
154
|
"""
|
152
155
|
|
153
|
-
def
|
154
|
-
self.
|
155
|
-
self._client = None
|
156
|
-
super().__init__(task_run_id=task_run_id)
|
157
|
-
|
158
|
-
@property
|
159
|
-
def client(self):
|
160
|
-
if self._client is None:
|
161
|
-
self._client = get_client(sync_client=True)
|
162
|
-
return self._client
|
156
|
+
def wait(self, timeout: Optional[float] = None) -> None:
|
157
|
+
return run_coro_as_sync(self.wait_async(timeout=timeout))
|
163
158
|
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
return self._task_run
|
169
|
-
|
170
|
-
@task_run.setter
|
171
|
-
def task_run(self, task_run):
|
172
|
-
self._task_run = task_run
|
173
|
-
|
174
|
-
def wait(
|
175
|
-
self, timeout: Optional[float] = None, polling_interval: Optional[float] = 0.2
|
176
|
-
) -> None:
|
177
|
-
start_time = time.time()
|
178
|
-
# TODO: Websocket implementation?
|
179
|
-
while True:
|
180
|
-
self.task_run = cast(
|
181
|
-
TaskRun, self.client.read_task_run(task_run_id=self.task_run_id)
|
159
|
+
async def wait_async(self, timeout: Optional[float] = None):
|
160
|
+
if self._final_state:
|
161
|
+
logger.debug(
|
162
|
+
"Final state already set for %s. Returning...", self.task_run_id
|
182
163
|
)
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
164
|
+
return
|
165
|
+
|
166
|
+
# Read task run to see if it is still running
|
167
|
+
async with get_client() as client:
|
168
|
+
task_run = await client.read_task_run(task_run_id=self._task_run_id)
|
169
|
+
if task_run.state.is_final():
|
170
|
+
logger.debug(
|
171
|
+
"Task run %s already finished. Returning...",
|
172
|
+
self.task_run_id,
|
173
|
+
)
|
174
|
+
self._final_state = task_run.state
|
187
175
|
return
|
188
|
-
|
176
|
+
|
177
|
+
# If still running, wait for a completed event from the server
|
178
|
+
logger.debug(
|
179
|
+
"Waiting for completed event for task run %s...",
|
180
|
+
self.task_run_id,
|
181
|
+
)
|
182
|
+
await TaskRunWaiter.wait_for_task_run(self._task_run_id, timeout=timeout)
|
183
|
+
task_run = await client.read_task_run(task_run_id=self._task_run_id)
|
184
|
+
if task_run.state.is_final():
|
185
|
+
self._final_state = task_run.state
|
186
|
+
return
|
189
187
|
|
190
188
|
def result(
|
191
189
|
self,
|
192
190
|
timeout: Optional[float] = None,
|
193
191
|
raise_on_failure: bool = True,
|
194
|
-
polling_interval: Optional[float] = 0.2,
|
195
192
|
) -> Any:
|
193
|
+
return run_coro_as_sync(
|
194
|
+
self.result_async(timeout=timeout, raise_on_failure=raise_on_failure)
|
195
|
+
)
|
196
|
+
|
197
|
+
async def result_async(
|
198
|
+
self,
|
199
|
+
timeout: Optional[float] = None,
|
200
|
+
raise_on_failure: bool = True,
|
201
|
+
):
|
196
202
|
if not self._final_state:
|
197
|
-
self.
|
203
|
+
await self.wait_async(timeout=timeout)
|
198
204
|
if not self._final_state:
|
199
205
|
raise TimeoutError(
|
200
206
|
f"Task run {self.task_run_id} did not complete within {timeout} seconds"
|
201
207
|
)
|
202
208
|
|
203
|
-
|
209
|
+
return await self._final_state.result(
|
204
210
|
raise_on_failure=raise_on_failure, fetch=True
|
205
211
|
)
|
206
|
-
# state.result is a `sync_compatible` function that may or may not return an awaitable
|
207
|
-
# depending on whether the parent frame is sync or not
|
208
|
-
if inspect.isawaitable(_result):
|
209
|
-
_result = run_coro_as_sync(_result)
|
210
|
-
return _result
|
211
212
|
|
212
213
|
def __eq__(self, other):
|
213
214
|
if not isinstance(other, PrefectDistributedFuture):
|
@@ -367,7 +367,7 @@ class AuthenticationResource:
|
|
367
367
|
work_pool_name: str,
|
368
368
|
user_name: str = "prefect-ecs-user",
|
369
369
|
policy_name: str = "prefect-ecs-policy",
|
370
|
-
credentials_block_name: str = None,
|
370
|
+
credentials_block_name: Optional[str] = None,
|
371
371
|
):
|
372
372
|
self._user_name = user_name
|
373
373
|
self._credentials_block_name = (
|
@@ -1130,7 +1130,7 @@ class ElasticContainerServicePushProvisioner:
|
|
1130
1130
|
work_pool_name: str,
|
1131
1131
|
user_name: str = "prefect-ecs-user",
|
1132
1132
|
policy_name: str = "prefect-ecs-policy",
|
1133
|
-
credentials_block_name: str = None,
|
1133
|
+
credentials_block_name: Optional[str] = None,
|
1134
1134
|
cluster_name: str = "prefect-ecs-cluster",
|
1135
1135
|
vpc_name: str = "prefect-ecs-vpc",
|
1136
1136
|
ecs_security_group_name: str = "prefect-ecs-security-group",
|
prefect/input/__init__.py
CHANGED
@@ -12,6 +12,8 @@ from .run_input import (
|
|
12
12
|
RunInputMetadata,
|
13
13
|
keyset_from_base_key,
|
14
14
|
keyset_from_paused_state,
|
15
|
+
receive_input,
|
16
|
+
send_input,
|
15
17
|
)
|
16
18
|
|
17
19
|
__all__ = [
|
@@ -26,4 +28,6 @@ __all__ = [
|
|
26
28
|
"keyset_from_base_key",
|
27
29
|
"keyset_from_paused_state",
|
28
30
|
"read_flow_run_input",
|
31
|
+
"receive_input",
|
32
|
+
"send_input",
|
29
33
|
]
|
prefect/input/run_input.py
CHANGED
@@ -18,7 +18,8 @@ Sender flow:
|
|
18
18
|
```python
|
19
19
|
import random
|
20
20
|
from uuid import UUID
|
21
|
-
from prefect import flow
|
21
|
+
from prefect import flow
|
22
|
+
from prefect.logging import get_run_logger
|
22
23
|
from prefect.input import RunInput
|
23
24
|
|
24
25
|
class NumberData(RunInput):
|
@@ -43,7 +44,8 @@ Receiver flow:
|
|
43
44
|
```python
|
44
45
|
import random
|
45
46
|
from uuid import UUID
|
46
|
-
from prefect import flow
|
47
|
+
from prefect import flow
|
48
|
+
from prefect.logging import get_run_logger
|
47
49
|
from prefect.input import RunInput
|
48
50
|
|
49
51
|
class NumberData(RunInput):
|
prefect/logging/formatters.py
CHANGED
@@ -78,8 +78,8 @@ class PrefectFormatter(logging.Formatter):
|
|
78
78
|
validate=True,
|
79
79
|
*,
|
80
80
|
defaults=None,
|
81
|
-
task_run_fmt: str = None,
|
82
|
-
flow_run_fmt: str = None,
|
81
|
+
task_run_fmt: Optional[str] = None,
|
82
|
+
flow_run_fmt: Optional[str] = None,
|
83
83
|
) -> None:
|
84
84
|
"""
|
85
85
|
Implementation of the standard Python formatter with support for multiple
|
prefect/logging/handlers.py
CHANGED
@@ -108,8 +108,8 @@ class APILogHandler(logging.Handler):
|
|
108
108
|
)
|
109
109
|
|
110
110
|
# Not ideal, but this method is called by the stdlib and cannot return a
|
111
|
-
# coroutine so we just schedule the drain in
|
112
|
-
from_sync.
|
111
|
+
# coroutine so we just schedule the drain in the global loop thread and continue
|
112
|
+
from_sync.call_soon_in_loop_thread(create_call(APILogWorker.drain_all))
|
113
113
|
return None
|
114
114
|
else:
|
115
115
|
# We set a timeout of 5s because we don't want to block forever if the worker
|
prefect/logging/loggers.py
CHANGED
@@ -69,7 +69,7 @@ class PrefectLogAdapter(logging.LoggerAdapter):
|
|
69
69
|
|
70
70
|
|
71
71
|
@lru_cache()
|
72
|
-
def get_logger(name: str = None) -> logging.Logger:
|
72
|
+
def get_logger(name: Optional[str] = None) -> logging.Logger:
|
73
73
|
"""
|
74
74
|
Get a `prefect` logger. These loggers are intended for internal use within the
|
75
75
|
`prefect` package.
|
prefect/plugins.py
CHANGED