prefect-client 3.0.10__py3-none-any.whl → 3.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +17 -14
- prefect/_internal/schemas/bases.py +1 -0
- prefect/_internal/schemas/validators.py +5 -3
- prefect/_version.py +3 -3
- prefect/client/cloud.py +2 -2
- prefect/client/orchestration.py +4 -4
- prefect/client/schemas/filters.py +14 -0
- prefect/context.py +3 -2
- prefect/deployments/runner.py +15 -6
- prefect/events/schemas/automations.py +3 -3
- prefect/events/schemas/deployment_triggers.py +10 -5
- prefect/flow_engine.py +4 -4
- prefect/flows.py +24 -9
- prefect/futures.py +4 -4
- prefect/logging/handlers.py +1 -1
- prefect/logging/highlighters.py +2 -0
- prefect/logging/logging.yml +82 -83
- prefect/runner/runner.py +1 -2
- prefect/runner/server.py +12 -1
- prefect/settings/__init__.py +59 -0
- prefect/settings/base.py +131 -0
- prefect/settings/constants.py +8 -0
- prefect/settings/context.py +65 -0
- prefect/settings/legacy.py +167 -0
- prefect/settings/models/__init__.py +0 -0
- prefect/settings/models/api.py +41 -0
- prefect/settings/models/cli.py +31 -0
- prefect/settings/models/client.py +90 -0
- prefect/settings/models/cloud.py +58 -0
- prefect/settings/models/deployments.py +40 -0
- prefect/settings/models/flows.py +37 -0
- prefect/settings/models/internal.py +21 -0
- prefect/settings/models/logging.py +137 -0
- prefect/settings/models/results.py +47 -0
- prefect/settings/models/root.py +447 -0
- prefect/settings/models/runner.py +65 -0
- prefect/settings/models/server/__init__.py +1 -0
- prefect/settings/models/server/api.py +133 -0
- prefect/settings/models/server/database.py +202 -0
- prefect/settings/models/server/deployments.py +24 -0
- prefect/settings/models/server/ephemeral.py +34 -0
- prefect/settings/models/server/events.py +140 -0
- prefect/settings/models/server/flow_run_graph.py +34 -0
- prefect/settings/models/server/root.py +143 -0
- prefect/settings/models/server/services.py +485 -0
- prefect/settings/models/server/tasks.py +86 -0
- prefect/settings/models/server/ui.py +52 -0
- prefect/settings/models/tasks.py +91 -0
- prefect/settings/models/testing.py +52 -0
- prefect/settings/models/ui.py +0 -0
- prefect/settings/models/worker.py +46 -0
- prefect/settings/profiles.py +390 -0
- prefect/settings/sources.py +162 -0
- prefect/task_engine.py +24 -29
- prefect/task_runners.py +6 -1
- prefect/tasks.py +63 -28
- prefect/utilities/asyncutils.py +1 -1
- prefect/utilities/engine.py +11 -3
- prefect/utilities/services.py +3 -3
- prefect/workers/base.py +8 -2
- {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/METADATA +2 -2
- {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/RECORD +66 -33
- prefect/settings.py +0 -2172
- /prefect/{profiles.toml → settings/profiles.toml} +0 -0
- {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/LICENSE +0 -0
- {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/WHEEL +0 -0
- {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/top_level.txt +0 -0
prefect/settings.py
DELETED
@@ -1,2172 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
Prefect settings are defined using `BaseSettings` from `pydantic_settings`. `BaseSettings` can load setting values
|
3
|
-
from system environment variables and each additionally specified `env_file`.
|
4
|
-
|
5
|
-
The recommended user-facing way to access Prefect settings at this time is to import specific setting objects directly,
|
6
|
-
like `from prefect.settings import PREFECT_API_URL; print(PREFECT_API_URL.value())`.
|
7
|
-
|
8
|
-
Importantly, we replace the `callback` mechanism for updating settings with an "after" model_validator that updates dependent settings.
|
9
|
-
After https://github.com/pydantic/pydantic/issues/9789 is resolved, we will be able to define context-aware defaults
|
10
|
-
for settings, at which point we will not need to use the "after" model_validator.
|
11
|
-
"""
|
12
|
-
|
13
|
-
import inspect
|
14
|
-
import os
|
15
|
-
import re
|
16
|
-
import sys
|
17
|
-
import warnings
|
18
|
-
from contextlib import contextmanager
|
19
|
-
from datetime import timedelta
|
20
|
-
from functools import partial
|
21
|
-
from pathlib import Path
|
22
|
-
from typing import (
|
23
|
-
Annotated,
|
24
|
-
Any,
|
25
|
-
Dict,
|
26
|
-
Generator,
|
27
|
-
Iterable,
|
28
|
-
List,
|
29
|
-
Mapping,
|
30
|
-
Optional,
|
31
|
-
Set,
|
32
|
-
Tuple,
|
33
|
-
Type,
|
34
|
-
TypeVar,
|
35
|
-
Union,
|
36
|
-
get_args,
|
37
|
-
)
|
38
|
-
from urllib.parse import quote_plus, urlparse
|
39
|
-
|
40
|
-
import toml
|
41
|
-
from pydantic import (
|
42
|
-
AfterValidator,
|
43
|
-
BaseModel,
|
44
|
-
BeforeValidator,
|
45
|
-
ConfigDict,
|
46
|
-
Field,
|
47
|
-
Secret,
|
48
|
-
SecretStr,
|
49
|
-
SerializationInfo,
|
50
|
-
SerializerFunctionWrapHandler,
|
51
|
-
TypeAdapter,
|
52
|
-
ValidationError,
|
53
|
-
model_serializer,
|
54
|
-
model_validator,
|
55
|
-
)
|
56
|
-
from pydantic.fields import FieldInfo
|
57
|
-
from pydantic_settings import (
|
58
|
-
BaseSettings,
|
59
|
-
PydanticBaseSettingsSource,
|
60
|
-
SettingsConfigDict,
|
61
|
-
)
|
62
|
-
from typing_extensions import Literal, Self
|
63
|
-
|
64
|
-
from prefect.exceptions import ProfileSettingsValidationError
|
65
|
-
from prefect.types import ClientRetryExtraCodes, LogLevel
|
66
|
-
from prefect.utilities.collections import (
|
67
|
-
deep_merge_dicts,
|
68
|
-
set_in_dict,
|
69
|
-
visit_collection,
|
70
|
-
)
|
71
|
-
from prefect.utilities.pydantic import handle_secret_render
|
72
|
-
|
73
|
-
T = TypeVar("T")
|
74
|
-
|
75
|
-
DEFAULT_PREFECT_HOME = Path.home() / ".prefect"
|
76
|
-
DEFAULT_PROFILES_PATH = Path(__file__).parent.joinpath("profiles.toml")
|
77
|
-
_SECRET_TYPES: Tuple[Type, ...] = (Secret, SecretStr)
|
78
|
-
|
79
|
-
|
80
|
-
def env_var_to_accessor(env_var: str) -> str:
|
81
|
-
"""
|
82
|
-
Convert an environment variable name to a settings accessor.
|
83
|
-
"""
|
84
|
-
if SETTING_VARIABLES.get(env_var) is not None:
|
85
|
-
return SETTING_VARIABLES[env_var].accessor
|
86
|
-
return env_var.replace("PREFECT_", "").lower()
|
87
|
-
|
88
|
-
|
89
|
-
def is_test_mode() -> bool:
|
90
|
-
"""Check if the current process is in test mode."""
|
91
|
-
return bool(os.getenv("PREFECT_TEST_MODE") or os.getenv("PREFECT_UNIT_TEST_MODE"))
|
92
|
-
|
93
|
-
|
94
|
-
class Setting:
|
95
|
-
"""Mimics the old Setting object for compatibility with existing code."""
|
96
|
-
|
97
|
-
def __init__(
|
98
|
-
self, name: str, default: Any, type_: Any, accessor: Optional[str] = None
|
99
|
-
):
|
100
|
-
self._name = name
|
101
|
-
self._default = default
|
102
|
-
self._type = type_
|
103
|
-
if accessor is None:
|
104
|
-
self.accessor = env_var_to_accessor(name)
|
105
|
-
else:
|
106
|
-
self.accessor = accessor
|
107
|
-
|
108
|
-
@property
|
109
|
-
def name(self):
|
110
|
-
return self._name
|
111
|
-
|
112
|
-
@property
|
113
|
-
def is_secret(self):
|
114
|
-
if self._type in _SECRET_TYPES:
|
115
|
-
return True
|
116
|
-
for secret_type in _SECRET_TYPES:
|
117
|
-
if secret_type in get_args(self._type):
|
118
|
-
return True
|
119
|
-
return False
|
120
|
-
|
121
|
-
def default(self):
|
122
|
-
return self._default
|
123
|
-
|
124
|
-
def value(self: Self) -> Any:
|
125
|
-
if self.name == "PREFECT_TEST_SETTING":
|
126
|
-
if "PREFECT_TEST_MODE" in os.environ:
|
127
|
-
return get_current_settings().test_setting
|
128
|
-
else:
|
129
|
-
return None
|
130
|
-
|
131
|
-
path = self.accessor.split(".")
|
132
|
-
current_value = get_current_settings()
|
133
|
-
for key in path:
|
134
|
-
current_value = getattr(current_value, key, None)
|
135
|
-
if isinstance(current_value, _SECRET_TYPES):
|
136
|
-
return current_value.get_secret_value()
|
137
|
-
return current_value
|
138
|
-
|
139
|
-
def value_from(self: Self, settings: "Settings") -> Any:
|
140
|
-
path = self.accessor.split(".")
|
141
|
-
current_value = settings
|
142
|
-
for key in path:
|
143
|
-
current_value = getattr(current_value, key, None)
|
144
|
-
if isinstance(current_value, _SECRET_TYPES):
|
145
|
-
return current_value.get_secret_value()
|
146
|
-
return current_value
|
147
|
-
|
148
|
-
def __bool__(self) -> bool:
|
149
|
-
return bool(self.value())
|
150
|
-
|
151
|
-
def __str__(self) -> str:
|
152
|
-
return str(self.value())
|
153
|
-
|
154
|
-
def __repr__(self) -> str:
|
155
|
-
return f"<{self.name}: {self._type!r}>"
|
156
|
-
|
157
|
-
def __eq__(self, __o: object) -> bool:
|
158
|
-
return __o.__eq__(self.value())
|
159
|
-
|
160
|
-
def __hash__(self) -> int:
|
161
|
-
return hash((type(self), self.name))
|
162
|
-
|
163
|
-
|
164
|
-
########################################################################
|
165
|
-
# Define post init validators for use in an "after" model_validator,
|
166
|
-
# core logic will remain similar after context-aware defaults are supported
|
167
|
-
|
168
|
-
|
169
|
-
def default_ui_url(settings: "Settings") -> Optional[str]:
|
170
|
-
value = settings.ui_url
|
171
|
-
if value is not None:
|
172
|
-
return value
|
173
|
-
|
174
|
-
# Otherwise, infer a value from the API URL
|
175
|
-
ui_url = api_url = settings.api.url
|
176
|
-
|
177
|
-
if not api_url:
|
178
|
-
return None
|
179
|
-
|
180
|
-
cloud_url = settings.cloud_api_url
|
181
|
-
cloud_ui_url = settings.cloud_ui_url
|
182
|
-
if api_url.startswith(cloud_url):
|
183
|
-
ui_url = ui_url.replace(cloud_url, cloud_ui_url)
|
184
|
-
|
185
|
-
if ui_url.endswith("/api"):
|
186
|
-
# Handles open-source APIs
|
187
|
-
ui_url = ui_url[:-4]
|
188
|
-
|
189
|
-
# Handles Cloud APIs with content after `/api`
|
190
|
-
ui_url = ui_url.replace("/api/", "/")
|
191
|
-
|
192
|
-
# Update routing
|
193
|
-
ui_url = ui_url.replace("/accounts/", "/account/")
|
194
|
-
ui_url = ui_url.replace("/workspaces/", "/workspace/")
|
195
|
-
|
196
|
-
return ui_url
|
197
|
-
|
198
|
-
|
199
|
-
def default_cloud_ui_url(settings) -> Optional[str]:
|
200
|
-
value = settings.cloud_ui_url
|
201
|
-
if value is not None:
|
202
|
-
return value
|
203
|
-
|
204
|
-
# Otherwise, infer a value from the API URL
|
205
|
-
ui_url = api_url = settings.cloud_api_url
|
206
|
-
|
207
|
-
if re.match(r"^https://api[\.\w]*.prefect.[^\.]+/", api_url):
|
208
|
-
ui_url = ui_url.replace("https://api", "https://app", 1)
|
209
|
-
|
210
|
-
if ui_url.endswith("/api"):
|
211
|
-
ui_url = ui_url[:-4]
|
212
|
-
|
213
|
-
return ui_url
|
214
|
-
|
215
|
-
|
216
|
-
def max_log_size_smaller_than_batch_size(values):
|
217
|
-
"""
|
218
|
-
Validator for settings asserting the batch size and match log size are compatible
|
219
|
-
"""
|
220
|
-
if values["logging_to_api_batch_size"] < values["logging_to_api_max_log_size"]:
|
221
|
-
raise ValueError(
|
222
|
-
"`PREFECT_LOGGING_TO_API_MAX_LOG_SIZE` cannot be larger than"
|
223
|
-
" `PREFECT_LOGGING_TO_API_BATCH_SIZE`"
|
224
|
-
)
|
225
|
-
return values
|
226
|
-
|
227
|
-
|
228
|
-
def warn_on_database_password_value_without_usage(values):
|
229
|
-
"""
|
230
|
-
Validator for settings warning if the database password is set but not used.
|
231
|
-
"""
|
232
|
-
db_password = (
|
233
|
-
v.get_secret_value()
|
234
|
-
if (v := values["api_database_password"]) and hasattr(v, "get_secret_value")
|
235
|
-
else None
|
236
|
-
)
|
237
|
-
api_db_connection_url = (
|
238
|
-
values["api_database_connection_url"].get_secret_value()
|
239
|
-
if hasattr(values["api_database_connection_url"], "get_secret_value")
|
240
|
-
else values["api_database_connection_url"]
|
241
|
-
)
|
242
|
-
if (
|
243
|
-
db_password
|
244
|
-
and api_db_connection_url is not None
|
245
|
-
and ("PREFECT_API_DATABASE_PASSWORD" not in api_db_connection_url)
|
246
|
-
and db_password not in api_db_connection_url
|
247
|
-
and quote_plus(db_password) not in api_db_connection_url
|
248
|
-
):
|
249
|
-
warnings.warn(
|
250
|
-
"PREFECT_API_DATABASE_PASSWORD is set but not included in the "
|
251
|
-
"PREFECT_API_DATABASE_CONNECTION_URL. "
|
252
|
-
"The provided password will be ignored."
|
253
|
-
)
|
254
|
-
return values
|
255
|
-
|
256
|
-
|
257
|
-
def warn_on_misconfigured_api_url(values):
|
258
|
-
"""
|
259
|
-
Validator for settings warning if the API URL is misconfigured.
|
260
|
-
"""
|
261
|
-
api_url = values.get("api", {}).get("url")
|
262
|
-
if api_url is not None:
|
263
|
-
misconfigured_mappings = {
|
264
|
-
"app.prefect.cloud": (
|
265
|
-
"`PREFECT_API_URL` points to `app.prefect.cloud`. Did you"
|
266
|
-
" mean `api.prefect.cloud`?"
|
267
|
-
),
|
268
|
-
"account/": (
|
269
|
-
"`PREFECT_API_URL` uses `/account/` but should use `/accounts/`."
|
270
|
-
),
|
271
|
-
"workspace/": (
|
272
|
-
"`PREFECT_API_URL` uses `/workspace/` but should use `/workspaces/`."
|
273
|
-
),
|
274
|
-
}
|
275
|
-
warnings_list = []
|
276
|
-
|
277
|
-
for misconfig, warning in misconfigured_mappings.items():
|
278
|
-
if misconfig in api_url:
|
279
|
-
warnings_list.append(warning)
|
280
|
-
|
281
|
-
parsed_url = urlparse(api_url)
|
282
|
-
if parsed_url.path and not parsed_url.path.startswith("/api"):
|
283
|
-
warnings_list.append(
|
284
|
-
"`PREFECT_API_URL` should have `/api` after the base URL."
|
285
|
-
)
|
286
|
-
|
287
|
-
if warnings_list:
|
288
|
-
example = 'e.g. PREFECT_API_URL="https://api.prefect.cloud/api/accounts/[ACCOUNT-ID]/workspaces/[WORKSPACE-ID]"'
|
289
|
-
warnings_list.append(example)
|
290
|
-
|
291
|
-
warnings.warn("\n".join(warnings_list), stacklevel=2)
|
292
|
-
|
293
|
-
return values
|
294
|
-
|
295
|
-
|
296
|
-
def default_database_connection_url(settings: "Settings") -> SecretStr:
|
297
|
-
value = None
|
298
|
-
if settings.api_database_driver == "postgresql+asyncpg":
|
299
|
-
required = [
|
300
|
-
"api_database_host",
|
301
|
-
"api_database_user",
|
302
|
-
"api_database_name",
|
303
|
-
"api_database_password",
|
304
|
-
]
|
305
|
-
missing = [attr for attr in required if getattr(settings, attr) is None]
|
306
|
-
if missing:
|
307
|
-
raise ValueError(
|
308
|
-
f"Missing required database connection settings: {', '.join(missing)}"
|
309
|
-
)
|
310
|
-
|
311
|
-
from sqlalchemy import URL
|
312
|
-
|
313
|
-
return URL(
|
314
|
-
drivername=settings.api_database_driver,
|
315
|
-
host=settings.api_database_host,
|
316
|
-
port=settings.api_database_port or 5432,
|
317
|
-
username=settings.api_database_user,
|
318
|
-
password=(
|
319
|
-
settings.api_database_password.get_secret_value()
|
320
|
-
if settings.api_database_password
|
321
|
-
else None
|
322
|
-
),
|
323
|
-
database=settings.api_database_name,
|
324
|
-
query=[], # type: ignore
|
325
|
-
).render_as_string(hide_password=False)
|
326
|
-
|
327
|
-
elif settings.api_database_driver == "sqlite+aiosqlite":
|
328
|
-
if settings.api_database_name:
|
329
|
-
value = f"{settings.api_database_driver}:///{settings.api_database_name}"
|
330
|
-
else:
|
331
|
-
value = f"sqlite+aiosqlite:///{settings.home}/prefect.db"
|
332
|
-
|
333
|
-
elif settings.api_database_driver:
|
334
|
-
raise ValueError(f"Unsupported database driver: {settings.api_database_driver}")
|
335
|
-
|
336
|
-
value = value if value else f"sqlite+aiosqlite:///{settings.home}/prefect.db"
|
337
|
-
return SecretStr(value)
|
338
|
-
|
339
|
-
|
340
|
-
###########################################################################
|
341
|
-
# Settings Loader
|
342
|
-
|
343
|
-
|
344
|
-
def _get_profiles_path() -> Path:
|
345
|
-
"""Helper to get the profiles path"""
|
346
|
-
|
347
|
-
if is_test_mode():
|
348
|
-
return DEFAULT_PROFILES_PATH
|
349
|
-
if env_path := os.getenv("PREFECT_PROFILES_PATH"):
|
350
|
-
return Path(env_path)
|
351
|
-
if not (DEFAULT_PREFECT_HOME / "profiles.toml").exists():
|
352
|
-
return DEFAULT_PROFILES_PATH
|
353
|
-
return DEFAULT_PREFECT_HOME / "profiles.toml"
|
354
|
-
|
355
|
-
|
356
|
-
class ProfileSettingsTomlLoader(PydanticBaseSettingsSource):
|
357
|
-
"""
|
358
|
-
Custom pydantic settings source to load profile settings from a toml file.
|
359
|
-
|
360
|
-
See https://docs.pydantic.dev/latest/concepts/pydantic_settings/#customise-settings-sources
|
361
|
-
"""
|
362
|
-
|
363
|
-
def __init__(self, settings_cls: Type[BaseSettings]):
|
364
|
-
super().__init__(settings_cls)
|
365
|
-
self.settings_cls = settings_cls
|
366
|
-
self.profiles_path = _get_profiles_path()
|
367
|
-
self.profile_settings = self._load_profile_settings()
|
368
|
-
|
369
|
-
def _load_profile_settings(self) -> Dict[str, Any]:
|
370
|
-
"""Helper method to load the profile settings from the profiles.toml file"""
|
371
|
-
|
372
|
-
if not self.profiles_path.exists():
|
373
|
-
return {}
|
374
|
-
|
375
|
-
try:
|
376
|
-
all_profile_data = toml.load(self.profiles_path)
|
377
|
-
except toml.TomlDecodeError:
|
378
|
-
warnings.warn(
|
379
|
-
f"Failed to load profiles from {self.profiles_path}. Please ensure the file is valid TOML."
|
380
|
-
)
|
381
|
-
return {}
|
382
|
-
|
383
|
-
if (
|
384
|
-
sys.argv[0].endswith("/prefect")
|
385
|
-
and len(sys.argv) >= 3
|
386
|
-
and sys.argv[1] == "--profile"
|
387
|
-
):
|
388
|
-
active_profile = sys.argv[2]
|
389
|
-
|
390
|
-
else:
|
391
|
-
active_profile = os.environ.get("PREFECT_PROFILE") or all_profile_data.get(
|
392
|
-
"active"
|
393
|
-
)
|
394
|
-
|
395
|
-
profiles_data = all_profile_data.get("profiles", {})
|
396
|
-
|
397
|
-
if not active_profile or active_profile not in profiles_data:
|
398
|
-
return {}
|
399
|
-
return profiles_data[active_profile]
|
400
|
-
|
401
|
-
def get_field_value(
|
402
|
-
self, field: FieldInfo, field_name: str
|
403
|
-
) -> Tuple[Any, str, bool]:
|
404
|
-
"""Concrete implementation to get the field value from the profile settings"""
|
405
|
-
value = self.profile_settings.get(
|
406
|
-
f"{self.config.get('env_prefix','')}{field_name.upper()}"
|
407
|
-
)
|
408
|
-
return value, field_name, self.field_is_complex(field)
|
409
|
-
|
410
|
-
def __call__(self) -> Dict[str, Any]:
|
411
|
-
"""Called by pydantic to get the settings from our custom source"""
|
412
|
-
if is_test_mode():
|
413
|
-
return {}
|
414
|
-
profile_settings: Dict[str, Any] = {}
|
415
|
-
for field_name, field in self.settings_cls.model_fields.items():
|
416
|
-
value, key, is_complex = self.get_field_value(field, field_name)
|
417
|
-
if value is not None:
|
418
|
-
prepared_value = self.prepare_field_value(
|
419
|
-
field_name, field, value, is_complex
|
420
|
-
)
|
421
|
-
profile_settings[key] = prepared_value
|
422
|
-
return profile_settings
|
423
|
-
|
424
|
-
|
425
|
-
###########################################################################
|
426
|
-
# Settings
|
427
|
-
class PrefectBaseSettings(BaseSettings):
|
428
|
-
@classmethod
|
429
|
-
def settings_customise_sources(
|
430
|
-
cls,
|
431
|
-
settings_cls: Type[BaseSettings],
|
432
|
-
init_settings: PydanticBaseSettingsSource,
|
433
|
-
env_settings: PydanticBaseSettingsSource,
|
434
|
-
dotenv_settings: PydanticBaseSettingsSource,
|
435
|
-
file_secret_settings: PydanticBaseSettingsSource,
|
436
|
-
) -> Tuple[PydanticBaseSettingsSource, ...]:
|
437
|
-
"""
|
438
|
-
Define an order for Prefect settings sources.
|
439
|
-
|
440
|
-
The order of the returned callables decides the priority of inputs; first item is the highest priority.
|
441
|
-
|
442
|
-
See https://docs.pydantic.dev/latest/concepts/pydantic_settings/#customise-settings-sources
|
443
|
-
"""
|
444
|
-
return (
|
445
|
-
init_settings,
|
446
|
-
env_settings,
|
447
|
-
dotenv_settings,
|
448
|
-
file_secret_settings,
|
449
|
-
ProfileSettingsTomlLoader(settings_cls),
|
450
|
-
)
|
451
|
-
|
452
|
-
@classmethod
|
453
|
-
def valid_setting_names(cls) -> Set[str]:
|
454
|
-
"""
|
455
|
-
A set of valid setting names, e.g. "PREFECT_API_URL" or "PREFECT_API_KEY".
|
456
|
-
"""
|
457
|
-
settings_fields = set()
|
458
|
-
for field_name, field in cls.model_fields.items():
|
459
|
-
if inspect.isclass(field.annotation) and issubclass(
|
460
|
-
field.annotation, PrefectBaseSettings
|
461
|
-
):
|
462
|
-
settings_fields.update(field.annotation.valid_setting_names())
|
463
|
-
else:
|
464
|
-
settings_fields.add(
|
465
|
-
f"{cls.model_config.get('env_prefix')}{field_name.upper()}"
|
466
|
-
)
|
467
|
-
return settings_fields
|
468
|
-
|
469
|
-
def to_environment_variables(
|
470
|
-
self,
|
471
|
-
exclude_unset: bool = False,
|
472
|
-
include_secrets: bool = True,
|
473
|
-
) -> Dict[str, str]:
|
474
|
-
"""Convert the settings object to a dictionary of environment variables."""
|
475
|
-
|
476
|
-
env: Dict[str, Any] = self.model_dump(
|
477
|
-
exclude_unset=exclude_unset,
|
478
|
-
mode="json",
|
479
|
-
context={"include_secrets": include_secrets},
|
480
|
-
)
|
481
|
-
env_variables = {}
|
482
|
-
for key in self.model_fields.keys():
|
483
|
-
if isinstance(child_settings := getattr(self, key), PrefectBaseSettings):
|
484
|
-
child_env = child_settings.to_environment_variables(
|
485
|
-
exclude_unset=exclude_unset,
|
486
|
-
include_secrets=include_secrets,
|
487
|
-
)
|
488
|
-
env_variables.update(child_env)
|
489
|
-
elif (value := env.get(key)) is not None:
|
490
|
-
env_variables[
|
491
|
-
f"{self.model_config.get('env_prefix')}{key.upper()}"
|
492
|
-
] = str(value)
|
493
|
-
return env_variables
|
494
|
-
|
495
|
-
@model_serializer(
|
496
|
-
mode="wrap", when_used="always"
|
497
|
-
) # TODO: reconsider `when_used` default for more control
|
498
|
-
def ser_model(
|
499
|
-
self, handler: SerializerFunctionWrapHandler, info: SerializationInfo
|
500
|
-
) -> Any:
|
501
|
-
ctx = info.context
|
502
|
-
jsonable_self = handler(self)
|
503
|
-
if ctx and ctx.get("include_secrets") is True:
|
504
|
-
dump_kwargs = dict(
|
505
|
-
include=info.include,
|
506
|
-
exclude=info.exclude,
|
507
|
-
exclude_unset=info.exclude_unset,
|
508
|
-
)
|
509
|
-
jsonable_self.update(
|
510
|
-
{
|
511
|
-
field_name: visit_collection(
|
512
|
-
expr=getattr(self, field_name),
|
513
|
-
visit_fn=partial(handle_secret_render, context=ctx),
|
514
|
-
return_data=True,
|
515
|
-
)
|
516
|
-
for field_name in set(self.model_dump(**dump_kwargs).keys()) # type: ignore
|
517
|
-
}
|
518
|
-
)
|
519
|
-
|
520
|
-
return jsonable_self
|
521
|
-
|
522
|
-
|
523
|
-
class APISettings(PrefectBaseSettings):
|
524
|
-
"""
|
525
|
-
Settings for interacting with the Prefect API
|
526
|
-
"""
|
527
|
-
|
528
|
-
model_config = SettingsConfigDict(
|
529
|
-
env_prefix="PREFECT_API_", env_file=".env", extra="ignore"
|
530
|
-
)
|
531
|
-
url: Optional[str] = Field(
|
532
|
-
default=None,
|
533
|
-
description="The URL of the Prefect API. If not set, the client will attempt to infer it.",
|
534
|
-
)
|
535
|
-
key: Optional[SecretStr] = Field(
|
536
|
-
default=None,
|
537
|
-
description="The API key used for authentication with the Prefect API. Should be kept secret.",
|
538
|
-
)
|
539
|
-
tls_insecure_skip_verify: bool = Field(
|
540
|
-
default=False,
|
541
|
-
description="If `True`, disables SSL checking to allow insecure requests. This is recommended only during development, e.g. when using self-signed certificates.",
|
542
|
-
)
|
543
|
-
ssl_cert_file: Optional[str] = Field(
|
544
|
-
default=os.environ.get("SSL_CERT_FILE"),
|
545
|
-
description="This configuration settings option specifies the path to an SSL certificate file.",
|
546
|
-
)
|
547
|
-
enable_http2: bool = Field(
|
548
|
-
default=False,
|
549
|
-
description="If true, enable support for HTTP/2 for communicating with an API. If the API does not support HTTP/2, this will have no effect and connections will be made via HTTP/1.1.",
|
550
|
-
)
|
551
|
-
request_timeout: float = Field(
|
552
|
-
default=60.0,
|
553
|
-
description="The default timeout for requests to the API",
|
554
|
-
)
|
555
|
-
default_limit: int = Field(
|
556
|
-
default=200,
|
557
|
-
description="The default limit applied to queries that can return multiple objects, such as `POST /flow_runs/filter`.",
|
558
|
-
)
|
559
|
-
|
560
|
-
|
561
|
-
class Settings(PrefectBaseSettings):
|
562
|
-
"""
|
563
|
-
Settings for Prefect using Pydantic settings.
|
564
|
-
|
565
|
-
See https://docs.pydantic.dev/latest/concepts/pydantic_settings
|
566
|
-
"""
|
567
|
-
|
568
|
-
model_config = SettingsConfigDict(
|
569
|
-
env_file=".env",
|
570
|
-
env_prefix="PREFECT_",
|
571
|
-
env_nested_delimiter=None,
|
572
|
-
extra="ignore",
|
573
|
-
)
|
574
|
-
|
575
|
-
###########################################################################
|
576
|
-
# CLI
|
577
|
-
|
578
|
-
cli_colors: bool = Field(
|
579
|
-
default=True,
|
580
|
-
description="If True, use colors in CLI output. If `False`, output will not include colors codes.",
|
581
|
-
)
|
582
|
-
|
583
|
-
cli_prompt: Optional[bool] = Field(
|
584
|
-
default=None,
|
585
|
-
description="If `True`, use interactive prompts in CLI commands. If `False`, no interactive prompts will be used. If `None`, the value will be dynamically determined based on the presence of an interactive-enabled terminal.",
|
586
|
-
)
|
587
|
-
|
588
|
-
cli_wrap_lines: bool = Field(
|
589
|
-
default=True,
|
590
|
-
description="If `True`, wrap text by inserting new lines in long lines in CLI output. If `False`, output will not be wrapped.",
|
591
|
-
)
|
592
|
-
|
593
|
-
###########################################################################
|
594
|
-
# Testing
|
595
|
-
|
596
|
-
test_mode: bool = Field(
|
597
|
-
default=False,
|
598
|
-
description="If `True`, places the API in test mode. This may modify behavior to facilitate testing.",
|
599
|
-
)
|
600
|
-
|
601
|
-
unit_test_mode: bool = Field(
|
602
|
-
default=False,
|
603
|
-
description="This setting only exists to facilitate unit testing. If `True`, code is executing in a unit test context. Defaults to `False`.",
|
604
|
-
)
|
605
|
-
|
606
|
-
unit_test_loop_debug: bool = Field(
|
607
|
-
default=True,
|
608
|
-
description="If `True` turns on debug mode for the unit testing event loop.",
|
609
|
-
)
|
610
|
-
|
611
|
-
test_setting: Optional[Any] = Field(
|
612
|
-
default="FOO",
|
613
|
-
description="This setting only exists to facilitate unit testing. If in test mode, this setting will return its value. Otherwise, it returns `None`.",
|
614
|
-
)
|
615
|
-
|
616
|
-
###########################################################################
|
617
|
-
# Results settings
|
618
|
-
|
619
|
-
results_default_serializer: str = Field(
|
620
|
-
default="pickle",
|
621
|
-
description="The default serializer to use when not otherwise specified.",
|
622
|
-
)
|
623
|
-
|
624
|
-
results_persist_by_default: bool = Field(
|
625
|
-
default=False,
|
626
|
-
description="The default setting for persisting results when not otherwise specified.",
|
627
|
-
)
|
628
|
-
|
629
|
-
###########################################################################
|
630
|
-
# API settings
|
631
|
-
|
632
|
-
api: APISettings = Field(
|
633
|
-
default_factory=APISettings,
|
634
|
-
description="Settings for interacting with the Prefect API",
|
635
|
-
)
|
636
|
-
|
637
|
-
api_blocks_register_on_start: bool = Field(
|
638
|
-
default=True,
|
639
|
-
description="If set, any block types that have been imported will be registered with the backend on application startup. If not set, block types must be manually registered.",
|
640
|
-
)
|
641
|
-
|
642
|
-
api_log_retryable_errors: bool = Field(
|
643
|
-
default=False,
|
644
|
-
description="If `True`, log retryable errors in the API and it's services.",
|
645
|
-
)
|
646
|
-
|
647
|
-
api_default_limit: int = Field(
|
648
|
-
default=200,
|
649
|
-
description="The default limit applied to queries that can return multiple objects, such as `POST /flow_runs/filter`.",
|
650
|
-
)
|
651
|
-
|
652
|
-
api_task_cache_key_max_length: int = Field(
|
653
|
-
default=2000,
|
654
|
-
description="The maximum number of characters allowed for a task run cache key.",
|
655
|
-
)
|
656
|
-
|
657
|
-
api_max_flow_run_graph_nodes: int = Field(
|
658
|
-
default=10000,
|
659
|
-
description="The maximum size of a flow run graph on the v2 API",
|
660
|
-
)
|
661
|
-
|
662
|
-
api_max_flow_run_graph_artifacts: int = Field(
|
663
|
-
default=10000,
|
664
|
-
description="The maximum number of artifacts to show on a flow run graph on the v2 API",
|
665
|
-
)
|
666
|
-
|
667
|
-
###########################################################################
|
668
|
-
# API Database settings
|
669
|
-
|
670
|
-
api_database_connection_url: Optional[SecretStr] = Field(
|
671
|
-
default=None,
|
672
|
-
description="""
|
673
|
-
A database connection URL in a SQLAlchemy-compatible
|
674
|
-
format. Prefect currently supports SQLite and Postgres. Note that all
|
675
|
-
Prefect database engines must use an async driver - for SQLite, use
|
676
|
-
`sqlite+aiosqlite` and for Postgres use `postgresql+asyncpg`.
|
677
|
-
|
678
|
-
SQLite in-memory databases can be used by providing the url
|
679
|
-
`sqlite+aiosqlite:///file::memory:?cache=shared&uri=true&check_same_thread=false`,
|
680
|
-
which will allow the database to be accessed by multiple threads. Note
|
681
|
-
that in-memory databases can not be accessed from multiple processes and
|
682
|
-
should only be used for simple tests.
|
683
|
-
""",
|
684
|
-
)
|
685
|
-
|
686
|
-
api_database_driver: Optional[
|
687
|
-
Literal["postgresql+asyncpg", "sqlite+aiosqlite"]
|
688
|
-
] = Field(
|
689
|
-
default=None,
|
690
|
-
description=(
|
691
|
-
"The database driver to use when connecting to the database. "
|
692
|
-
"If not set, the driver will be inferred from the connection URL."
|
693
|
-
),
|
694
|
-
)
|
695
|
-
|
696
|
-
api_database_host: Optional[str] = Field(
|
697
|
-
default=None,
|
698
|
-
description="The database server host.",
|
699
|
-
)
|
700
|
-
|
701
|
-
api_database_port: Optional[int] = Field(
|
702
|
-
default=None,
|
703
|
-
description="The database server port.",
|
704
|
-
)
|
705
|
-
|
706
|
-
api_database_user: Optional[str] = Field(
|
707
|
-
default=None,
|
708
|
-
description="The user to use when connecting to the database.",
|
709
|
-
)
|
710
|
-
|
711
|
-
api_database_name: Optional[str] = Field(
|
712
|
-
default=None,
|
713
|
-
description="The name of the Prefect database on the remote server, or the path to the database file for SQLite.",
|
714
|
-
)
|
715
|
-
|
716
|
-
api_database_password: Optional[SecretStr] = Field(
|
717
|
-
default=None,
|
718
|
-
description="The password to use when connecting to the database. Should be kept secret.",
|
719
|
-
)
|
720
|
-
|
721
|
-
api_database_echo: bool = Field(
|
722
|
-
default=False,
|
723
|
-
description="If `True`, SQLAlchemy will log all SQL issued to the database. Defaults to `False`.",
|
724
|
-
)
|
725
|
-
|
726
|
-
api_database_migrate_on_start: bool = Field(
|
727
|
-
default=True,
|
728
|
-
description="If `True`, the database will be migrated on application startup.",
|
729
|
-
)
|
730
|
-
|
731
|
-
api_database_timeout: Optional[float] = Field(
|
732
|
-
default=10.0,
|
733
|
-
description="A statement timeout, in seconds, applied to all database interactions made by the API. Defaults to 10 seconds.",
|
734
|
-
)
|
735
|
-
|
736
|
-
api_database_connection_timeout: Optional[float] = Field(
|
737
|
-
default=5,
|
738
|
-
description="A connection timeout, in seconds, applied to database connections. Defaults to `5`.",
|
739
|
-
)
|
740
|
-
|
741
|
-
###########################################################################
|
742
|
-
# API Services settings
|
743
|
-
|
744
|
-
api_services_scheduler_enabled: bool = Field(
|
745
|
-
default=True,
|
746
|
-
description="Whether or not to start the scheduler service in the server application.",
|
747
|
-
)
|
748
|
-
|
749
|
-
api_services_scheduler_loop_seconds: float = Field(
|
750
|
-
default=60,
|
751
|
-
description="""
|
752
|
-
The scheduler loop interval, in seconds. This determines
|
753
|
-
how often the scheduler will attempt to schedule new flow runs, but has no
|
754
|
-
impact on how quickly either flow runs or task runs are actually executed.
|
755
|
-
Defaults to `60`.
|
756
|
-
""",
|
757
|
-
)
|
758
|
-
|
759
|
-
api_services_scheduler_deployment_batch_size: int = Field(
|
760
|
-
default=100,
|
761
|
-
description="""
|
762
|
-
The number of deployments the scheduler will attempt to
|
763
|
-
schedule in a single batch. If there are more deployments than the batch
|
764
|
-
size, the scheduler immediately attempts to schedule the next batch; it
|
765
|
-
does not sleep for `scheduler_loop_seconds` until it has visited every
|
766
|
-
deployment once. Defaults to `100`.
|
767
|
-
""",
|
768
|
-
)
|
769
|
-
|
770
|
-
api_services_scheduler_max_runs: int = Field(
|
771
|
-
default=100,
|
772
|
-
description="""
|
773
|
-
The scheduler will attempt to schedule up to this many
|
774
|
-
auto-scheduled runs in the future. Note that runs may have fewer than
|
775
|
-
this many scheduled runs, depending on the value of
|
776
|
-
`scheduler_max_scheduled_time`. Defaults to `100`.
|
777
|
-
""",
|
778
|
-
)
|
779
|
-
|
780
|
-
api_services_scheduler_min_runs: int = Field(
|
781
|
-
default=3,
|
782
|
-
description="""
|
783
|
-
The scheduler will attempt to schedule at least this many
|
784
|
-
auto-scheduled runs in the future. Note that runs may have more than
|
785
|
-
this many scheduled runs, depending on the value of
|
786
|
-
`scheduler_min_scheduled_time`. Defaults to `3`.
|
787
|
-
""",
|
788
|
-
)
|
789
|
-
|
790
|
-
api_services_scheduler_max_scheduled_time: timedelta = Field(
|
791
|
-
default=timedelta(days=100),
|
792
|
-
description="""
|
793
|
-
The scheduler will create new runs up to this far in the
|
794
|
-
future. Note that this setting will take precedence over
|
795
|
-
`scheduler_max_runs`: if a flow runs once a month and
|
796
|
-
`scheduler_max_scheduled_time` is three months, then only three runs will be
|
797
|
-
scheduled. Defaults to 100 days (`8640000` seconds).
|
798
|
-
""",
|
799
|
-
)
|
800
|
-
|
801
|
-
api_services_scheduler_min_scheduled_time: timedelta = Field(
|
802
|
-
default=timedelta(hours=1),
|
803
|
-
description="""
|
804
|
-
The scheduler will create new runs at least this far in the
|
805
|
-
future. Note that this setting will take precedence over `scheduler_min_runs`:
|
806
|
-
if a flow runs every hour and `scheduler_min_scheduled_time` is three hours,
|
807
|
-
then three runs will be scheduled even if `scheduler_min_runs` is 1. Defaults to
|
808
|
-
""",
|
809
|
-
)
|
810
|
-
|
811
|
-
api_services_scheduler_insert_batch_size: int = Field(
|
812
|
-
default=500,
|
813
|
-
description="""
|
814
|
-
The number of runs the scheduler will attempt to insert in a single batch.
|
815
|
-
Defaults to `500`.
|
816
|
-
""",
|
817
|
-
)
|
818
|
-
|
819
|
-
api_services_late_runs_enabled: bool = Field(
|
820
|
-
default=True,
|
821
|
-
description="Whether or not to start the late runs service in the server application.",
|
822
|
-
)
|
823
|
-
|
824
|
-
api_services_late_runs_loop_seconds: float = Field(
|
825
|
-
default=5,
|
826
|
-
description="""
|
827
|
-
The late runs service will look for runs to mark as late this often. Defaults to `5`.
|
828
|
-
""",
|
829
|
-
)
|
830
|
-
|
831
|
-
api_services_late_runs_after_seconds: timedelta = Field(
|
832
|
-
default=timedelta(seconds=15),
|
833
|
-
description="""
|
834
|
-
The late runs service will mark runs as late after they have exceeded their scheduled start time by this many seconds. Defaults to `5` seconds.
|
835
|
-
""",
|
836
|
-
)
|
837
|
-
|
838
|
-
api_services_pause_expirations_loop_seconds: float = Field(
|
839
|
-
default=5,
|
840
|
-
description="""
|
841
|
-
The pause expiration service will look for runs to mark as failed this often. Defaults to `5`.
|
842
|
-
""",
|
843
|
-
)
|
844
|
-
|
845
|
-
api_services_cancellation_cleanup_enabled: bool = Field(
|
846
|
-
default=True,
|
847
|
-
description="Whether or not to start the cancellation cleanup service in the server application.",
|
848
|
-
)
|
849
|
-
|
850
|
-
api_services_cancellation_cleanup_loop_seconds: float = Field(
|
851
|
-
default=20,
|
852
|
-
description="""
|
853
|
-
The cancellation cleanup service will look non-terminal tasks and subflows this often. Defaults to `20`.
|
854
|
-
""",
|
855
|
-
)
|
856
|
-
|
857
|
-
api_services_foreman_enabled: bool = Field(
|
858
|
-
default=True,
|
859
|
-
description="Whether or not to start the Foreman service in the server application.",
|
860
|
-
)
|
861
|
-
|
862
|
-
api_services_foreman_loop_seconds: float = Field(
|
863
|
-
default=15,
|
864
|
-
description="""
|
865
|
-
The number of seconds to wait between each iteration of the Foreman loop which checks
|
866
|
-
for offline workers and updates work pool status. Defaults to `15`.
|
867
|
-
""",
|
868
|
-
)
|
869
|
-
|
870
|
-
api_services_foreman_inactivity_heartbeat_multiple: int = Field(
|
871
|
-
default=3,
|
872
|
-
description="""
|
873
|
-
The number of heartbeats that must be missed before a worker is marked as offline. Defaults to `3`.
|
874
|
-
""",
|
875
|
-
)
|
876
|
-
|
877
|
-
api_services_foreman_fallback_heartbeat_interval_seconds: int = Field(
|
878
|
-
default=30,
|
879
|
-
description="""
|
880
|
-
The number of seconds to use for online/offline evaluation if a worker's heartbeat
|
881
|
-
interval is not set. Defaults to `30`.
|
882
|
-
""",
|
883
|
-
)
|
884
|
-
|
885
|
-
api_services_foreman_deployment_last_polled_timeout_seconds: int = Field(
|
886
|
-
default=60,
|
887
|
-
description="""
|
888
|
-
The number of seconds before a deployment is marked as not ready if it has not been
|
889
|
-
polled. Defaults to `60`.
|
890
|
-
""",
|
891
|
-
)
|
892
|
-
|
893
|
-
api_services_foreman_work_queue_last_polled_timeout_seconds: int = Field(
|
894
|
-
default=60,
|
895
|
-
description="""
|
896
|
-
The number of seconds before a work queue is marked as not ready if it has not been
|
897
|
-
polled. Defaults to `60`.
|
898
|
-
""",
|
899
|
-
)
|
900
|
-
|
901
|
-
api_services_task_run_recorder_enabled: bool = Field(
|
902
|
-
default=True,
|
903
|
-
description="Whether or not to start the task run recorder service in the server application.",
|
904
|
-
)
|
905
|
-
|
906
|
-
api_services_flow_run_notifications_enabled: bool = Field(
|
907
|
-
default=True,
|
908
|
-
description="""
|
909
|
-
Whether or not to start the flow run notifications service in the server application.
|
910
|
-
If disabled, you will need to run this service separately to send flow run notifications.
|
911
|
-
""",
|
912
|
-
)
|
913
|
-
|
914
|
-
api_services_pause_expirations_enabled: bool = Field(
|
915
|
-
default=True,
|
916
|
-
description="""
|
917
|
-
Whether or not to start the paused flow run expiration service in the server
|
918
|
-
application. If disabled, paused flows that have timed out will remain in a Paused state
|
919
|
-
until a resume attempt.
|
920
|
-
""",
|
921
|
-
)
|
922
|
-
|
923
|
-
###########################################################################
|
924
|
-
# Cloud settings
|
925
|
-
|
926
|
-
cloud_api_url: str = Field(
|
927
|
-
default="https://api.prefect.cloud/api",
|
928
|
-
description="API URL for Prefect Cloud. Used for authentication.",
|
929
|
-
)
|
930
|
-
|
931
|
-
cloud_ui_url: Optional[str] = Field(
|
932
|
-
default=None,
|
933
|
-
description="The URL of the Prefect Cloud UI. If not set, the client will attempt to infer it.",
|
934
|
-
)
|
935
|
-
|
936
|
-
###########################################################################
|
937
|
-
# Logging settings
|
938
|
-
|
939
|
-
logging_level: LogLevel = Field(
|
940
|
-
default="INFO",
|
941
|
-
description="The default logging level for Prefect loggers.",
|
942
|
-
)
|
943
|
-
|
944
|
-
logging_internal_level: LogLevel = Field(
|
945
|
-
default="ERROR",
|
946
|
-
description="The default logging level for Prefect's internal machinery loggers.",
|
947
|
-
)
|
948
|
-
|
949
|
-
logging_server_level: LogLevel = Field(
|
950
|
-
default="WARNING",
|
951
|
-
description="The default logging level for the Prefect API server.",
|
952
|
-
)
|
953
|
-
|
954
|
-
logging_settings_path: Optional[Path] = Field(
|
955
|
-
default=None,
|
956
|
-
description="The path to a custom YAML logging configuration file.",
|
957
|
-
)
|
958
|
-
|
959
|
-
logging_extra_loggers: Annotated[
|
960
|
-
Union[str, list[str], None],
|
961
|
-
AfterValidator(lambda v: [n.strip() for n in v.split(",")] if v else []),
|
962
|
-
] = Field(
|
963
|
-
default=None,
|
964
|
-
description="Additional loggers to attach to Prefect logging at runtime.",
|
965
|
-
)
|
966
|
-
|
967
|
-
logging_log_prints: bool = Field(
|
968
|
-
default=False,
|
969
|
-
description="If `True`, `print` statements in flows and tasks will be redirected to the Prefect logger for the given run.",
|
970
|
-
)
|
971
|
-
|
972
|
-
logging_to_api_enabled: bool = Field(
|
973
|
-
default=True,
|
974
|
-
description="If `True`, logs will be sent to the API.",
|
975
|
-
)
|
976
|
-
|
977
|
-
logging_to_api_batch_interval: float = Field(
|
978
|
-
default=2.0,
|
979
|
-
description="The number of seconds between batched writes of logs to the API.",
|
980
|
-
)
|
981
|
-
|
982
|
-
logging_to_api_batch_size: int = Field(
|
983
|
-
default=4_000_000,
|
984
|
-
description="The number of logs to batch before sending to the API.",
|
985
|
-
)
|
986
|
-
|
987
|
-
logging_to_api_max_log_size: int = Field(
|
988
|
-
default=1_000_000,
|
989
|
-
description="The maximum size in bytes for a single log.",
|
990
|
-
)
|
991
|
-
|
992
|
-
logging_to_api_when_missing_flow: Literal["warn", "error", "ignore"] = Field(
|
993
|
-
default="warn",
|
994
|
-
description="""
|
995
|
-
Controls the behavior when loggers attempt to send logs to the API handler from outside of a flow.
|
996
|
-
|
997
|
-
All logs sent to the API must be associated with a flow run. The API log handler can
|
998
|
-
only be used outside of a flow by manually providing a flow run identifier. Logs
|
999
|
-
that are not associated with a flow run will not be sent to the API. This setting can
|
1000
|
-
be used to determine if a warning or error is displayed when the identifier is missing.
|
1001
|
-
|
1002
|
-
The following options are available:
|
1003
|
-
|
1004
|
-
- "warn": Log a warning message.
|
1005
|
-
- "error": Raise an error.
|
1006
|
-
- "ignore": Do not log a warning message or raise an error.
|
1007
|
-
""",
|
1008
|
-
)
|
1009
|
-
|
1010
|
-
logging_colors: bool = Field(
|
1011
|
-
default=True,
|
1012
|
-
description="If `True`, use colors in CLI output. If `False`, output will not include colors codes.",
|
1013
|
-
)
|
1014
|
-
|
1015
|
-
logging_markup: bool = Field(
|
1016
|
-
default=False,
|
1017
|
-
description="""
|
1018
|
-
Whether to interpret strings wrapped in square brackets as a style.
|
1019
|
-
This allows styles to be conveniently added to log messages, e.g.
|
1020
|
-
`[red]This is a red message.[/red]`. However, the downside is, if enabled,
|
1021
|
-
strings that contain square brackets may be inaccurately interpreted and
|
1022
|
-
lead to incomplete output, e.g.
|
1023
|
-
`[red]This is a red message.[/red]` may be interpreted as
|
1024
|
-
`[red]This is a red message.[/red]`.
|
1025
|
-
""",
|
1026
|
-
)
|
1027
|
-
|
1028
|
-
###########################################################################
|
1029
|
-
# Server settings
|
1030
|
-
|
1031
|
-
server_api_host: str = Field(
|
1032
|
-
default="127.0.0.1",
|
1033
|
-
description="The API's host address (defaults to `127.0.0.1`).",
|
1034
|
-
)
|
1035
|
-
|
1036
|
-
server_api_port: int = Field(
|
1037
|
-
default=4200,
|
1038
|
-
description="The API's port address (defaults to `4200`).",
|
1039
|
-
)
|
1040
|
-
|
1041
|
-
server_api_keepalive_timeout: int = Field(
|
1042
|
-
default=5,
|
1043
|
-
description="""
|
1044
|
-
The API's keep alive timeout (defaults to `5`).
|
1045
|
-
Refer to https://www.uvicorn.org/settings/#timeouts for details.
|
1046
|
-
|
1047
|
-
When the API is hosted behind a load balancer, you may want to set this to a value
|
1048
|
-
greater than the load balancer's idle timeout.
|
1049
|
-
|
1050
|
-
Note this setting only applies when calling `prefect server start`; if hosting the
|
1051
|
-
API with another tool you will need to configure this there instead.
|
1052
|
-
""",
|
1053
|
-
)
|
1054
|
-
|
1055
|
-
server_csrf_protection_enabled: bool = Field(
|
1056
|
-
default=False,
|
1057
|
-
description="""
|
1058
|
-
Controls the activation of CSRF protection for the Prefect server API.
|
1059
|
-
|
1060
|
-
When enabled (`True`), the server enforces CSRF validation checks on incoming
|
1061
|
-
state-changing requests (POST, PUT, PATCH, DELETE), requiring a valid CSRF
|
1062
|
-
token to be included in the request headers or body. This adds a layer of
|
1063
|
-
security by preventing unauthorized or malicious sites from making requests on
|
1064
|
-
behalf of authenticated users.
|
1065
|
-
|
1066
|
-
It is recommended to enable this setting in production environments where the
|
1067
|
-
API is exposed to web clients to safeguard against CSRF attacks.
|
1068
|
-
|
1069
|
-
Note: Enabling this setting requires corresponding support in the client for
|
1070
|
-
CSRF token management. See PREFECT_CLIENT_CSRF_SUPPORT_ENABLED for more.
|
1071
|
-
""",
|
1072
|
-
)
|
1073
|
-
|
1074
|
-
server_csrf_token_expiration: timedelta = Field(
|
1075
|
-
default=timedelta(hours=1),
|
1076
|
-
description="""
|
1077
|
-
Specifies the duration for which a CSRF token remains valid after being issued
|
1078
|
-
by the server.
|
1079
|
-
|
1080
|
-
The default expiration time is set to 1 hour, which offers a reasonable
|
1081
|
-
compromise. Adjust this setting based on your specific security requirements
|
1082
|
-
and usage patterns.
|
1083
|
-
""",
|
1084
|
-
)
|
1085
|
-
|
1086
|
-
server_cors_allowed_origins: str = Field(
|
1087
|
-
default="*",
|
1088
|
-
description="""
|
1089
|
-
A comma-separated list of origins that are authorized to make cross-origin requests to the API.
|
1090
|
-
|
1091
|
-
By default, this is set to `*`, which allows requests from all origins.
|
1092
|
-
""",
|
1093
|
-
)
|
1094
|
-
|
1095
|
-
server_cors_allowed_methods: str = Field(
|
1096
|
-
default="*",
|
1097
|
-
description="""
|
1098
|
-
A comma-separated list of methods that are authorized to make cross-origin requests to the API.
|
1099
|
-
|
1100
|
-
By default, this is set to `*`, which allows requests from all methods.
|
1101
|
-
""",
|
1102
|
-
)
|
1103
|
-
|
1104
|
-
server_cors_allowed_headers: str = Field(
|
1105
|
-
default="*",
|
1106
|
-
description="""
|
1107
|
-
A comma-separated list of headers that are authorized to make cross-origin requests to the API.
|
1108
|
-
|
1109
|
-
By default, this is set to `*`, which allows requests from all headers.
|
1110
|
-
""",
|
1111
|
-
)
|
1112
|
-
|
1113
|
-
server_allow_ephemeral_mode: bool = Field(
|
1114
|
-
default=False,
|
1115
|
-
description="""
|
1116
|
-
Controls whether or not a subprocess server can be started when no API URL is provided.
|
1117
|
-
""",
|
1118
|
-
)
|
1119
|
-
|
1120
|
-
server_ephemeral_startup_timeout_seconds: int = Field(
|
1121
|
-
default=10,
|
1122
|
-
description="""
|
1123
|
-
The number of seconds to wait for the server to start when ephemeral mode is enabled.
|
1124
|
-
Defaults to `10`.
|
1125
|
-
""",
|
1126
|
-
)
|
1127
|
-
|
1128
|
-
server_analytics_enabled: bool = Field(
|
1129
|
-
default=True,
|
1130
|
-
description="""
|
1131
|
-
When enabled, Prefect sends anonymous data (e.g. count of flow runs, package version)
|
1132
|
-
on server startup to help us improve our product.
|
1133
|
-
""",
|
1134
|
-
)
|
1135
|
-
|
1136
|
-
###########################################################################
|
1137
|
-
# UI settings
|
1138
|
-
|
1139
|
-
ui_enabled: bool = Field(
|
1140
|
-
default=True,
|
1141
|
-
description="Whether or not to serve the Prefect UI.",
|
1142
|
-
)
|
1143
|
-
|
1144
|
-
ui_url: Optional[str] = Field(
|
1145
|
-
default=None,
|
1146
|
-
description="The URL of the Prefect UI. If not set, the client will attempt to infer it.",
|
1147
|
-
)
|
1148
|
-
|
1149
|
-
ui_api_url: Optional[str] = Field(
|
1150
|
-
default=None,
|
1151
|
-
description="The connection url for communication from the UI to the API. Defaults to `PREFECT_API_URL` if set. Otherwise, the default URL is generated from `PREFECT_SERVER_API_HOST` and `PREFECT_SERVER_API_PORT`.",
|
1152
|
-
)
|
1153
|
-
|
1154
|
-
ui_serve_base: str = Field(
|
1155
|
-
default="/",
|
1156
|
-
description="The base URL path to serve the Prefect UI from.",
|
1157
|
-
)
|
1158
|
-
|
1159
|
-
ui_static_directory: Optional[str] = Field(
|
1160
|
-
default=None,
|
1161
|
-
description="The directory to serve static files from. This should be used when running into permissions issues when attempting to serve the UI from the default directory (for example when running in a Docker container).",
|
1162
|
-
)
|
1163
|
-
|
1164
|
-
###########################################################################
|
1165
|
-
# Events settings
|
1166
|
-
|
1167
|
-
api_services_triggers_enabled: bool = Field(
|
1168
|
-
default=True,
|
1169
|
-
description="Whether or not to start the triggers service in the server application.",
|
1170
|
-
)
|
1171
|
-
|
1172
|
-
api_services_event_persister_enabled: bool = Field(
|
1173
|
-
default=True,
|
1174
|
-
description="Whether or not to start the event persister service in the server application.",
|
1175
|
-
)
|
1176
|
-
|
1177
|
-
api_services_event_persister_batch_size: int = Field(
|
1178
|
-
default=20,
|
1179
|
-
gt=0,
|
1180
|
-
description="The number of events the event persister will attempt to insert in one batch.",
|
1181
|
-
)
|
1182
|
-
|
1183
|
-
api_services_event_persister_flush_interval: float = Field(
|
1184
|
-
default=5,
|
1185
|
-
gt=0.0,
|
1186
|
-
description="The maximum number of seconds between flushes of the event persister.",
|
1187
|
-
)
|
1188
|
-
|
1189
|
-
api_events_stream_out_enabled: bool = Field(
|
1190
|
-
default=True,
|
1191
|
-
description="Whether or not to stream events out to the API via websockets.",
|
1192
|
-
)
|
1193
|
-
|
1194
|
-
api_enable_metrics: bool = Field(
|
1195
|
-
default=False,
|
1196
|
-
description="Whether or not to enable Prometheus metrics in the API.",
|
1197
|
-
)
|
1198
|
-
|
1199
|
-
api_events_related_resource_cache_ttl: timedelta = Field(
|
1200
|
-
default=timedelta(minutes=5),
|
1201
|
-
description="The number of seconds to cache related resources for in the API.",
|
1202
|
-
)
|
1203
|
-
|
1204
|
-
client_enable_metrics: bool = Field(
|
1205
|
-
default=False,
|
1206
|
-
description="Whether or not to enable Prometheus metrics in the client.",
|
1207
|
-
)
|
1208
|
-
|
1209
|
-
client_metrics_port: int = Field(
|
1210
|
-
default=4201, description="The port to expose the client Prometheus metrics on."
|
1211
|
-
)
|
1212
|
-
|
1213
|
-
events_maximum_labels_per_resource: int = Field(
|
1214
|
-
default=500,
|
1215
|
-
description="The maximum number of labels a resource may have.",
|
1216
|
-
)
|
1217
|
-
|
1218
|
-
events_maximum_related_resources: int = Field(
|
1219
|
-
default=500,
|
1220
|
-
description="The maximum number of related resources an Event may have.",
|
1221
|
-
)
|
1222
|
-
|
1223
|
-
events_maximum_size_bytes: int = Field(
|
1224
|
-
default=1_500_000,
|
1225
|
-
description="The maximum size of an Event when serialized to JSON",
|
1226
|
-
)
|
1227
|
-
|
1228
|
-
events_expired_bucket_buffer: timedelta = Field(
|
1229
|
-
default=timedelta(seconds=60),
|
1230
|
-
description="The amount of time to retain expired automation buckets",
|
1231
|
-
)
|
1232
|
-
|
1233
|
-
events_proactive_granularity: timedelta = Field(
|
1234
|
-
default=timedelta(seconds=5),
|
1235
|
-
description="How frequently proactive automations are evaluated",
|
1236
|
-
)
|
1237
|
-
|
1238
|
-
events_retention_period: timedelta = Field(
|
1239
|
-
default=timedelta(days=7),
|
1240
|
-
description="The amount of time to retain events in the database.",
|
1241
|
-
)
|
1242
|
-
|
1243
|
-
events_maximum_websocket_backfill: timedelta = Field(
|
1244
|
-
default=timedelta(minutes=15),
|
1245
|
-
description="The maximum range to look back for backfilling events for a websocket subscriber.",
|
1246
|
-
)
|
1247
|
-
|
1248
|
-
events_websocket_backfill_page_size: int = Field(
|
1249
|
-
default=250,
|
1250
|
-
gt=0,
|
1251
|
-
description="The page size for the queries to backfill events for websocket subscribers.",
|
1252
|
-
)
|
1253
|
-
|
1254
|
-
###########################################################################
|
1255
|
-
# uncategorized
|
1256
|
-
|
1257
|
-
home: Annotated[Path, BeforeValidator(lambda x: Path(x).expanduser())] = Field(
|
1258
|
-
default=Path("~") / ".prefect",
|
1259
|
-
description="The path to the Prefect home directory. Defaults to ~/.prefect",
|
1260
|
-
)
|
1261
|
-
debug_mode: bool = Field(
|
1262
|
-
default=False,
|
1263
|
-
description="If True, enables debug mode which may provide additional logging and debugging features.",
|
1264
|
-
)
|
1265
|
-
|
1266
|
-
silence_api_url_misconfiguration: bool = Field(
|
1267
|
-
default=False,
|
1268
|
-
description="""
|
1269
|
-
If `True`, disable the warning when a user accidentally misconfigure its `PREFECT_API_URL`
|
1270
|
-
Sometimes when a user manually set `PREFECT_API_URL` to a custom url,reverse-proxy for example,
|
1271
|
-
we would like to silence this warning so we will set it to `FALSE`.
|
1272
|
-
""",
|
1273
|
-
)
|
1274
|
-
|
1275
|
-
client_max_retries: int = Field(
|
1276
|
-
default=5,
|
1277
|
-
ge=0,
|
1278
|
-
description="""
|
1279
|
-
The maximum number of retries to perform on failed HTTP requests.
|
1280
|
-
Defaults to 5. Set to 0 to disable retries.
|
1281
|
-
See `PREFECT_CLIENT_RETRY_EXTRA_CODES` for details on which HTTP status codes are
|
1282
|
-
retried.
|
1283
|
-
""",
|
1284
|
-
)
|
1285
|
-
|
1286
|
-
client_retry_jitter_factor: float = Field(
|
1287
|
-
default=0.2,
|
1288
|
-
ge=0.0,
|
1289
|
-
description="""
|
1290
|
-
A value greater than or equal to zero to control the amount of jitter added to retried
|
1291
|
-
client requests. Higher values introduce larger amounts of jitter.
|
1292
|
-
Set to 0 to disable jitter. See `clamped_poisson_interval` for details on the how jitter
|
1293
|
-
can affect retry lengths.
|
1294
|
-
""",
|
1295
|
-
)
|
1296
|
-
|
1297
|
-
client_retry_extra_codes: ClientRetryExtraCodes = Field(
|
1298
|
-
default_factory=set,
|
1299
|
-
description="""
|
1300
|
-
A list of extra HTTP status codes to retry on. Defaults to an empty list.
|
1301
|
-
429, 502 and 503 are always retried. Please note that not all routes are idempotent and retrying
|
1302
|
-
may result in unexpected behavior.
|
1303
|
-
""",
|
1304
|
-
examples=["404,429,503", "429", {404, 429, 503}],
|
1305
|
-
)
|
1306
|
-
|
1307
|
-
client_csrf_support_enabled: bool = Field(
|
1308
|
-
default=True,
|
1309
|
-
description="""
|
1310
|
-
Determines if CSRF token handling is active in the Prefect client for API
|
1311
|
-
requests.
|
1312
|
-
|
1313
|
-
When enabled (`True`), the client automatically manages CSRF tokens by
|
1314
|
-
retrieving, storing, and including them in applicable state-changing requests
|
1315
|
-
""",
|
1316
|
-
)
|
1317
|
-
|
1318
|
-
experimental_warn: bool = Field(
|
1319
|
-
default=True,
|
1320
|
-
description="If `True`, warn on usage of experimental features.",
|
1321
|
-
)
|
1322
|
-
|
1323
|
-
profiles_path: Optional[Path] = Field(
|
1324
|
-
default=None,
|
1325
|
-
description="The path to a profiles configuration file.",
|
1326
|
-
)
|
1327
|
-
|
1328
|
-
tasks_refresh_cache: bool = Field(
|
1329
|
-
default=False,
|
1330
|
-
description="If `True`, enables a refresh of cached results: re-executing the task will refresh the cached results.",
|
1331
|
-
)
|
1332
|
-
|
1333
|
-
task_default_retries: int = Field(
|
1334
|
-
default=0,
|
1335
|
-
ge=0,
|
1336
|
-
description="This value sets the default number of retries for all tasks.",
|
1337
|
-
)
|
1338
|
-
|
1339
|
-
task_default_retry_delay_seconds: Union[int, float, list[float]] = Field(
|
1340
|
-
default=0,
|
1341
|
-
description="This value sets the default retry delay seconds for all tasks.",
|
1342
|
-
)
|
1343
|
-
|
1344
|
-
task_run_tag_concurrency_slot_wait_seconds: int = Field(
|
1345
|
-
default=30,
|
1346
|
-
ge=0,
|
1347
|
-
description="The number of seconds to wait before retrying when a task run cannot secure a concurrency slot from the server.",
|
1348
|
-
)
|
1349
|
-
|
1350
|
-
flow_default_retries: int = Field(
|
1351
|
-
default=0,
|
1352
|
-
ge=0,
|
1353
|
-
description="This value sets the default number of retries for all flows.",
|
1354
|
-
)
|
1355
|
-
|
1356
|
-
flow_default_retry_delay_seconds: Union[int, float] = Field(
|
1357
|
-
default=0,
|
1358
|
-
description="This value sets the retry delay seconds for all flows.",
|
1359
|
-
)
|
1360
|
-
|
1361
|
-
local_storage_path: Optional[Path] = Field(
|
1362
|
-
default=None,
|
1363
|
-
description="The path to a block storage directory to store things in.",
|
1364
|
-
)
|
1365
|
-
|
1366
|
-
memo_store_path: Optional[Path] = Field(
|
1367
|
-
default=None,
|
1368
|
-
description="The path to the memo store file.",
|
1369
|
-
)
|
1370
|
-
|
1371
|
-
memoize_block_auto_registration: bool = Field(
|
1372
|
-
default=True,
|
1373
|
-
description="Controls whether or not block auto-registration on start",
|
1374
|
-
)
|
1375
|
-
|
1376
|
-
sqlalchemy_pool_size: Optional[int] = Field(
|
1377
|
-
default=None,
|
1378
|
-
description="Controls connection pool size when using a PostgreSQL database with the Prefect API. If not set, the default SQLAlchemy pool size will be used.",
|
1379
|
-
)
|
1380
|
-
|
1381
|
-
sqlalchemy_max_overflow: Optional[int] = Field(
|
1382
|
-
default=None,
|
1383
|
-
description="Controls maximum overflow of the connection pool when using a PostgreSQL database with the Prefect API. If not set, the default SQLAlchemy maximum overflow value will be used.",
|
1384
|
-
)
|
1385
|
-
|
1386
|
-
async_fetch_state_result: bool = Field(
|
1387
|
-
default=False,
|
1388
|
-
description="""
|
1389
|
-
Determines whether `State.result()` fetches results automatically or not.
|
1390
|
-
In Prefect 2.6.0, the `State.result()` method was updated to be async
|
1391
|
-
to facilitate automatic retrieval of results from storage which means when
|
1392
|
-
writing async code you must `await` the call. For backwards compatibility,
|
1393
|
-
the result is not retrieved by default for async users. You may opt into this
|
1394
|
-
per call by passing `fetch=True` or toggle this setting to change the behavior
|
1395
|
-
globally.
|
1396
|
-
""",
|
1397
|
-
)
|
1398
|
-
|
1399
|
-
runner_process_limit: int = Field(
|
1400
|
-
default=5,
|
1401
|
-
description="Maximum number of processes a runner will execute in parallel.",
|
1402
|
-
)
|
1403
|
-
|
1404
|
-
runner_poll_frequency: int = Field(
|
1405
|
-
default=10,
|
1406
|
-
description="Number of seconds a runner should wait between queries for scheduled work.",
|
1407
|
-
)
|
1408
|
-
|
1409
|
-
runner_server_missed_polls_tolerance: int = Field(
|
1410
|
-
default=2,
|
1411
|
-
description="Number of missed polls before a runner is considered unhealthy by its webserver.",
|
1412
|
-
)
|
1413
|
-
|
1414
|
-
runner_server_host: str = Field(
|
1415
|
-
default="localhost",
|
1416
|
-
description="The host address the runner's webserver should bind to.",
|
1417
|
-
)
|
1418
|
-
|
1419
|
-
runner_server_port: int = Field(
|
1420
|
-
default=8080,
|
1421
|
-
description="The port the runner's webserver should bind to.",
|
1422
|
-
)
|
1423
|
-
|
1424
|
-
runner_server_log_level: LogLevel = Field(
|
1425
|
-
default="error",
|
1426
|
-
description="The log level of the runner's webserver.",
|
1427
|
-
)
|
1428
|
-
|
1429
|
-
runner_server_enable: bool = Field(
|
1430
|
-
default=False,
|
1431
|
-
description="Whether or not to enable the runner's webserver.",
|
1432
|
-
)
|
1433
|
-
|
1434
|
-
deployment_concurrency_slot_wait_seconds: float = Field(
|
1435
|
-
default=30.0,
|
1436
|
-
ge=0.0,
|
1437
|
-
description=(
|
1438
|
-
"The number of seconds to wait before retrying when a deployment flow run"
|
1439
|
-
" cannot secure a concurrency slot from the server."
|
1440
|
-
),
|
1441
|
-
)
|
1442
|
-
|
1443
|
-
deployment_schedule_max_scheduled_runs: int = Field(
|
1444
|
-
default=50,
|
1445
|
-
description="The maximum number of scheduled runs to create for a deployment.",
|
1446
|
-
)
|
1447
|
-
|
1448
|
-
worker_heartbeat_seconds: float = Field(
|
1449
|
-
default=30,
|
1450
|
-
description="Number of seconds a worker should wait between sending a heartbeat.",
|
1451
|
-
)
|
1452
|
-
|
1453
|
-
worker_query_seconds: float = Field(
|
1454
|
-
default=10,
|
1455
|
-
description="Number of seconds a worker should wait between queries for scheduled work.",
|
1456
|
-
)
|
1457
|
-
|
1458
|
-
worker_prefetch_seconds: float = Field(
|
1459
|
-
default=10,
|
1460
|
-
description="The number of seconds into the future a worker should query for scheduled work.",
|
1461
|
-
)
|
1462
|
-
|
1463
|
-
worker_webserver_host: str = Field(
|
1464
|
-
default="0.0.0.0",
|
1465
|
-
description="The host address the worker's webserver should bind to.",
|
1466
|
-
)
|
1467
|
-
|
1468
|
-
worker_webserver_port: int = Field(
|
1469
|
-
default=8080,
|
1470
|
-
description="The port the worker's webserver should bind to.",
|
1471
|
-
)
|
1472
|
-
|
1473
|
-
task_scheduling_default_storage_block: Optional[str] = Field(
|
1474
|
-
default=None,
|
1475
|
-
description="The `block-type/block-document` slug of a block to use as the default storage for autonomous tasks.",
|
1476
|
-
)
|
1477
|
-
|
1478
|
-
task_scheduling_delete_failed_submissions: bool = Field(
|
1479
|
-
default=True,
|
1480
|
-
description="Whether or not to delete failed task submissions from the database.",
|
1481
|
-
)
|
1482
|
-
|
1483
|
-
task_scheduling_max_scheduled_queue_size: int = Field(
|
1484
|
-
default=1000,
|
1485
|
-
description="The maximum number of scheduled tasks to queue for submission.",
|
1486
|
-
)
|
1487
|
-
|
1488
|
-
task_scheduling_max_retry_queue_size: int = Field(
|
1489
|
-
default=100,
|
1490
|
-
description="The maximum number of retries to queue for submission.",
|
1491
|
-
)
|
1492
|
-
|
1493
|
-
task_scheduling_pending_task_timeout: timedelta = Field(
|
1494
|
-
default=timedelta(0),
|
1495
|
-
description="How long before a PENDING task are made available to another task worker.",
|
1496
|
-
)
|
1497
|
-
|
1498
|
-
experimental_enable_schedule_concurrency: bool = Field(
|
1499
|
-
default=False,
|
1500
|
-
description="Whether or not to enable concurrency for scheduled tasks.",
|
1501
|
-
)
|
1502
|
-
|
1503
|
-
default_result_storage_block: Optional[str] = Field(
|
1504
|
-
default=None,
|
1505
|
-
description="The `block-type/block-document` slug of a block to use as the default result storage.",
|
1506
|
-
)
|
1507
|
-
|
1508
|
-
default_work_pool_name: Optional[str] = Field(
|
1509
|
-
default=None,
|
1510
|
-
description="The default work pool to deploy to.",
|
1511
|
-
)
|
1512
|
-
|
1513
|
-
default_docker_build_namespace: Optional[str] = Field(
|
1514
|
-
default=None,
|
1515
|
-
description="The default Docker namespace to use when building images.",
|
1516
|
-
)
|
1517
|
-
|
1518
|
-
messaging_broker: str = Field(
|
1519
|
-
default="prefect.server.utilities.messaging.memory",
|
1520
|
-
description="Which message broker implementation to use for the messaging system, should point to a module that exports a Publisher and Consumer class.",
|
1521
|
-
)
|
1522
|
-
|
1523
|
-
messaging_cache: str = Field(
|
1524
|
-
default="prefect.server.utilities.messaging.memory",
|
1525
|
-
description="Which cache implementation to use for the events system. Should point to a module that exports a Cache class.",
|
1526
|
-
)
|
1527
|
-
|
1528
|
-
###########################################################################
|
1529
|
-
# allow deprecated access to PREFECT_SOME_SETTING_NAME
|
1530
|
-
|
1531
|
-
def __getattribute__(self, name: str) -> Any:
|
1532
|
-
if name.startswith("PREFECT_"):
|
1533
|
-
field_name = env_var_to_accessor(name)
|
1534
|
-
warnings.warn(
|
1535
|
-
f"Accessing `Settings().{name}` is deprecated. Use `Settings().{field_name}` instead.",
|
1536
|
-
DeprecationWarning,
|
1537
|
-
stacklevel=2,
|
1538
|
-
)
|
1539
|
-
return super().__getattribute__(field_name)
|
1540
|
-
return super().__getattribute__(name)
|
1541
|
-
|
1542
|
-
###########################################################################
|
1543
|
-
|
1544
|
-
@model_validator(mode="after")
|
1545
|
-
def post_hoc_settings(self) -> Self:
|
1546
|
-
"""refactor on resolution of https://github.com/pydantic/pydantic/issues/9789
|
1547
|
-
|
1548
|
-
we should not be modifying __pydantic_fields_set__ directly, but until we can
|
1549
|
-
define dependencies between defaults in a first-class way, we need clean up
|
1550
|
-
post-hoc default assignments to keep set/unset fields correct after instantiation.
|
1551
|
-
"""
|
1552
|
-
if self.cloud_ui_url is None:
|
1553
|
-
self.cloud_ui_url = default_cloud_ui_url(self)
|
1554
|
-
self.__pydantic_fields_set__.remove("cloud_ui_url")
|
1555
|
-
|
1556
|
-
if self.ui_url is None:
|
1557
|
-
self.ui_url = default_ui_url(self)
|
1558
|
-
self.__pydantic_fields_set__.remove("ui_url")
|
1559
|
-
if self.ui_api_url is None:
|
1560
|
-
if self.api.url:
|
1561
|
-
self.ui_api_url = self.api.url
|
1562
|
-
if self.api.url:
|
1563
|
-
self.ui_api_url = self.api.url
|
1564
|
-
self.__pydantic_fields_set__.remove("ui_api_url")
|
1565
|
-
else:
|
1566
|
-
self.ui_api_url = (
|
1567
|
-
f"http://{self.server_api_host}:{self.server_api_port}"
|
1568
|
-
)
|
1569
|
-
self.__pydantic_fields_set__.remove("ui_api_url")
|
1570
|
-
if self.profiles_path is None or "PREFECT_HOME" in str(self.profiles_path):
|
1571
|
-
self.profiles_path = Path(f"{self.home}/profiles.toml")
|
1572
|
-
self.__pydantic_fields_set__.remove("profiles_path")
|
1573
|
-
if self.local_storage_path is None:
|
1574
|
-
self.local_storage_path = Path(f"{self.home}/storage")
|
1575
|
-
self.__pydantic_fields_set__.remove("local_storage_path")
|
1576
|
-
if self.memo_store_path is None:
|
1577
|
-
self.memo_store_path = Path(f"{self.home}/memo_store.toml")
|
1578
|
-
self.__pydantic_fields_set__.remove("memo_store_path")
|
1579
|
-
if self.debug_mode or self.test_mode:
|
1580
|
-
self.logging_level = "DEBUG"
|
1581
|
-
self.logging_internal_level = "DEBUG"
|
1582
|
-
self.__pydantic_fields_set__.remove("logging_level")
|
1583
|
-
self.__pydantic_fields_set__.remove("logging_internal_level")
|
1584
|
-
|
1585
|
-
if self.logging_settings_path is None:
|
1586
|
-
self.logging_settings_path = Path(f"{self.home}/logging.yml")
|
1587
|
-
self.__pydantic_fields_set__.remove("logging_settings_path")
|
1588
|
-
# Set default database connection URL if not provided
|
1589
|
-
if self.api_database_connection_url is None:
|
1590
|
-
self.api_database_connection_url = default_database_connection_url(self)
|
1591
|
-
self.__pydantic_fields_set__.remove("api_database_connection_url")
|
1592
|
-
if "PREFECT_API_DATABASE_PASSWORD" in (
|
1593
|
-
db_url := (
|
1594
|
-
self.api_database_connection_url.get_secret_value()
|
1595
|
-
if isinstance(self.api_database_connection_url, SecretStr)
|
1596
|
-
else self.api_database_connection_url
|
1597
|
-
)
|
1598
|
-
):
|
1599
|
-
if self.api_database_password is None:
|
1600
|
-
raise ValueError(
|
1601
|
-
"database password is None - please set PREFECT_API_DATABASE_PASSWORD"
|
1602
|
-
)
|
1603
|
-
self.api_database_connection_url = SecretStr(
|
1604
|
-
db_url.replace(
|
1605
|
-
"${PREFECT_API_DATABASE_PASSWORD}",
|
1606
|
-
self.api_database_password.get_secret_value(),
|
1607
|
-
)
|
1608
|
-
if self.api_database_password
|
1609
|
-
else ""
|
1610
|
-
)
|
1611
|
-
self.__pydantic_fields_set__.remove("api_database_connection_url")
|
1612
|
-
return self
|
1613
|
-
|
1614
|
-
@model_validator(mode="after")
|
1615
|
-
def emit_warnings(self) -> Self:
|
1616
|
-
"""More post-hoc validation of settings, including warnings for misconfigurations."""
|
1617
|
-
values = self.model_dump()
|
1618
|
-
values = max_log_size_smaller_than_batch_size(values)
|
1619
|
-
values = warn_on_database_password_value_without_usage(values)
|
1620
|
-
if not self.silence_api_url_misconfiguration:
|
1621
|
-
values = warn_on_misconfigured_api_url(values)
|
1622
|
-
return self
|
1623
|
-
|
1624
|
-
##########################################################################
|
1625
|
-
# Settings methods
|
1626
|
-
|
1627
|
-
def copy_with_update(
|
1628
|
-
self: Self,
|
1629
|
-
updates: Optional[Mapping[Setting, Any]] = None,
|
1630
|
-
set_defaults: Optional[Mapping[Setting, Any]] = None,
|
1631
|
-
restore_defaults: Optional[Iterable[Setting]] = None,
|
1632
|
-
) -> Self:
|
1633
|
-
"""
|
1634
|
-
Create a new Settings object with validation.
|
1635
|
-
|
1636
|
-
Arguments:
|
1637
|
-
updates: A mapping of settings to new values. Existing values for the
|
1638
|
-
given settings will be overridden.
|
1639
|
-
set_defaults: A mapping of settings to new default values. Existing values for
|
1640
|
-
the given settings will only be overridden if they were not set.
|
1641
|
-
restore_defaults: An iterable of settings to restore to their default values.
|
1642
|
-
|
1643
|
-
Returns:
|
1644
|
-
A new Settings object.
|
1645
|
-
"""
|
1646
|
-
restore_defaults_obj = {}
|
1647
|
-
for r in restore_defaults or []:
|
1648
|
-
set_in_dict(restore_defaults_obj, r.accessor, True)
|
1649
|
-
updates = updates or {}
|
1650
|
-
set_defaults = set_defaults or {}
|
1651
|
-
|
1652
|
-
set_defaults_obj = {}
|
1653
|
-
for setting, value in set_defaults.items():
|
1654
|
-
set_in_dict(set_defaults_obj, setting.accessor, value)
|
1655
|
-
|
1656
|
-
updates_obj = {}
|
1657
|
-
for setting, value in updates.items():
|
1658
|
-
set_in_dict(updates_obj, setting.accessor, value)
|
1659
|
-
|
1660
|
-
new_settings = self.__class__(
|
1661
|
-
**deep_merge_dicts(
|
1662
|
-
set_defaults_obj,
|
1663
|
-
self.model_dump(exclude_unset=True, exclude=restore_defaults_obj),
|
1664
|
-
updates_obj,
|
1665
|
-
)
|
1666
|
-
)
|
1667
|
-
return new_settings
|
1668
|
-
|
1669
|
-
def hash_key(self) -> str:
|
1670
|
-
"""
|
1671
|
-
Return a hash key for the settings object. This is needed since some
|
1672
|
-
settings may be unhashable, like lists.
|
1673
|
-
"""
|
1674
|
-
env_variables = self.to_environment_variables()
|
1675
|
-
return str(hash(tuple((key, value) for key, value in env_variables.items())))
|
1676
|
-
|
1677
|
-
|
1678
|
-
############################################################################
|
1679
|
-
# Settings utils
|
1680
|
-
|
1681
|
-
# Functions to instantiate `Settings` instances
|
1682
|
-
|
1683
|
-
|
1684
|
-
def _cast_settings(
|
1685
|
-
settings: Union[Dict[Union[str, Setting], Any], Any],
|
1686
|
-
) -> Dict[Setting, Any]:
|
1687
|
-
"""For backwards compatibility, allow either Settings objects as keys or string references to settings."""
|
1688
|
-
if not isinstance(settings, dict):
|
1689
|
-
raise ValueError("Settings must be a dictionary.")
|
1690
|
-
casted_settings = {}
|
1691
|
-
for k, value in settings.items():
|
1692
|
-
try:
|
1693
|
-
if isinstance(k, str):
|
1694
|
-
setting = SETTING_VARIABLES[k]
|
1695
|
-
else:
|
1696
|
-
setting = k
|
1697
|
-
casted_settings[setting] = value
|
1698
|
-
except KeyError as e:
|
1699
|
-
warnings.warn(f"Setting {e} is not recognized")
|
1700
|
-
continue
|
1701
|
-
return casted_settings
|
1702
|
-
|
1703
|
-
|
1704
|
-
def get_current_settings() -> Settings:
|
1705
|
-
"""
|
1706
|
-
Returns a settings object populated with values from the current settings context
|
1707
|
-
or, if no settings context is active, the environment.
|
1708
|
-
"""
|
1709
|
-
from prefect.context import SettingsContext
|
1710
|
-
|
1711
|
-
settings_context = SettingsContext.get()
|
1712
|
-
if settings_context is not None:
|
1713
|
-
return settings_context.settings
|
1714
|
-
|
1715
|
-
return Settings()
|
1716
|
-
|
1717
|
-
|
1718
|
-
@contextmanager
|
1719
|
-
def temporary_settings(
|
1720
|
-
updates: Optional[Mapping[Setting, Any]] = None,
|
1721
|
-
set_defaults: Optional[Mapping[Setting, Any]] = None,
|
1722
|
-
restore_defaults: Optional[Iterable[Setting]] = None,
|
1723
|
-
) -> Generator[Settings, None, None]:
|
1724
|
-
"""
|
1725
|
-
Temporarily override the current settings by entering a new profile.
|
1726
|
-
|
1727
|
-
See `Settings.copy_with_update` for details on different argument behavior.
|
1728
|
-
|
1729
|
-
Examples:
|
1730
|
-
>>> from prefect.settings import PREFECT_API_URL
|
1731
|
-
>>>
|
1732
|
-
>>> with temporary_settings(updates={PREFECT_API_URL: "foo"}):
|
1733
|
-
>>> assert PREFECT_API_URL.value() == "foo"
|
1734
|
-
>>>
|
1735
|
-
>>> with temporary_settings(set_defaults={PREFECT_API_URL: "bar"}):
|
1736
|
-
>>> assert PREFECT_API_URL.value() == "foo"
|
1737
|
-
>>>
|
1738
|
-
>>> with temporary_settings(restore_defaults={PREFECT_API_URL}):
|
1739
|
-
>>> assert PREFECT_API_URL.value() is None
|
1740
|
-
>>>
|
1741
|
-
>>> with temporary_settings(set_defaults={PREFECT_API_URL: "bar"})
|
1742
|
-
>>> assert PREFECT_API_URL.value() == "bar"
|
1743
|
-
>>> assert PREFECT_API_URL.value() is None
|
1744
|
-
"""
|
1745
|
-
import prefect.context
|
1746
|
-
|
1747
|
-
context = prefect.context.get_settings_context()
|
1748
|
-
|
1749
|
-
if not restore_defaults:
|
1750
|
-
restore_defaults = []
|
1751
|
-
|
1752
|
-
new_settings = context.settings.copy_with_update(
|
1753
|
-
updates=updates, set_defaults=set_defaults, restore_defaults=restore_defaults
|
1754
|
-
)
|
1755
|
-
|
1756
|
-
with prefect.context.SettingsContext(
|
1757
|
-
profile=context.profile, settings=new_settings
|
1758
|
-
):
|
1759
|
-
yield new_settings
|
1760
|
-
|
1761
|
-
|
1762
|
-
############################################################################
|
1763
|
-
# Profiles
|
1764
|
-
|
1765
|
-
|
1766
|
-
class Profile(BaseModel):
|
1767
|
-
"""A user profile containing settings."""
|
1768
|
-
|
1769
|
-
model_config = ConfigDict(extra="ignore", arbitrary_types_allowed=True)
|
1770
|
-
|
1771
|
-
name: str
|
1772
|
-
settings: Annotated[Dict[Setting, Any], BeforeValidator(_cast_settings)] = Field(
|
1773
|
-
default_factory=dict
|
1774
|
-
)
|
1775
|
-
source: Optional[Path] = None
|
1776
|
-
|
1777
|
-
def to_environment_variables(self) -> Dict[str, str]:
|
1778
|
-
"""Convert the profile settings to a dictionary of environment variables."""
|
1779
|
-
return {
|
1780
|
-
setting.name: str(value)
|
1781
|
-
for setting, value in self.settings.items()
|
1782
|
-
if value is not None
|
1783
|
-
}
|
1784
|
-
|
1785
|
-
def validate_settings(self):
|
1786
|
-
errors: List[Tuple[Setting, ValidationError]] = []
|
1787
|
-
for setting, value in self.settings.items():
|
1788
|
-
try:
|
1789
|
-
model_fields = Settings.model_fields
|
1790
|
-
annotation = None
|
1791
|
-
for section in setting.accessor.split("."):
|
1792
|
-
annotation = model_fields[section].annotation
|
1793
|
-
if inspect.isclass(annotation) and issubclass(
|
1794
|
-
annotation, BaseSettings
|
1795
|
-
):
|
1796
|
-
model_fields = annotation.model_fields
|
1797
|
-
|
1798
|
-
TypeAdapter(annotation).validate_python(value)
|
1799
|
-
except ValidationError as e:
|
1800
|
-
errors.append((setting, e))
|
1801
|
-
if errors:
|
1802
|
-
raise ProfileSettingsValidationError(errors)
|
1803
|
-
|
1804
|
-
|
1805
|
-
class ProfilesCollection:
|
1806
|
-
""" "
|
1807
|
-
A utility class for working with a collection of profiles.
|
1808
|
-
|
1809
|
-
Profiles in the collection must have unique names.
|
1810
|
-
|
1811
|
-
The collection may store the name of the active profile.
|
1812
|
-
"""
|
1813
|
-
|
1814
|
-
def __init__(
|
1815
|
-
self, profiles: Iterable[Profile], active: Optional[str] = None
|
1816
|
-
) -> None:
|
1817
|
-
self.profiles_by_name = {profile.name: profile for profile in profiles}
|
1818
|
-
self.active_name = active
|
1819
|
-
|
1820
|
-
@property
|
1821
|
-
def names(self) -> Set[str]:
|
1822
|
-
"""
|
1823
|
-
Return a set of profile names in this collection.
|
1824
|
-
"""
|
1825
|
-
return set(self.profiles_by_name.keys())
|
1826
|
-
|
1827
|
-
@property
|
1828
|
-
def active_profile(self) -> Optional[Profile]:
|
1829
|
-
"""
|
1830
|
-
Retrieve the active profile in this collection.
|
1831
|
-
"""
|
1832
|
-
if self.active_name is None:
|
1833
|
-
return None
|
1834
|
-
return self[self.active_name]
|
1835
|
-
|
1836
|
-
def set_active(self, name: Optional[str], check: bool = True):
|
1837
|
-
"""
|
1838
|
-
Set the active profile name in the collection.
|
1839
|
-
|
1840
|
-
A null value may be passed to indicate that this collection does not determine
|
1841
|
-
the active profile.
|
1842
|
-
"""
|
1843
|
-
if check and name is not None and name not in self.names:
|
1844
|
-
raise ValueError(f"Unknown profile name {name!r}.")
|
1845
|
-
self.active_name = name
|
1846
|
-
|
1847
|
-
def update_profile(
|
1848
|
-
self,
|
1849
|
-
name: str,
|
1850
|
-
settings: Dict[Setting, Any],
|
1851
|
-
source: Optional[Path] = None,
|
1852
|
-
) -> Profile:
|
1853
|
-
"""
|
1854
|
-
Add a profile to the collection or update the existing on if the name is already
|
1855
|
-
present in this collection.
|
1856
|
-
|
1857
|
-
If updating an existing profile, the settings will be merged. Settings can
|
1858
|
-
be dropped from the existing profile by setting them to `None` in the new
|
1859
|
-
profile.
|
1860
|
-
|
1861
|
-
Returns the new profile object.
|
1862
|
-
"""
|
1863
|
-
existing = self.profiles_by_name.get(name)
|
1864
|
-
|
1865
|
-
# Convert the input to a `Profile` to cast settings to the correct type
|
1866
|
-
profile = Profile(name=name, settings=settings, source=source)
|
1867
|
-
|
1868
|
-
if existing:
|
1869
|
-
new_settings = {**existing.settings, **profile.settings}
|
1870
|
-
|
1871
|
-
# Drop null keys to restore to default
|
1872
|
-
for key, value in tuple(new_settings.items()):
|
1873
|
-
if value is None:
|
1874
|
-
new_settings.pop(key)
|
1875
|
-
|
1876
|
-
new_profile = Profile(
|
1877
|
-
name=profile.name,
|
1878
|
-
settings=new_settings,
|
1879
|
-
source=source or profile.source,
|
1880
|
-
)
|
1881
|
-
else:
|
1882
|
-
new_profile = profile
|
1883
|
-
|
1884
|
-
self.profiles_by_name[new_profile.name] = new_profile
|
1885
|
-
|
1886
|
-
return new_profile
|
1887
|
-
|
1888
|
-
def add_profile(self, profile: Profile) -> None:
|
1889
|
-
"""
|
1890
|
-
Add a profile to the collection.
|
1891
|
-
|
1892
|
-
If the profile name already exists, an exception will be raised.
|
1893
|
-
"""
|
1894
|
-
if profile.name in self.profiles_by_name:
|
1895
|
-
raise ValueError(
|
1896
|
-
f"Profile name {profile.name!r} already exists in collection."
|
1897
|
-
)
|
1898
|
-
|
1899
|
-
self.profiles_by_name[profile.name] = profile
|
1900
|
-
|
1901
|
-
def remove_profile(self, name: str) -> None:
|
1902
|
-
"""
|
1903
|
-
Remove a profile from the collection.
|
1904
|
-
"""
|
1905
|
-
self.profiles_by_name.pop(name)
|
1906
|
-
|
1907
|
-
def without_profile_source(self, path: Optional[Path]) -> "ProfilesCollection":
|
1908
|
-
"""
|
1909
|
-
Remove profiles that were loaded from a given path.
|
1910
|
-
|
1911
|
-
Returns a new collection.
|
1912
|
-
"""
|
1913
|
-
return ProfilesCollection(
|
1914
|
-
[
|
1915
|
-
profile
|
1916
|
-
for profile in self.profiles_by_name.values()
|
1917
|
-
if profile.source != path
|
1918
|
-
],
|
1919
|
-
active=self.active_name,
|
1920
|
-
)
|
1921
|
-
|
1922
|
-
def to_dict(self):
|
1923
|
-
"""
|
1924
|
-
Convert to a dictionary suitable for writing to disk.
|
1925
|
-
"""
|
1926
|
-
return {
|
1927
|
-
"active": self.active_name,
|
1928
|
-
"profiles": {
|
1929
|
-
profile.name: profile.to_environment_variables()
|
1930
|
-
for profile in self.profiles_by_name.values()
|
1931
|
-
},
|
1932
|
-
}
|
1933
|
-
|
1934
|
-
def __getitem__(self, name: str) -> Profile:
|
1935
|
-
return self.profiles_by_name[name]
|
1936
|
-
|
1937
|
-
def __iter__(self):
|
1938
|
-
return self.profiles_by_name.__iter__()
|
1939
|
-
|
1940
|
-
def items(self):
|
1941
|
-
return self.profiles_by_name.items()
|
1942
|
-
|
1943
|
-
def __eq__(self, __o: object) -> bool:
|
1944
|
-
if not isinstance(__o, ProfilesCollection):
|
1945
|
-
return False
|
1946
|
-
|
1947
|
-
return (
|
1948
|
-
self.profiles_by_name == __o.profiles_by_name
|
1949
|
-
and self.active_name == __o.active_name
|
1950
|
-
)
|
1951
|
-
|
1952
|
-
def __repr__(self) -> str:
|
1953
|
-
return (
|
1954
|
-
f"ProfilesCollection(profiles={list(self.profiles_by_name.values())!r},"
|
1955
|
-
f" active={self.active_name!r})>"
|
1956
|
-
)
|
1957
|
-
|
1958
|
-
|
1959
|
-
def _read_profiles_from(path: Path) -> ProfilesCollection:
|
1960
|
-
"""
|
1961
|
-
Read profiles from a path into a new `ProfilesCollection`.
|
1962
|
-
|
1963
|
-
Profiles are expected to be written in TOML with the following schema:
|
1964
|
-
```
|
1965
|
-
active = <name: Optional[str]>
|
1966
|
-
|
1967
|
-
[profiles.<name: str>]
|
1968
|
-
<SETTING: str> = <value: Any>
|
1969
|
-
```
|
1970
|
-
"""
|
1971
|
-
contents = toml.loads(path.read_text())
|
1972
|
-
active_profile = contents.get("active")
|
1973
|
-
raw_profiles = contents.get("profiles", {})
|
1974
|
-
|
1975
|
-
profiles = []
|
1976
|
-
for name, settings in raw_profiles.items():
|
1977
|
-
profiles.append(Profile(name=name, settings=settings, source=path))
|
1978
|
-
|
1979
|
-
return ProfilesCollection(profiles, active=active_profile)
|
1980
|
-
|
1981
|
-
|
1982
|
-
def _write_profiles_to(path: Path, profiles: ProfilesCollection) -> None:
|
1983
|
-
"""
|
1984
|
-
Write profiles in the given collection to a path as TOML.
|
1985
|
-
|
1986
|
-
Any existing data not present in the given `profiles` will be deleted.
|
1987
|
-
"""
|
1988
|
-
if not path.exists():
|
1989
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
1990
|
-
path.touch(mode=0o600)
|
1991
|
-
path.write_text(toml.dumps(profiles.to_dict()))
|
1992
|
-
|
1993
|
-
|
1994
|
-
def load_profiles(include_defaults: bool = True) -> ProfilesCollection:
|
1995
|
-
"""
|
1996
|
-
Load profiles from the current profile path. Optionally include profiles from the
|
1997
|
-
default profile path.
|
1998
|
-
"""
|
1999
|
-
current_settings = get_current_settings()
|
2000
|
-
default_profiles = _read_profiles_from(DEFAULT_PROFILES_PATH)
|
2001
|
-
|
2002
|
-
if current_settings.profiles_path is None:
|
2003
|
-
raise RuntimeError(
|
2004
|
-
"No profiles path set; please ensure `PREFECT_PROFILES_PATH` is set."
|
2005
|
-
)
|
2006
|
-
|
2007
|
-
if not include_defaults:
|
2008
|
-
if not current_settings.profiles_path.exists():
|
2009
|
-
return ProfilesCollection([])
|
2010
|
-
return _read_profiles_from(current_settings.profiles_path)
|
2011
|
-
|
2012
|
-
user_profiles_path = current_settings.profiles_path
|
2013
|
-
profiles = default_profiles
|
2014
|
-
if user_profiles_path.exists():
|
2015
|
-
user_profiles = _read_profiles_from(user_profiles_path)
|
2016
|
-
|
2017
|
-
# Merge all of the user profiles with the defaults
|
2018
|
-
for name in user_profiles:
|
2019
|
-
if not (source := user_profiles[name].source):
|
2020
|
-
raise ValueError(f"Profile {name!r} has no source.")
|
2021
|
-
profiles.update_profile(
|
2022
|
-
name,
|
2023
|
-
settings=user_profiles[name].settings,
|
2024
|
-
source=source,
|
2025
|
-
)
|
2026
|
-
|
2027
|
-
if user_profiles.active_name:
|
2028
|
-
profiles.set_active(user_profiles.active_name, check=False)
|
2029
|
-
|
2030
|
-
return profiles
|
2031
|
-
|
2032
|
-
|
2033
|
-
def load_current_profile():
|
2034
|
-
"""
|
2035
|
-
Load the current profile from the default and current profile paths.
|
2036
|
-
|
2037
|
-
This will _not_ include settings from the current settings context. Only settings
|
2038
|
-
that have been persisted to the profiles file will be saved.
|
2039
|
-
"""
|
2040
|
-
import prefect.context
|
2041
|
-
|
2042
|
-
profiles = load_profiles()
|
2043
|
-
context = prefect.context.get_settings_context()
|
2044
|
-
|
2045
|
-
if context:
|
2046
|
-
profiles.set_active(context.profile.name)
|
2047
|
-
|
2048
|
-
return profiles.active_profile
|
2049
|
-
|
2050
|
-
|
2051
|
-
def save_profiles(profiles: ProfilesCollection) -> None:
|
2052
|
-
"""
|
2053
|
-
Writes all non-default profiles to the current profiles path.
|
2054
|
-
"""
|
2055
|
-
profiles_path = get_current_settings().profiles_path
|
2056
|
-
assert profiles_path is not None, "Profiles path is not set."
|
2057
|
-
profiles = profiles.without_profile_source(DEFAULT_PROFILES_PATH)
|
2058
|
-
return _write_profiles_to(profiles_path, profiles)
|
2059
|
-
|
2060
|
-
|
2061
|
-
def load_profile(name: str) -> Profile:
|
2062
|
-
"""
|
2063
|
-
Load a single profile by name.
|
2064
|
-
"""
|
2065
|
-
profiles = load_profiles()
|
2066
|
-
try:
|
2067
|
-
return profiles[name]
|
2068
|
-
except KeyError:
|
2069
|
-
raise ValueError(f"Profile {name!r} not found.")
|
2070
|
-
|
2071
|
-
|
2072
|
-
def update_current_profile(
|
2073
|
-
settings: Dict[Union[str, Setting], Any],
|
2074
|
-
) -> Profile:
|
2075
|
-
"""
|
2076
|
-
Update the persisted data for the profile currently in-use.
|
2077
|
-
|
2078
|
-
If the profile does not exist in the profiles file, it will be created.
|
2079
|
-
|
2080
|
-
Given settings will be merged with the existing settings as described in
|
2081
|
-
`ProfilesCollection.update_profile`.
|
2082
|
-
|
2083
|
-
Returns:
|
2084
|
-
The new profile.
|
2085
|
-
"""
|
2086
|
-
import prefect.context
|
2087
|
-
|
2088
|
-
current_profile = prefect.context.get_settings_context().profile
|
2089
|
-
|
2090
|
-
if not current_profile:
|
2091
|
-
from prefect.exceptions import MissingProfileError
|
2092
|
-
|
2093
|
-
raise MissingProfileError("No profile is currently in use.")
|
2094
|
-
|
2095
|
-
profiles = load_profiles()
|
2096
|
-
|
2097
|
-
# Ensure the current profile's settings are present
|
2098
|
-
profiles.update_profile(current_profile.name, current_profile.settings)
|
2099
|
-
# Then merge the new settings in
|
2100
|
-
new_profile = profiles.update_profile(
|
2101
|
-
current_profile.name, _cast_settings(settings)
|
2102
|
-
)
|
2103
|
-
|
2104
|
-
new_profile.validate_settings()
|
2105
|
-
|
2106
|
-
save_profiles(profiles)
|
2107
|
-
|
2108
|
-
return profiles[current_profile.name]
|
2109
|
-
|
2110
|
-
|
2111
|
-
############################################################################
|
2112
|
-
# Allow traditional env var access
|
2113
|
-
|
2114
|
-
|
2115
|
-
def _collect_settings_fields(
|
2116
|
-
settings_cls: Type[BaseSettings], accessor_prefix: Optional[str] = None
|
2117
|
-
) -> Dict[str, Setting]:
|
2118
|
-
settings_fields: Dict[str, Setting] = {}
|
2119
|
-
for field_name, field in settings_cls.model_fields.items():
|
2120
|
-
if inspect.isclass(field.annotation) and issubclass(
|
2121
|
-
field.annotation, BaseSettings
|
2122
|
-
):
|
2123
|
-
accessor = (
|
2124
|
-
field_name
|
2125
|
-
if accessor_prefix is None
|
2126
|
-
else f"{accessor_prefix}.{field_name}"
|
2127
|
-
)
|
2128
|
-
settings_fields.update(_collect_settings_fields(field.annotation, accessor))
|
2129
|
-
else:
|
2130
|
-
accessor = (
|
2131
|
-
field_name
|
2132
|
-
if accessor_prefix is None
|
2133
|
-
else f"{accessor_prefix}.{field_name}"
|
2134
|
-
)
|
2135
|
-
setting = Setting(
|
2136
|
-
name=f"{settings_cls.model_config.get('env_prefix')}{field_name.upper()}",
|
2137
|
-
default=field.default,
|
2138
|
-
type_=field.annotation,
|
2139
|
-
accessor=accessor,
|
2140
|
-
)
|
2141
|
-
settings_fields[setting.name] = setting
|
2142
|
-
settings_fields[setting.accessor] = setting
|
2143
|
-
return settings_fields
|
2144
|
-
|
2145
|
-
|
2146
|
-
SETTING_VARIABLES: dict[str, Setting] = _collect_settings_fields(Settings)
|
2147
|
-
|
2148
|
-
|
2149
|
-
def __getattr__(name: str) -> Setting:
|
2150
|
-
if name in Settings.valid_setting_names():
|
2151
|
-
return SETTING_VARIABLES[name]
|
2152
|
-
raise AttributeError(f"{name} is not a Prefect setting.")
|
2153
|
-
|
2154
|
-
|
2155
|
-
__all__ = [ # noqa: F822
|
2156
|
-
"Profile",
|
2157
|
-
"ProfilesCollection",
|
2158
|
-
"Setting",
|
2159
|
-
"Settings",
|
2160
|
-
"load_current_profile",
|
2161
|
-
"update_current_profile",
|
2162
|
-
"load_profile",
|
2163
|
-
"save_profiles",
|
2164
|
-
"load_profiles",
|
2165
|
-
"get_current_settings",
|
2166
|
-
"temporary_settings",
|
2167
|
-
"DEFAULT_PROFILES_PATH",
|
2168
|
-
# add public settings here for auto-completion
|
2169
|
-
"PREFECT_API_KEY", # type: ignore
|
2170
|
-
"PREFECT_API_URL", # type: ignore
|
2171
|
-
"PREFECT_UI_URL", # type: ignore
|
2172
|
-
]
|