prefect-client 2.16.6__py3-none-any.whl → 2.16.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. prefect/_internal/pydantic/__init__.py +21 -1
  2. prefect/_internal/pydantic/_base_model.py +16 -0
  3. prefect/_internal/pydantic/_compat.py +325 -74
  4. prefect/_internal/pydantic/_flags.py +15 -0
  5. prefect/_internal/schemas/validators.py +582 -9
  6. prefect/artifacts.py +179 -70
  7. prefect/client/orchestration.py +1 -1
  8. prefect/client/schemas/actions.py +2 -2
  9. prefect/client/schemas/objects.py +13 -24
  10. prefect/client/schemas/schedules.py +18 -80
  11. prefect/deployments/deployments.py +22 -86
  12. prefect/deployments/runner.py +8 -11
  13. prefect/events/__init__.py +40 -1
  14. prefect/events/clients.py +17 -20
  15. prefect/events/filters.py +5 -6
  16. prefect/events/related.py +1 -1
  17. prefect/events/schemas/__init__.py +5 -0
  18. prefect/events/schemas/automations.py +303 -0
  19. prefect/events/{schemas.py → schemas/deployment_triggers.py} +146 -270
  20. prefect/events/schemas/events.py +285 -0
  21. prefect/events/schemas/labelling.py +106 -0
  22. prefect/events/utilities.py +2 -2
  23. prefect/events/worker.py +1 -1
  24. prefect/filesystems.py +8 -37
  25. prefect/flows.py +4 -4
  26. prefect/infrastructure/kubernetes.py +12 -56
  27. prefect/infrastructure/provisioners/__init__.py +1 -0
  28. prefect/pydantic/__init__.py +4 -0
  29. prefect/pydantic/main.py +15 -0
  30. prefect/runner/runner.py +2 -2
  31. prefect/runner/server.py +1 -1
  32. prefect/serializers.py +13 -61
  33. prefect/settings.py +35 -13
  34. prefect/task_server.py +21 -7
  35. prefect/utilities/asyncutils.py +1 -1
  36. prefect/utilities/context.py +33 -1
  37. prefect/workers/base.py +1 -2
  38. prefect/workers/block.py +3 -7
  39. {prefect_client-2.16.6.dist-info → prefect_client-2.16.8.dist-info}/METADATA +2 -2
  40. {prefect_client-2.16.6.dist-info → prefect_client-2.16.8.dist-info}/RECORD +43 -36
  41. prefect/utilities/validation.py +0 -63
  42. {prefect_client-2.16.6.dist-info → prefect_client-2.16.8.dist-info}/LICENSE +0 -0
  43. {prefect_client-2.16.6.dist-info → prefect_client-2.16.8.dist-info}/WHEEL +0 -0
  44. {prefect_client-2.16.6.dist-info → prefect_client-2.16.8.dist-info}/top_level.txt +0 -0
@@ -1,26 +1,57 @@
1
+ import datetime
2
+ import json
3
+ import logging
1
4
  import re
5
+ import urllib.parse
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
2
8
 
3
- from prefect.exceptions import InvalidNameError
9
+ import jsonschema
10
+ import pendulum
11
+
12
+ from prefect._internal.pydantic import HAS_PYDANTIC_V2
13
+ from prefect._internal.schemas.fields import DateTimeTZ
14
+ from prefect.exceptions import InvalidNameError, InvalidRepositoryURLError
15
+ from prefect.utilities.annotations import NotSet
16
+ from prefect.utilities.importtools import from_qualified_name
17
+ from prefect.utilities.names import generate_slug
18
+ from prefect.utilities.pydantic import JsonPatch
4
19
 
5
20
  BANNED_CHARACTERS = ["/", "%", "&", ">", "<"]
6
21
  LOWERCASE_LETTERS_NUMBERS_AND_DASHES_ONLY_REGEX = "^[a-z0-9-]*$"
7
22
  LOWERCASE_LETTERS_NUMBERS_AND_UNDERSCORES_REGEX = "^[a-z0-9_]*$"
8
23
 
24
+ if TYPE_CHECKING:
25
+ from prefect.blocks.core import Block
26
+ from prefect.events.schemas import DeploymentTrigger
27
+ from prefect.utilities.callables import ParameterSchema
28
+
29
+ if HAS_PYDANTIC_V2:
30
+ from pydantic.v1.fields import ModelField
31
+ else:
32
+ from pydantic.fields import ModelField
9
33
 
10
- def raise_on_name_with_banned_characters(name: str) -> None:
34
+
35
+ def raise_on_name_with_banned_characters(name: str) -> str:
11
36
  """
12
37
  Raise an InvalidNameError if the given name contains any invalid
13
38
  characters.
14
39
  """
15
- if any(c in name for c in BANNED_CHARACTERS):
16
- raise InvalidNameError(
17
- f"Name {name!r} contains an invalid character. "
18
- f"Must not contain any of: {BANNED_CHARACTERS}."
19
- )
40
+ if name is not None:
41
+ if any(c in name for c in BANNED_CHARACTERS):
42
+ raise InvalidNameError(
43
+ f"Name {name!r} contains an invalid character. "
44
+ f"Must not contain any of: {BANNED_CHARACTERS}."
45
+ )
46
+ return name
20
47
 
21
48
 
22
- def raise_on_name_alphanumeric_dashes_only(value, field_name: str = "value"):
23
- if not bool(re.match(LOWERCASE_LETTERS_NUMBERS_AND_DASHES_ONLY_REGEX, value)):
49
+ def raise_on_name_alphanumeric_dashes_only(
50
+ value: Optional[str], field_name: str = "value"
51
+ ):
52
+ if value and not bool(
53
+ re.match(LOWERCASE_LETTERS_NUMBERS_AND_DASHES_ONLY_REGEX, value)
54
+ ):
24
55
  raise ValueError(
25
56
  f"{field_name} must only contain lowercase letters, numbers, and dashes."
26
57
  )
@@ -34,3 +65,545 @@ def raise_on_name_alphanumeric_underscores_only(value, field_name: str = "value"
34
65
  " underscores."
35
66
  )
36
67
  return value
68
+
69
+
70
+ def validate_schema(schema: dict):
71
+ """
72
+ Validate that the provided schema is a valid json schema.
73
+
74
+ Args:
75
+ schema: The schema to validate.
76
+
77
+ Raises:
78
+ ValueError: If the provided schema is not a valid json schema.
79
+
80
+ """
81
+ try:
82
+ if schema is not None:
83
+ # Most closely matches the schemas generated by pydantic
84
+ jsonschema.Draft4Validator.check_schema(schema)
85
+ except jsonschema.SchemaError as exc:
86
+ raise ValueError(
87
+ "The provided schema is not a valid json schema. Schema error:"
88
+ f" {exc.message}"
89
+ ) from exc
90
+
91
+
92
+ def validate_values_conform_to_schema(
93
+ values: dict, schema: dict, ignore_required: bool = False
94
+ ):
95
+ """
96
+ Validate that the provided values conform to the provided json schema.
97
+
98
+ Args:
99
+ values: The values to validate.
100
+ schema: The schema to validate against.
101
+ ignore_required: Whether to ignore the required fields in the schema. Should be
102
+ used when a partial set of values is acceptable.
103
+
104
+ Raises:
105
+ ValueError: If the parameters do not conform to the schema.
106
+
107
+ """
108
+ from prefect.utilities.collections import remove_nested_keys
109
+
110
+ if ignore_required:
111
+ schema = remove_nested_keys(["required"], schema)
112
+
113
+ try:
114
+ if schema is not None and values is not None:
115
+ jsonschema.validate(values, schema)
116
+ except jsonschema.ValidationError as exc:
117
+ if exc.json_path == "$":
118
+ error_message = "Validation failed."
119
+ else:
120
+ error_message = (
121
+ f"Validation failed for field {exc.json_path.replace('$.', '')!r}."
122
+ )
123
+ error_message += f" Failure reason: {exc.message}"
124
+ raise ValueError(error_message) from exc
125
+ except jsonschema.SchemaError as exc:
126
+ raise ValueError(
127
+ "The provided schema is not a valid json schema. Schema error:"
128
+ f" {exc.message}"
129
+ ) from exc
130
+
131
+
132
+ ### DEPLOYMENT SCHEMA VALIDATORS ###
133
+
134
+
135
+ def infrastructure_must_have_capabilities(
136
+ value: Union[Dict[str, Any], "Block", None],
137
+ ) -> Optional["Block"]:
138
+ """
139
+ Ensure that the provided value is an infrastructure block with the required capabilities.
140
+ """
141
+
142
+ from prefect.blocks.core import Block
143
+
144
+ if isinstance(value, dict):
145
+ if "_block_type_slug" in value:
146
+ # Replace private attribute with public for dispatch
147
+ value["block_type_slug"] = value.pop("_block_type_slug")
148
+ block = Block(**value)
149
+ elif value is None:
150
+ return value
151
+ else:
152
+ block = value
153
+
154
+ if "run-infrastructure" not in block.get_block_capabilities():
155
+ raise ValueError(
156
+ "Infrastructure block must have 'run-infrastructure' capabilities."
157
+ )
158
+ return block
159
+
160
+
161
+ def storage_must_have_capabilities(
162
+ value: Union[Dict[str, Any], "Block", None],
163
+ ) -> Optional["Block"]:
164
+ """
165
+ Ensure that the provided value is a storage block with the required capabilities.
166
+ """
167
+ from prefect.blocks.core import Block
168
+
169
+ if isinstance(value, dict):
170
+ block_type = Block.get_block_class_from_key(value.pop("_block_type_slug"))
171
+ block = block_type(**value)
172
+ elif value is None:
173
+ return value
174
+ else:
175
+ block = value
176
+
177
+ capabilities = block.get_block_capabilities()
178
+ if "get-directory" not in capabilities:
179
+ raise ValueError("Remote Storage block must have 'get-directory' capabilities.")
180
+ return block
181
+
182
+
183
+ def handle_openapi_schema(value: Optional["ParameterSchema"]) -> "ParameterSchema":
184
+ """
185
+ This method ensures setting a value of `None` is handled gracefully.
186
+ """
187
+ from prefect.utilities.callables import ParameterSchema
188
+
189
+ if value is None:
190
+ return ParameterSchema()
191
+ return value
192
+
193
+
194
+ ### SCHEDULE SCHEMA VALIDATORS ###
195
+
196
+
197
+ def validate_deprecated_schedule_fields(values: dict, logger: logging.Logger) -> dict:
198
+ """
199
+ Validate and log deprecation warnings for deprecated schedule fields.
200
+ """
201
+ if values.get("schedule") and not values.get("schedules"):
202
+ logger.warning(
203
+ "The field 'schedule' in 'Deployment' has been deprecated. It will not be "
204
+ "available after Sep 2024. Define schedules in the `schedules` list instead."
205
+ )
206
+ elif values.get("is_schedule_active") and not values.get("schedules"):
207
+ logger.warning(
208
+ "The field 'is_schedule_active' in 'Deployment' has been deprecated. It will "
209
+ "not be available after Sep 2024. Use the `active` flag within a schedule in "
210
+ "the `schedules` list instead and the `pause` flag in 'Deployment' to pause "
211
+ "all schedules."
212
+ )
213
+ return values
214
+
215
+
216
+ def reconcile_schedules(cls, values: dict) -> dict:
217
+ """
218
+ Reconcile the `schedule` and `schedules` fields in a deployment.
219
+ """
220
+
221
+ from prefect.deployments.schedules import (
222
+ create_minimal_deployment_schedule,
223
+ normalize_to_minimal_deployment_schedules,
224
+ )
225
+
226
+ schedule = values.get("schedule", NotSet)
227
+ schedules = values.get("schedules", NotSet)
228
+
229
+ if schedules is not NotSet:
230
+ values["schedules"] = normalize_to_minimal_deployment_schedules(schedules)
231
+ elif schedule is not NotSet:
232
+ values["schedule"] = None
233
+
234
+ if schedule is None:
235
+ values["schedules"] = []
236
+ else:
237
+ values["schedules"] = [
238
+ create_minimal_deployment_schedule(
239
+ schedule=schedule, active=values.get("is_schedule_active")
240
+ )
241
+ ]
242
+
243
+ for schedule in values.get("schedules", []):
244
+ cls._validate_schedule(schedule.schedule)
245
+
246
+ return values
247
+
248
+
249
+ def interval_schedule_must_be_positive(v: datetime.timedelta) -> datetime.timedelta:
250
+ if v.total_seconds() <= 0:
251
+ raise ValueError("The interval must be positive")
252
+ return v
253
+
254
+
255
+ def default_anchor_date(v: DateTimeTZ) -> DateTimeTZ:
256
+ if v is None:
257
+ return pendulum.now("UTC")
258
+ return pendulum.instance(v)
259
+
260
+
261
+ def get_valid_timezones(v: str) -> Tuple[str, ...]:
262
+ # pendulum.tz.timezones is a callable in 3.0 and above
263
+ # https://github.com/PrefectHQ/prefect/issues/11619
264
+ if callable(pendulum.tz.timezones):
265
+ return pendulum.tz.timezones()
266
+ else:
267
+ return pendulum.tz.timezones
268
+
269
+
270
+ def validate_rrule_timezone(v: str) -> str:
271
+ """
272
+ Validate that the provided timezone is a valid IANA timezone.
273
+
274
+ Unfortunately this list is slightly different from the list of valid
275
+ timezones in pendulum that we use for cron and interval timezone validation.
276
+ """
277
+ from prefect._internal.pytz import HAS_PYTZ
278
+
279
+ if HAS_PYTZ:
280
+ import pytz
281
+ else:
282
+ from prefect._internal import pytz
283
+
284
+ if v and v not in pytz.all_timezones_set:
285
+ raise ValueError(f'Invalid timezone: "{v}"')
286
+ elif v is None:
287
+ return "UTC"
288
+ return v
289
+
290
+
291
+ def validate_timezone(v: str, timezones: Tuple[str, ...]) -> str:
292
+ if v and v not in timezones:
293
+ raise ValueError(
294
+ f'Invalid timezone: "{v}" (specify in IANA tzdata format, for example,'
295
+ " America/New_York)"
296
+ )
297
+ return v
298
+
299
+
300
+ def default_timezone(v: str, values: Optional[dict] = {}) -> str:
301
+ timezones = get_valid_timezones(v)
302
+
303
+ if v is not None:
304
+ return validate_timezone(v, timezones)
305
+
306
+ # anchor schedules
307
+ elif v is None and values and values.get("anchor_date"):
308
+ tz = values["anchor_date"].tz.name
309
+ if tz in timezones:
310
+ return tz
311
+ # sometimes anchor dates have "timezones" that are UTC offsets
312
+ # like "-04:00". This happens when parsing ISO8601 strings.
313
+ # In this case we, the correct inferred localization is "UTC".
314
+ else:
315
+ return "UTC"
316
+
317
+ # cron schedules
318
+ return v
319
+
320
+
321
+ def validate_cron_string(v: str) -> str:
322
+ from croniter import croniter
323
+
324
+ # croniter allows "random" and "hashed" expressions
325
+ # which we do not support https://github.com/kiorky/croniter
326
+ if not croniter.is_valid(v):
327
+ raise ValueError(f'Invalid cron string: "{v}"')
328
+ elif any(c for c in v.split() if c.casefold() in ["R", "H", "r", "h"]):
329
+ raise ValueError(
330
+ f'Random and Hashed expressions are unsupported, received: "{v}"'
331
+ )
332
+ return v
333
+
334
+
335
+ # approx. 1 years worth of RDATEs + buffer
336
+ MAX_RRULE_LENGTH = 6500
337
+
338
+
339
+ def validate_rrule_string(v: str) -> str:
340
+ import dateutil.rrule
341
+
342
+ # attempt to parse the rrule string as an rrule object
343
+ # this will error if the string is invalid
344
+ try:
345
+ dateutil.rrule.rrulestr(v, cache=True)
346
+ except ValueError as exc:
347
+ # rrules errors are a mix of cryptic and informative
348
+ # so reraise to be clear that the string was invalid
349
+ raise ValueError(f'Invalid RRule string "{v}": {exc}')
350
+ if len(v) > MAX_RRULE_LENGTH:
351
+ raise ValueError(
352
+ f'Invalid RRule string "{v[:40]}..."\n'
353
+ f"Max length is {MAX_RRULE_LENGTH}, got {len(v)}"
354
+ )
355
+ return v
356
+
357
+
358
+ ### AUTOMATION SCHEMA VALIDATORS ###
359
+
360
+
361
+ def validate_trigger_within(
362
+ value: datetime.timedelta, field: "ModelField"
363
+ ) -> datetime.timedelta:
364
+ """
365
+ Validate that the `within` field is greater than the minimum value.
366
+ """
367
+ minimum = field.field_info.extra["minimum"]
368
+ if value.total_seconds() < minimum:
369
+ raise ValueError("The minimum `within` is 0 seconds")
370
+ return value
371
+
372
+
373
+ def validate_automation_names(
374
+ field_value: List["DeploymentTrigger"], values: dict
375
+ ) -> List["DeploymentTrigger"]:
376
+ """
377
+ Ensure that each trigger has a name for its automation if none is provided.
378
+ """
379
+ for i, trigger in enumerate(field_value, start=1):
380
+ if trigger.name is None:
381
+ trigger.name = f"{values['name']}__automation_{i}"
382
+
383
+ return field_value
384
+
385
+
386
+ ### INFRASTRUCTURE SCHEMA VALIDATORS ###
387
+
388
+
389
+ def validate_k8s_job_required_components(cls, value: Dict[str, Any]):
390
+ """
391
+ Validate that a Kubernetes job manifest has all required components.
392
+ """
393
+ from prefect.utilities.pydantic import JsonPatch
394
+
395
+ patch = JsonPatch.from_diff(value, cls.base_job_manifest())
396
+ missing_paths = sorted([op["path"] for op in patch if op["op"] == "add"])
397
+ if missing_paths:
398
+ raise ValueError(
399
+ "Job is missing required attributes at the following paths: "
400
+ f"{', '.join(missing_paths)}"
401
+ )
402
+ return value
403
+
404
+
405
+ def validate_k8s_job_compatible_values(cls, value: Dict[str, Any]):
406
+ """
407
+ Validate that the provided job values are compatible with the job type.
408
+ """
409
+ from prefect.utilities.pydantic import JsonPatch
410
+
411
+ patch = JsonPatch.from_diff(value, cls.base_job_manifest())
412
+ incompatible = sorted(
413
+ [
414
+ f"{op['path']} must have value {op['value']!r}"
415
+ for op in patch
416
+ if op["op"] == "replace"
417
+ ]
418
+ )
419
+ if incompatible:
420
+ raise ValueError(
421
+ "Job has incompatible values for the following attributes: "
422
+ f"{', '.join(incompatible)}"
423
+ )
424
+ return value
425
+
426
+
427
+ def cast_k8s_job_customizations(
428
+ cls, value: Union[JsonPatch, str, List[Dict[str, Any]]]
429
+ ):
430
+ if isinstance(value, list):
431
+ return JsonPatch(value)
432
+ elif isinstance(value, str):
433
+ try:
434
+ return JsonPatch(json.loads(value))
435
+ except json.JSONDecodeError as exc:
436
+ raise ValueError(
437
+ f"Unable to parse customizations as JSON: {value}. Please make sure"
438
+ " that the provided value is a valid JSON string."
439
+ ) from exc
440
+ return value
441
+
442
+
443
+ def set_default_namespace(values: dict) -> dict:
444
+ """
445
+ Set the default namespace for a Kubernetes job if not provided.
446
+ """
447
+ job = values.get("job")
448
+
449
+ namespace = values.get("namespace")
450
+ job_namespace = job["metadata"].get("namespace") if job else None
451
+
452
+ if not namespace and not job_namespace:
453
+ values["namespace"] = "default"
454
+
455
+ return values
456
+
457
+
458
+ def set_default_image(values: dict) -> dict:
459
+ """
460
+ Set the default image for a Kubernetes job if not provided.
461
+ """
462
+ from prefect.utilities.dockerutils import get_prefect_image_name
463
+
464
+ job = values.get("job")
465
+ image = values.get("image")
466
+ job_image = (
467
+ job["spec"]["template"]["spec"]["containers"][0].get("image") if job else None
468
+ )
469
+
470
+ if not image and not job_image:
471
+ values["image"] = get_prefect_image_name()
472
+
473
+ return values
474
+
475
+
476
+ def get_or_create_state_name(v: str, values: dict) -> str:
477
+ """If a name is not provided, use the type"""
478
+
479
+ # if `type` is not in `values` it means the `type` didn't pass its own
480
+ # validation check and an error will be raised after this function is called
481
+ if v is None and values.get("type"):
482
+ v = " ".join([v.capitalize() for v in values.get("type").value.split("_")])
483
+ return v
484
+
485
+
486
+ def get_or_create_run_name(name):
487
+ return name or generate_slug(2)
488
+
489
+
490
+ ### FILESYSTEM SCHEMA VALIDATORS ###
491
+
492
+
493
+ def stringify_path(value: Union[str, Path]) -> str:
494
+ if isinstance(value, Path):
495
+ return str(value)
496
+ return value
497
+
498
+
499
+ def validate_basepath(value: str) -> str:
500
+ scheme, netloc, _, _, _ = urllib.parse.urlsplit(value)
501
+
502
+ if not scheme:
503
+ raise ValueError(f"Base path must start with a scheme. Got {value!r}.")
504
+
505
+ if not netloc:
506
+ raise ValueError(
507
+ f"Base path must include a location after the scheme. Got {value!r}."
508
+ )
509
+
510
+ if scheme == "file":
511
+ raise ValueError(
512
+ "Base path scheme cannot be 'file'. Use `LocalFileSystem` instead for"
513
+ " local file access."
514
+ )
515
+
516
+ return value
517
+
518
+
519
+ def validate_github_access_token(v: str, values: dict) -> str:
520
+ """Ensure that credentials are not provided with 'SSH' formatted GitHub URLs.
521
+
522
+ Note: validates `access_token` specifically so that it only fires when
523
+ private repositories are used.
524
+ """
525
+ if v is not None:
526
+ if urllib.parse.urlparse(values["repository"]).scheme != "https":
527
+ raise InvalidRepositoryURLError(
528
+ "Crendentials can only be used with GitHub repositories "
529
+ "using the 'HTTPS' format. You must either remove the "
530
+ "credential if you wish to use the 'SSH' format and are not "
531
+ "using a private repository, or you must change the repository "
532
+ "URL to the 'HTTPS' format. "
533
+ )
534
+
535
+ return v
536
+
537
+
538
+ ### SERIALIZER SCHEMA VALIDATORS ###
539
+
540
+
541
+ def validate_picklelib(value: str) -> str:
542
+ """
543
+ Check that the given pickle library is importable and has dumps/loads methods.
544
+ """
545
+ try:
546
+ pickler = from_qualified_name(value)
547
+ except (ImportError, AttributeError) as exc:
548
+ raise ValueError(
549
+ f"Failed to import requested pickle library: {value!r}."
550
+ ) from exc
551
+
552
+ if not callable(getattr(pickler, "dumps", None)):
553
+ raise ValueError(f"Pickle library at {value!r} does not have a 'dumps' method.")
554
+
555
+ if not callable(getattr(pickler, "loads", None)):
556
+ raise ValueError(f"Pickle library at {value!r} does not have a 'loads' method.")
557
+
558
+ return value
559
+
560
+
561
+ def validate_dump_kwargs(value: dict) -> dict:
562
+ # `default` is set by `object_encoder`. A user provided callable would make this
563
+ # class unserializable anyway.
564
+ if "default" in value:
565
+ raise ValueError("`default` cannot be provided. Use `object_encoder` instead.")
566
+ return value
567
+
568
+
569
+ def validate_load_kwargs(value: dict) -> dict:
570
+ # `object_hook` is set by `object_decoder`. A user provided callable would make
571
+ # this class unserializable anyway.
572
+ if "object_hook" in value:
573
+ raise ValueError(
574
+ "`object_hook` cannot be provided. Use `object_decoder` instead."
575
+ )
576
+ return value
577
+
578
+
579
+ def cast_type_names_to_serializers(value):
580
+ from prefect.serializers import Serializer
581
+
582
+ if isinstance(value, str):
583
+ return Serializer(type=value)
584
+ return value
585
+
586
+
587
+ def validate_compressionlib(value: str) -> str:
588
+ """
589
+ Check that the given pickle library is importable and has compress/decompress
590
+ methods.
591
+ """
592
+ try:
593
+ compressor = from_qualified_name(value)
594
+ except (ImportError, AttributeError) as exc:
595
+ raise ValueError(
596
+ f"Failed to import requested compression library: {value!r}."
597
+ ) from exc
598
+
599
+ if not callable(getattr(compressor, "compress", None)):
600
+ raise ValueError(
601
+ f"Compression library at {value!r} does not have a 'compress' method."
602
+ )
603
+
604
+ if not callable(getattr(compressor, "decompress", None)):
605
+ raise ValueError(
606
+ f"Compression library at {value!r} does not have a 'decompress' method."
607
+ )
608
+
609
+ return value