dycw-utilities 0.131.17__py3-none-any.whl → 0.131.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
utilities/iterables.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import builtins
4
- import datetime as dt
5
4
  from collections import Counter
6
5
  from collections.abc import (
7
6
  Callable,
@@ -34,7 +33,7 @@ from typing import (
34
33
  )
35
34
 
36
35
  from utilities.errors import ImpossibleCaseError
37
- from utilities.functions import ensure_hashable, ensure_not_none, ensure_str
36
+ from utilities.functions import ensure_hashable, ensure_str
38
37
  from utilities.math import (
39
38
  _CheckIntegerEqualError,
40
39
  _CheckIntegerEqualOrApproxError,
@@ -45,7 +44,6 @@ from utilities.math import (
45
44
  from utilities.reprlib import get_repr
46
45
  from utilities.sentinel import Sentinel, sentinel
47
46
  from utilities.types import Sign, THashable, TSupportsAdd, TSupportsLT
48
- from utilities.zoneinfo import UTC
49
47
 
50
48
  if TYPE_CHECKING:
51
49
  from types import NoneType
@@ -1326,9 +1324,6 @@ def _sort_iterable_cmp(x: Any, y: Any, /) -> Sign:
1326
1324
  if x is None:
1327
1325
  y = cast("NoneType", y)
1328
1326
  return 0
1329
- if isinstance(x, dt.datetime):
1330
- y = cast("dt.datetime", y)
1331
- return _sort_iterable_cmp_datetimes(x, y)
1332
1327
  if isinstance(x, float):
1333
1328
  y = cast("float", y)
1334
1329
  return _sort_iterable_cmp_floats(x, y)
@@ -1371,30 +1366,6 @@ class SortIterableError(Exception):
1371
1366
  return f"Unable to sort {get_repr(self.x)} and {get_repr(self.y)}"
1372
1367
 
1373
1368
 
1374
- def _sort_iterable_cmp_datetimes(x: dt.datetime, y: dt.datetime, /) -> Sign:
1375
- """Compare two datetimes."""
1376
- match x.tzinfo, y.tzinfo:
1377
- case None, None:
1378
- return cast("Sign", (x > y) - (x < y))
1379
- case dt.tzinfo(), None:
1380
- return 1
1381
- case None, dt.tzinfo():
1382
- return -1
1383
- case dt.tzinfo(), dt.tzinfo():
1384
- x_utc = x.astimezone(tz=UTC)
1385
- y_utc = y.astimezone(tz=UTC)
1386
- result = cast("Sign", (x_utc > y_utc) - (x_utc < y_utc))
1387
- if result != 0:
1388
- return result
1389
- x_time_zone = ensure_not_none(ensure_not_none(x.tzinfo).tzname(x))
1390
- y_time_zone = ensure_not_none(ensure_not_none(y.tzinfo).tzname(y))
1391
- return cast(
1392
- "Sign", (x_time_zone > y_time_zone) - (x_time_zone < y_time_zone)
1393
- )
1394
- case _ as never:
1395
- assert_never(never)
1396
-
1397
-
1398
1369
  def _sort_iterable_cmp_floats(x: float, y: float, /) -> Sign:
1399
1370
  """Compare two floats."""
1400
1371
  x_nan, y_nan = map(isnan, [x, y])
utilities/operator.py CHANGED
@@ -1,23 +1,17 @@
1
1
  from __future__ import annotations
2
2
 
3
- import datetime as dt
4
3
  from collections.abc import Callable, Mapping, Sequence
5
4
  from collections.abc import Set as AbstractSet
6
5
  from dataclasses import asdict, dataclass
7
6
  from typing import TYPE_CHECKING, Any, TypeVar, cast, override
8
7
 
9
8
  import utilities.math
10
- from utilities.datetime import (
11
- AreEqualDatesOrDateTimesError,
12
- AreEqualDateTimesError,
13
- are_equal_dates_or_datetimes,
14
- )
15
9
  from utilities.functions import is_dataclass_instance
16
10
  from utilities.iterables import SortIterableError, sort_iterable
17
11
  from utilities.reprlib import get_repr
18
12
 
19
13
  if TYPE_CHECKING:
20
- from utilities.types import Dataclass, DateOrDateTime, Number
14
+ from utilities.types import Dataclass, Number
21
15
 
22
16
  _T = TypeVar("_T")
23
17
 
@@ -51,12 +45,6 @@ def is_equal(
51
45
  if isinstance(x, str): # else Sequence
52
46
  y = cast("str", y)
53
47
  return x == y
54
- if isinstance(x, dt.date | dt.datetime):
55
- y = cast("DateOrDateTime", y)
56
- try:
57
- return are_equal_dates_or_datetimes(x, y)
58
- except (AreEqualDateTimesError, AreEqualDatesOrDateTimesError):
59
- return False
60
48
  if is_dataclass_instance(x):
61
49
  y = cast("Dataclass", y)
62
50
  x_values = asdict(x)
@@ -80,8 +68,26 @@ def is_equal(
80
68
  try:
81
69
  x_sorted = sort_iterable(x)
82
70
  y_sorted = sort_iterable(y)
83
- except SortIterableError as error:
84
- raise IsEqualError(x=error.x, y=error.y) from None
71
+ except SortIterableError:
72
+ x_in_y = all(
73
+ any(
74
+ is_equal(
75
+ x_i, y_i, rel_tol=rel_tol, abs_tol=abs_tol, extra=extra
76
+ )
77
+ for y_i in y
78
+ )
79
+ for x_i in x
80
+ )
81
+ y_in_x = all(
82
+ any(
83
+ is_equal(
84
+ x_i, y_i, rel_tol=rel_tol, abs_tol=abs_tol, extra=extra
85
+ )
86
+ for x_i in x
87
+ )
88
+ for y_i in y
89
+ )
90
+ return x_in_y and y_in_x
85
91
  return is_equal(x_sorted, y_sorted, rel_tol=rel_tol, abs_tol=abs_tol)
86
92
  if isinstance(x, Sequence):
87
93
  y = cast("Sequence[Any]", y)
utilities/orjson.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import datetime as dt
4
3
  import re
5
4
  from collections.abc import Callable, Iterable, Mapping, Sequence
6
5
  from contextlib import suppress
@@ -23,7 +22,15 @@ from orjson import (
23
22
  dumps,
24
23
  loads,
25
24
  )
26
- from whenever import ZonedDateTime
25
+ from whenever import (
26
+ Date,
27
+ DateDelta,
28
+ DateTimeDelta,
29
+ PlainDateTime,
30
+ Time,
31
+ TimeDelta,
32
+ ZonedDateTime,
33
+ )
27
34
 
28
35
  from utilities.concurrent import concurrent_map
29
36
  from utilities.dataclasses import dataclass_to_dict
@@ -39,27 +46,13 @@ from utilities.logging import get_logging_level_number
39
46
  from utilities.math import MAX_INT64, MIN_INT64
40
47
  from utilities.types import Dataclass, LogLevel, MaybeIterable, PathLike, StrMapping
41
48
  from utilities.tzlocal import LOCAL_TIME_ZONE
42
- from utilities.uuid import UUID_PATTERN
43
49
  from utilities.version import Version, parse_version
44
- from utilities.whenever import (
45
- parse_date,
46
- parse_plain_datetime,
47
- parse_time,
48
- parse_timedelta,
49
- parse_zoned_datetime,
50
- serialize_date,
51
- serialize_datetime,
52
- serialize_time,
53
- serialize_timedelta,
54
- )
55
50
  from utilities.whenever2 import from_timestamp
56
51
 
57
52
  if TYPE_CHECKING:
58
53
  from collections.abc import Set as AbstractSet
59
54
  from logging import _FormatStyle
60
55
 
61
- from whenever import Date
62
-
63
56
  from utilities.types import Parallelism
64
57
 
65
58
 
@@ -70,26 +63,25 @@ if TYPE_CHECKING:
70
63
  class _Prefixes(Enum):
71
64
  dataclass = "dc"
72
65
  date = "d"
73
- datetime = "dt"
66
+ date_delta = "dd"
67
+ date_time_delta = "D"
74
68
  enum = "e"
75
- exception_class = "exc"
76
- exception_instance = "exi"
69
+ exception_class = "Ex"
70
+ exception_instance = "ex"
77
71
  float_ = "fl"
78
72
  frozenset_ = "fr"
79
73
  list_ = "l"
80
- nan = "nan"
81
74
  none = "none"
82
75
  path = "p"
83
- pos_inf = "pos_inf"
84
- neg_inf = "neg_inf"
76
+ plain_date_time = "pd"
85
77
  set_ = "s"
86
- timedelta = "td"
87
- time = "tm"
78
+ time = "ti"
79
+ time_delta = "td"
88
80
  tuple_ = "tu"
89
81
  unserializable = "un"
90
82
  uuid = "uu"
91
83
  version = "v"
92
- zoned_datetime = "zd"
84
+ zoned_date_time = "zd"
93
85
 
94
86
 
95
87
  type _DataclassHook = Callable[[type[Dataclass], StrMapping], StrMapping]
@@ -160,14 +152,12 @@ def _pre_process(
160
152
  # singletons
161
153
  case None:
162
154
  return f"[{_Prefixes.none.value}]"
163
- case dt.datetime() as datetime:
164
- return f"[{_Prefixes.datetime.value}]{serialize_datetime(datetime)}"
165
- case dt.date() as date: # after datetime
166
- return f"[{_Prefixes.date.value}]{serialize_date(date)}"
167
- case dt.time() as time:
168
- return f"[{_Prefixes.time.value}]{serialize_time(time)}"
169
- case dt.timedelta() as timedelta:
170
- return f"[{_Prefixes.timedelta.value}]{serialize_timedelta(timedelta)}"
155
+ case Date() as date:
156
+ return f"[{_Prefixes.date.value}]{date}"
157
+ case DateDelta() as date:
158
+ return f"[{_Prefixes.date_delta.value}]{date}"
159
+ case DateTimeDelta() as date:
160
+ return f"[{_Prefixes.date_time_delta.value}]{date}"
171
161
  case Exception() as error_:
172
162
  return {
173
163
  f"[{_Prefixes.exception_instance.value}|{type(error_).__qualname__}]": pre(
@@ -182,18 +172,24 @@ def _pre_process(
182
172
  if MIN_INT64 <= int_ <= MAX_INT64:
183
173
  return int_
184
174
  raise _SerializeIntegerError(obj=int_)
185
- case UUID() as uuid:
186
- return f"[{_Prefixes.uuid.value}]{uuid}"
187
175
  case Path() as path:
188
176
  return f"[{_Prefixes.path.value}]{path!s}"
177
+ case PlainDateTime() as datetime:
178
+ return f"[{_Prefixes.plain_date_time.value}]{datetime}"
189
179
  case str() as str_:
190
180
  return str_
181
+ case Time() as time:
182
+ return f"[{_Prefixes.time.value}]{time}"
183
+ case TimeDelta() as time_delta:
184
+ return f"[{_Prefixes.time_delta.value}]{time_delta}"
191
185
  case type() as error_cls if issubclass(error_cls, Exception):
192
186
  return f"[{_Prefixes.exception_class.value}|{error_cls.__qualname__}]"
187
+ case UUID() as uuid:
188
+ return f"[{_Prefixes.uuid.value}]{uuid}"
193
189
  case Version() as version:
194
190
  return f"[{_Prefixes.version.value}]{version!s}"
195
191
  case ZonedDateTime() as datetime:
196
- return f"[{_Prefixes.zoned_datetime.value}]{datetime}"
192
+ return f"[{_Prefixes.zoned_date_time.value}]{datetime}"
197
193
  # contains
198
194
  case Dataclass() as dataclass:
199
195
  asdict = dataclass_to_dict(
@@ -338,51 +334,36 @@ def deserialize(
338
334
  )
339
335
 
340
336
 
341
- _NONE_PATTERN = re.compile(r"^\[" + _Prefixes.none.value + r"\]$")
342
- _LOCAL_DATETIME_PATTERN = re.compile(
343
- r"^\["
344
- + _Prefixes.datetime.value
345
- + r"\](\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?)$"
346
- )
347
- _UUID_PATTERN = re.compile(r"^\[" + _Prefixes.uuid.value + r"\](" + UUID_PATTERN + ")$")
348
- _ZONED_DATETIME_PATTERN = re.compile(
349
- r"^\["
350
- + _Prefixes.datetime.value
351
- + r"\](\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,6})?[\+\-]\d{2}:\d{2}(?::\d{2})?\[(?!(?:dt\.)).+?\])$"
352
- )
353
- _ZONED_DATETIME_PATTERN2 = re.compile(
354
- r"^\["
355
- + _Prefixes.zoned_datetime.value
356
- + r"\](\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{1,9})?[\+\-]\d{2}:\d{2}(?::\d{2})?\[(?!(?:dt\.)).+?\])$"
357
- )
358
-
359
-
360
- def _make_unit_pattern(prefix: _Prefixes, /) -> Pattern[str]:
361
- return re.compile(r"^\[" + prefix.value + r"\](.+)$")
362
-
363
-
364
337
  (
365
338
  _DATE_PATTERN,
339
+ _DATE_DELTA_PATTERN,
340
+ _DATE_TIME_DELTA_PATTERN,
366
341
  _FLOAT_PATTERN,
342
+ _NONE_PATTERN,
367
343
  _PATH_PATTERN,
344
+ _PLAIN_DATE_TIME_PATTERN,
368
345
  _TIME_PATTERN,
369
- _TIMEDELTA_PATTERN,
346
+ _TIME_DELTA_PATTERN,
347
+ _UUID_PATTERN,
370
348
  _VERSION_PATTERN,
371
- ) = map(
372
- _make_unit_pattern,
373
- [
349
+ _ZONED_DATE_TIME_PATTERN,
350
+ ) = [
351
+ re.compile(r"^\[" + p.value + r"\](" + ".*" + ")$")
352
+ for p in [
374
353
  _Prefixes.date,
354
+ _Prefixes.date_delta,
355
+ _Prefixes.date_time_delta,
375
356
  _Prefixes.float_,
357
+ _Prefixes.none,
376
358
  _Prefixes.path,
359
+ _Prefixes.plain_date_time,
377
360
  _Prefixes.time,
378
- _Prefixes.timedelta,
361
+ _Prefixes.time_delta,
362
+ _Prefixes.uuid,
379
363
  _Prefixes.version,
380
- ],
381
- )
382
-
383
-
384
- def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
385
- return re.compile(r"^\[" + prefix.value + r"(?:\|(.+))?\]$")
364
+ _Prefixes.zoned_date_time,
365
+ ]
366
+ ]
386
367
 
387
368
 
388
369
  (
@@ -394,9 +375,9 @@ def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
394
375
  _LIST_PATTERN,
395
376
  _SET_PATTERN,
396
377
  _TUPLE_PATTERN,
397
- ) = map(
398
- _make_container_pattern,
399
- [
378
+ ) = [
379
+ re.compile(r"^\[" + p.value + r"(?:\|(.+))?\]$")
380
+ for p in [
400
381
  _Prefixes.dataclass,
401
382
  _Prefixes.enum,
402
383
  _Prefixes.exception_class,
@@ -405,8 +386,8 @@ def _make_container_pattern(prefix: _Prefixes, /) -> Pattern[str]:
405
386
  _Prefixes.list_,
406
387
  _Prefixes.set_,
407
388
  _Prefixes.tuple_,
408
- ],
409
- )
389
+ ]
390
+ ]
410
391
 
411
392
 
412
393
  def _object_hook(
@@ -425,24 +406,26 @@ def _object_hook(
425
406
  if match := _NONE_PATTERN.search(text):
426
407
  return None
427
408
  if match := _DATE_PATTERN.search(text):
428
- return parse_date(match.group(1))
409
+ return Date.parse_common_iso(match.group(1))
410
+ if match := _DATE_DELTA_PATTERN.search(text):
411
+ return DateDelta.parse_common_iso(match.group(1))
412
+ if match := _DATE_TIME_DELTA_PATTERN.search(text):
413
+ return DateTimeDelta.parse_common_iso(match.group(1))
429
414
  if match := _FLOAT_PATTERN.search(text):
430
415
  return float(match.group(1))
431
- if match := _LOCAL_DATETIME_PATTERN.search(text):
432
- return parse_plain_datetime(match.group(1))
433
416
  if match := _PATH_PATTERN.search(text):
434
417
  return Path(match.group(1))
418
+ if match := _PLAIN_DATE_TIME_PATTERN.search(text):
419
+ return PlainDateTime.parse_common_iso(match.group(1))
435
420
  if match := _TIME_PATTERN.search(text):
436
- return parse_time(match.group(1))
437
- if match := _TIMEDELTA_PATTERN.search(text):
438
- return parse_timedelta(match.group(1))
421
+ return Time.parse_common_iso(match.group(1))
422
+ if match := _TIME_DELTA_PATTERN.search(text):
423
+ return TimeDelta.parse_common_iso(match.group(1))
439
424
  if match := _UUID_PATTERN.search(text):
440
425
  return UUID(match.group(1))
441
426
  if match := _VERSION_PATTERN.search(text):
442
427
  return parse_version(match.group(1))
443
- if match := _ZONED_DATETIME_PATTERN.search(text):
444
- return parse_zoned_datetime(match.group(1))
445
- if match := _ZONED_DATETIME_PATTERN2.search(text):
428
+ if match := _ZONED_DATE_TIME_PATTERN.search(text):
446
429
  return ZonedDateTime.parse_common_iso(match.group(1))
447
430
  if (
448
431
  exc_class := _object_hook_exception_class(
utilities/parse.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import datetime as dt
4
3
  from contextlib import suppress
5
4
  from dataclasses import dataclass
6
5
  from enum import Enum
@@ -9,6 +8,16 @@ from re import DOTALL
9
8
  from types import NoneType
10
9
  from typing import TYPE_CHECKING, Any, override
11
10
 
11
+ from whenever import (
12
+ Date,
13
+ DateDelta,
14
+ DateTimeDelta,
15
+ PlainDateTime,
16
+ Time,
17
+ TimeDelta,
18
+ ZonedDateTime,
19
+ )
20
+
12
21
  from utilities.enum import ParseEnumError, parse_enum
13
22
  from utilities.iterables import OneEmptyError, OneNonUniqueError, one, one_str
14
23
  from utilities.math import ParseNumberError, parse_number
@@ -26,7 +35,7 @@ from utilities.text import (
26
35
  split_key_value_pairs,
27
36
  split_str,
28
37
  )
29
- from utilities.types import Duration, Number, ParseObjectExtra, SerializeObjectExtra
38
+ from utilities.types import Number, ParseObjectExtra, SerializeObjectExtra
30
39
  from utilities.typing import (
31
40
  get_args,
32
41
  is_dict_type,
@@ -182,6 +191,14 @@ def _parse_object_type(
182
191
  return parse_enum(text, cls, case_sensitive=case_sensitive)
183
192
  except ParseEnumError:
184
193
  raise _ParseObjectParseError(type_=cls, text=text) from None
194
+ if issubclass(
195
+ cls,
196
+ (Date, DateDelta, DateTimeDelta, PlainDateTime, Time, TimeDelta, ZonedDateTime),
197
+ ):
198
+ try:
199
+ return cls.parse_common_iso(text)
200
+ except ValueError:
201
+ raise _ParseObjectParseError(type_=cls, text=text) from None
185
202
  if issubclass(cls, Path):
186
203
  return Path(text).expanduser()
187
204
  if issubclass(cls, Sentinel):
@@ -194,34 +211,6 @@ def _parse_object_type(
194
211
  return parse_version(text)
195
212
  except ParseVersionError:
196
213
  raise _ParseObjectParseError(type_=cls, text=text) from None
197
- if is_subclass_gen(cls, dt.date):
198
- from utilities.whenever import ParseDateError, parse_date
199
-
200
- try:
201
- return parse_date(text)
202
- except ParseDateError:
203
- raise _ParseObjectParseError(type_=cls, text=text) from None
204
- if is_subclass_gen(cls, dt.datetime):
205
- from utilities.whenever import ParseDateTimeError, parse_datetime
206
-
207
- try:
208
- return parse_datetime(text)
209
- except ParseDateTimeError:
210
- raise _ParseObjectParseError(type_=cls, text=text) from None
211
- if issubclass(cls, dt.time):
212
- from utilities.whenever import ParseTimeError, parse_time
213
-
214
- try:
215
- return parse_time(text)
216
- except ParseTimeError:
217
- raise _ParseObjectParseError(type_=cls, text=text) from None
218
- if issubclass(cls, dt.timedelta):
219
- from utilities.whenever import ParseTimedeltaError, parse_timedelta
220
-
221
- try:
222
- return parse_timedelta(text)
223
- except ParseTimedeltaError:
224
- raise _ParseObjectParseError(type_=cls, text=text) from None
225
214
  raise _ParseObjectParseError(type_=cls, text=text)
226
215
 
227
216
 
@@ -374,13 +363,6 @@ def _parse_object_union_type(type_: Any, text: str, /) -> Any:
374
363
  return parse_number(text)
375
364
  except ParseNumberError:
376
365
  raise _ParseObjectParseError(type_=type_, text=text) from None
377
- if type_ is Duration:
378
- from utilities.whenever import ParseDurationError, parse_duration
379
-
380
- try:
381
- return parse_duration(text)
382
- except ParseDurationError:
383
- raise _ParseObjectParseError(type_=type_, text=text) from None
384
366
  raise _ParseObjectParseError(type_=type_, text=text) from None
385
367
 
386
368
 
@@ -464,22 +446,11 @@ def serialize_object(
464
446
  obj, bool | int | float | str | Path | Sentinel | Version
465
447
  ):
466
448
  return str(obj)
467
- if is_instance_gen(obj, dt.date):
468
- from utilities.whenever import serialize_date
469
-
470
- return serialize_date(obj)
471
- if is_instance_gen(obj, dt.datetime):
472
- from utilities.whenever import serialize_datetime
473
-
474
- return serialize_datetime(obj)
475
- if isinstance(obj, dt.time):
476
- from utilities.whenever import serialize_time
477
-
478
- return serialize_time(obj)
479
- if isinstance(obj, dt.timedelta):
480
- from utilities.whenever import serialize_timedelta
481
-
482
- return serialize_timedelta(obj)
449
+ if isinstance(
450
+ obj,
451
+ (Date, DateDelta, DateTimeDelta, PlainDateTime, Time, TimeDelta, ZonedDateTime),
452
+ ):
453
+ return obj.format_common_iso()
483
454
  if isinstance(obj, Enum):
484
455
  return obj.name
485
456
  if isinstance(obj, dict):
utilities/polars.py CHANGED
@@ -1005,6 +1005,7 @@ def dataclass_to_dataframe(
1005
1005
  *,
1006
1006
  globalns: StrMapping | None = None,
1007
1007
  localns: StrMapping | None = None,
1008
+ warn_name_errors: bool = False,
1008
1009
  ) -> DataFrame:
1009
1010
  """Convert a dataclass/es into a DataFrame."""
1010
1011
  objs = list(always_iterable(objs))
@@ -1018,12 +1019,14 @@ def dataclass_to_dataframe(
1018
1019
  ) from None
1019
1020
  data = list(map(asdict, objs))
1020
1021
  first, *_ = objs
1021
- schema = dataclass_to_schema(first, globalns=globalns, localns=localns)
1022
+ schema = dataclass_to_schema(
1023
+ first, globalns=globalns, localns=localns, warn_name_errors=warn_name_errors
1024
+ )
1022
1025
  df = DataFrame(data, schema=schema, orient="row")
1023
- return map_over_columns(_dataclass_to_dataframe_uuid, df)
1026
+ return map_over_columns(_dataclass_to_dataframe_cast, df)
1024
1027
 
1025
1028
 
1026
- def _dataclass_to_dataframe_uuid(series: Series, /) -> Series:
1029
+ def _dataclass_to_dataframe_cast(series: Series, /) -> Series:
1027
1030
  if series.dtype == Object:
1028
1031
  is_path = series.map_elements(make_isinstance(Path), return_dtype=Boolean).all()
1029
1032
  is_uuid = series.map_elements(make_isinstance(UUID), return_dtype=Boolean).all()