lionagi 0.15.13__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. lionagi/config.py +1 -0
  2. lionagi/libs/validate/fuzzy_match_keys.py +5 -182
  3. lionagi/libs/validate/string_similarity.py +6 -331
  4. lionagi/ln/__init__.py +56 -66
  5. lionagi/ln/_async_call.py +13 -10
  6. lionagi/ln/_hash.py +33 -8
  7. lionagi/ln/_list_call.py +2 -35
  8. lionagi/ln/_to_list.py +51 -28
  9. lionagi/ln/_utils.py +156 -0
  10. lionagi/ln/concurrency/__init__.py +39 -31
  11. lionagi/ln/concurrency/_compat.py +65 -0
  12. lionagi/ln/concurrency/cancel.py +92 -109
  13. lionagi/ln/concurrency/errors.py +17 -17
  14. lionagi/ln/concurrency/patterns.py +249 -206
  15. lionagi/ln/concurrency/primitives.py +257 -216
  16. lionagi/ln/concurrency/resource_tracker.py +42 -155
  17. lionagi/ln/concurrency/task.py +55 -73
  18. lionagi/ln/concurrency/throttle.py +3 -0
  19. lionagi/ln/concurrency/utils.py +1 -0
  20. lionagi/ln/fuzzy/__init__.py +15 -0
  21. lionagi/ln/{_extract_json.py → fuzzy/_extract_json.py} +22 -9
  22. lionagi/ln/{_fuzzy_json.py → fuzzy/_fuzzy_json.py} +14 -8
  23. lionagi/ln/fuzzy/_fuzzy_match.py +172 -0
  24. lionagi/ln/fuzzy/_fuzzy_validate.py +46 -0
  25. lionagi/ln/fuzzy/_string_similarity.py +332 -0
  26. lionagi/ln/{_models.py → types.py} +153 -4
  27. lionagi/operations/flow.py +2 -1
  28. lionagi/operations/operate/operate.py +26 -16
  29. lionagi/protocols/contracts.py +46 -0
  30. lionagi/protocols/generic/event.py +6 -6
  31. lionagi/protocols/generic/processor.py +9 -5
  32. lionagi/protocols/ids.py +82 -0
  33. lionagi/protocols/types.py +10 -12
  34. lionagi/service/connections/match_endpoint.py +9 -0
  35. lionagi/service/connections/providers/nvidia_nim_.py +100 -0
  36. lionagi/utils.py +34 -64
  37. lionagi/version.py +1 -1
  38. {lionagi-0.15.13.dist-info → lionagi-0.16.0.dist-info}/METADATA +4 -2
  39. {lionagi-0.15.13.dist-info → lionagi-0.16.0.dist-info}/RECORD +41 -33
  40. lionagi/ln/_types.py +0 -146
  41. {lionagi-0.15.13.dist-info → lionagi-0.16.0.dist-info}/WHEEL +0 -0
  42. {lionagi-0.15.13.dist-info → lionagi-0.16.0.dist-info}/licenses/LICENSE +0 -0
lionagi/ln/_async_call.py CHANGED
@@ -7,11 +7,16 @@ import anyio
7
7
  import anyio.to_thread
8
8
  from pydantic import BaseModel
9
9
 
10
- from ._models import Params
11
10
  from ._to_list import to_list
12
- from ._types import T, Unset, not_sentinel
13
11
  from .concurrency import Lock as ConcurrencyLock
14
- from .concurrency import Semaphore, create_task_group, is_coro_func
12
+ from .concurrency import (
13
+ Semaphore,
14
+ create_task_group,
15
+ get_cancelled_exc_class,
16
+ is_coro_func,
17
+ move_on_after,
18
+ )
19
+ from .types import Params, T, Unset, not_sentinel
15
20
 
16
21
  __all__ = (
17
22
  "alcall",
@@ -101,7 +106,7 @@ async def alcall(
101
106
  if coro_func:
102
107
  # Async function
103
108
  if retry_timeout is not None:
104
- with anyio.move_on_after(retry_timeout) as cancel_scope:
109
+ with move_on_after(retry_timeout) as cancel_scope:
105
110
  result = await func(item, **kwargs)
106
111
  if cancel_scope.cancelled_caught:
107
112
  raise asyncio.TimeoutError(
@@ -113,7 +118,7 @@ async def alcall(
113
118
  else:
114
119
  # Sync function
115
120
  if retry_timeout is not None:
116
- with anyio.move_on_after(retry_timeout) as cancel_scope:
121
+ with move_on_after(retry_timeout) as cancel_scope:
117
122
  result = await anyio.to_thread.run_sync(
118
123
  func, item, **kwargs
119
124
  )
@@ -134,7 +139,7 @@ async def alcall(
134
139
  return index, result
135
140
 
136
141
  # if cancelled, re-raise
137
- except anyio.get_cancelled_exc_class():
142
+ except get_cancelled_exc_class():
138
143
  raise
139
144
 
140
145
  # handle other exceptions
@@ -173,7 +178,7 @@ async def alcall(
173
178
  # Execute all tasks using task group
174
179
  async with create_task_group() as tg:
175
180
  for idx, item in enumerate(input_):
176
- await tg.start_soon(run_and_store, item, idx)
181
+ tg.start_soon(run_and_store, item, idx)
177
182
  # Apply throttle delay between starting tasks
178
183
  if throttle_delay and idx < len(input_) - 1:
179
184
  await anyio.sleep(throttle_delay)
@@ -292,6 +297,4 @@ class BcallParams(AlcallParams):
292
297
  self, input_: list[Any], func: Callable[..., T], **kw
293
298
  ) -> list[T]:
294
299
  kwargs = {**self.default_kw(), **kw}
295
- func = self._func
296
-
297
- return await func(input_, func, self.batch_size, **kwargs)
300
+ return await bcall(input_, func, self.batch_size, **kwargs)
lionagi/ln/_hash.py CHANGED
@@ -1,5 +1,8 @@
1
+ from __future__ import annotations
2
+
1
3
  import copy
2
4
 
5
+ import msgspec
3
6
  from pydantic import BaseModel as PydanticBaseModel
4
7
 
5
8
  __all__ = ("hash_dict",)
@@ -12,6 +15,7 @@ _TYPE_MARKER_TUPLE = 2
12
15
  _TYPE_MARKER_SET = 3
13
16
  _TYPE_MARKER_FROZENSET = 4
14
17
  _TYPE_MARKER_PYDANTIC = 5 # Distinguishes dumped Pydantic models
18
+ _TYPE_MARKER_MSGSPEC = 6 # Distinguishes msgspec Structs
15
19
 
16
20
 
17
21
  def _generate_hashable_representation(item: any) -> any:
@@ -23,6 +27,14 @@ def _generate_hashable_representation(item: any) -> any:
23
27
  if isinstance(item, _PRIMITIVE_TYPES):
24
28
  return item
25
29
 
30
+ # Handle msgspec Structs
31
+ if isinstance(item, msgspec.Struct):
32
+ # Use msgspec.to_builtins for efficient conversion to built-in types
33
+ return (
34
+ _TYPE_MARKER_MSGSPEC,
35
+ _generate_hashable_representation(msgspec.to_builtins(item)),
36
+ )
37
+
26
38
  if isinstance(item, PydanticBaseModel):
27
39
  # Process the Pydantic model by first dumping it to a dict, then processing that dict.
28
40
  # The type marker distinguishes this from a regular dictionary.
@@ -58,9 +70,15 @@ def _generate_hashable_representation(item: any) -> any:
58
70
  try: # Attempt direct sort for comparable elements
59
71
  sorted_elements = sorted(list(item))
60
72
  except TypeError: # Fallback for unorderable mixed types
61
- sorted_elements = sorted(
62
- list(item), key=lambda x: (str(type(x)), str(x))
63
- )
73
+
74
+ def sort_key(x):
75
+ # Deterministic ordering across mixed, unorderable types
76
+ # Sort strictly by textual type then textual value.
77
+ # This also naturally places bool before int because
78
+ # "<class 'bool'>" < "<class 'int'>" lexicographically.
79
+ return (str(type(x)), str(x))
80
+
81
+ sorted_elements = sorted(list(item), key=sort_key)
64
82
  return (
65
83
  _TYPE_MARKER_FROZENSET,
66
84
  tuple(
@@ -73,9 +91,12 @@ def _generate_hashable_representation(item: any) -> any:
73
91
  try:
74
92
  sorted_elements = sorted(list(item))
75
93
  except TypeError:
76
- sorted_elements = sorted(
77
- list(item), key=lambda x: (str(type(x)), str(x))
78
- )
94
+ # For mixed types, use a deterministic, portable sort key
95
+ def sort_key(x):
96
+ # Sort by textual type then textual value for stability.
97
+ return (str(type(x)), str(x))
98
+
99
+ sorted_elements = sorted(list(item), key=sort_key)
79
100
  return (
80
101
  _TYPE_MARKER_SET,
81
102
  tuple(
@@ -87,8 +108,12 @@ def _generate_hashable_representation(item: any) -> any:
87
108
  # Fallback for other types (e.g., custom objects not derived from the above)
88
109
  try:
89
110
  return str(item)
90
- except Exception: # If str() fails for some reason
91
- return repr(item)
111
+ except Exception:
112
+ try:
113
+ return repr(item)
114
+ except Exception:
115
+ # If both str() and repr() fail, return a stable fallback based on type and id
116
+ return f"<unhashable:{type(item).__name__}:{id(item)}>"
92
117
 
93
118
 
94
119
  def hash_dict(data: any, strict: bool = False) -> int:
lionagi/ln/_list_call.py CHANGED
@@ -1,14 +1,12 @@
1
1
  from collections.abc import Callable, Iterable
2
- from dataclasses import dataclass
3
- from typing import Any, ClassVar, TypeVar
2
+ from typing import Any, TypeVar
4
3
 
5
- from ._models import Params
6
4
  from ._to_list import to_list
7
5
 
8
6
  R = TypeVar("R")
9
7
  T = TypeVar("T")
10
8
 
11
- __all__ = ("lcall", "LcallParams")
9
+ __all__ = ("lcall",)
12
10
 
13
11
 
14
12
  def lcall(
@@ -97,34 +95,3 @@ def lcall(
97
95
  )
98
96
 
99
97
  return out
100
-
101
-
102
- @dataclass(slots=True, frozen=True, init=False)
103
- class LcallParams(Params):
104
- _func: ClassVar[Any] = lcall
105
-
106
- # input processing
107
- input_flatten: bool
108
- """If True, recursively flatten input to a flat list"""
109
- input_dropna: bool
110
- """If True, remove None and undefined values from input."""
111
- input_unique: bool
112
- input_use_values: bool
113
- input_flatten_tuple_set: bool
114
-
115
- # output processing
116
- output_flatten: bool
117
- """If True, recursively flatten output to a flat list."""
118
- output_dropna: bool
119
- """If True, remove None and undefined values."""
120
- output_unique: bool
121
- """If True, remove duplicates (requires output_flatten=True)."""
122
- output_use_values: bool
123
- """If True, extract values from enums/mappings."""
124
- output_flatten_tuple_set: bool
125
- """If True, include tuples and sets in flattening."""
126
-
127
- def __call__(self, input_: Any, *args, **kw) -> list:
128
- """Convert parameters to a list."""
129
- f = self.as_partial()
130
- return f(input_, *args, **kw)
lionagi/ln/_to_list.py CHANGED
@@ -3,21 +3,24 @@ from dataclasses import dataclass
3
3
  from enum import Enum as _Enum
4
4
  from typing import Any, ClassVar
5
5
 
6
+ from msgspec import Struct
6
7
  from pydantic import BaseModel
7
8
  from pydantic_core import PydanticUndefinedType
8
9
 
9
10
  from ._hash import hash_dict
10
- from ._models import Params
11
- from ._types import UndefinedType, UnsetType
11
+ from .types import Params, UndefinedType, UnsetType
12
12
 
13
13
  __all__ = ("to_list", "ToListParams")
14
14
 
15
15
 
16
- _SKIP_TYPE = (str, bytes, bytearray, Mapping, BaseModel, _Enum)
17
- _TUPLE_SET_TYPES = (tuple, set, frozenset)
18
- _SKIP_TUPLE_SET = (*_SKIP_TYPE, *_TUPLE_SET_TYPES)
16
+ _BYTE_LIKE = (str, bytes, bytearray)
17
+ _MODEL_LIKE = (BaseModel, Struct)
18
+ _MAP_LIKE = (Mapping, *_MODEL_LIKE)
19
+ _TUPLE_SET = (tuple, set, frozenset)
19
20
  _SINGLETONE_TYPES = (UndefinedType, UnsetType, PydanticUndefinedType)
20
- _BYTE_LIKE_TYPES = (str, bytes, bytearray)
21
+
22
+ _SKIP_TYPE = (*_BYTE_LIKE, *_MAP_LIKE, _Enum)
23
+ _SKIP_TUPLE_SET = (*_SKIP_TYPE, *_TUPLE_SET)
21
24
 
22
25
 
23
26
  def to_list(
@@ -104,7 +107,7 @@ def to_list(
104
107
  else list(members)
105
108
  )
106
109
 
107
- if isinstance(input_, _BYTE_LIKE_TYPES):
110
+ if isinstance(input_, _BYTE_LIKE):
108
111
  return list(input_) if use_values else [input_]
109
112
 
110
113
  if isinstance(input_, Mapping):
@@ -117,9 +120,7 @@ def to_list(
117
120
  if isinstance(input_, BaseModel):
118
121
  return [input_]
119
122
 
120
- if isinstance(input_, Iterable) and not isinstance(
121
- input_, _BYTE_LIKE_TYPES
122
- ):
123
+ if isinstance(input_, Iterable) and not isinstance(input_, _BYTE_LIKE):
123
124
  return list(input_)
124
125
 
125
126
  return [input_]
@@ -133,24 +134,46 @@ def to_list(
133
134
  if unique:
134
135
  seen = set()
135
136
  out = []
136
- try:
137
- return [x for x in processed if not (x in seen or seen.add(x))]
138
- except TypeError:
139
- for i in processed:
140
- hash_value = None
141
- try:
142
- hash_value = hash(i)
143
- except TypeError:
144
- if isinstance(i, (BaseModel, Mapping)):
145
- hash_value = hash_dict(i)
146
- else:
147
- raise ValueError(
148
- "Unhashable type encountered in list unique value processing."
149
- )
150
- if hash_value not in seen:
151
- seen.add(hash_value)
152
- out.append(i)
153
- return out
137
+ use_hash_fallback = False
138
+ for i in processed:
139
+ try:
140
+ if not use_hash_fallback:
141
+ # Direct approach - try to use the item as hash key
142
+ if i not in seen:
143
+ seen.add(i)
144
+ out.append(i)
145
+ else:
146
+ # Hash-based approach for unhashable items
147
+ hash_value = (
148
+ hash(i)
149
+ if hasattr(i, "__hash__") and i.__hash__ is not None
150
+ else hash_dict(i)
151
+ )
152
+ if hash_value not in seen:
153
+ seen.add(hash_value)
154
+ out.append(i)
155
+ except TypeError:
156
+ # Switch to hash-based approach and restart
157
+ if not use_hash_fallback:
158
+ use_hash_fallback = True
159
+ seen = set()
160
+ out = []
161
+ # Restart from beginning with hash-based approach
162
+ for j in processed:
163
+ try:
164
+ hash_value = hash(j)
165
+ except TypeError:
166
+ if isinstance(j, _MAP_LIKE):
167
+ hash_value = hash_dict(j)
168
+ else:
169
+ raise ValueError(
170
+ "Unhashable type encountered in list unique value processing."
171
+ )
172
+ if hash_value not in seen:
173
+ seen.add(hash_value)
174
+ out.append(j)
175
+ break
176
+ return out
154
177
 
155
178
  return processed
156
179
 
lionagi/ln/_utils.py ADDED
@@ -0,0 +1,156 @@
1
+ import importlib.util
2
+ import uuid
3
+ from datetime import datetime, timezone
4
+ from pathlib import Path as StdPath
5
+ from typing import Any
6
+
7
+ from anyio import Path as AsyncPath
8
+
9
+ __all__ = (
10
+ "now_utc",
11
+ "acreate_path",
12
+ "get_bins",
13
+ "import_module",
14
+ "is_import_installed",
15
+ )
16
+
17
+
18
+ def now_utc() -> datetime:
19
+ return datetime.now(timezone.utc)
20
+
21
+
22
+ async def acreate_path(
23
+ directory: StdPath | AsyncPath | str,
24
+ filename: str,
25
+ extension: str = None,
26
+ timestamp: bool = False,
27
+ dir_exist_ok: bool = True,
28
+ file_exist_ok: bool = False,
29
+ time_prefix: bool = False,
30
+ timestamp_format: str | None = None,
31
+ random_hash_digits: int = 0,
32
+ ) -> AsyncPath:
33
+ """
34
+ Generate a new file path asynchronously with optional timestamp and a random suffix.
35
+ Uses non-blocking I/O (AnyIO).
36
+ """
37
+
38
+ # Use AsyncPath for construction and execution
39
+ if "/" in filename:
40
+ sub_dir, filename = filename.split("/")[:-1], filename.split("/")[-1]
41
+ directory = AsyncPath(directory) / "/".join(sub_dir)
42
+
43
+ if "\\" in filename:
44
+ raise ValueError("Filename cannot contain directory separators.")
45
+
46
+ # Ensure directory is an AsyncPath
47
+ directory = AsyncPath(directory)
48
+ if "." in filename:
49
+ name, ext = filename.rsplit(".", 1)
50
+ else:
51
+ name, ext = filename, extension
52
+ ext = f".{ext.lstrip('.')}" if ext else ""
53
+
54
+ if timestamp:
55
+ # datetime.now() is generally non-blocking
56
+ ts_str = datetime.now().strftime(timestamp_format or "%Y%m%d%H%M%S")
57
+ name = f"{ts_str}_{name}" if time_prefix else f"{name}_{ts_str}"
58
+
59
+ if random_hash_digits > 0:
60
+ random_suffix = uuid.uuid4().hex[:random_hash_digits]
61
+ name = f"{name}-{random_suffix}"
62
+
63
+ full_path = directory / f"{name}{ext}"
64
+
65
+ # --- CRITICAL: ASYNC I/O Operations ---
66
+ await full_path.parent.mkdir(parents=True, exist_ok=dir_exist_ok)
67
+
68
+ if await full_path.exists() and not file_exist_ok:
69
+ raise FileExistsError(
70
+ f"File {full_path} already exists and file_exist_ok is False."
71
+ )
72
+
73
+ return full_path
74
+
75
+
76
+ def get_bins(input_: list[str], upper: int) -> list[list[int]]:
77
+ """Organizes indices of strings into bins based on a cumulative upper limit.
78
+
79
+ Args:
80
+ input_ (List[str]): The list of strings to be binned.
81
+ upper (int): The cumulative length upper limit for each bin.
82
+
83
+ Returns:
84
+ List[List[int]]: A list of bins, each bin is a list of indices from the input list.
85
+ """
86
+ current = 0
87
+ bins = []
88
+ current_bin = []
89
+ for idx, item in enumerate(input_):
90
+ if current + len(item) < upper:
91
+ current_bin.append(idx)
92
+ current += len(item)
93
+ else:
94
+ bins.append(current_bin)
95
+ current_bin = [idx]
96
+ current = len(item)
97
+ if current_bin:
98
+ bins.append(current_bin)
99
+ return bins
100
+
101
+
102
+ def import_module(
103
+ package_name: str,
104
+ module_name: str = None,
105
+ import_name: str | list = None,
106
+ ) -> Any:
107
+ """
108
+ Import a module by its path.
109
+
110
+ Args:
111
+ module_path: The path of the module to import.
112
+
113
+ Returns:
114
+ The imported module.
115
+
116
+ Raises:
117
+ ImportError: If the module cannot be imported.
118
+ """
119
+ try:
120
+ full_import_path = (
121
+ f"{package_name}.{module_name}" if module_name else package_name
122
+ )
123
+
124
+ if import_name:
125
+ import_name = (
126
+ [import_name]
127
+ if not isinstance(import_name, list)
128
+ else import_name
129
+ )
130
+ a = __import__(
131
+ full_import_path,
132
+ fromlist=import_name,
133
+ )
134
+ if len(import_name) == 1:
135
+ return getattr(a, import_name[0])
136
+ return [getattr(a, name) for name in import_name]
137
+ else:
138
+ return __import__(full_import_path)
139
+
140
+ except ImportError as e:
141
+ raise ImportError(
142
+ f"Failed to import module {full_import_path}: {e}"
143
+ ) from e
144
+
145
+
146
+ def is_import_installed(package_name: str) -> bool:
147
+ """
148
+ Check if a package is installed.
149
+
150
+ Args:
151
+ package_name: The name of the package to check.
152
+
153
+ Returns:
154
+ bool: True if the package is installed, False otherwise.
155
+ """
156
+ return importlib.util.find_spec(package_name) is not None
@@ -1,23 +1,25 @@
1
- """Structured concurrency primitives for pynector.
2
-
3
- This module provides structured concurrency primitives using AnyIO,
4
- which allows for consistent behavior across asyncio and trio backends.
5
- """
6
-
7
- from .cancel import CancelScope, fail_after, move_on_after
8
- from .errors import get_cancelled_exc_class, shield
9
- from .patterns import (
10
- ConnectionPool,
11
- WorkerPool,
12
- parallel_requests,
13
- retry_with_timeout,
1
+ from ._compat import ExceptionGroup
2
+ from .cancel import (
3
+ CancelScope,
4
+ effective_deadline,
5
+ fail_after,
6
+ fail_at,
7
+ move_on_after,
8
+ move_on_at,
9
+ )
10
+ from .errors import get_cancelled_exc_class, is_cancelled, shield
11
+ from .patterns import CompletionStream, bounded_map, gather, race, retry
12
+ from .primitives import (
13
+ CapacityLimiter,
14
+ Condition,
15
+ Event,
16
+ Lock,
17
+ Queue,
18
+ Semaphore,
14
19
  )
15
- from .primitives import CapacityLimiter, Condition, Event, Lock, Semaphore
16
20
  from .resource_tracker import (
17
- ResourceTracker,
18
- cleanup_check,
19
- get_global_tracker,
20
- resource_leak_detector,
21
+ LeakInfo,
22
+ LeakTracker,
21
23
  track_resource,
22
24
  untrack_resource,
23
25
  )
@@ -25,29 +27,35 @@ from .task import TaskGroup, create_task_group
25
27
  from .utils import is_coro_func
26
28
 
27
29
  ConcurrencyEvent = Event
30
+
28
31
  __all__ = (
29
- "TaskGroup",
30
- "create_task_group",
31
32
  "CancelScope",
32
- "move_on_after",
33
33
  "fail_after",
34
- "ConnectionPool",
35
- "WorkerPool",
36
- "parallel_requests",
37
- "retry_with_timeout",
34
+ "move_on_after",
35
+ "fail_at",
36
+ "move_on_at",
37
+ "effective_deadline",
38
+ "get_cancelled_exc_class",
39
+ "is_cancelled",
40
+ "shield",
41
+ "TaskGroup",
42
+ "create_task_group",
38
43
  "Lock",
39
44
  "Semaphore",
40
45
  "CapacityLimiter",
46
+ "Queue",
41
47
  "Event",
42
48
  "Condition",
43
- "get_cancelled_exc_class",
44
- "shield",
45
- "ResourceTracker",
46
- "resource_leak_detector",
49
+ "gather",
50
+ "race",
51
+ "bounded_map",
52
+ "retry",
53
+ "CompletionStream",
47
54
  "track_resource",
48
55
  "untrack_resource",
49
- "cleanup_check",
50
- "get_global_tracker",
56
+ "LeakInfo",
57
+ "LeakTracker",
51
58
  "is_coro_func",
52
59
  "ConcurrencyEvent",
60
+ "ExceptionGroup",
53
61
  )
@@ -0,0 +1,65 @@
1
+ """Compatibility layer for Python 3.10+ support.
2
+
3
+ Provides ExceptionGroup backport and other compatibility utilities.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import sys
9
+ from collections.abc import Sequence
10
+
11
+ # ExceptionGroup compatibility for Python 3.10
12
+ if sys.version_info >= (3, 11):
13
+ # Python 3.11+ has built-in ExceptionGroup
14
+ from builtins import ExceptionGroup as _ExceptionGroup
15
+
16
+ BaseExceptionGroup = _ExceptionGroup
17
+ ExceptionGroup = _ExceptionGroup
18
+
19
+ else:
20
+ # Python 3.10: Use exceptiongroup backport
21
+ try:
22
+ from exceptiongroup import BaseExceptionGroup, ExceptionGroup
23
+ except ImportError:
24
+ # Fallback implementation for environments without exceptiongroup
25
+ class BaseExceptionGroup(BaseException): # type: ignore
26
+ """Minimal BaseExceptionGroup implementation for Python 3.10 without exceptiongroup."""
27
+
28
+ def __init__(
29
+ self, message: str, exceptions: Sequence[BaseException]
30
+ ) -> None:
31
+ super().__init__(message)
32
+ self.message = message
33
+ self.exceptions = tuple(exceptions)
34
+
35
+ def __str__(self) -> str:
36
+ return (
37
+ f"{self.message} ({len(self.exceptions)} sub-exceptions)"
38
+ )
39
+
40
+ class ExceptionGroup(BaseExceptionGroup, Exception): # type: ignore
41
+ """Minimal ExceptionGroup implementation for Python 3.10 without exceptiongroup."""
42
+
43
+ pass
44
+
45
+
46
+ def is_exception_group(exc: BaseException) -> bool:
47
+ """Check if exception is an ExceptionGroup (compatible across Python versions)."""
48
+ return isinstance(exc, BaseExceptionGroup)
49
+
50
+
51
+ def get_exception_group_exceptions(
52
+ exc: BaseException,
53
+ ) -> Sequence[BaseException]:
54
+ """Get exceptions from ExceptionGroup, or return single exception in list."""
55
+ if is_exception_group(exc):
56
+ return getattr(exc, "exceptions", (exc,))
57
+ return (exc,)
58
+
59
+
60
+ __all__ = [
61
+ "BaseExceptionGroup",
62
+ "ExceptionGroup",
63
+ "is_exception_group",
64
+ "get_exception_group_exceptions",
65
+ ]