lionagi 0.14.11__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. lionagi/libs/concurrency.py +1 -0
  2. lionagi/libs/token_transform/perplexity.py +2 -1
  3. lionagi/libs/token_transform/symbolic_compress_context.py +8 -7
  4. lionagi/ln/__init__.py +49 -0
  5. lionagi/ln/_async_call.py +293 -0
  6. lionagi/ln/_list_call.py +129 -0
  7. lionagi/ln/_models.py +126 -0
  8. lionagi/ln/_to_list.py +175 -0
  9. lionagi/ln/_types.py +146 -0
  10. lionagi/{libs → ln}/concurrency/__init__.py +4 -2
  11. lionagi/ln/concurrency/utils.py +14 -0
  12. lionagi/models/hashable_model.py +1 -2
  13. lionagi/operations/brainstorm/brainstorm.py +2 -1
  14. lionagi/operations/flow.py +3 -3
  15. lionagi/operations/plan/plan.py +3 -3
  16. lionagi/protocols/generic/pile.py +1 -1
  17. lionagi/service/hooks/_types.py +2 -2
  18. lionagi/session/branch.py +4 -2
  19. lionagi/utils.py +90 -510
  20. lionagi/version.py +1 -1
  21. {lionagi-0.14.11.dist-info → lionagi-0.15.0.dist-info}/METADATA +4 -4
  22. {lionagi-0.14.11.dist-info → lionagi-0.15.0.dist-info}/RECORD +32 -26
  23. lionagi/libs/hash/__init__.py +0 -3
  24. lionagi/libs/hash/manager.py +0 -26
  25. /lionagi/{libs/hash/hash_dict.py → ln/_hash.py} +0 -0
  26. /lionagi/{libs → ln}/concurrency/cancel.py +0 -0
  27. /lionagi/{libs → ln}/concurrency/errors.py +0 -0
  28. /lionagi/{libs → ln}/concurrency/patterns.py +0 -0
  29. /lionagi/{libs → ln}/concurrency/primitives.py +0 -0
  30. /lionagi/{libs → ln}/concurrency/resource_tracker.py +0 -0
  31. /lionagi/{libs → ln}/concurrency/task.py +0 -0
  32. /lionagi/{libs → ln}/concurrency/throttle.py +0 -0
  33. {lionagi-0.14.11.dist-info → lionagi-0.15.0.dist-info}/WHEEL +0 -0
  34. {lionagi-0.14.11.dist-info → lionagi-0.15.0.dist-info}/licenses/LICENSE +0 -0
lionagi/utils.py CHANGED
@@ -15,7 +15,6 @@ import shutil
15
15
  import subprocess
16
16
  import sys
17
17
  import uuid
18
- from abc import ABC
19
18
  from collections.abc import (
20
19
  AsyncGenerator,
21
20
  Callable,
@@ -25,27 +24,28 @@ from collections.abc import (
25
24
  )
26
25
  from concurrent.futures import ThreadPoolExecutor
27
26
  from datetime import datetime, timezone
28
- from enum import Enum
29
- from functools import lru_cache, partial
27
+ from enum import Enum as _Enum
28
+ from functools import partial
30
29
  from inspect import isclass
31
30
  from pathlib import Path
32
- from typing import (
33
- Any,
34
- Literal,
35
- TypedDict,
36
- TypeVar,
37
- get_args,
38
- get_origin,
39
- overload,
40
- )
31
+ from typing import Any, Literal, TypeVar, get_args, get_origin
41
32
 
42
- import anyio
43
33
  from pydantic import BaseModel
44
34
  from pydantic_core import PydanticUndefinedType
35
+ from typing_extensions import deprecated
45
36
 
46
- from .libs.concurrency import Lock as ConcurrencyLock
47
- from .libs.concurrency import Semaphore, create_task_group
48
37
  from .libs.validate.xml_parser import xml_to_dict
38
+ from .ln import (
39
+ DataClass,
40
+ Enum,
41
+ KeysDict,
42
+ Params,
43
+ Undefined,
44
+ UndefinedType,
45
+ hash_dict,
46
+ to_list,
47
+ )
48
+ from .ln.concurrency import is_coro_func
49
49
  from .settings import Settings
50
50
 
51
51
  R = TypeVar("R")
@@ -54,6 +54,7 @@ B = TypeVar("B", bound=BaseModel)
54
54
 
55
55
  logger = logging.getLogger(__name__)
56
56
 
57
+ UNDEFINED = Undefined
57
58
 
58
59
  __all__ = (
59
60
  "UndefinedType",
@@ -76,11 +77,6 @@ __all__ = (
76
77
  "time",
77
78
  "fuzzy_parse_json",
78
79
  "fix_json_string",
79
- "ToListParams",
80
- "LCallParams",
81
- "ALCallParams",
82
- "BCallParams",
83
- "CreatePathParams",
84
80
  "get_bins",
85
81
  "EventStatus",
86
82
  "logger",
@@ -90,77 +86,18 @@ __all__ = (
90
86
  "breakdown_pydantic_annotation",
91
87
  "run_package_manager_command",
92
88
  "StringEnum",
89
+ "Enum",
90
+ "hash_dict",
93
91
  )
94
92
 
95
93
 
96
94
  # --- General Global Utilities Types ---
97
-
98
-
95
+ @deprecated("String Enum is deprecated, use `Enum` instead.")
99
96
  class StringEnum(str, Enum):
100
- @classmethod
101
- def allowed(cls) -> tuple[str, ...]:
102
- return tuple(e.value for e in cls)
103
-
104
-
105
- class UndefinedType:
106
- def __init__(self) -> None:
107
- self.undefined = True
108
-
109
- def __bool__(self) -> Literal[False]:
110
- return False
111
-
112
- def __deepcopy__(self, memo):
113
- # Ensure UNDEFINED is universal
114
- return self
115
-
116
- def __repr__(self) -> Literal["UNDEFINED"]:
117
- return "UNDEFINED"
118
-
119
- __slots__ = ["undefined"]
120
-
121
-
122
- class KeysDict(TypedDict, total=False):
123
- """TypedDict for keys dictionary."""
124
-
125
- key: Any # Represents any key-type pair
126
-
127
-
128
- def hash_dict(data) -> int:
129
- hashable_items = []
130
- if isinstance(data, BaseModel):
131
- data = data.model_dump()
132
- for k, v in data.items():
133
- if isinstance(v, (list, dict)):
134
- # Convert unhashable types to JSON string for hashing
135
- v = json.dumps(v, sort_keys=True)
136
- elif not isinstance(v, (str, int, float, bool, type(None))):
137
- # Convert other unhashable types to string representation
138
- v = str(v)
139
- hashable_items.append((k, v))
140
- return hash(frozenset(hashable_items))
141
-
142
-
143
- class Params(BaseModel):
144
- def keys(self):
145
- return type(self).model_fields.keys()
146
-
147
- def __call__(self, *args, **kwargs):
148
- raise NotImplementedError(
149
- "This method should be implemented in a subclass"
150
- )
151
-
152
-
153
- class DataClass(ABC):
154
97
  pass
155
98
 
156
99
 
157
- # --- Create a global UNDEFINED object ---
158
- UNDEFINED = UndefinedType()
159
-
160
-
161
100
  # --- General Global Utilities Functions ---
162
-
163
-
164
101
  def time(
165
102
  *,
166
103
  tz: timezone = Settings.Config.TIMEZONE,
@@ -254,11 +191,6 @@ def is_same_dtype(
254
191
  return (result, dtype) if return_dtype else result
255
192
 
256
193
 
257
- @lru_cache(maxsize=None)
258
- def is_coro_func(func: Callable[..., Any]) -> bool:
259
- return asyncio.iscoroutinefunction(func)
260
-
261
-
262
194
  async def custom_error_handler(
263
195
  error: Exception, error_map: dict[type, Callable[[Exception], None]]
264
196
  ) -> None:
@@ -270,206 +202,9 @@ async def custom_error_handler(
270
202
  raise error
271
203
 
272
204
 
273
- @overload
274
- def to_list(
275
- input_: None | UndefinedType | PydanticUndefinedType,
276
- /,
277
- ) -> list: ...
278
-
279
-
280
- @overload
281
- def to_list(
282
- input_: str | bytes | bytearray,
283
- /,
284
- use_values: bool = False,
285
- ) -> list[str | bytes | bytearray]: ...
286
-
287
-
288
- @overload
289
- def to_list(
290
- input_: Mapping,
291
- /,
292
- use_values: bool = False,
293
- ) -> list[Any]: ...
294
-
295
-
296
- @overload
297
- def to_list(
298
- input_: Any,
299
- /,
300
- *,
301
- flatten: bool = False,
302
- dropna: bool = False,
303
- unique: bool = False,
304
- use_values: bool = False,
305
- flatten_tuple_set: bool = False,
306
- ) -> list: ...
307
-
308
-
309
- def to_list(
310
- input_: Any,
311
- /,
312
- *,
313
- flatten: bool = False,
314
- dropna: bool = False,
315
- unique: bool = False,
316
- use_values: bool = False,
317
- flatten_tuple_set: bool = False,
318
- ) -> list:
319
- """Convert input to a list with optional transformations.
320
-
321
- Transforms various input types into a list with configurable processing
322
- options for flattening, filtering, and value extraction.
323
-
324
- Args:
325
- input_: Value to convert to list.
326
- flatten: If True, recursively flatten nested iterables.
327
- dropna: If True, remove None and undefined values.
328
- unique: If True, remove duplicates (requires flatten=True).
329
- use_values: If True, extract values from enums/mappings.
330
- flatten_tuple_items: If True, include tuples in flattening.
331
- flatten_set_items: If True, include sets in flattening.
332
-
333
- Returns:
334
- list: Processed list based on input and specified options.
335
-
336
- Raises:
337
- ValueError: If unique=True is used without flatten=True.
338
-
339
- Examples:
340
- >>> to_list([1, [2, 3], 4], flatten=True)
341
- [1, 2, 3, 4]
342
- >>> to_list([1, None, 2], dropna=True)
343
- [1, 2]
344
- """
345
-
346
- def _process_list(
347
- lst: list[Any],
348
- flatten: bool,
349
- dropna: bool,
350
- ) -> list[Any]:
351
- """Process list according to flatten and dropna options.
352
-
353
- Args:
354
- lst: Input list to process.
355
- flatten: Whether to flatten nested iterables.
356
- dropna: Whether to remove None/undefined values.
357
-
358
- Returns:
359
- list: Processed list based on specified options.
360
- """
361
- result = []
362
- skip_types = (str, bytes, bytearray, Mapping, BaseModel, Enum)
363
-
364
- if not flatten_tuple_set:
365
- skip_types += (tuple, set, frozenset)
366
-
367
- for item in lst:
368
- if dropna and (
369
- item is None
370
- or isinstance(item, (UndefinedType, PydanticUndefinedType))
371
- ):
372
- continue
373
-
374
- is_iterable = isinstance(item, Iterable)
375
- should_skip = isinstance(item, skip_types)
376
-
377
- if is_iterable and not should_skip:
378
- item_list = list(item)
379
- if flatten:
380
- result.extend(
381
- _process_list(
382
- item_list, flatten=flatten, dropna=dropna
383
- )
384
- )
385
- else:
386
- result.append(
387
- _process_list(
388
- item_list, flatten=flatten, dropna=dropna
389
- )
390
- )
391
- else:
392
- result.append(item)
393
-
394
- return result
395
-
396
- def _to_list_type(input_: Any, use_values: bool) -> list[Any]:
397
- """Convert input to initial list based on type.
398
-
399
- Args:
400
- input_: Value to convert to list.
401
- use_values: Whether to extract values from containers.
402
-
403
- Returns:
404
- list: Initial list conversion of input.
405
- """
406
- if input_ is None or isinstance(
407
- input_, (UndefinedType, PydanticUndefinedType)
408
- ):
409
- return []
410
-
411
- if isinstance(input_, list):
412
- return input_
413
-
414
- if isinstance(input_, type) and issubclass(input_, Enum):
415
- members = input_.__members__.values()
416
- return (
417
- [member.value for member in members]
418
- if use_values
419
- else list(members)
420
- )
421
-
422
- if isinstance(input_, (str, bytes, bytearray)):
423
- return list(input_) if use_values else [input_]
424
-
425
- if isinstance(input_, Mapping):
426
- return (
427
- list(input_.values())
428
- if use_values and hasattr(input_, "values")
429
- else [input_]
430
- )
431
-
432
- if isinstance(input_, BaseModel):
433
- return [input_]
434
-
435
- if isinstance(input_, Iterable) and not isinstance(
436
- input_, (str, bytes, bytearray)
437
- ):
438
- return list(input_)
439
-
440
- return [input_]
441
-
442
- if unique and not flatten:
443
- raise ValueError("unique=True requires flatten=True")
444
-
445
- initial_list = _to_list_type(input_, use_values=use_values)
446
- processed = _process_list(initial_list, flatten=flatten, dropna=dropna)
447
-
448
- if unique:
449
- seen = set()
450
- out = []
451
- try:
452
- return [x for x in processed if not (x in seen or seen.add(x))]
453
- except TypeError:
454
- for i in processed:
455
- hash_value = None
456
- try:
457
- hash_value = hash(i)
458
- except TypeError:
459
- if isinstance(i, (BaseModel, Mapping)):
460
- hash_value = hash_dict(i)
461
- else:
462
- raise ValueError(
463
- "Unhashable type encountered in list unique value processing."
464
- )
465
- if hash_value not in seen:
466
- seen.add(hash_value)
467
- out.append(i)
468
- return out
469
-
470
- return processed
471
-
472
-
205
+ @deprecated(
206
+ "Use `lionagi.ln.lcall` instead, function signature has changed, this will be removed in future versions."
207
+ )
473
208
  def lcall(
474
209
  input_: Iterable[T] | T,
475
210
  func: Callable[[T], R] | Iterable[Callable[[T], R]],
@@ -514,67 +249,23 @@ def lcall(
514
249
  >>> lcall([1, [2, 3]], str, flatten=True)
515
250
  ['1', '2', '3']
516
251
  """
517
- # Validate and extract callable function
518
- if not callable(func):
519
- try:
520
- func_list = list(func)
521
- if len(func_list) != 1 or not callable(func_list[0]):
522
- raise ValueError(
523
- "func must contain exactly one callable function."
524
- )
525
- func = func_list[0]
526
- except TypeError as e:
527
- raise ValueError(
528
- "func must be callable or iterable with one callable."
529
- ) from e
530
-
531
- # Process input based on sanitization flag
532
- if sanitize_input:
533
- input_ = to_list(
534
- input_,
535
- flatten=True,
536
- dropna=True,
537
- unique=unique_input,
538
- flatten_tuple_set=flatten_tuple_set,
539
- use_values=use_input_values,
540
- )
541
- else:
542
- if not isinstance(input_, list):
543
- try:
544
- input_ = list(input_)
545
- except TypeError:
546
- input_ = [input_]
547
-
548
- # Validate output processing options
549
- if unique_output and not (flatten or dropna):
550
- raise ValueError(
551
- "unique_output requires flatten or dropna for post-processing."
552
- )
553
-
554
- # Process elements and collect results
555
- out = []
556
- append = out.append
557
-
558
- for item in input_:
559
- try:
560
- result = func(item, *args, **kwargs)
561
- append(result)
562
- except InterruptedError:
563
- return out
564
- except Exception:
565
- raise
566
-
567
- # Apply output processing if requested
568
- if flatten or dropna:
569
- out = to_list(
570
- out,
571
- flatten=flatten,
572
- dropna=dropna,
573
- unique=unique_output,
574
- flatten_tuple_set=flatten_tuple_set,
575
- )
252
+ from lionagi.ln import lcall as _lcall
576
253
 
577
- return out
254
+ return _lcall(
255
+ input_,
256
+ func,
257
+ *args,
258
+ input_flatten=sanitize_input,
259
+ input_dropna=sanitize_input,
260
+ input_flatten_tuple_set=flatten_tuple_set,
261
+ input_unique=unique_input,
262
+ input_use_values=use_input_values,
263
+ output_flatten=flatten,
264
+ output_dropna=dropna,
265
+ output_flatten_tuple_set=flatten_tuple_set,
266
+ output_unique=unique_output,
267
+ **kwargs,
268
+ )
578
269
 
579
270
 
580
271
  async def alcall(
@@ -628,147 +319,34 @@ async def alcall(
628
319
  asyncio.TimeoutError: If a call times out and no default is provided.
629
320
  Exception: If retries are exhausted and no default is provided.
630
321
  """
322
+ from .ln._async_call import alcall as _alcall
631
323
 
632
- # Validate func is a single callable
633
- if not callable(func):
634
- # If func is not callable, maybe it's an iterable. Extract one callable if possible.
635
- try:
636
- func_list = list(func) # Convert iterable to list
637
- except TypeError:
638
- raise ValueError(
639
- "func must be callable or an iterable containing one callable."
640
- )
641
-
642
- # Ensure exactly one callable is present
643
- if len(func_list) != 1 or not callable(func_list[0]):
644
- raise ValueError("Only one callable function is allowed.")
645
-
646
- func = func_list[0]
647
-
648
- # Process input if requested
649
- if sanitize_input:
650
- input_ = to_list(
651
- input_,
652
- flatten=True,
653
- dropna=True,
654
- unique=unique_input,
655
- flatten_tuple_set=flatten_tuple_set,
656
- )
657
- else:
658
- if not isinstance(input_, list):
659
- # Attempt to iterate
660
- if isinstance(input_, BaseModel):
661
- # Pydantic model, convert to list
662
- input_ = [input_]
663
- else:
664
- try:
665
- iter(input_)
666
- # It's iterable (tuple), convert to list of its contents
667
- input_ = list(input_)
668
- except TypeError:
669
- # Not iterable, just wrap in a list
670
- input_ = [input_]
671
-
672
- # Optional initial delay before processing
673
- if initial_delay:
674
- await anyio.sleep(initial_delay)
675
-
676
- semaphore = Semaphore(max_concurrent) if max_concurrent else None
677
- throttle_delay = throttle_period or 0
678
- coro_func = is_coro_func(func)
679
-
680
- async def call_func(item: Any) -> T:
681
- if coro_func:
682
- # Async function
683
- if retry_timeout is not None:
684
- with anyio.move_on_after(retry_timeout) as cancel_scope:
685
- result = await func(item, **kwargs)
686
- if cancel_scope.cancelled_caught:
687
- raise asyncio.TimeoutError(
688
- f"Function call timed out after {retry_timeout}s"
689
- )
690
- return result
691
- else:
692
- return await func(item, **kwargs)
693
- else:
694
- # Sync function
695
- if retry_timeout is not None:
696
- with anyio.move_on_after(retry_timeout) as cancel_scope:
697
- result = await anyio.to_thread.run_sync(
698
- func, item, **kwargs
699
- )
700
- if cancel_scope.cancelled_caught:
701
- raise asyncio.TimeoutError(
702
- f"Function call timed out after {retry_timeout}s"
703
- )
704
- return result
705
- else:
706
- return await anyio.to_thread.run_sync(func, item, **kwargs)
707
-
708
- async def execute_task(i: Any, index: int) -> Any:
709
- attempts = 0
710
- current_delay = retry_delay
711
- while True:
712
- try:
713
- result = await call_func(i)
714
- return index, result
715
- except anyio.get_cancelled_exc_class():
716
- raise
717
-
718
- except Exception:
719
- attempts += 1
720
- if attempts <= num_retries:
721
- if current_delay:
722
- await anyio.sleep(current_delay)
723
- current_delay *= backoff_factor
724
- # Retry loop continues
725
- else:
726
- # Exhausted retries
727
- if retry_default is not UNDEFINED:
728
- return index, retry_default
729
- # No default, re-raise
730
- raise
731
-
732
- async def task_wrapper(item: Any, idx: int) -> Any:
733
- if semaphore:
734
- async with semaphore:
735
- result = await execute_task(item, idx)
736
- else:
737
- result = await execute_task(item, idx)
738
-
739
- return result
740
-
741
- # Use task group for structured concurrency
742
- results = []
743
- results_lock = ConcurrencyLock() # Protect results list
744
-
745
- async def run_and_store(item: Any, idx: int):
746
- result = await task_wrapper(item, idx)
747
- async with results_lock:
748
- results.append(result)
749
-
750
- # Execute all tasks using task group
751
- async with create_task_group() as tg:
752
- for idx, item in enumerate(input_):
753
- await tg.start_soon(run_and_store, item, idx)
754
- # Apply throttle delay between starting tasks
755
- if throttle_delay and idx < len(input_) - 1:
756
- await anyio.sleep(throttle_delay)
757
-
758
- # Sort by original index
759
- results.sort(key=lambda x: x[0])
760
-
761
- # (index, result)
762
- output_list = [r[1] for r in results]
763
- return to_list(
764
- output_list,
765
- flatten=flatten,
766
- dropna=dropna,
767
- unique=unique_output,
768
- flatten_tuple_set=flatten_tuple_set,
324
+ return await _alcall(
325
+ input_,
326
+ func,
327
+ input_flatten=sanitize_input,
328
+ input_dropna=sanitize_input,
329
+ input_unique=unique_input,
330
+ input_flatten_tuple_set=flatten_tuple_set,
331
+ output_flatten=flatten,
332
+ output_dropna=dropna,
333
+ output_unique=unique_output,
334
+ output_flatten_tuple_set=flatten_tuple_set,
335
+ delay_before_start=initial_delay,
336
+ retry_initial_deplay=retry_delay,
337
+ retry_backoff=backoff_factor,
338
+ retry_default=retry_default,
339
+ retry_timeout=retry_timeout,
340
+ retry_attempts=num_retries,
341
+ max_concurrent=max_concurrent,
342
+ throttle_period=throttle_period,
343
+ **kwargs,
769
344
  )
770
345
 
771
346
 
347
+ @deprecated(
348
+ "Use `lionagi.ln.alcall` instead, function signature has changed, this will be removed in future versions."
349
+ )
772
350
  async def bcall(
773
351
  input_: Any,
774
352
  func: Callable[..., T],
@@ -791,29 +369,31 @@ async def bcall(
791
369
  flatten_tuple_set: bool = False,
792
370
  **kwargs: Any,
793
371
  ) -> AsyncGenerator[list[T | tuple[T, float]], None]:
794
- input_ = to_list(input_, flatten=True, dropna=True)
795
-
796
- for i in range(0, len(input_), batch_size):
797
- batch = input_[i : i + batch_size] # noqa: E203
798
- yield await alcall(
799
- batch,
800
- func,
801
- sanitize_input=sanitize_input,
802
- unique_input=unique_input,
803
- num_retries=num_retries,
804
- initial_delay=initial_delay,
805
- retry_delay=retry_delay,
806
- backoff_factor=backoff_factor,
807
- retry_default=retry_default,
808
- retry_timeout=retry_timeout,
809
- max_concurrent=max_concurrent,
810
- throttle_period=throttle_period,
811
- flatten=flatten,
812
- dropna=dropna,
813
- unique_output=unique_output,
814
- flatten_tuple_set=flatten_tuple_set,
815
- **kwargs,
816
- )
372
+ from .ln._async_call import bcall as _bcall
373
+
374
+ async for i in _bcall(
375
+ input_,
376
+ func,
377
+ batch_size,
378
+ input_flatten=sanitize_input,
379
+ input_dropna=sanitize_input,
380
+ input_unique=unique_input,
381
+ input_flatten_tuple_set=flatten_tuple_set,
382
+ output_flatten=flatten,
383
+ output_dropna=dropna,
384
+ output_unique=unique_output,
385
+ output_flatten_tuple_set=flatten_tuple_set,
386
+ delay_before_start=initial_delay,
387
+ retry_initial_deplay=retry_delay,
388
+ retry_backoff=backoff_factor,
389
+ retry_default=retry_default,
390
+ retry_timeout=retry_timeout,
391
+ retry_attempts=num_retries,
392
+ max_concurrent=max_concurrent,
393
+ throttle_period=throttle_period,
394
+ **kwargs,
395
+ ):
396
+ yield i
817
397
 
818
398
 
819
399
  def create_path(
@@ -1163,7 +743,7 @@ def _recur_to_dict(
1163
743
  ]
1164
744
  return type(input_)(processed)
1165
745
 
1166
- elif isinstance(input_, type) and issubclass(input_, Enum):
746
+ elif isinstance(input_, type) and issubclass(input_, _Enum):
1167
747
  try:
1168
748
  obj_dict = _to_dict(input_, **kwargs)
1169
749
  return _recur_to_dict(
@@ -1290,7 +870,7 @@ def _to_dict(
1290
870
  if isinstance(input_, set):
1291
871
  return _set_to_dict(input_)
1292
872
 
1293
- if isinstance(input_, type) and issubclass(input_, Enum):
873
+ if isinstance(input_, type) and issubclass(input_, _Enum):
1294
874
  return _enum_to_dict(input_, use_enum_values=use_enum_values)
1295
875
 
1296
876
  if isinstance(input_, Mapping):
@@ -1439,7 +1019,7 @@ def throttle(
1439
1019
  Returns:
1440
1020
  The throttled function.
1441
1021
  """
1442
- from lionagi.libs.concurrency.throttle import Throttle
1022
+ from .ln.concurrency.throttle import Throttle
1443
1023
 
1444
1024
  if not is_coro_func(func):
1445
1025
  func = force_async(func)
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.14.11"
1
+ __version__ = "0.15.0"