symbolicai 0.21.0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. symai/__init__.py +96 -64
  2. symai/backend/base.py +93 -80
  3. symai/backend/engines/drawing/engine_bfl.py +12 -11
  4. symai/backend/engines/drawing/engine_gpt_image.py +108 -87
  5. symai/backend/engines/embedding/engine_llama_cpp.py +20 -24
  6. symai/backend/engines/embedding/engine_openai.py +3 -5
  7. symai/backend/engines/execute/engine_python.py +6 -5
  8. symai/backend/engines/files/engine_io.py +74 -67
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +3 -3
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +54 -38
  11. symai/backend/engines/index/engine_pinecone.py +23 -24
  12. symai/backend/engines/index/engine_vectordb.py +16 -14
  13. symai/backend/engines/lean/engine_lean4.py +38 -34
  14. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  15. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +262 -182
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +263 -191
  17. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +53 -49
  18. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +212 -211
  19. symai/backend/engines/neurosymbolic/engine_groq.py +87 -63
  20. symai/backend/engines/neurosymbolic/engine_huggingface.py +21 -24
  21. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +44 -46
  22. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +256 -229
  23. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +270 -150
  24. symai/backend/engines/ocr/engine_apilayer.py +6 -8
  25. symai/backend/engines/output/engine_stdout.py +1 -4
  26. symai/backend/engines/search/engine_openai.py +7 -7
  27. symai/backend/engines/search/engine_perplexity.py +5 -5
  28. symai/backend/engines/search/engine_serpapi.py +12 -14
  29. symai/backend/engines/speech_to_text/engine_local_whisper.py +20 -27
  30. symai/backend/engines/symbolic/engine_wolframalpha.py +3 -3
  31. symai/backend/engines/text_to_speech/engine_openai.py +5 -7
  32. symai/backend/engines/text_vision/engine_clip.py +7 -11
  33. symai/backend/engines/userinput/engine_console.py +3 -3
  34. symai/backend/engines/webscraping/engine_requests.py +81 -48
  35. symai/backend/mixin/__init__.py +13 -0
  36. symai/backend/mixin/anthropic.py +4 -2
  37. symai/backend/mixin/deepseek.py +2 -0
  38. symai/backend/mixin/google.py +2 -0
  39. symai/backend/mixin/openai.py +11 -3
  40. symai/backend/settings.py +83 -16
  41. symai/chat.py +101 -78
  42. symai/collect/__init__.py +7 -1
  43. symai/collect/dynamic.py +77 -69
  44. symai/collect/pipeline.py +35 -27
  45. symai/collect/stats.py +75 -63
  46. symai/components.py +198 -169
  47. symai/constraints.py +15 -12
  48. symai/core.py +698 -359
  49. symai/core_ext.py +32 -34
  50. symai/endpoints/api.py +80 -73
  51. symai/extended/.DS_Store +0 -0
  52. symai/extended/__init__.py +46 -12
  53. symai/extended/api_builder.py +11 -8
  54. symai/extended/arxiv_pdf_parser.py +13 -12
  55. symai/extended/bibtex_parser.py +2 -3
  56. symai/extended/conversation.py +101 -90
  57. symai/extended/document.py +17 -10
  58. symai/extended/file_merger.py +18 -13
  59. symai/extended/graph.py +18 -13
  60. symai/extended/html_style_template.py +2 -4
  61. symai/extended/interfaces/blip_2.py +1 -2
  62. symai/extended/interfaces/clip.py +1 -2
  63. symai/extended/interfaces/console.py +7 -1
  64. symai/extended/interfaces/dall_e.py +1 -1
  65. symai/extended/interfaces/flux.py +1 -1
  66. symai/extended/interfaces/gpt_image.py +1 -1
  67. symai/extended/interfaces/input.py +1 -1
  68. symai/extended/interfaces/llava.py +0 -1
  69. symai/extended/interfaces/naive_vectordb.py +7 -8
  70. symai/extended/interfaces/naive_webscraping.py +1 -1
  71. symai/extended/interfaces/ocr.py +1 -1
  72. symai/extended/interfaces/pinecone.py +6 -5
  73. symai/extended/interfaces/serpapi.py +1 -1
  74. symai/extended/interfaces/terminal.py +2 -3
  75. symai/extended/interfaces/tts.py +1 -1
  76. symai/extended/interfaces/whisper.py +1 -1
  77. symai/extended/interfaces/wolframalpha.py +1 -1
  78. symai/extended/metrics/__init__.py +11 -1
  79. symai/extended/metrics/similarity.py +11 -13
  80. symai/extended/os_command.py +17 -16
  81. symai/extended/packages/__init__.py +29 -3
  82. symai/extended/packages/symdev.py +19 -16
  83. symai/extended/packages/sympkg.py +12 -9
  84. symai/extended/packages/symrun.py +21 -19
  85. symai/extended/repo_cloner.py +11 -10
  86. symai/extended/seo_query_optimizer.py +1 -2
  87. symai/extended/solver.py +20 -23
  88. symai/extended/summarizer.py +4 -3
  89. symai/extended/taypan_interpreter.py +10 -12
  90. symai/extended/vectordb.py +99 -82
  91. symai/formatter/__init__.py +9 -1
  92. symai/formatter/formatter.py +12 -16
  93. symai/formatter/regex.py +62 -63
  94. symai/functional.py +173 -122
  95. symai/imports.py +136 -127
  96. symai/interfaces.py +56 -27
  97. symai/memory.py +14 -13
  98. symai/misc/console.py +49 -39
  99. symai/misc/loader.py +5 -3
  100. symai/models/__init__.py +17 -1
  101. symai/models/base.py +269 -181
  102. symai/models/errors.py +0 -1
  103. symai/ops/__init__.py +32 -22
  104. symai/ops/measures.py +11 -15
  105. symai/ops/primitives.py +348 -228
  106. symai/post_processors.py +32 -28
  107. symai/pre_processors.py +39 -41
  108. symai/processor.py +6 -4
  109. symai/prompts.py +59 -45
  110. symai/server/huggingface_server.py +23 -20
  111. symai/server/llama_cpp_server.py +7 -5
  112. symai/shell.py +3 -4
  113. symai/shellsv.py +499 -375
  114. symai/strategy.py +517 -287
  115. symai/symbol.py +111 -116
  116. symai/utils.py +42 -36
  117. {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/METADATA +4 -2
  118. symbolicai-1.0.0.dist-info/RECORD +163 -0
  119. symbolicai-0.21.0.dist-info/RECORD +0 -162
  120. {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/WHEEL +0 -0
  121. {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/entry_points.txt +0 -0
  122. {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/licenses/LICENSE +0 -0
  123. {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/top_level.txt +0 -0
symai/functional.py CHANGED
@@ -8,19 +8,32 @@ import sys
8
8
  import traceback
9
9
  import warnings
10
10
  from enum import Enum
11
- from types import ModuleType
12
- from typing import Any, Callable, Dict, List, Optional, Tuple, Type
11
+ from typing import TYPE_CHECKING, Any
13
12
 
14
- from box import Box
15
13
  from loguru import logger
16
14
  from pydantic import BaseModel
17
15
 
18
16
  from .backend import engines
19
17
  from .backend.base import ENGINE_UNREGISTERED, Engine
20
- from .post_processors import PostProcessor
21
- from .pre_processors import PreProcessor
22
- from .utils import CustomUserWarning
23
18
  from .context import CURRENT_ENGINE_VAR
19
+ from .prompts import (
20
+ ProbabilisticBooleanModeMedium,
21
+ ProbabilisticBooleanModeStrict,
22
+ ProbabilisticBooleanModeTolerant,
23
+ )
24
+ from .utils import UserMessage
25
+
26
+ if TYPE_CHECKING:
27
+ from collections.abc import Callable
28
+ from types import ModuleType
29
+
30
+ from .core import Argument
31
+ from .post_processors import PostProcessor
32
+ from .pre_processors import PreProcessor
33
+ else:
34
+ Callable = Any
35
+ ModuleType = type(importlib)
36
+ PostProcessor = PreProcessor = Any
24
37
 
25
38
 
26
39
  class ConstraintViolationException(Exception):
@@ -34,9 +47,7 @@ class ProbabilisticBooleanMode(Enum):
34
47
 
35
48
 
36
49
  ENGINE_PROBABILISTIC_BOOLEAN_MODE = ProbabilisticBooleanMode.MEDIUM
37
- from .prompts import (ProbabilisticBooleanModeMedium,
38
- ProbabilisticBooleanModeStrict,
39
- ProbabilisticBooleanModeTolerant)
50
+
40
51
 
41
52
 
42
53
  def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> bool:
@@ -46,48 +57,56 @@ def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> boo
46
57
  val = str(rsp).lower()
47
58
  if mode == ProbabilisticBooleanMode.STRICT:
48
59
  return val == ProbabilisticBooleanModeStrict
49
- elif mode == ProbabilisticBooleanMode.MEDIUM:
60
+ if mode == ProbabilisticBooleanMode.MEDIUM:
50
61
  return val in ProbabilisticBooleanModeMedium
51
- elif mode == ProbabilisticBooleanMode.TOLERANT:
62
+ if mode == ProbabilisticBooleanMode.TOLERANT:
52
63
  # allow for probabilistic boolean / fault tolerance
53
64
  return val in ProbabilisticBooleanModeTolerant
54
- else:
55
- raise ValueError(f"Invalid mode {mode} for probabilistic boolean!")
65
+ UserMessage(f"Invalid mode {mode} for probabilistic boolean!", raise_with=ValueError)
66
+ return False
56
67
 
57
68
 
58
- def _cast_return_type(rsp: Any, return_constraint: Type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode) -> Any:
59
- if return_constraint == inspect._empty:
60
- # do not cast if return type is not specified
61
- pass
62
- elif issubclass(return_constraint, BaseModel):
63
- # pydantic model
64
- rsp = return_constraint(data=rsp)
69
+ def _cast_collection_response(rsp: Any, return_constraint: type) -> Any:
70
+ try:
71
+ res = ast.literal_eval(rsp)
72
+ except Exception:
73
+ logger.warning(f"Failed to cast return type to {return_constraint} for {rsp!s}")
74
+ warnings.warn(f"Failed to cast return type to {return_constraint}", stacklevel=2)
75
+ res = rsp
76
+ assert res is not None, f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {rsp!s}"
77
+ return res
78
+
79
+
80
+ def _cast_boolean_response(rsp: Any, mode: ProbabilisticBooleanMode) -> bool:
81
+ if len(rsp) <= 0:
82
+ return False
83
+ return _probabilistic_bool(rsp, mode=mode)
84
+
85
+
86
+ def _cast_with_fallback(rsp: Any, return_constraint: type) -> Any:
87
+ try:
88
+ return return_constraint(rsp)
89
+ except (ValueError, TypeError):
90
+ if return_constraint is int:
91
+ UserMessage(f"Cannot convert {rsp} to int", raise_with=ConstraintViolationException)
92
+ warnings.warn(f"Failed to cast {rsp} to {return_constraint}", stacklevel=2)
65
93
  return rsp
66
- elif str(return_constraint) == str(type(rsp)):
94
+
95
+
96
+ def _cast_return_type(rsp: Any, return_constraint: type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode) -> Any:
97
+ if return_constraint is inspect._empty:
67
98
  return rsp
68
- elif return_constraint in (list, tuple, set, dict):
69
- try:
70
- res = ast.literal_eval(rsp)
71
- except Exception as e:
72
- logger.warning(f"Failed to cast return type to {return_constraint} for {str(rsp)}")
73
- warnings.warn(f"Failed to cast return type to {return_constraint}") # Add warning for test
74
- res = rsp
75
- assert res is not None, f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {str(rsp)}"
76
- return res
77
- elif return_constraint == bool:
78
- if len(rsp) <= 0:
79
- return False
80
- else:
81
- return _probabilistic_bool(rsp, mode=engine_probabilistic_boolean_mode)
82
- elif not isinstance(rsp, return_constraint):
83
- try:
84
- # hard cast to return type fallback
85
- rsp = return_constraint(rsp)
86
- except (ValueError, TypeError) as e:
87
- if return_constraint == int:
88
- raise ConstraintViolationException(f"Cannot convert {rsp} to int")
89
- warnings.warn(f"Failed to cast {rsp} to {return_constraint}")
90
- return rsp
99
+ if issubclass(return_constraint, BaseModel):
100
+ # pydantic model
101
+ return return_constraint(data=rsp)
102
+ if str(return_constraint) == str(type(rsp)):
103
+ return rsp
104
+ if return_constraint in (list, tuple, set, dict):
105
+ return _cast_collection_response(rsp, return_constraint)
106
+ if return_constraint is bool:
107
+ return _cast_boolean_response(rsp, mode=engine_probabilistic_boolean_mode)
108
+ if not isinstance(rsp, return_constraint):
109
+ return _cast_with_fallback(rsp, return_constraint)
91
110
  return rsp
92
111
 
93
112
  def _apply_postprocessors(outputs, return_constraint, post_processors, argument, mode=ENGINE_PROBABILISTIC_BOOLEAN_MODE):
@@ -108,11 +127,11 @@ def _apply_postprocessors(outputs, return_constraint, post_processors, argument,
108
127
 
109
128
  for constraint in argument.prop.constraints:
110
129
  if not constraint(rsp):
111
- raise ConstraintViolationException("Constraint not satisfied:", rsp, constraint)
130
+ UserMessage(f"Constraint not satisfied for value {rsp!r} with constraint {constraint}", raise_with=ConstraintViolationException)
112
131
  return rsp, metadata
113
132
 
114
133
 
115
- def _apply_preprocessors(argument, instance: Any, pre_processors: Optional[List[PreProcessor]]) -> str:
134
+ def _apply_preprocessors(argument, instance: Any, pre_processors: list[PreProcessor] | None) -> str:
116
135
  processed_input = ''
117
136
  if pre_processors and not argument.prop.raw_input:
118
137
  argument.prop.instance = instance
@@ -129,21 +148,21 @@ def _limit_number_results(rsp: Any, argument, return_type):
129
148
  limit_ = argument.prop.limit if argument.prop.limit else (len(rsp) if hasattr(rsp, '__len__') else None)
130
149
  # the following line is different from original code to make it work for iterable return types when the limit is 1
131
150
  if limit_ is not None:
132
- if return_type == str and isinstance(rsp, list):
151
+ if return_type is str and isinstance(rsp, list):
133
152
  return '\n'.join(rsp[:limit_])
134
- elif return_type == list:
153
+ if return_type is list:
135
154
  return rsp[:limit_]
136
- elif return_type == dict:
155
+ if return_type is dict:
137
156
  keys = list(rsp.keys())
138
157
  return {k: rsp[k] for k in keys[:limit_]}
139
- elif return_type == set:
158
+ if return_type is set:
140
159
  return set(list(rsp)[:limit_])
141
- elif return_type == tuple:
160
+ if return_type is tuple:
142
161
  return tuple(list(rsp)[:limit_])
143
162
  return rsp
144
163
 
145
164
 
146
- def _prepare_argument(argument: Any, engine: Any, instance: Any, func: Callable, constraints: List[Callable], default: Any, limit: int, trials: int, pre_processors: Optional[List[PreProcessor]], post_processors: Optional[List[PostProcessor]]) -> Any:
165
+ def _prepare_argument(argument: Any, engine: Any, instance: Any, func: Callable, constraints: list[Callable], default: Any, limit: int, trials: int, pre_processors: list[PreProcessor] | None, post_processors: list[PostProcessor] | None) -> Any:
147
166
  # check signature for return type
148
167
  sig = inspect.signature(func)
149
168
  return_constraint = sig._return_annotation
@@ -172,31 +191,36 @@ def _execute_query_fallback(func, instance, argument, error=None, stack_trace=No
172
191
  providing error context to the fallback function, and maintaining the same return format.
173
192
  """
174
193
  try:
175
- rsp = func(instance, error=error, stack_trace=stack_trace, *argument.args, **argument.signature_kwargs)
194
+ rsp = func(
195
+ instance,
196
+ *argument.args,
197
+ error=error,
198
+ stack_trace=stack_trace,
199
+ **argument.signature_kwargs,
200
+ )
176
201
  except Exception:
177
- raise error # re-raise the original error
202
+ raise error from None # Re-raise the original error without chaining fallback failure.
178
203
  if rsp is not None:
179
204
  # fallback was implemented
180
- rsp = dict(data=rsp, error=error, stack_trace=stack_trace)
181
- return rsp
182
- elif argument.prop.default is not None:
205
+ return {"data": rsp, "error": error, "stack_trace": stack_trace}
206
+ if argument.prop.default is not None:
183
207
  # no fallback implementation, but default value is set
184
- rsp = dict(data=argument.prop.default, error=error, stack_trace=stack_trace)
185
- return rsp
186
- else:
187
- raise error
208
+ return {"data": argument.prop.default, "error": error, "stack_trace": stack_trace}
209
+ raise error from None
188
210
 
189
211
 
190
212
  def _process_query_single(engine,
191
213
  instance,
192
214
  func: Callable,
193
- constraints: List[Callable] = [],
194
- default: Optional[object] = None,
215
+ constraints: list[Callable] | None = None,
216
+ default: object | None = None,
195
217
  limit: int = 1,
196
218
  trials: int = 1,
197
- pre_processors: Optional[List[PreProcessor]] = None,
198
- post_processors: Optional[List[PostProcessor]] = None,
219
+ pre_processors: list[PreProcessor] | None = None,
220
+ post_processors: list[PostProcessor] | None = None,
199
221
  argument=None):
222
+ if constraints is None:
223
+ constraints = []
200
224
  if pre_processors and not isinstance(pre_processors, list):
201
225
  pre_processors = [pre_processors]
202
226
  if post_processors and not isinstance(post_processors, list):
@@ -217,7 +241,7 @@ def _process_query_single(engine,
217
241
  break
218
242
  except Exception as e:
219
243
  stack_trace = traceback.format_exc()
220
- logger.error(f"Failed to execute query: {str(e)}")
244
+ logger.error(f"Failed to execute query: {e!s}")
221
245
  logger.error(f"Stack trace: {stack_trace}")
222
246
  if _ == trials - 1:
223
247
  result = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
@@ -230,32 +254,68 @@ def _process_query_single(engine,
230
254
  return limited_result
231
255
 
232
256
 
233
- def _execute_query(engine, argument) -> List[object]:
257
+ def _normalize_processors(pre_processors: list[PreProcessor] | PreProcessor | None,
258
+ post_processors: list[PostProcessor] | PostProcessor | None) -> tuple[list[PreProcessor] | None, list[PostProcessor] | None]:
259
+ if pre_processors and not isinstance(pre_processors, list):
260
+ pre_processors = [pre_processors]
261
+ if post_processors and not isinstance(post_processors, list):
262
+ post_processors = [post_processors]
263
+ return pre_processors, post_processors
264
+
265
+
266
+ def _run_query_with_retries(
267
+ engine: Engine,
268
+ argument: Any,
269
+ func: Callable,
270
+ instance: Any,
271
+ trials: int,
272
+ return_constraint: type,
273
+ post_processors: list[PostProcessor] | None,
274
+ ) -> tuple[Any, Any]:
275
+ try_cnt = 0
276
+ rsp = None
277
+ metadata = None
278
+ while try_cnt < trials:
279
+ try_cnt += 1
280
+ try:
281
+ outputs = _execute_query(engine, argument)
282
+ rsp, metadata = _apply_postprocessors(outputs, return_constraint, post_processors, argument)
283
+ break
284
+ except Exception as error:
285
+ stack_trace = traceback.format_exc()
286
+ logger.error(f"Failed to execute query: {error!s}")
287
+ logger.error(f"Stack trace: {stack_trace}")
288
+ if try_cnt < trials:
289
+ continue
290
+ rsp = _execute_query_fallback(func, instance, argument, error=error, stack_trace=stack_trace)
291
+ metadata = None
292
+ return rsp, metadata
293
+
294
+
295
+ def _execute_query(engine, argument) -> list[object]:
234
296
  # build prompt and query engine
235
297
  engine.prepare(argument)
236
298
  if argument.prop.preview:
237
299
  return engine.preview(argument)
238
- outputs = engine(argument) # currently only supports single query
239
- return outputs
300
+ return engine(argument) # currently only supports single query
240
301
 
241
302
 
242
303
  def _process_query(
243
304
  engine: Engine,
244
305
  instance: Any,
245
306
  func: Callable,
246
- constraints: List[Callable] = [],
247
- default: Optional[object] = None,
307
+ constraints: list[Callable] | None = None,
308
+ default: object | None = None,
248
309
  limit: int | None = None,
249
310
  trials: int = 1,
250
- pre_processors: Optional[List[PreProcessor]] = None,
251
- post_processors: Optional[List[PostProcessor]] = None,
311
+ pre_processors: list[PreProcessor] | None = None,
312
+ post_processors: list[PostProcessor] | None = None,
252
313
  argument: Argument = None,
253
314
  ) -> Any:
254
315
 
255
- if pre_processors and not isinstance(pre_processors, list):
256
- pre_processors = [pre_processors]
257
- if post_processors and not isinstance(post_processors, list):
258
- post_processors = [post_processors]
316
+ if constraints is None:
317
+ constraints = []
318
+ pre_processors, post_processors = _normalize_processors(pre_processors, post_processors)
259
319
 
260
320
  argument = _prepare_argument(argument, engine, instance, func, constraints, default, limit, trials, pre_processors, post_processors)
261
321
  return_constraint = argument.prop.return_constraint
@@ -264,24 +324,11 @@ def _process_query(
264
324
  if not argument.prop.raw_input:
265
325
  argument.prop.processed_input = processed_input
266
326
 
267
- try_cnt = 0
268
- while try_cnt < trials:
269
- try_cnt += 1
270
- try:
271
- outputs = _execute_query(engine, argument)
272
- rsp, metadata = _apply_postprocessors(outputs, return_constraint, post_processors, argument)
273
- if argument.prop.preview:
274
- if argument.prop.return_metadata:
275
- return rsp, metadata
276
- return rsp
277
-
278
- except Exception as e:
279
- stack_trace = traceback.format_exc()
280
- logger.error(f"Failed to execute query: {str(e)}")
281
- logger.error(f"Stack trace: {stack_trace}")
282
- if try_cnt < trials:
283
- continue
284
- rsp = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
327
+ rsp, metadata = _run_query_with_retries(engine, argument, func, instance, trials, return_constraint, post_processors)
328
+ if argument.prop.preview:
329
+ if argument.prop.return_metadata:
330
+ return rsp, metadata
331
+ return rsp
285
332
 
286
333
  if not argument.prop.raw_output:
287
334
  rsp = _limit_number_results(rsp, argument, return_constraint)
@@ -290,16 +337,16 @@ def _process_query(
290
337
  return rsp
291
338
 
292
339
 
293
- class EngineRepository(object):
340
+ class EngineRepository:
294
341
  _instance = None
295
342
 
296
343
  def __init__(self):
297
344
  if '_engines' not in self.__dict__: # ensures _engines is only set once
298
- self._engines: Dict[str, Engine] = {}
345
+ self._engines: dict[str, Engine] = {}
299
346
 
300
- def __new__(cls, *args, **kwargs):
347
+ def __new__(cls, *_args, **_kwargs):
301
348
  if cls._instance is None:
302
- cls._instance = super(EngineRepository, cls).__new__(cls, *args, **kwargs)
349
+ cls._instance = super().__new__(cls)
303
350
  cls._instance.__init__() # Explicitly call __init__
304
351
  return cls._instance
305
352
 
@@ -308,22 +355,23 @@ class EngineRepository(object):
308
355
  self = EngineRepository()
309
356
  # Check if the engine is already registered
310
357
  if id in self._engines and not allow_engine_override:
311
- raise ValueError(f"Engine {id} is already registered. Set allow_engine_override to True to override.")
358
+ UserMessage(f"Engine {id} is already registered. Set allow_engine_override to True to override.", raise_with=ValueError)
312
359
 
313
360
  self._engines[id] = engine_instance
314
361
 
315
362
  @staticmethod
316
- def register_from_plugin(id: str, plugin: str, selected_engine: Optional[str] = None, allow_engine_override: bool = False, *args, **kwargs) -> None:
317
- from .imports import Import
363
+ def register_from_plugin(id: str, plugin: str, selected_engine: str | None = None, allow_engine_override: bool = False, *args, **kwargs) -> None:
364
+ # Lazy import keeps functional -> imports -> symbol -> core -> functional cycle broken.
365
+ from .imports import Import # noqa
318
366
  types = Import.load_module_class(plugin)
319
367
  # filter out engine class type
320
368
  engines = [t for t in types if issubclass(t, Engine) and t is not Engine]
321
369
  if len(engines) > 1 and selected_engine is None:
322
- raise ValueError(f"Multiple engines found in plugin {plugin}. Please specify the engine to use.")
323
- elif len(engines) > 1 and selected_engine is not None:
370
+ UserMessage(f"Multiple engines found in plugin {plugin}. Please specify the engine to use.", raise_with=ValueError)
371
+ if len(engines) > 1 and selected_engine is not None:
324
372
  engine = [e for e in engines if selected_engine in str(e)]
325
373
  if len(engine) <= 0:
326
- raise ValueError(f"No engine named {selected_engine} found in plugin {plugin}.")
374
+ UserMessage(f"No engine named {selected_engine} found in plugin {plugin}.", raise_with=ValueError)
327
375
  engine = engines[0](*args, **kwargs)
328
376
  EngineRepository.register(id, engine, allow_engine_override=allow_engine_override)
329
377
 
@@ -345,7 +393,7 @@ class EngineRepository(object):
345
393
  # Assume the class has an 'init' static method to initialize it
346
394
  engine_id_func_ = getattr(instance, 'id', None)
347
395
  if engine_id_func_ is None:
348
- raise ValueError(f"Engine {str(instance)} does not have an id. Please add a method id() to the class.")
396
+ UserMessage(f"Engine {instance!s} does not have an id. Please add a method id() to the class.", raise_with=ValueError)
349
397
  # call engine_() to get the id of the engine
350
398
  id_ = engine_id_func_()
351
399
  # only registered configured engine
@@ -353,10 +401,10 @@ class EngineRepository(object):
353
401
  # register new engine
354
402
  self.register(id_, instance, allow_engine_override=allow_engine_override)
355
403
  except Exception as e:
356
- logger.error(f"Failed to register engine {str(attribute)}: {str(e)}")
404
+ logger.error(f"Failed to register engine {attribute!s}: {e!s}")
357
405
 
358
406
  @staticmethod
359
- def get(engine_name: str, *args, **kwargs):
407
+ def get(engine_name: str, *_args, **_kwargs):
360
408
 
361
409
  self = EngineRepository()
362
410
  # First check if we're in the context manager that dynamically changes models
@@ -366,24 +414,24 @@ class EngineRepository(object):
366
414
  return engine
367
415
 
368
416
  # Otherwise, fallback to normal lookup:
369
- if engine_name not in self._engines.keys():
417
+ if engine_name not in self._engines:
370
418
  subpackage_name = engine_name.replace('-', '_')
371
419
  subpackage = importlib.import_module(f"{engines.__package__}.{subpackage_name}", None)
372
420
  if subpackage is None:
373
- raise ValueError(f"The symbolicai library does not contain the engine named {engine_name}.")
421
+ UserMessage(f"The symbolicai library does not contain the engine named {engine_name}.", raise_with=ValueError)
374
422
  self.register_from_package(subpackage)
375
423
  engine = self._engines.get(engine_name, None)
376
424
  if engine is None:
377
- raise ValueError(f"No engine named {engine_name} is registered.")
425
+ UserMessage(f"No engine named {engine_name} is registered.", raise_with=ValueError)
378
426
  return engine
379
427
 
380
428
  @staticmethod
381
- def list() -> List[str]:
429
+ def list() -> list[str]:
382
430
  self = EngineRepository()
383
431
  return dict(self._engines.items())
384
432
 
385
433
  @staticmethod
386
- def command(engines: List[str], *args, **kwargs) -> Any:
434
+ def command(engines: list[str], *args, **kwargs) -> Any:
387
435
  self = EngineRepository()
388
436
  if isinstance(engines, str):
389
437
  engines = [engines]
@@ -396,28 +444,30 @@ class EngineRepository(object):
396
444
  if engine:
397
445
  # Call the command function for the engine with provided arguments
398
446
  return engine.command(*args, **kwargs)
399
- raise ValueError(f"No engine named <{engine_name}> is registered.")
447
+ UserMessage(f"No engine named <{engine_name}> is registered.", raise_with=ValueError)
448
+ return None
400
449
 
401
450
  @staticmethod
402
- def query(engine: str, *args, **kwargs) -> Tuple:
451
+ def query(engine: str, *args, **kwargs) -> tuple:
403
452
  self = EngineRepository()
404
453
  engine = self.get(engine)
405
454
  if engine:
406
455
  engine_allows_batching = getattr(engine, 'allows_batching', False)
407
456
  if engine_allows_batching:
408
457
  return _process_query_single(engine, *args, **kwargs)
409
- else:
410
- return _process_query(engine, *args, **kwargs)
411
- raise ValueError(f"No engine named {engine} is registered.")
458
+ return _process_query(engine, *args, **kwargs)
459
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
460
+ return None
412
461
 
413
462
  @staticmethod
414
- def bind_property(engine: str, property: str, *args, **kwargs):
463
+ def bind_property(engine: str, property: str, *_args, **_kwargs):
415
464
  self = EngineRepository()
416
465
  """Bind a property to a specific engine."""
417
466
  engine = self.get(engine)
418
467
  if engine:
419
468
  return getattr(engine, property, None)
420
- raise ValueError(f"No engine named {engine} is registered.")
469
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
470
+ return None
421
471
 
422
472
  def get_dynamic_engine_instance(self):
423
473
  # 1) Primary: use ContextVar (fast, async-safe)
@@ -429,7 +479,8 @@ class EngineRepository(object):
429
479
  pass
430
480
 
431
481
  # 2) Fallback: walk ONLY current thread frames (legacy behavior)
432
- from .components import DynamicEngine
482
+ # Keeping DynamicEngine import lazy prevents functional importing components before it finishes loading.
483
+ from .components import DynamicEngine # noqa
433
484
  try:
434
485
  frame = sys._getframe()
435
486
  except Exception:
@@ -438,7 +489,7 @@ class EngineRepository(object):
438
489
  try:
439
490
  locals_copy = frame.f_locals.copy() if hasattr(frame.f_locals, 'copy') else dict(frame.f_locals)
440
491
  except Exception:
441
- CustomUserWarning(
492
+ UserMessage(
442
493
  "Unexpected failure copying frame locals while resolving DynamicEngine.",
443
494
  raise_with=None,
444
495
  )