symbolicai 0.20.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. symai/__init__.py +96 -64
  2. symai/backend/base.py +93 -80
  3. symai/backend/engines/drawing/engine_bfl.py +12 -11
  4. symai/backend/engines/drawing/engine_gpt_image.py +108 -87
  5. symai/backend/engines/embedding/engine_llama_cpp.py +25 -28
  6. symai/backend/engines/embedding/engine_openai.py +3 -5
  7. symai/backend/engines/execute/engine_python.py +6 -5
  8. symai/backend/engines/files/engine_io.py +74 -67
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +3 -3
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +54 -38
  11. symai/backend/engines/index/engine_pinecone.py +23 -24
  12. symai/backend/engines/index/engine_vectordb.py +16 -14
  13. symai/backend/engines/lean/engine_lean4.py +38 -34
  14. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  15. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +262 -182
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +263 -191
  17. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +53 -49
  18. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +212 -211
  19. symai/backend/engines/neurosymbolic/engine_groq.py +87 -63
  20. symai/backend/engines/neurosymbolic/engine_huggingface.py +21 -24
  21. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +117 -48
  22. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +256 -229
  23. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +270 -150
  24. symai/backend/engines/ocr/engine_apilayer.py +6 -8
  25. symai/backend/engines/output/engine_stdout.py +1 -4
  26. symai/backend/engines/search/engine_openai.py +7 -7
  27. symai/backend/engines/search/engine_perplexity.py +5 -5
  28. symai/backend/engines/search/engine_serpapi.py +12 -14
  29. symai/backend/engines/speech_to_text/engine_local_whisper.py +20 -27
  30. symai/backend/engines/symbolic/engine_wolframalpha.py +3 -3
  31. symai/backend/engines/text_to_speech/engine_openai.py +5 -7
  32. symai/backend/engines/text_vision/engine_clip.py +7 -11
  33. symai/backend/engines/userinput/engine_console.py +3 -3
  34. symai/backend/engines/webscraping/engine_requests.py +81 -48
  35. symai/backend/mixin/__init__.py +13 -0
  36. symai/backend/mixin/anthropic.py +4 -2
  37. symai/backend/mixin/deepseek.py +2 -0
  38. symai/backend/mixin/google.py +2 -0
  39. symai/backend/mixin/openai.py +11 -3
  40. symai/backend/settings.py +83 -16
  41. symai/chat.py +101 -78
  42. symai/collect/__init__.py +7 -1
  43. symai/collect/dynamic.py +77 -69
  44. symai/collect/pipeline.py +35 -27
  45. symai/collect/stats.py +75 -63
  46. symai/components.py +198 -169
  47. symai/constraints.py +15 -12
  48. symai/core.py +698 -359
  49. symai/core_ext.py +32 -34
  50. symai/endpoints/api.py +80 -73
  51. symai/extended/.DS_Store +0 -0
  52. symai/extended/__init__.py +46 -12
  53. symai/extended/api_builder.py +11 -8
  54. symai/extended/arxiv_pdf_parser.py +13 -12
  55. symai/extended/bibtex_parser.py +2 -3
  56. symai/extended/conversation.py +101 -90
  57. symai/extended/document.py +17 -10
  58. symai/extended/file_merger.py +18 -13
  59. symai/extended/graph.py +18 -13
  60. symai/extended/html_style_template.py +2 -4
  61. symai/extended/interfaces/blip_2.py +1 -2
  62. symai/extended/interfaces/clip.py +1 -2
  63. symai/extended/interfaces/console.py +7 -1
  64. symai/extended/interfaces/dall_e.py +1 -1
  65. symai/extended/interfaces/flux.py +1 -1
  66. symai/extended/interfaces/gpt_image.py +1 -1
  67. symai/extended/interfaces/input.py +1 -1
  68. symai/extended/interfaces/llava.py +0 -1
  69. symai/extended/interfaces/naive_vectordb.py +7 -8
  70. symai/extended/interfaces/naive_webscraping.py +1 -1
  71. symai/extended/interfaces/ocr.py +1 -1
  72. symai/extended/interfaces/pinecone.py +6 -5
  73. symai/extended/interfaces/serpapi.py +1 -1
  74. symai/extended/interfaces/terminal.py +2 -3
  75. symai/extended/interfaces/tts.py +1 -1
  76. symai/extended/interfaces/whisper.py +1 -1
  77. symai/extended/interfaces/wolframalpha.py +1 -1
  78. symai/extended/metrics/__init__.py +11 -1
  79. symai/extended/metrics/similarity.py +11 -13
  80. symai/extended/os_command.py +17 -16
  81. symai/extended/packages/__init__.py +29 -3
  82. symai/extended/packages/symdev.py +19 -16
  83. symai/extended/packages/sympkg.py +12 -9
  84. symai/extended/packages/symrun.py +21 -19
  85. symai/extended/repo_cloner.py +11 -10
  86. symai/extended/seo_query_optimizer.py +1 -2
  87. symai/extended/solver.py +20 -23
  88. symai/extended/summarizer.py +4 -3
  89. symai/extended/taypan_interpreter.py +10 -12
  90. symai/extended/vectordb.py +99 -82
  91. symai/formatter/__init__.py +9 -1
  92. symai/formatter/formatter.py +12 -16
  93. symai/formatter/regex.py +62 -63
  94. symai/functional.py +176 -122
  95. symai/imports.py +136 -127
  96. symai/interfaces.py +56 -27
  97. symai/memory.py +14 -13
  98. symai/misc/console.py +49 -39
  99. symai/misc/loader.py +5 -3
  100. symai/models/__init__.py +17 -1
  101. symai/models/base.py +269 -181
  102. symai/models/errors.py +0 -1
  103. symai/ops/__init__.py +32 -22
  104. symai/ops/measures.py +11 -15
  105. symai/ops/primitives.py +348 -228
  106. symai/post_processors.py +32 -28
  107. symai/pre_processors.py +39 -41
  108. symai/processor.py +6 -4
  109. symai/prompts.py +59 -45
  110. symai/server/huggingface_server.py +23 -20
  111. symai/server/llama_cpp_server.py +7 -5
  112. symai/shell.py +3 -4
  113. symai/shellsv.py +499 -375
  114. symai/strategy.py +517 -287
  115. symai/symbol.py +111 -116
  116. symai/utils.py +42 -36
  117. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/METADATA +4 -2
  118. symbolicai-1.0.0.dist-info/RECORD +163 -0
  119. symbolicai-0.20.2.dist-info/RECORD +0 -162
  120. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/WHEEL +0 -0
  121. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/entry_points.txt +0 -0
  122. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/licenses/LICENSE +0 -0
  123. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/top_level.txt +0 -0
symai/functional.py CHANGED
@@ -8,19 +8,32 @@ import sys
8
8
  import traceback
9
9
  import warnings
10
10
  from enum import Enum
11
- from types import ModuleType
12
- from typing import Any, Callable, Dict, List, Optional, Tuple, Type
11
+ from typing import TYPE_CHECKING, Any
13
12
 
14
- from box import Box
15
13
  from loguru import logger
16
14
  from pydantic import BaseModel
17
15
 
18
16
  from .backend import engines
19
17
  from .backend.base import ENGINE_UNREGISTERED, Engine
20
- from .post_processors import PostProcessor
21
- from .pre_processors import PreProcessor
22
- from .utils import CustomUserWarning
23
18
  from .context import CURRENT_ENGINE_VAR
19
+ from .prompts import (
20
+ ProbabilisticBooleanModeMedium,
21
+ ProbabilisticBooleanModeStrict,
22
+ ProbabilisticBooleanModeTolerant,
23
+ )
24
+ from .utils import UserMessage
25
+
26
+ if TYPE_CHECKING:
27
+ from collections.abc import Callable
28
+ from types import ModuleType
29
+
30
+ from .core import Argument
31
+ from .post_processors import PostProcessor
32
+ from .pre_processors import PreProcessor
33
+ else:
34
+ Callable = Any
35
+ ModuleType = type(importlib)
36
+ PostProcessor = PreProcessor = Any
24
37
 
25
38
 
26
39
  class ConstraintViolationException(Exception):
@@ -34,9 +47,7 @@ class ProbabilisticBooleanMode(Enum):
34
47
 
35
48
 
36
49
  ENGINE_PROBABILISTIC_BOOLEAN_MODE = ProbabilisticBooleanMode.MEDIUM
37
- from .prompts import (ProbabilisticBooleanModeMedium,
38
- ProbabilisticBooleanModeStrict,
39
- ProbabilisticBooleanModeTolerant)
50
+
40
51
 
41
52
 
42
53
  def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> bool:
@@ -46,48 +57,56 @@ def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> boo
46
57
  val = str(rsp).lower()
47
58
  if mode == ProbabilisticBooleanMode.STRICT:
48
59
  return val == ProbabilisticBooleanModeStrict
49
- elif mode == ProbabilisticBooleanMode.MEDIUM:
60
+ if mode == ProbabilisticBooleanMode.MEDIUM:
50
61
  return val in ProbabilisticBooleanModeMedium
51
- elif mode == ProbabilisticBooleanMode.TOLERANT:
62
+ if mode == ProbabilisticBooleanMode.TOLERANT:
52
63
  # allow for probabilistic boolean / fault tolerance
53
64
  return val in ProbabilisticBooleanModeTolerant
54
- else:
55
- raise ValueError(f"Invalid mode {mode} for probabilistic boolean!")
65
+ UserMessage(f"Invalid mode {mode} for probabilistic boolean!", raise_with=ValueError)
66
+ return False
56
67
 
57
68
 
58
- def _cast_return_type(rsp: Any, return_constraint: Type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode) -> Any:
59
- if return_constraint == inspect._empty:
60
- # do not cast if return type is not specified
61
- pass
62
- elif issubclass(return_constraint, BaseModel):
63
- # pydantic model
64
- rsp = return_constraint(data=rsp)
69
+ def _cast_collection_response(rsp: Any, return_constraint: type) -> Any:
70
+ try:
71
+ res = ast.literal_eval(rsp)
72
+ except Exception:
73
+ logger.warning(f"Failed to cast return type to {return_constraint} for {rsp!s}")
74
+ warnings.warn(f"Failed to cast return type to {return_constraint}", stacklevel=2)
75
+ res = rsp
76
+ assert res is not None, f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {rsp!s}"
77
+ return res
78
+
79
+
80
+ def _cast_boolean_response(rsp: Any, mode: ProbabilisticBooleanMode) -> bool:
81
+ if len(rsp) <= 0:
82
+ return False
83
+ return _probabilistic_bool(rsp, mode=mode)
84
+
85
+
86
+ def _cast_with_fallback(rsp: Any, return_constraint: type) -> Any:
87
+ try:
88
+ return return_constraint(rsp)
89
+ except (ValueError, TypeError):
90
+ if return_constraint is int:
91
+ UserMessage(f"Cannot convert {rsp} to int", raise_with=ConstraintViolationException)
92
+ warnings.warn(f"Failed to cast {rsp} to {return_constraint}", stacklevel=2)
65
93
  return rsp
66
- elif str(return_constraint) == str(type(rsp)):
94
+
95
+
96
+ def _cast_return_type(rsp: Any, return_constraint: type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode) -> Any:
97
+ if return_constraint is inspect._empty:
67
98
  return rsp
68
- elif return_constraint in (list, tuple, set, dict):
69
- try:
70
- res = ast.literal_eval(rsp)
71
- except Exception as e:
72
- logger.warning(f"Failed to cast return type to {return_constraint} for {str(rsp)}")
73
- warnings.warn(f"Failed to cast return type to {return_constraint}") # Add warning for test
74
- res = rsp
75
- assert res is not None, f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {str(rsp)}"
76
- return res
77
- elif return_constraint == bool:
78
- if len(rsp) <= 0:
79
- return False
80
- else:
81
- return _probabilistic_bool(rsp, mode=engine_probabilistic_boolean_mode)
82
- elif not isinstance(rsp, return_constraint):
83
- try:
84
- # hard cast to return type fallback
85
- rsp = return_constraint(rsp)
86
- except (ValueError, TypeError) as e:
87
- if return_constraint == int:
88
- raise ConstraintViolationException(f"Cannot convert {rsp} to int")
89
- warnings.warn(f"Failed to cast {rsp} to {return_constraint}")
90
- return rsp
99
+ if issubclass(return_constraint, BaseModel):
100
+ # pydantic model
101
+ return return_constraint(data=rsp)
102
+ if str(return_constraint) == str(type(rsp)):
103
+ return rsp
104
+ if return_constraint in (list, tuple, set, dict):
105
+ return _cast_collection_response(rsp, return_constraint)
106
+ if return_constraint is bool:
107
+ return _cast_boolean_response(rsp, mode=engine_probabilistic_boolean_mode)
108
+ if not isinstance(rsp, return_constraint):
109
+ return _cast_with_fallback(rsp, return_constraint)
91
110
  return rsp
92
111
 
93
112
  def _apply_postprocessors(outputs, return_constraint, post_processors, argument, mode=ENGINE_PROBABILISTIC_BOOLEAN_MODE):
@@ -108,11 +127,11 @@ def _apply_postprocessors(outputs, return_constraint, post_processors, argument,
108
127
 
109
128
  for constraint in argument.prop.constraints:
110
129
  if not constraint(rsp):
111
- raise ConstraintViolationException("Constraint not satisfied:", rsp, constraint)
130
+ UserMessage(f"Constraint not satisfied for value {rsp!r} with constraint {constraint}", raise_with=ConstraintViolationException)
112
131
  return rsp, metadata
113
132
 
114
133
 
115
- def _apply_preprocessors(argument, instance: Any, pre_processors: Optional[List[PreProcessor]]) -> str:
134
+ def _apply_preprocessors(argument, instance: Any, pre_processors: list[PreProcessor] | None) -> str:
116
135
  processed_input = ''
117
136
  if pre_processors and not argument.prop.raw_input:
118
137
  argument.prop.instance = instance
@@ -129,21 +148,21 @@ def _limit_number_results(rsp: Any, argument, return_type):
129
148
  limit_ = argument.prop.limit if argument.prop.limit else (len(rsp) if hasattr(rsp, '__len__') else None)
130
149
  # the following line is different from original code to make it work for iterable return types when the limit is 1
131
150
  if limit_ is not None:
132
- if return_type == str and isinstance(rsp, list):
151
+ if return_type is str and isinstance(rsp, list):
133
152
  return '\n'.join(rsp[:limit_])
134
- elif return_type == list:
153
+ if return_type is list:
135
154
  return rsp[:limit_]
136
- elif return_type == dict:
155
+ if return_type is dict:
137
156
  keys = list(rsp.keys())
138
157
  return {k: rsp[k] for k in keys[:limit_]}
139
- elif return_type == set:
158
+ if return_type is set:
140
159
  return set(list(rsp)[:limit_])
141
- elif return_type == tuple:
160
+ if return_type is tuple:
142
161
  return tuple(list(rsp)[:limit_])
143
162
  return rsp
144
163
 
145
164
 
146
- def _prepare_argument(argument: Any, engine: Any, instance: Any, func: Callable, constraints: List[Callable], default: Any, limit: int, trials: int, pre_processors: Optional[List[PreProcessor]], post_processors: Optional[List[PostProcessor]]) -> Any:
165
+ def _prepare_argument(argument: Any, engine: Any, instance: Any, func: Callable, constraints: list[Callable], default: Any, limit: int, trials: int, pre_processors: list[PreProcessor] | None, post_processors: list[PostProcessor] | None) -> Any:
147
166
  # check signature for return type
148
167
  sig = inspect.signature(func)
149
168
  return_constraint = sig._return_annotation
@@ -171,29 +190,37 @@ def _execute_query_fallback(func, instance, argument, error=None, stack_trace=No
171
190
  This matches the fallback logic used in _process_query by handling errors consistently,
172
191
  providing error context to the fallback function, and maintaining the same return format.
173
192
  """
174
- rsp = func(instance, error=error, stack_trace=stack_trace, *argument.args, **argument.signature_kwargs)
193
+ try:
194
+ rsp = func(
195
+ instance,
196
+ *argument.args,
197
+ error=error,
198
+ stack_trace=stack_trace,
199
+ **argument.signature_kwargs,
200
+ )
201
+ except Exception:
202
+ raise error from None # Re-raise the original error without chaining fallback failure.
175
203
  if rsp is not None:
176
204
  # fallback was implemented
177
- rsp = dict(data=rsp, error=error, stack_trace=stack_trace)
178
- return rsp
179
- elif argument.prop.default is not None:
205
+ return {"data": rsp, "error": error, "stack_trace": stack_trace}
206
+ if argument.prop.default is not None:
180
207
  # no fallback implementation, but default value is set
181
- rsp = dict(data=argument.prop.default, error=error, stack_trace=stack_trace)
182
- return rsp
183
- else:
184
- raise error
208
+ return {"data": argument.prop.default, "error": error, "stack_trace": stack_trace}
209
+ raise error from None
185
210
 
186
211
 
187
212
  def _process_query_single(engine,
188
213
  instance,
189
214
  func: Callable,
190
- constraints: List[Callable] = [],
191
- default: Optional[object] = None,
215
+ constraints: list[Callable] | None = None,
216
+ default: object | None = None,
192
217
  limit: int = 1,
193
218
  trials: int = 1,
194
- pre_processors: Optional[List[PreProcessor]] = None,
195
- post_processors: Optional[List[PostProcessor]] = None,
219
+ pre_processors: list[PreProcessor] | None = None,
220
+ post_processors: list[PostProcessor] | None = None,
196
221
  argument=None):
222
+ if constraints is None:
223
+ constraints = []
197
224
  if pre_processors and not isinstance(pre_processors, list):
198
225
  pre_processors = [pre_processors]
199
226
  if post_processors and not isinstance(post_processors, list):
@@ -214,7 +241,7 @@ def _process_query_single(engine,
214
241
  break
215
242
  except Exception as e:
216
243
  stack_trace = traceback.format_exc()
217
- logger.error(f"Failed to execute query: {str(e)}")
244
+ logger.error(f"Failed to execute query: {e!s}")
218
245
  logger.error(f"Stack trace: {stack_trace}")
219
246
  if _ == trials - 1:
220
247
  result = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
@@ -227,32 +254,68 @@ def _process_query_single(engine,
227
254
  return limited_result
228
255
 
229
256
 
230
- def _execute_query(engine, argument) -> List[object]:
257
+ def _normalize_processors(pre_processors: list[PreProcessor] | PreProcessor | None,
258
+ post_processors: list[PostProcessor] | PostProcessor | None) -> tuple[list[PreProcessor] | None, list[PostProcessor] | None]:
259
+ if pre_processors and not isinstance(pre_processors, list):
260
+ pre_processors = [pre_processors]
261
+ if post_processors and not isinstance(post_processors, list):
262
+ post_processors = [post_processors]
263
+ return pre_processors, post_processors
264
+
265
+
266
+ def _run_query_with_retries(
267
+ engine: Engine,
268
+ argument: Any,
269
+ func: Callable,
270
+ instance: Any,
271
+ trials: int,
272
+ return_constraint: type,
273
+ post_processors: list[PostProcessor] | None,
274
+ ) -> tuple[Any, Any]:
275
+ try_cnt = 0
276
+ rsp = None
277
+ metadata = None
278
+ while try_cnt < trials:
279
+ try_cnt += 1
280
+ try:
281
+ outputs = _execute_query(engine, argument)
282
+ rsp, metadata = _apply_postprocessors(outputs, return_constraint, post_processors, argument)
283
+ break
284
+ except Exception as error:
285
+ stack_trace = traceback.format_exc()
286
+ logger.error(f"Failed to execute query: {error!s}")
287
+ logger.error(f"Stack trace: {stack_trace}")
288
+ if try_cnt < trials:
289
+ continue
290
+ rsp = _execute_query_fallback(func, instance, argument, error=error, stack_trace=stack_trace)
291
+ metadata = None
292
+ return rsp, metadata
293
+
294
+
295
+ def _execute_query(engine, argument) -> list[object]:
231
296
  # build prompt and query engine
232
297
  engine.prepare(argument)
233
298
  if argument.prop.preview:
234
299
  return engine.preview(argument)
235
- outputs = engine(argument) # currently only supports single query
236
- return outputs
300
+ return engine(argument) # currently only supports single query
237
301
 
238
302
 
239
303
  def _process_query(
240
304
  engine: Engine,
241
305
  instance: Any,
242
306
  func: Callable,
243
- constraints: List[Callable] = [],
244
- default: Optional[object] = None,
245
- limit: int = 1,
307
+ constraints: list[Callable] | None = None,
308
+ default: object | None = None,
309
+ limit: int | None = None,
246
310
  trials: int = 1,
247
- pre_processors: Optional[List[PreProcessor]] = None,
248
- post_processors: Optional[List[PostProcessor]] = None,
311
+ pre_processors: list[PreProcessor] | None = None,
312
+ post_processors: list[PostProcessor] | None = None,
249
313
  argument: Argument = None,
250
314
  ) -> Any:
251
315
 
252
- if pre_processors and not isinstance(pre_processors, list):
253
- pre_processors = [pre_processors]
254
- if post_processors and not isinstance(post_processors, list):
255
- post_processors = [post_processors]
316
+ if constraints is None:
317
+ constraints = []
318
+ pre_processors, post_processors = _normalize_processors(pre_processors, post_processors)
256
319
 
257
320
  argument = _prepare_argument(argument, engine, instance, func, constraints, default, limit, trials, pre_processors, post_processors)
258
321
  return_constraint = argument.prop.return_constraint
@@ -261,24 +324,11 @@ def _process_query(
261
324
  if not argument.prop.raw_input:
262
325
  argument.prop.processed_input = processed_input
263
326
 
264
- try_cnt = 0
265
- while try_cnt < trials:
266
- try_cnt += 1
267
- try:
268
- outputs = _execute_query(engine, argument)
269
- rsp, metadata = _apply_postprocessors(outputs, return_constraint, post_processors, argument)
270
- if argument.prop.preview:
271
- if argument.prop.return_metadata:
272
- return rsp, metadata
273
- return rsp
274
-
275
- except Exception as e:
276
- stack_trace = traceback.format_exc()
277
- logger.error(f"Failed to execute query: {str(e)}")
278
- logger.error(f"Stack trace: {stack_trace}")
279
- if try_cnt < trials:
280
- continue
281
- rsp = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
327
+ rsp, metadata = _run_query_with_retries(engine, argument, func, instance, trials, return_constraint, post_processors)
328
+ if argument.prop.preview:
329
+ if argument.prop.return_metadata:
330
+ return rsp, metadata
331
+ return rsp
282
332
 
283
333
  if not argument.prop.raw_output:
284
334
  rsp = _limit_number_results(rsp, argument, return_constraint)
@@ -287,16 +337,16 @@ def _process_query(
287
337
  return rsp
288
338
 
289
339
 
290
- class EngineRepository(object):
340
+ class EngineRepository:
291
341
  _instance = None
292
342
 
293
343
  def __init__(self):
294
344
  if '_engines' not in self.__dict__: # ensures _engines is only set once
295
- self._engines: Dict[str, Engine] = {}
345
+ self._engines: dict[str, Engine] = {}
296
346
 
297
- def __new__(cls, *args, **kwargs):
347
+ def __new__(cls, *_args, **_kwargs):
298
348
  if cls._instance is None:
299
- cls._instance = super(EngineRepository, cls).__new__(cls, *args, **kwargs)
349
+ cls._instance = super().__new__(cls)
300
350
  cls._instance.__init__() # Explicitly call __init__
301
351
  return cls._instance
302
352
 
@@ -305,22 +355,23 @@ class EngineRepository(object):
305
355
  self = EngineRepository()
306
356
  # Check if the engine is already registered
307
357
  if id in self._engines and not allow_engine_override:
308
- raise ValueError(f"Engine {id} is already registered. Set allow_engine_override to True to override.")
358
+ UserMessage(f"Engine {id} is already registered. Set allow_engine_override to True to override.", raise_with=ValueError)
309
359
 
310
360
  self._engines[id] = engine_instance
311
361
 
312
362
  @staticmethod
313
- def register_from_plugin(id: str, plugin: str, selected_engine: Optional[str] = None, allow_engine_override: bool = False, *args, **kwargs) -> None:
314
- from .imports import Import
363
+ def register_from_plugin(id: str, plugin: str, selected_engine: str | None = None, allow_engine_override: bool = False, *args, **kwargs) -> None:
364
+ # Lazy import keeps functional -> imports -> symbol -> core -> functional cycle broken.
365
+ from .imports import Import # noqa
315
366
  types = Import.load_module_class(plugin)
316
367
  # filter out engine class type
317
368
  engines = [t for t in types if issubclass(t, Engine) and t is not Engine]
318
369
  if len(engines) > 1 and selected_engine is None:
319
- raise ValueError(f"Multiple engines found in plugin {plugin}. Please specify the engine to use.")
320
- elif len(engines) > 1 and selected_engine is not None:
370
+ UserMessage(f"Multiple engines found in plugin {plugin}. Please specify the engine to use.", raise_with=ValueError)
371
+ if len(engines) > 1 and selected_engine is not None:
321
372
  engine = [e for e in engines if selected_engine in str(e)]
322
373
  if len(engine) <= 0:
323
- raise ValueError(f"No engine named {selected_engine} found in plugin {plugin}.")
374
+ UserMessage(f"No engine named {selected_engine} found in plugin {plugin}.", raise_with=ValueError)
324
375
  engine = engines[0](*args, **kwargs)
325
376
  EngineRepository.register(id, engine, allow_engine_override=allow_engine_override)
326
377
 
@@ -342,7 +393,7 @@ class EngineRepository(object):
342
393
  # Assume the class has an 'init' static method to initialize it
343
394
  engine_id_func_ = getattr(instance, 'id', None)
344
395
  if engine_id_func_ is None:
345
- raise ValueError(f"Engine {str(instance)} does not have an id. Please add a method id() to the class.")
396
+ UserMessage(f"Engine {instance!s} does not have an id. Please add a method id() to the class.", raise_with=ValueError)
346
397
  # call engine_() to get the id of the engine
347
398
  id_ = engine_id_func_()
348
399
  # only registered configured engine
@@ -350,10 +401,10 @@ class EngineRepository(object):
350
401
  # register new engine
351
402
  self.register(id_, instance, allow_engine_override=allow_engine_override)
352
403
  except Exception as e:
353
- logger.error(f"Failed to register engine {str(attribute)}: {str(e)}")
404
+ logger.error(f"Failed to register engine {attribute!s}: {e!s}")
354
405
 
355
406
  @staticmethod
356
- def get(engine_name: str, *args, **kwargs):
407
+ def get(engine_name: str, *_args, **_kwargs):
357
408
 
358
409
  self = EngineRepository()
359
410
  # First check if we're in the context manager that dynamically changes models
@@ -363,24 +414,24 @@ class EngineRepository(object):
363
414
  return engine
364
415
 
365
416
  # Otherwise, fallback to normal lookup:
366
- if engine_name not in self._engines.keys():
417
+ if engine_name not in self._engines:
367
418
  subpackage_name = engine_name.replace('-', '_')
368
419
  subpackage = importlib.import_module(f"{engines.__package__}.{subpackage_name}", None)
369
420
  if subpackage is None:
370
- raise ValueError(f"The symbolicai library does not contain the engine named {engine_name}.")
421
+ UserMessage(f"The symbolicai library does not contain the engine named {engine_name}.", raise_with=ValueError)
371
422
  self.register_from_package(subpackage)
372
423
  engine = self._engines.get(engine_name, None)
373
424
  if engine is None:
374
- raise ValueError(f"No engine named {engine_name} is registered.")
425
+ UserMessage(f"No engine named {engine_name} is registered.", raise_with=ValueError)
375
426
  return engine
376
427
 
377
428
  @staticmethod
378
- def list() -> List[str]:
429
+ def list() -> list[str]:
379
430
  self = EngineRepository()
380
431
  return dict(self._engines.items())
381
432
 
382
433
  @staticmethod
383
- def command(engines: List[str], *args, **kwargs) -> Any:
434
+ def command(engines: list[str], *args, **kwargs) -> Any:
384
435
  self = EngineRepository()
385
436
  if isinstance(engines, str):
386
437
  engines = [engines]
@@ -393,28 +444,30 @@ class EngineRepository(object):
393
444
  if engine:
394
445
  # Call the command function for the engine with provided arguments
395
446
  return engine.command(*args, **kwargs)
396
- raise ValueError(f"No engine named <{engine_name}> is registered.")
447
+ UserMessage(f"No engine named <{engine_name}> is registered.", raise_with=ValueError)
448
+ return None
397
449
 
398
450
  @staticmethod
399
- def query(engine: str, *args, **kwargs) -> Tuple:
451
+ def query(engine: str, *args, **kwargs) -> tuple:
400
452
  self = EngineRepository()
401
453
  engine = self.get(engine)
402
454
  if engine:
403
455
  engine_allows_batching = getattr(engine, 'allows_batching', False)
404
456
  if engine_allows_batching:
405
457
  return _process_query_single(engine, *args, **kwargs)
406
- else:
407
- return _process_query(engine, *args, **kwargs)
408
- raise ValueError(f"No engine named {engine} is registered.")
458
+ return _process_query(engine, *args, **kwargs)
459
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
460
+ return None
409
461
 
410
462
  @staticmethod
411
- def bind_property(engine: str, property: str, *args, **kwargs):
463
+ def bind_property(engine: str, property: str, *_args, **_kwargs):
412
464
  self = EngineRepository()
413
465
  """Bind a property to a specific engine."""
414
466
  engine = self.get(engine)
415
467
  if engine:
416
468
  return getattr(engine, property, None)
417
- raise ValueError(f"No engine named {engine} is registered.")
469
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
470
+ return None
418
471
 
419
472
  def get_dynamic_engine_instance(self):
420
473
  # 1) Primary: use ContextVar (fast, async-safe)
@@ -426,7 +479,8 @@ class EngineRepository(object):
426
479
  pass
427
480
 
428
481
  # 2) Fallback: walk ONLY current thread frames (legacy behavior)
429
- from .components import DynamicEngine
482
+ # Keeping DynamicEngine import lazy prevents functional importing components before it finishes loading.
483
+ from .components import DynamicEngine # noqa
430
484
  try:
431
485
  frame = sys._getframe()
432
486
  except Exception:
@@ -435,7 +489,7 @@ class EngineRepository(object):
435
489
  try:
436
490
  locals_copy = frame.f_locals.copy() if hasattr(frame.f_locals, 'copy') else dict(frame.f_locals)
437
491
  except Exception:
438
- CustomUserWarning(
492
+ UserMessage(
439
493
  "Unexpected failure copying frame locals while resolving DynamicEngine.",
440
494
  raise_with=None,
441
495
  )