symbolicai 0.21.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. symai/__init__.py +269 -173
  2. symai/backend/base.py +123 -110
  3. symai/backend/engines/drawing/engine_bfl.py +45 -44
  4. symai/backend/engines/drawing/engine_gpt_image.py +112 -97
  5. symai/backend/engines/embedding/engine_llama_cpp.py +63 -52
  6. symai/backend/engines/embedding/engine_openai.py +25 -21
  7. symai/backend/engines/execute/engine_python.py +19 -18
  8. symai/backend/engines/files/engine_io.py +104 -95
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +28 -24
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +102 -79
  11. symai/backend/engines/index/engine_pinecone.py +124 -97
  12. symai/backend/engines/index/engine_qdrant.py +1011 -0
  13. symai/backend/engines/index/engine_vectordb.py +84 -56
  14. symai/backend/engines/lean/engine_lean4.py +96 -52
  15. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +330 -248
  17. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +329 -264
  18. symai/backend/engines/neurosymbolic/engine_cerebras.py +328 -0
  19. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +118 -88
  20. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +344 -299
  21. symai/backend/engines/neurosymbolic/engine_groq.py +173 -115
  22. symai/backend/engines/neurosymbolic/engine_huggingface.py +114 -84
  23. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +144 -118
  24. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +415 -307
  25. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +394 -231
  26. symai/backend/engines/ocr/engine_apilayer.py +23 -27
  27. symai/backend/engines/output/engine_stdout.py +10 -13
  28. symai/backend/engines/{webscraping → scrape}/engine_requests.py +101 -54
  29. symai/backend/engines/search/engine_openai.py +100 -88
  30. symai/backend/engines/search/engine_parallel.py +665 -0
  31. symai/backend/engines/search/engine_perplexity.py +44 -45
  32. symai/backend/engines/search/engine_serpapi.py +37 -34
  33. symai/backend/engines/speech_to_text/engine_local_whisper.py +54 -51
  34. symai/backend/engines/symbolic/engine_wolframalpha.py +15 -9
  35. symai/backend/engines/text_to_speech/engine_openai.py +20 -26
  36. symai/backend/engines/text_vision/engine_clip.py +39 -37
  37. symai/backend/engines/userinput/engine_console.py +5 -6
  38. symai/backend/mixin/__init__.py +13 -0
  39. symai/backend/mixin/anthropic.py +48 -38
  40. symai/backend/mixin/deepseek.py +6 -5
  41. symai/backend/mixin/google.py +7 -4
  42. symai/backend/mixin/groq.py +2 -4
  43. symai/backend/mixin/openai.py +140 -110
  44. symai/backend/settings.py +87 -20
  45. symai/chat.py +216 -123
  46. symai/collect/__init__.py +7 -1
  47. symai/collect/dynamic.py +80 -70
  48. symai/collect/pipeline.py +67 -51
  49. symai/collect/stats.py +161 -109
  50. symai/components.py +707 -360
  51. symai/constraints.py +24 -12
  52. symai/core.py +1857 -1233
  53. symai/core_ext.py +83 -80
  54. symai/endpoints/api.py +166 -104
  55. symai/extended/.DS_Store +0 -0
  56. symai/extended/__init__.py +46 -12
  57. symai/extended/api_builder.py +29 -21
  58. symai/extended/arxiv_pdf_parser.py +23 -14
  59. symai/extended/bibtex_parser.py +9 -6
  60. symai/extended/conversation.py +156 -126
  61. symai/extended/document.py +50 -30
  62. symai/extended/file_merger.py +57 -14
  63. symai/extended/graph.py +51 -32
  64. symai/extended/html_style_template.py +18 -14
  65. symai/extended/interfaces/blip_2.py +2 -3
  66. symai/extended/interfaces/clip.py +4 -3
  67. symai/extended/interfaces/console.py +9 -1
  68. symai/extended/interfaces/dall_e.py +4 -2
  69. symai/extended/interfaces/file.py +2 -0
  70. symai/extended/interfaces/flux.py +4 -2
  71. symai/extended/interfaces/gpt_image.py +16 -7
  72. symai/extended/interfaces/input.py +2 -1
  73. symai/extended/interfaces/llava.py +1 -2
  74. symai/extended/interfaces/{naive_webscraping.py → naive_scrape.py} +4 -3
  75. symai/extended/interfaces/naive_vectordb.py +9 -10
  76. symai/extended/interfaces/ocr.py +5 -3
  77. symai/extended/interfaces/openai_search.py +2 -0
  78. symai/extended/interfaces/parallel.py +30 -0
  79. symai/extended/interfaces/perplexity.py +2 -0
  80. symai/extended/interfaces/pinecone.py +12 -9
  81. symai/extended/interfaces/python.py +2 -0
  82. symai/extended/interfaces/serpapi.py +3 -1
  83. symai/extended/interfaces/terminal.py +2 -4
  84. symai/extended/interfaces/tts.py +3 -2
  85. symai/extended/interfaces/whisper.py +3 -2
  86. symai/extended/interfaces/wolframalpha.py +2 -1
  87. symai/extended/metrics/__init__.py +11 -1
  88. symai/extended/metrics/similarity.py +14 -13
  89. symai/extended/os_command.py +39 -29
  90. symai/extended/packages/__init__.py +29 -3
  91. symai/extended/packages/symdev.py +51 -43
  92. symai/extended/packages/sympkg.py +41 -35
  93. symai/extended/packages/symrun.py +63 -50
  94. symai/extended/repo_cloner.py +14 -12
  95. symai/extended/seo_query_optimizer.py +15 -13
  96. symai/extended/solver.py +116 -91
  97. symai/extended/summarizer.py +12 -10
  98. symai/extended/taypan_interpreter.py +17 -18
  99. symai/extended/vectordb.py +122 -92
  100. symai/formatter/__init__.py +9 -1
  101. symai/formatter/formatter.py +51 -47
  102. symai/formatter/regex.py +70 -69
  103. symai/functional.py +325 -176
  104. symai/imports.py +190 -147
  105. symai/interfaces.py +57 -28
  106. symai/memory.py +45 -35
  107. symai/menu/screen.py +28 -19
  108. symai/misc/console.py +66 -56
  109. symai/misc/loader.py +8 -5
  110. symai/models/__init__.py +17 -1
  111. symai/models/base.py +395 -236
  112. symai/models/errors.py +1 -2
  113. symai/ops/__init__.py +32 -22
  114. symai/ops/measures.py +24 -25
  115. symai/ops/primitives.py +1149 -731
  116. symai/post_processors.py +58 -50
  117. symai/pre_processors.py +86 -82
  118. symai/processor.py +21 -13
  119. symai/prompts.py +764 -685
  120. symai/server/huggingface_server.py +135 -49
  121. symai/server/llama_cpp_server.py +21 -11
  122. symai/server/qdrant_server.py +206 -0
  123. symai/shell.py +100 -42
  124. symai/shellsv.py +700 -492
  125. symai/strategy.py +630 -346
  126. symai/symbol.py +368 -322
  127. symai/utils.py +100 -78
  128. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/METADATA +22 -10
  129. symbolicai-1.1.0.dist-info/RECORD +168 -0
  130. symbolicai-0.21.0.dist-info/RECORD +0 -162
  131. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/WHEEL +0 -0
  132. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/entry_points.txt +0 -0
  133. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/licenses/LICENSE +0 -0
  134. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/top_level.txt +0 -0
symai/functional.py CHANGED
@@ -8,19 +8,32 @@ import sys
8
8
  import traceback
9
9
  import warnings
10
10
  from enum import Enum
11
- from types import ModuleType
12
- from typing import Any, Callable, Dict, List, Optional, Tuple, Type
11
+ from typing import TYPE_CHECKING, Any
13
12
 
14
- from box import Box
15
13
  from loguru import logger
16
14
  from pydantic import BaseModel
17
15
 
18
16
  from .backend import engines
19
17
  from .backend.base import ENGINE_UNREGISTERED, Engine
20
- from .post_processors import PostProcessor
21
- from .pre_processors import PreProcessor
22
- from .utils import CustomUserWarning
23
18
  from .context import CURRENT_ENGINE_VAR
19
+ from .prompts import (
20
+ ProbabilisticBooleanModeMedium,
21
+ ProbabilisticBooleanModeStrict,
22
+ ProbabilisticBooleanModeTolerant,
23
+ )
24
+ from .utils import UserMessage
25
+
26
+ if TYPE_CHECKING:
27
+ from collections.abc import Callable
28
+ from types import ModuleType
29
+
30
+ from .core import Argument
31
+ from .post_processors import PostProcessor
32
+ from .pre_processors import PreProcessor
33
+ else:
34
+ Callable = Any
35
+ ModuleType = type(importlib)
36
+ PostProcessor = PreProcessor = Any
24
37
 
25
38
 
26
39
  class ConstraintViolationException(Exception):
@@ -34,9 +47,6 @@ class ProbabilisticBooleanMode(Enum):
34
47
 
35
48
 
36
49
  ENGINE_PROBABILISTIC_BOOLEAN_MODE = ProbabilisticBooleanMode.MEDIUM
37
- from .prompts import (ProbabilisticBooleanModeMedium,
38
- ProbabilisticBooleanModeStrict,
39
- ProbabilisticBooleanModeTolerant)
40
50
 
41
51
 
42
52
  def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> bool:
@@ -44,53 +54,68 @@ def _probabilistic_bool(rsp: str, mode=ProbabilisticBooleanMode.TOLERANT) -> boo
44
54
  return False
45
55
  # check if rsp is a string / hard match
46
56
  val = str(rsp).lower()
47
- if mode == ProbabilisticBooleanMode.STRICT:
57
+ if mode == ProbabilisticBooleanMode.STRICT:
48
58
  return val == ProbabilisticBooleanModeStrict
49
- elif mode == ProbabilisticBooleanMode.MEDIUM:
59
+ if mode == ProbabilisticBooleanMode.MEDIUM:
50
60
  return val in ProbabilisticBooleanModeMedium
51
- elif mode == ProbabilisticBooleanMode.TOLERANT:
61
+ if mode == ProbabilisticBooleanMode.TOLERANT:
52
62
  # allow for probabilistic boolean / fault tolerance
53
63
  return val in ProbabilisticBooleanModeTolerant
54
- else:
55
- raise ValueError(f"Invalid mode {mode} for probabilistic boolean!")
64
+ UserMessage(f"Invalid mode {mode} for probabilistic boolean!", raise_with=ValueError)
65
+ return False
56
66
 
57
67
 
58
- def _cast_return_type(rsp: Any, return_constraint: Type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode) -> Any:
59
- if return_constraint == inspect._empty:
60
- # do not cast if return type is not specified
61
- pass
62
- elif issubclass(return_constraint, BaseModel):
63
- # pydantic model
64
- rsp = return_constraint(data=rsp)
68
+ def _cast_collection_response(rsp: Any, return_constraint: type) -> Any:
69
+ try:
70
+ res = ast.literal_eval(rsp)
71
+ except Exception:
72
+ logger.warning(f"Failed to cast return type to {return_constraint} for {rsp!s}")
73
+ warnings.warn(f"Failed to cast return type to {return_constraint}", stacklevel=2)
74
+ res = rsp
75
+ assert res is not None, (
76
+ f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {rsp!s}"
77
+ )
78
+ return res
79
+
80
+
81
+ def _cast_boolean_response(rsp: Any, mode: ProbabilisticBooleanMode) -> bool:
82
+ if len(rsp) <= 0:
83
+ return False
84
+ return _probabilistic_bool(rsp, mode=mode)
85
+
86
+
87
+ def _cast_with_fallback(rsp: Any, return_constraint: type) -> Any:
88
+ try:
89
+ return return_constraint(rsp)
90
+ except (ValueError, TypeError):
91
+ if return_constraint is int:
92
+ UserMessage(f"Cannot convert {rsp} to int", raise_with=ConstraintViolationException)
93
+ warnings.warn(f"Failed to cast {rsp} to {return_constraint}", stacklevel=2)
65
94
  return rsp
66
- elif str(return_constraint) == str(type(rsp)):
95
+
96
+
97
+ def _cast_return_type(
98
+ rsp: Any, return_constraint: type, engine_probabilistic_boolean_mode: ProbabilisticBooleanMode
99
+ ) -> Any:
100
+ if return_constraint is inspect._empty:
67
101
  return rsp
68
- elif return_constraint in (list, tuple, set, dict):
69
- try:
70
- res = ast.literal_eval(rsp)
71
- except Exception as e:
72
- logger.warning(f"Failed to cast return type to {return_constraint} for {str(rsp)}")
73
- warnings.warn(f"Failed to cast return type to {return_constraint}") # Add warning for test
74
- res = rsp
75
- assert res is not None, f"Return type cast failed! Check if the return type is correct or post_processors output matches desired format: {str(rsp)}"
76
- return res
77
- elif return_constraint == bool:
78
- if len(rsp) <= 0:
79
- return False
80
- else:
81
- return _probabilistic_bool(rsp, mode=engine_probabilistic_boolean_mode)
82
- elif not isinstance(rsp, return_constraint):
83
- try:
84
- # hard cast to return type fallback
85
- rsp = return_constraint(rsp)
86
- except (ValueError, TypeError) as e:
87
- if return_constraint == int:
88
- raise ConstraintViolationException(f"Cannot convert {rsp} to int")
89
- warnings.warn(f"Failed to cast {rsp} to {return_constraint}")
90
- return rsp
102
+ if issubclass(return_constraint, BaseModel):
103
+ # pydantic model
104
+ return return_constraint(data=rsp)
105
+ if str(return_constraint) == str(type(rsp)):
106
+ return rsp
107
+ if return_constraint in (list, tuple, set, dict):
108
+ return _cast_collection_response(rsp, return_constraint)
109
+ if return_constraint is bool:
110
+ return _cast_boolean_response(rsp, mode=engine_probabilistic_boolean_mode)
111
+ if not isinstance(rsp, return_constraint):
112
+ return _cast_with_fallback(rsp, return_constraint)
91
113
  return rsp
92
114
 
93
- def _apply_postprocessors(outputs, return_constraint, post_processors, argument, mode=ENGINE_PROBABILISTIC_BOOLEAN_MODE):
115
+
116
+ def _apply_postprocessors(
117
+ outputs, return_constraint, post_processors, argument, mode=ENGINE_PROBABILISTIC_BOOLEAN_MODE
118
+ ):
94
119
  if argument.prop.preview:
95
120
  return outputs
96
121
 
@@ -99,7 +124,7 @@ def _apply_postprocessors(outputs, return_constraint, post_processors, argument,
99
124
  argument.prop.metadata = metadata
100
125
 
101
126
  if argument.prop.raw_output:
102
- return metadata.get('raw_output'), metadata
127
+ return metadata.get("raw_output"), metadata
103
128
 
104
129
  if post_processors:
105
130
  for pp in post_processors:
@@ -108,60 +133,80 @@ def _apply_postprocessors(outputs, return_constraint, post_processors, argument,
108
133
 
109
134
  for constraint in argument.prop.constraints:
110
135
  if not constraint(rsp):
111
- raise ConstraintViolationException("Constraint not satisfied:", rsp, constraint)
136
+ UserMessage(
137
+ f"Constraint not satisfied for value {rsp!r} with constraint {constraint}",
138
+ raise_with=ConstraintViolationException,
139
+ )
112
140
  return rsp, metadata
113
141
 
114
142
 
115
- def _apply_preprocessors(argument, instance: Any, pre_processors: Optional[List[PreProcessor]]) -> str:
116
- processed_input = ''
143
+ def _apply_preprocessors(argument, instance: Any, pre_processors: list[PreProcessor] | None) -> str:
144
+ processed_input = ""
117
145
  if pre_processors and not argument.prop.raw_input:
118
146
  argument.prop.instance = instance
119
147
  for pp in pre_processors:
120
148
  t = pp(argument)
121
- processed_input += t if t is not None else ''
149
+ processed_input += t if t is not None else ""
122
150
  else:
123
151
  if argument.args and len(argument.args) > 0:
124
- processed_input += ' '.join([str(a) for a in argument.args])
152
+ processed_input += " ".join([str(a) for a in argument.args])
125
153
  return processed_input
126
154
 
127
155
 
128
156
  def _limit_number_results(rsp: Any, argument, return_type):
129
- limit_ = argument.prop.limit if argument.prop.limit else (len(rsp) if hasattr(rsp, '__len__') else None)
157
+ limit_ = (
158
+ argument.prop.limit
159
+ if argument.prop.limit
160
+ else (len(rsp) if hasattr(rsp, "__len__") else None)
161
+ )
130
162
  # the following line is different from original code to make it work for iterable return types when the limit is 1
131
163
  if limit_ is not None:
132
- if return_type == str and isinstance(rsp, list):
133
- return '\n'.join(rsp[:limit_])
134
- elif return_type == list:
164
+ if return_type is str and isinstance(rsp, list):
165
+ return "\n".join(rsp[:limit_])
166
+ if return_type is list:
135
167
  return rsp[:limit_]
136
- elif return_type == dict:
168
+ if return_type is dict:
137
169
  keys = list(rsp.keys())
138
170
  return {k: rsp[k] for k in keys[:limit_]}
139
- elif return_type == set:
171
+ if return_type is set:
140
172
  return set(list(rsp)[:limit_])
141
- elif return_type == tuple:
173
+ if return_type is tuple:
142
174
  return tuple(list(rsp)[:limit_])
143
175
  return rsp
144
176
 
145
177
 
146
- def _prepare_argument(argument: Any, engine: Any, instance: Any, func: Callable, constraints: List[Callable], default: Any, limit: int, trials: int, pre_processors: Optional[List[PreProcessor]], post_processors: Optional[List[PostProcessor]]) -> Any:
178
+ def _prepare_argument(
179
+ argument: Any,
180
+ engine: Any,
181
+ instance: Any,
182
+ func: Callable,
183
+ constraints: list[Callable],
184
+ default: Any,
185
+ limit: int,
186
+ trials: int,
187
+ pre_processors: list[PreProcessor] | None,
188
+ post_processors: list[PostProcessor] | None,
189
+ ) -> Any:
147
190
  # check signature for return type
148
191
  sig = inspect.signature(func)
149
192
  return_constraint = sig._return_annotation
150
- assert 'typing' not in str(return_constraint), "Return type must be of base type not generic Typing object, e.g. int, str, list, etc."
193
+ assert "typing" not in str(return_constraint), (
194
+ "Return type must be of base type not generic Typing object, e.g. int, str, list, etc."
195
+ )
151
196
 
152
197
  # prepare argument container
153
- argument.prop.engine = engine
154
- argument.prop.instance = instance
155
- argument.prop.instance_type = type(instance)
156
- argument.prop.signature = sig
157
- argument.prop.func = func
158
- argument.prop.constraints = constraints
198
+ argument.prop.engine = engine
199
+ argument.prop.instance = instance
200
+ argument.prop.instance_type = type(instance)
201
+ argument.prop.signature = sig
202
+ argument.prop.func = func
203
+ argument.prop.constraints = constraints
159
204
  argument.prop.return_constraint = return_constraint
160
- argument.prop.default = default
161
- argument.prop.limit = limit
162
- argument.prop.trials = trials
163
- argument.prop.pre_processors = pre_processors
164
- argument.prop.post_processors = post_processors
205
+ argument.prop.default = default
206
+ argument.prop.limit = limit
207
+ argument.prop.trials = trials
208
+ argument.prop.pre_processors = pre_processors
209
+ argument.prop.post_processors = post_processors
165
210
  return argument
166
211
 
167
212
 
@@ -172,37 +217,55 @@ def _execute_query_fallback(func, instance, argument, error=None, stack_trace=No
172
217
  providing error context to the fallback function, and maintaining the same return format.
173
218
  """
174
219
  try:
175
- rsp = func(instance, error=error, stack_trace=stack_trace, *argument.args, **argument.signature_kwargs)
220
+ rsp = func(
221
+ instance,
222
+ *argument.args,
223
+ error=error,
224
+ stack_trace=stack_trace,
225
+ **argument.signature_kwargs,
226
+ )
176
227
  except Exception:
177
- raise error # re-raise the original error
228
+ raise error from None # Re-raise the original error without chaining fallback failure.
178
229
  if rsp is not None:
179
230
  # fallback was implemented
180
- rsp = dict(data=rsp, error=error, stack_trace=stack_trace)
181
- return rsp
182
- elif argument.prop.default is not None:
231
+ return {"data": rsp, "error": error, "stack_trace": stack_trace}
232
+ if argument.prop.default is not None:
183
233
  # no fallback implementation, but default value is set
184
- rsp = dict(data=argument.prop.default, error=error, stack_trace=stack_trace)
185
- return rsp
186
- else:
187
- raise error
188
-
189
-
190
- def _process_query_single(engine,
191
- instance,
192
- func: Callable,
193
- constraints: List[Callable] = [],
194
- default: Optional[object] = None,
195
- limit: int = 1,
196
- trials: int = 1,
197
- pre_processors: Optional[List[PreProcessor]] = None,
198
- post_processors: Optional[List[PostProcessor]] = None,
199
- argument=None):
234
+ return {"data": argument.prop.default, "error": error, "stack_trace": stack_trace}
235
+ raise error from None
236
+
237
+
238
+ def _process_query_single(
239
+ engine,
240
+ instance,
241
+ func: Callable,
242
+ constraints: list[Callable] | None = None,
243
+ default: object | None = None,
244
+ limit: int = 1,
245
+ trials: int = 1,
246
+ pre_processors: list[PreProcessor] | None = None,
247
+ post_processors: list[PostProcessor] | None = None,
248
+ argument=None,
249
+ ):
250
+ if constraints is None:
251
+ constraints = []
200
252
  if pre_processors and not isinstance(pre_processors, list):
201
253
  pre_processors = [pre_processors]
202
254
  if post_processors and not isinstance(post_processors, list):
203
255
  post_processors = [post_processors]
204
256
 
205
- argument = _prepare_argument(argument, engine, instance, func, constraints, default, limit, trials, pre_processors, post_processors)
257
+ argument = _prepare_argument(
258
+ argument,
259
+ engine,
260
+ instance,
261
+ func,
262
+ constraints,
263
+ default,
264
+ limit,
265
+ trials,
266
+ pre_processors,
267
+ post_processors,
268
+ )
206
269
 
207
270
  preprocessed_input = _apply_preprocessors(argument, instance, pre_processors)
208
271
  argument.prop.processed_input = preprocessed_input
@@ -213,14 +276,18 @@ def _process_query_single(engine,
213
276
  for _ in range(trials):
214
277
  try:
215
278
  outputs = engine.executor_callback(argument)
216
- result, metadata = _apply_postprocessors(outputs, argument.prop.return_constraint, post_processors, argument)
279
+ result, metadata = _apply_postprocessors(
280
+ outputs, argument.prop.return_constraint, post_processors, argument
281
+ )
217
282
  break
218
283
  except Exception as e:
219
284
  stack_trace = traceback.format_exc()
220
- logger.error(f"Failed to execute query: {str(e)}")
285
+ logger.error(f"Failed to execute query: {e!s}")
221
286
  logger.error(f"Stack trace: {stack_trace}")
222
287
  if _ == trials - 1:
223
- result = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
288
+ result = _execute_query_fallback(
289
+ func, instance, argument, error=e, stack_trace=stack_trace
290
+ )
224
291
  if result is None:
225
292
  raise e
226
293
 
@@ -230,58 +297,99 @@ def _process_query_single(engine,
230
297
  return limited_result
231
298
 
232
299
 
233
- def _execute_query(engine, argument) -> List[object]:
234
- # build prompt and query engine
235
- engine.prepare(argument)
236
- if argument.prop.preview:
237
- return engine.preview(argument)
238
- outputs = engine(argument) # currently only supports single query
239
- return outputs
240
-
241
-
242
- def _process_query(
243
- engine: Engine,
244
- instance: Any,
245
- func: Callable,
246
- constraints: List[Callable] = [],
247
- default: Optional[object] = None,
248
- limit: int | None = None,
249
- trials: int = 1,
250
- pre_processors: Optional[List[PreProcessor]] = None,
251
- post_processors: Optional[List[PostProcessor]] = None,
252
- argument: Argument = None,
253
- ) -> Any:
254
-
300
+ def _normalize_processors(
301
+ pre_processors: list[PreProcessor] | PreProcessor | None,
302
+ post_processors: list[PostProcessor] | PostProcessor | None,
303
+ ) -> tuple[list[PreProcessor] | None, list[PostProcessor] | None]:
255
304
  if pre_processors and not isinstance(pre_processors, list):
256
305
  pre_processors = [pre_processors]
257
306
  if post_processors and not isinstance(post_processors, list):
258
307
  post_processors = [post_processors]
259
-
260
- argument = _prepare_argument(argument, engine, instance, func, constraints, default, limit, trials, pre_processors, post_processors)
261
- return_constraint = argument.prop.return_constraint
262
- # if prep_processors is empty or none this returns an empty string
263
- processed_input = _apply_preprocessors(argument, instance, pre_processors)
264
- if not argument.prop.raw_input:
265
- argument.prop.processed_input = processed_input
266
-
308
+ return pre_processors, post_processors
309
+
310
+
311
+ def _run_query_with_retries(
312
+ engine: Engine,
313
+ argument: Any,
314
+ func: Callable,
315
+ instance: Any,
316
+ trials: int,
317
+ return_constraint: type,
318
+ post_processors: list[PostProcessor] | None,
319
+ ) -> tuple[Any, Any]:
267
320
  try_cnt = 0
321
+ rsp = None
322
+ metadata = None
268
323
  while try_cnt < trials:
269
324
  try_cnt += 1
270
325
  try:
271
326
  outputs = _execute_query(engine, argument)
272
- rsp, metadata = _apply_postprocessors(outputs, return_constraint, post_processors, argument)
273
- if argument.prop.preview:
274
- if argument.prop.return_metadata:
275
- return rsp, metadata
276
- return rsp
277
-
278
- except Exception as e:
327
+ rsp, metadata = _apply_postprocessors(
328
+ outputs, return_constraint, post_processors, argument
329
+ )
330
+ break
331
+ except Exception as error:
279
332
  stack_trace = traceback.format_exc()
280
- logger.error(f"Failed to execute query: {str(e)}")
333
+ logger.error(f"Failed to execute query: {error!s}")
281
334
  logger.error(f"Stack trace: {stack_trace}")
282
335
  if try_cnt < trials:
283
336
  continue
284
- rsp = _execute_query_fallback(func, instance, argument, error=e, stack_trace=stack_trace)
337
+ rsp = _execute_query_fallback(
338
+ func, instance, argument, error=error, stack_trace=stack_trace
339
+ )
340
+ metadata = None
341
+ return rsp, metadata
342
+
343
+
344
+ def _execute_query(engine, argument) -> list[object]:
345
+ # build prompt and query engine
346
+ engine.prepare(argument)
347
+ if argument.prop.preview:
348
+ return engine.preview(argument)
349
+ return engine(argument) # currently only supports single query
350
+
351
+
352
+ def _process_query(
353
+ engine: Engine,
354
+ instance: Any,
355
+ func: Callable,
356
+ constraints: list[Callable] | None = None,
357
+ default: object | None = None,
358
+ limit: int | None = None,
359
+ trials: int = 1,
360
+ pre_processors: list[PreProcessor] | None = None,
361
+ post_processors: list[PostProcessor] | None = None,
362
+ argument: Argument = None,
363
+ ) -> Any:
364
+ if constraints is None:
365
+ constraints = []
366
+ pre_processors, post_processors = _normalize_processors(pre_processors, post_processors)
367
+
368
+ argument = _prepare_argument(
369
+ argument,
370
+ engine,
371
+ instance,
372
+ func,
373
+ constraints,
374
+ default,
375
+ limit,
376
+ trials,
377
+ pre_processors,
378
+ post_processors,
379
+ )
380
+ return_constraint = argument.prop.return_constraint
381
+ # if prep_processors is empty or none this returns an empty string
382
+ processed_input = _apply_preprocessors(argument, instance, pre_processors)
383
+ if not argument.prop.raw_input:
384
+ argument.prop.processed_input = processed_input
385
+
386
+ rsp, metadata = _run_query_with_retries(
387
+ engine, argument, func, instance, trials, return_constraint, post_processors
388
+ )
389
+ if argument.prop.preview:
390
+ if argument.prop.return_metadata:
391
+ return rsp, metadata
392
+ return rsp
285
393
 
286
394
  if not argument.prop.raw_output:
287
395
  rsp = _limit_number_results(rsp, argument, return_constraint)
@@ -290,16 +398,16 @@ def _process_query(
290
398
  return rsp
291
399
 
292
400
 
293
- class EngineRepository(object):
401
+ class EngineRepository:
294
402
  _instance = None
295
403
 
296
404
  def __init__(self):
297
- if '_engines' not in self.__dict__: # ensures _engines is only set once
298
- self._engines: Dict[str, Engine] = {}
405
+ if "_engines" not in self.__dict__: # ensures _engines is only set once
406
+ self._engines: dict[str, Engine] = {}
299
407
 
300
- def __new__(cls, *args, **kwargs):
408
+ def __new__(cls, *_args, **_kwargs):
301
409
  if cls._instance is None:
302
- cls._instance = super(EngineRepository, cls).__new__(cls, *args, **kwargs)
410
+ cls._instance = super().__new__(cls)
303
411
  cls._instance.__init__() # Explicitly call __init__
304
412
  return cls._instance
305
413
 
@@ -308,30 +416,50 @@ class EngineRepository(object):
308
416
  self = EngineRepository()
309
417
  # Check if the engine is already registered
310
418
  if id in self._engines and not allow_engine_override:
311
- raise ValueError(f"Engine {id} is already registered. Set allow_engine_override to True to override.")
419
+ UserMessage(
420
+ f"Engine {id} is already registered. Set allow_engine_override to True to override.",
421
+ raise_with=ValueError,
422
+ )
312
423
 
313
424
  self._engines[id] = engine_instance
314
425
 
315
426
  @staticmethod
316
- def register_from_plugin(id: str, plugin: str, selected_engine: Optional[str] = None, allow_engine_override: bool = False, *args, **kwargs) -> None:
317
- from .imports import Import
427
+ def register_from_plugin(
428
+ id: str,
429
+ plugin: str,
430
+ selected_engine: str | None = None,
431
+ allow_engine_override: bool = False,
432
+ *args,
433
+ **kwargs,
434
+ ) -> None:
435
+ # Lazy import keeps functional -> imports -> symbol -> core -> functional cycle broken.
436
+ from .imports import Import # noqa
437
+
318
438
  types = Import.load_module_class(plugin)
319
439
  # filter out engine class type
320
440
  engines = [t for t in types if issubclass(t, Engine) and t is not Engine]
321
441
  if len(engines) > 1 and selected_engine is None:
322
- raise ValueError(f"Multiple engines found in plugin {plugin}. Please specify the engine to use.")
323
- elif len(engines) > 1 and selected_engine is not None:
442
+ UserMessage(
443
+ f"Multiple engines found in plugin {plugin}. Please specify the engine to use.",
444
+ raise_with=ValueError,
445
+ )
446
+ if len(engines) > 1 and selected_engine is not None:
324
447
  engine = [e for e in engines if selected_engine in str(e)]
325
448
  if len(engine) <= 0:
326
- raise ValueError(f"No engine named {selected_engine} found in plugin {plugin}.")
449
+ UserMessage(
450
+ f"No engine named {selected_engine} found in plugin {plugin}.",
451
+ raise_with=ValueError,
452
+ )
327
453
  engine = engines[0](*args, **kwargs)
328
454
  EngineRepository.register(id, engine, allow_engine_override=allow_engine_override)
329
455
 
330
456
  @staticmethod
331
- def register_from_package(package: ModuleType, allow_engine_override: bool = False, *args, **kwargs) -> None:
457
+ def register_from_package(
458
+ package: ModuleType, allow_engine_override: bool = False, *args, **kwargs
459
+ ) -> None:
332
460
  self = EngineRepository()
333
461
  # Iterate over all modules in the given package and import them
334
- for _, module_name, _ in pkgutil.iter_modules(package.__path__, package.__name__ + '.'):
462
+ for _, module_name, _ in pkgutil.iter_modules(package.__path__, package.__name__ + "."):
335
463
  module = importlib.import_module(module_name)
336
464
 
337
465
  # Check all classes defined in the module
@@ -339,25 +467,35 @@ class EngineRepository(object):
339
467
  attribute = getattr(module, attribute_name)
340
468
 
341
469
  # Register class if it is a subclass of Engine (but not Engine itself)
342
- if inspect.isclass(attribute) and issubclass(attribute, Engine) and attribute is not Engine:
470
+ if (
471
+ inspect.isclass(attribute)
472
+ and issubclass(attribute, Engine)
473
+ and attribute is not Engine
474
+ ):
343
475
  try:
344
- instance = attribute(*args, **kwargs) # Create an instance of the engine class
476
+ instance = attribute(
477
+ *args, **kwargs
478
+ ) # Create an instance of the engine class
345
479
  # Assume the class has an 'init' static method to initialize it
346
- engine_id_func_ = getattr(instance, 'id', None)
480
+ engine_id_func_ = getattr(instance, "id", None)
347
481
  if engine_id_func_ is None:
348
- raise ValueError(f"Engine {str(instance)} does not have an id. Please add a method id() to the class.")
482
+ UserMessage(
483
+ f"Engine {instance!s} does not have an id. Please add a method id() to the class.",
484
+ raise_with=ValueError,
485
+ )
349
486
  # call engine_() to get the id of the engine
350
487
  id_ = engine_id_func_()
351
488
  # only registered configured engine
352
489
  if id_ != ENGINE_UNREGISTERED:
353
490
  # register new engine
354
- self.register(id_, instance, allow_engine_override=allow_engine_override)
491
+ self.register(
492
+ id_, instance, allow_engine_override=allow_engine_override
493
+ )
355
494
  except Exception as e:
356
- logger.error(f"Failed to register engine {str(attribute)}: {str(e)}")
495
+ logger.error(f"Failed to register engine {attribute!s}: {e!s}")
357
496
 
358
497
  @staticmethod
359
- def get(engine_name: str, *args, **kwargs):
360
-
498
+ def get(engine_name: str, *_args, **_kwargs):
361
499
  self = EngineRepository()
362
500
  # First check if we're in the context manager that dynamically changes models
363
501
  if engine_name == "neurosymbolic":
@@ -366,28 +504,31 @@ class EngineRepository(object):
366
504
  return engine
367
505
 
368
506
  # Otherwise, fallback to normal lookup:
369
- if engine_name not in self._engines.keys():
370
- subpackage_name = engine_name.replace('-', '_')
507
+ if engine_name not in self._engines:
508
+ subpackage_name = engine_name.replace("-", "_")
371
509
  subpackage = importlib.import_module(f"{engines.__package__}.{subpackage_name}", None)
372
510
  if subpackage is None:
373
- raise ValueError(f"The symbolicai library does not contain the engine named {engine_name}.")
511
+ UserMessage(
512
+ f"The symbolicai library does not contain the engine named {engine_name}.",
513
+ raise_with=ValueError,
514
+ )
374
515
  self.register_from_package(subpackage)
375
516
  engine = self._engines.get(engine_name, None)
376
517
  if engine is None:
377
- raise ValueError(f"No engine named {engine_name} is registered.")
518
+ UserMessage(f"No engine named {engine_name} is registered.", raise_with=ValueError)
378
519
  return engine
379
520
 
380
521
  @staticmethod
381
- def list() -> List[str]:
522
+ def list() -> list[str]:
382
523
  self = EngineRepository()
383
524
  return dict(self._engines.items())
384
525
 
385
526
  @staticmethod
386
- def command(engines: List[str], *args, **kwargs) -> Any:
527
+ def command(engines: list[str], *args, **kwargs) -> Any:
387
528
  self = EngineRepository()
388
529
  if isinstance(engines, str):
389
530
  engines = [engines]
390
- if 'all' in engines:
531
+ if "all" in engines:
391
532
  # Call the command function for all registered engines with provided arguments
392
533
  return [engine.command(*args, **kwargs) for name, engine in self._engines.items()]
393
534
  # Call the command function for the engine with provided arguments
@@ -396,28 +537,30 @@ class EngineRepository(object):
396
537
  if engine:
397
538
  # Call the command function for the engine with provided arguments
398
539
  return engine.command(*args, **kwargs)
399
- raise ValueError(f"No engine named <{engine_name}> is registered.")
540
+ UserMessage(f"No engine named <{engine_name}> is registered.", raise_with=ValueError)
541
+ return None
400
542
 
401
543
  @staticmethod
402
- def query(engine: str, *args, **kwargs) -> Tuple:
544
+ def query(engine: str, *args, **kwargs) -> tuple:
403
545
  self = EngineRepository()
404
546
  engine = self.get(engine)
405
547
  if engine:
406
- engine_allows_batching = getattr(engine, 'allows_batching', False)
548
+ engine_allows_batching = getattr(engine, "allows_batching", False)
407
549
  if engine_allows_batching:
408
550
  return _process_query_single(engine, *args, **kwargs)
409
- else:
410
- return _process_query(engine, *args, **kwargs)
411
- raise ValueError(f"No engine named {engine} is registered.")
551
+ return _process_query(engine, *args, **kwargs)
552
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
553
+ return None
412
554
 
413
555
  @staticmethod
414
- def bind_property(engine: str, property: str, *args, **kwargs):
556
+ def bind_property(engine: str, property: str, *_args, **_kwargs):
415
557
  self = EngineRepository()
416
558
  """Bind a property to a specific engine."""
417
559
  engine = self.get(engine)
418
560
  if engine:
419
561
  return getattr(engine, property, None)
420
- raise ValueError(f"No engine named {engine} is registered.")
562
+ UserMessage(f"No engine named {engine} is registered.", raise_with=ValueError)
563
+ return None
421
564
 
422
565
  def get_dynamic_engine_instance(self):
423
566
  # 1) Primary: use ContextVar (fast, async-safe)
@@ -429,22 +572,28 @@ class EngineRepository(object):
429
572
  pass
430
573
 
431
574
  # 2) Fallback: walk ONLY current thread frames (legacy behavior)
432
- from .components import DynamicEngine
575
+ # Keeping DynamicEngine import lazy prevents functional importing components before it finishes loading.
576
+ from .components import DynamicEngine # noqa
577
+
433
578
  try:
434
579
  frame = sys._getframe()
435
580
  except Exception:
436
581
  return None
437
582
  while frame:
438
583
  try:
439
- locals_copy = frame.f_locals.copy() if hasattr(frame.f_locals, 'copy') else dict(frame.f_locals)
584
+ locals_copy = (
585
+ frame.f_locals.copy()
586
+ if hasattr(frame.f_locals, "copy")
587
+ else dict(frame.f_locals)
588
+ )
440
589
  except Exception:
441
- CustomUserWarning(
590
+ UserMessage(
442
591
  "Unexpected failure copying frame locals while resolving DynamicEngine.",
443
592
  raise_with=None,
444
593
  )
445
594
  locals_copy = {}
446
595
  for value in locals_copy.values():
447
- if isinstance(value, DynamicEngine) and getattr(value, '_entered', False):
596
+ if isinstance(value, DynamicEngine) and getattr(value, "_entered", False):
448
597
  return value.engine_instance
449
598
  frame = frame.f_back
450
599
  return None