symbolicai 0.20.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. symai/__init__.py +96 -64
  2. symai/backend/base.py +93 -80
  3. symai/backend/engines/drawing/engine_bfl.py +12 -11
  4. symai/backend/engines/drawing/engine_gpt_image.py +108 -87
  5. symai/backend/engines/embedding/engine_llama_cpp.py +25 -28
  6. symai/backend/engines/embedding/engine_openai.py +3 -5
  7. symai/backend/engines/execute/engine_python.py +6 -5
  8. symai/backend/engines/files/engine_io.py +74 -67
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +3 -3
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +54 -38
  11. symai/backend/engines/index/engine_pinecone.py +23 -24
  12. symai/backend/engines/index/engine_vectordb.py +16 -14
  13. symai/backend/engines/lean/engine_lean4.py +38 -34
  14. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  15. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +262 -182
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +263 -191
  17. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +53 -49
  18. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +212 -211
  19. symai/backend/engines/neurosymbolic/engine_groq.py +87 -63
  20. symai/backend/engines/neurosymbolic/engine_huggingface.py +21 -24
  21. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +117 -48
  22. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +256 -229
  23. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +270 -150
  24. symai/backend/engines/ocr/engine_apilayer.py +6 -8
  25. symai/backend/engines/output/engine_stdout.py +1 -4
  26. symai/backend/engines/search/engine_openai.py +7 -7
  27. symai/backend/engines/search/engine_perplexity.py +5 -5
  28. symai/backend/engines/search/engine_serpapi.py +12 -14
  29. symai/backend/engines/speech_to_text/engine_local_whisper.py +20 -27
  30. symai/backend/engines/symbolic/engine_wolframalpha.py +3 -3
  31. symai/backend/engines/text_to_speech/engine_openai.py +5 -7
  32. symai/backend/engines/text_vision/engine_clip.py +7 -11
  33. symai/backend/engines/userinput/engine_console.py +3 -3
  34. symai/backend/engines/webscraping/engine_requests.py +81 -48
  35. symai/backend/mixin/__init__.py +13 -0
  36. symai/backend/mixin/anthropic.py +4 -2
  37. symai/backend/mixin/deepseek.py +2 -0
  38. symai/backend/mixin/google.py +2 -0
  39. symai/backend/mixin/openai.py +11 -3
  40. symai/backend/settings.py +83 -16
  41. symai/chat.py +101 -78
  42. symai/collect/__init__.py +7 -1
  43. symai/collect/dynamic.py +77 -69
  44. symai/collect/pipeline.py +35 -27
  45. symai/collect/stats.py +75 -63
  46. symai/components.py +198 -169
  47. symai/constraints.py +15 -12
  48. symai/core.py +698 -359
  49. symai/core_ext.py +32 -34
  50. symai/endpoints/api.py +80 -73
  51. symai/extended/.DS_Store +0 -0
  52. symai/extended/__init__.py +46 -12
  53. symai/extended/api_builder.py +11 -8
  54. symai/extended/arxiv_pdf_parser.py +13 -12
  55. symai/extended/bibtex_parser.py +2 -3
  56. symai/extended/conversation.py +101 -90
  57. symai/extended/document.py +17 -10
  58. symai/extended/file_merger.py +18 -13
  59. symai/extended/graph.py +18 -13
  60. symai/extended/html_style_template.py +2 -4
  61. symai/extended/interfaces/blip_2.py +1 -2
  62. symai/extended/interfaces/clip.py +1 -2
  63. symai/extended/interfaces/console.py +7 -1
  64. symai/extended/interfaces/dall_e.py +1 -1
  65. symai/extended/interfaces/flux.py +1 -1
  66. symai/extended/interfaces/gpt_image.py +1 -1
  67. symai/extended/interfaces/input.py +1 -1
  68. symai/extended/interfaces/llava.py +0 -1
  69. symai/extended/interfaces/naive_vectordb.py +7 -8
  70. symai/extended/interfaces/naive_webscraping.py +1 -1
  71. symai/extended/interfaces/ocr.py +1 -1
  72. symai/extended/interfaces/pinecone.py +6 -5
  73. symai/extended/interfaces/serpapi.py +1 -1
  74. symai/extended/interfaces/terminal.py +2 -3
  75. symai/extended/interfaces/tts.py +1 -1
  76. symai/extended/interfaces/whisper.py +1 -1
  77. symai/extended/interfaces/wolframalpha.py +1 -1
  78. symai/extended/metrics/__init__.py +11 -1
  79. symai/extended/metrics/similarity.py +11 -13
  80. symai/extended/os_command.py +17 -16
  81. symai/extended/packages/__init__.py +29 -3
  82. symai/extended/packages/symdev.py +19 -16
  83. symai/extended/packages/sympkg.py +12 -9
  84. symai/extended/packages/symrun.py +21 -19
  85. symai/extended/repo_cloner.py +11 -10
  86. symai/extended/seo_query_optimizer.py +1 -2
  87. symai/extended/solver.py +20 -23
  88. symai/extended/summarizer.py +4 -3
  89. symai/extended/taypan_interpreter.py +10 -12
  90. symai/extended/vectordb.py +99 -82
  91. symai/formatter/__init__.py +9 -1
  92. symai/formatter/formatter.py +12 -16
  93. symai/formatter/regex.py +62 -63
  94. symai/functional.py +176 -122
  95. symai/imports.py +136 -127
  96. symai/interfaces.py +56 -27
  97. symai/memory.py +14 -13
  98. symai/misc/console.py +49 -39
  99. symai/misc/loader.py +5 -3
  100. symai/models/__init__.py +17 -1
  101. symai/models/base.py +269 -181
  102. symai/models/errors.py +0 -1
  103. symai/ops/__init__.py +32 -22
  104. symai/ops/measures.py +11 -15
  105. symai/ops/primitives.py +348 -228
  106. symai/post_processors.py +32 -28
  107. symai/pre_processors.py +39 -41
  108. symai/processor.py +6 -4
  109. symai/prompts.py +59 -45
  110. symai/server/huggingface_server.py +23 -20
  111. symai/server/llama_cpp_server.py +7 -5
  112. symai/shell.py +3 -4
  113. symai/shellsv.py +499 -375
  114. symai/strategy.py +517 -287
  115. symai/symbol.py +111 -116
  116. symai/utils.py +42 -36
  117. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/METADATA +4 -2
  118. symbolicai-1.0.0.dist-info/RECORD +163 -0
  119. symbolicai-0.20.2.dist-info/RECORD +0 -162
  120. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/WHEEL +0 -0
  121. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/entry_points.txt +0 -0
  122. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/licenses/LICENSE +0 -0
  123. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,10 @@
1
1
  import logging
2
- import re
3
2
  from copy import deepcopy
4
- from typing import List, Optional
5
3
 
6
- from annotated_types import Not
7
4
  from openai import OpenAI
8
5
 
9
6
  from ....components import SelfPrompt
10
- from ....misc.console import ConsoleStyle
11
- from ....symbol import Symbol
12
- from ....utils import CustomUserWarning, encode_media_frames
7
+ from ....utils import UserMessage
13
8
  from ...base import Engine
14
9
  from ...mixin.deepseek import DeepSeekMixin
15
10
  from ...settings import SYMAI_CONFIG
@@ -22,7 +17,7 @@ logging.getLogger("httpcore").setLevel(logging.ERROR)
22
17
 
23
18
 
24
19
  class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
25
- def __init__(self, api_key: Optional[str] = None, model: Optional[str] = None):
20
+ def __init__(self, api_key: str | None = None, model: str | None = None):
26
21
  super().__init__()
27
22
  self.config = deepcopy(SYMAI_CONFIG)
28
23
  # In case we use EngineRepository.register to inject the api_key and model => dynamically change the engine at runtime
@@ -42,7 +37,7 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
42
37
  try:
43
38
  self.client = OpenAI(api_key=self.api_key, base_url="https://api.deepseek.com")
44
39
  except Exception as e:
45
- CustomUserWarning(f'Failed to initialize the DeepSeek client. Please check your library version. Caused by: {e}', raise_with=RuntimeError)
40
+ UserMessage(f'Failed to initialize the DeepSeek client. Please check your library version. Caused by: {e}', raise_with=RuntimeError)
46
41
 
47
42
  def id(self) -> str:
48
43
  if self.config.get('NEUROSYMBOLIC_ENGINE_MODEL') and \
@@ -59,14 +54,14 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
59
54
  if 'seed' in kwargs:
60
55
  self.seed = kwargs['seed']
61
56
 
62
- def compute_required_tokens(self, messages):
63
- CustomUserWarning('Method "compute_required_tokens" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
57
+ def compute_required_tokens(self, _messages):
58
+ UserMessage('Method "compute_required_tokens" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
64
59
 
65
- def compute_remaining_tokens(self, prompts: list) -> int:
66
- CustomUserWarning('Method "compute_remaining_tokens" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
60
+ def compute_remaining_tokens(self, _prompts: list) -> int:
61
+ UserMessage('Method "compute_remaining_tokens" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
67
62
 
68
- def truncate(self, prompts: list[dict], truncation_percentage: float | None, truncation_type: str) -> list[dict]:
69
- CustomUserWarning('Method "truncate" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
63
+ def truncate(self, _prompts: list[dict], _truncation_percentage: float | None, _truncation_type: str) -> list[dict]:
64
+ UserMessage('Method "truncate" not implemented for DeepSeekXReasoningEngine.', raise_with=NotImplementedError)
70
65
 
71
66
  def forward(self, argument):
72
67
  kwargs = argument.kwargs
@@ -80,18 +75,18 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
80
75
  except Exception as e:
81
76
  if self.api_key is None or self.api_key == '':
82
77
  msg = 'DeepSeek API key is not set. Please set it in the config file or pass it as an argument to the command method.'
83
- logging.error(msg)
78
+ UserMessage(msg)
84
79
  if self.config['NEUROSYMBOLIC_ENGINE_API_KEY'] is None or self.config['NEUROSYMBOLIC_ENGINE_API_KEY'] == '':
85
- CustomUserWarning(msg, raise_with=ValueError)
80
+ UserMessage(msg, raise_with=ValueError)
86
81
  self.api_key = self.config['NEUROSYMBOLIC_ENGINE_API_KEY']
87
82
 
88
83
  callback = self.client.chat.completions.create
89
- kwargs['model'] = kwargs['model'] if 'model' in kwargs else self.model
84
+ kwargs['model'] = kwargs.get('model', self.model)
90
85
 
91
86
  if except_remedy is not None:
92
87
  res = except_remedy(self, e, callback, argument)
93
88
  else:
94
- CustomUserWarning(f'Error during generation. Caused by: {e}', raise_with=ValueError)
89
+ UserMessage(f'Error during generation. Caused by: {e}', raise_with=ValueError)
95
90
 
96
91
  reasoning_content = res.choices[0].message.reasoning_content
97
92
  content = res.choices[0].message.content
@@ -101,36 +96,32 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
101
96
 
102
97
  def _prepare_raw_input(self, argument):
103
98
  if not argument.prop.processed_input:
104
- CustomUserWarning('A prompt instruction is required for DeepSeekXReasoningEngine when raw_input is enabled.', raise_with=ValueError)
99
+ UserMessage('A prompt instruction is required for DeepSeekXReasoningEngine when raw_input is enabled.', raise_with=ValueError)
105
100
  value = argument.prop.processed_input
106
101
  # convert to dict if not already
107
- if type(value) != list:
108
- if type(value) != dict:
102
+ if not isinstance(value, list):
103
+ if not isinstance(value, dict):
109
104
  value = {'role': 'user', 'content': str(value)}
110
105
  value = [value]
111
106
  return value
112
107
 
113
- def prepare(self, argument):
114
- if argument.prop.raw_input:
115
- argument.prop.prepared_input = self._prepare_raw_input(argument)
116
- return
117
-
108
+ def _build_system_prompt(self, argument):
118
109
  _non_verbose_output = """<META_INSTRUCTION/>\nYou do not output anything else, like verbose preambles or post explanation, such as "Sure, let me...", "Hope that was helpful...", "Yes, I can help you with that...", etc. Consider well formatted output, e.g. for sentences use punctuation, spaces etc. or for code use indentation, etc. Never add meta instructions information to your output!\n\n"""
119
- user: str = ""
120
110
  system: str = ""
111
+ prop = argument.prop
121
112
 
122
- if argument.prop.suppress_verbose_output:
113
+ if prop.suppress_verbose_output:
123
114
  system += _non_verbose_output
124
115
  system = f'{system}\n' if system and len(system) > 0 else ''
125
116
 
126
- if argument.prop.response_format:
127
- _rsp_fmt = argument.prop.response_format
117
+ if prop.response_format:
118
+ _rsp_fmt = prop.response_format
128
119
  if not (_rsp_fmt.get('type') is not None):
129
- CustomUserWarning('Response format type is required! Expected format `{"type": "json_object"}` or other supported types.', raise_with=AssertionError)
120
+ UserMessage('Response format type is required! Expected format `{"type": "json_object"}` or other supported types.', raise_with=AssertionError)
130
121
  system += _non_verbose_output
131
122
  system += f'<RESPONSE_FORMAT/>\n{_rsp_fmt["type"]}\n\n'
132
123
 
133
- ref = argument.prop.instance
124
+ ref = prop.instance
134
125
  static_ctxt, dyn_ctxt = ref.global_context
135
126
  if len(static_ctxt) > 0:
136
127
  system += f"<STATIC CONTEXT/>\n{static_ctxt}\n\n"
@@ -138,36 +129,49 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
138
129
  if len(dyn_ctxt) > 0:
139
130
  system += f"<DYNAMIC CONTEXT/>\n{dyn_ctxt}\n\n"
140
131
 
141
- payload = argument.prop.payload
142
- if argument.prop.payload:
143
- system += f"<ADDITIONAL CONTEXT/>\n{str(payload)}\n\n"
132
+ payload = prop.payload
133
+ if prop.payload:
134
+ system += f"<ADDITIONAL CONTEXT/>\n{payload!s}\n\n"
144
135
 
145
- examples: List[str] = argument.prop.examples
136
+ examples: list[str] = prop.examples
146
137
  if examples and len(examples) > 0:
147
- system += f"<EXAMPLES/>\n{str(examples)}\n\n"
138
+ system += f"<EXAMPLES/>\n{examples!s}\n\n"
148
139
 
149
- if argument.prop.prompt is not None and len(argument.prop.prompt) > 0:
150
- val = str(argument.prop.prompt)
140
+ if prop.prompt is not None and len(prop.prompt) > 0:
141
+ val = str(prop.prompt)
151
142
  system += f"<INSTRUCTION/>\n{val}\n\n"
152
143
 
153
- user += f"{str(argument.prop.processed_input)}"
144
+ if prop.template_suffix:
145
+ system += f' You will only generate content for the placeholder `{prop.template_suffix!s}` following the instructions and the provided context information.\n\n'
154
146
 
155
- if argument.prop.template_suffix:
156
- system += f' You will only generate content for the placeholder `{str(argument.prop.template_suffix)}` following the instructions and the provided context information.\n\n'
147
+ return system
157
148
 
158
- user_prompt = { "role": "user", "content": user }
149
+ def _build_user_prompt(self, argument):
150
+ return {"role": "user", "content": f"{argument.prop.processed_input!s}"}
159
151
 
160
- # First check if the `Symbol` instance has the flag set, otherwise check if it was passed as an argument to a method
161
- if argument.prop.instance._kwargs.get('self_prompt', False) or argument.prop.self_prompt:
152
+ def _apply_self_prompt(self, argument, system, user_prompt):
153
+ prop = argument.prop
154
+ if prop.instance._kwargs.get('self_prompt', False) or prop.self_prompt:
162
155
  self_prompter = SelfPrompt()
163
156
 
164
- res = self_prompter({'user': user, 'system': system})
157
+ res = self_prompter({'user': user_prompt['content'], 'system': system})
165
158
  if res is None:
166
- CustomUserWarning("Self-prompting failed for DeepSeekXReasoningEngine.", raise_with=ValueError)
159
+ UserMessage("Self-prompting failed for DeepSeekXReasoningEngine.", raise_with=ValueError)
167
160
 
168
161
  user_prompt = { "role": "user", "content": res['user'] }
169
162
  system = res['system']
170
163
 
164
+ return system, user_prompt
165
+
166
+ def prepare(self, argument):
167
+ if argument.prop.raw_input:
168
+ argument.prop.prepared_input = self._prepare_raw_input(argument)
169
+ return
170
+
171
+ system = self._build_system_prompt(argument)
172
+ user_prompt = self._build_user_prompt(argument)
173
+ system, user_prompt = self._apply_self_prompt(argument, system, user_prompt)
174
+
171
175
  argument.prop.prepared_input = [
172
176
  { "role": "system", "content": system },
173
177
  user_prompt,
@@ -177,8 +181,8 @@ class DeepSeekXReasoningEngine(Engine, DeepSeekMixin):
177
181
  """Prepares the request payload from the argument."""
178
182
  kwargs = argument.kwargs
179
183
  # 16/03/2025
180
- # Not Supported FeaturesFunction Call、Json Output、FIM (Beta)
181
- # Not Supported Parameterstemperature、top_p、presence_penalty、frequency_penalty、logprobs、top_logprobs
184
+ # Not Supported Features: Function Call、Json Output、FIM (Beta)
185
+ # Not Supported Parameters: temperature、top_p、presence_penalty、frequency_penalty、logprobs、top_logprobs
182
186
  return {
183
187
  "model": kwargs.get('model', self.model),
184
188
  "seed": kwargs.get('seed', self.seed),