symbolicai 0.20.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. symai/__init__.py +96 -64
  2. symai/backend/base.py +93 -80
  3. symai/backend/engines/drawing/engine_bfl.py +12 -11
  4. symai/backend/engines/drawing/engine_gpt_image.py +108 -87
  5. symai/backend/engines/embedding/engine_llama_cpp.py +25 -28
  6. symai/backend/engines/embedding/engine_openai.py +3 -5
  7. symai/backend/engines/execute/engine_python.py +6 -5
  8. symai/backend/engines/files/engine_io.py +74 -67
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +3 -3
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +54 -38
  11. symai/backend/engines/index/engine_pinecone.py +23 -24
  12. symai/backend/engines/index/engine_vectordb.py +16 -14
  13. symai/backend/engines/lean/engine_lean4.py +38 -34
  14. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  15. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +262 -182
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +263 -191
  17. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +53 -49
  18. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +212 -211
  19. symai/backend/engines/neurosymbolic/engine_groq.py +87 -63
  20. symai/backend/engines/neurosymbolic/engine_huggingface.py +21 -24
  21. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +117 -48
  22. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +256 -229
  23. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +270 -150
  24. symai/backend/engines/ocr/engine_apilayer.py +6 -8
  25. symai/backend/engines/output/engine_stdout.py +1 -4
  26. symai/backend/engines/search/engine_openai.py +7 -7
  27. symai/backend/engines/search/engine_perplexity.py +5 -5
  28. symai/backend/engines/search/engine_serpapi.py +12 -14
  29. symai/backend/engines/speech_to_text/engine_local_whisper.py +20 -27
  30. symai/backend/engines/symbolic/engine_wolframalpha.py +3 -3
  31. symai/backend/engines/text_to_speech/engine_openai.py +5 -7
  32. symai/backend/engines/text_vision/engine_clip.py +7 -11
  33. symai/backend/engines/userinput/engine_console.py +3 -3
  34. symai/backend/engines/webscraping/engine_requests.py +81 -48
  35. symai/backend/mixin/__init__.py +13 -0
  36. symai/backend/mixin/anthropic.py +4 -2
  37. symai/backend/mixin/deepseek.py +2 -0
  38. symai/backend/mixin/google.py +2 -0
  39. symai/backend/mixin/openai.py +11 -3
  40. symai/backend/settings.py +83 -16
  41. symai/chat.py +101 -78
  42. symai/collect/__init__.py +7 -1
  43. symai/collect/dynamic.py +77 -69
  44. symai/collect/pipeline.py +35 -27
  45. symai/collect/stats.py +75 -63
  46. symai/components.py +198 -169
  47. symai/constraints.py +15 -12
  48. symai/core.py +698 -359
  49. symai/core_ext.py +32 -34
  50. symai/endpoints/api.py +80 -73
  51. symai/extended/.DS_Store +0 -0
  52. symai/extended/__init__.py +46 -12
  53. symai/extended/api_builder.py +11 -8
  54. symai/extended/arxiv_pdf_parser.py +13 -12
  55. symai/extended/bibtex_parser.py +2 -3
  56. symai/extended/conversation.py +101 -90
  57. symai/extended/document.py +17 -10
  58. symai/extended/file_merger.py +18 -13
  59. symai/extended/graph.py +18 -13
  60. symai/extended/html_style_template.py +2 -4
  61. symai/extended/interfaces/blip_2.py +1 -2
  62. symai/extended/interfaces/clip.py +1 -2
  63. symai/extended/interfaces/console.py +7 -1
  64. symai/extended/interfaces/dall_e.py +1 -1
  65. symai/extended/interfaces/flux.py +1 -1
  66. symai/extended/interfaces/gpt_image.py +1 -1
  67. symai/extended/interfaces/input.py +1 -1
  68. symai/extended/interfaces/llava.py +0 -1
  69. symai/extended/interfaces/naive_vectordb.py +7 -8
  70. symai/extended/interfaces/naive_webscraping.py +1 -1
  71. symai/extended/interfaces/ocr.py +1 -1
  72. symai/extended/interfaces/pinecone.py +6 -5
  73. symai/extended/interfaces/serpapi.py +1 -1
  74. symai/extended/interfaces/terminal.py +2 -3
  75. symai/extended/interfaces/tts.py +1 -1
  76. symai/extended/interfaces/whisper.py +1 -1
  77. symai/extended/interfaces/wolframalpha.py +1 -1
  78. symai/extended/metrics/__init__.py +11 -1
  79. symai/extended/metrics/similarity.py +11 -13
  80. symai/extended/os_command.py +17 -16
  81. symai/extended/packages/__init__.py +29 -3
  82. symai/extended/packages/symdev.py +19 -16
  83. symai/extended/packages/sympkg.py +12 -9
  84. symai/extended/packages/symrun.py +21 -19
  85. symai/extended/repo_cloner.py +11 -10
  86. symai/extended/seo_query_optimizer.py +1 -2
  87. symai/extended/solver.py +20 -23
  88. symai/extended/summarizer.py +4 -3
  89. symai/extended/taypan_interpreter.py +10 -12
  90. symai/extended/vectordb.py +99 -82
  91. symai/formatter/__init__.py +9 -1
  92. symai/formatter/formatter.py +12 -16
  93. symai/formatter/regex.py +62 -63
  94. symai/functional.py +176 -122
  95. symai/imports.py +136 -127
  96. symai/interfaces.py +56 -27
  97. symai/memory.py +14 -13
  98. symai/misc/console.py +49 -39
  99. symai/misc/loader.py +5 -3
  100. symai/models/__init__.py +17 -1
  101. symai/models/base.py +269 -181
  102. symai/models/errors.py +0 -1
  103. symai/ops/__init__.py +32 -22
  104. symai/ops/measures.py +11 -15
  105. symai/ops/primitives.py +348 -228
  106. symai/post_processors.py +32 -28
  107. symai/pre_processors.py +39 -41
  108. symai/processor.py +6 -4
  109. symai/prompts.py +59 -45
  110. symai/server/huggingface_server.py +23 -20
  111. symai/server/llama_cpp_server.py +7 -5
  112. symai/shell.py +3 -4
  113. symai/shellsv.py +499 -375
  114. symai/strategy.py +517 -287
  115. symai/symbol.py +111 -116
  116. symai/utils.py +42 -36
  117. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/METADATA +4 -2
  118. symbolicai-1.0.0.dist-info/RECORD +163 -0
  119. symbolicai-0.20.2.dist-info/RECORD +0 -162
  120. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/WHEEL +0 -0
  121. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/entry_points.txt +0 -0
  122. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/licenses/LICENSE +0 -0
  123. {symbolicai-0.20.2.dist-info → symbolicai-1.0.0.dist-info}/top_level.txt +0 -0
@@ -33,7 +33,8 @@ class AnthropicMixin:
33
33
  self.model == 'claude-3-opus-20240229' or \
34
34
  self.model == 'claude-3-sonnet-20240229' or \
35
35
  self.model == 'claude-3-haiku-20240307':
36
- return 200_000
36
+ return 200_000
37
+ return None
37
38
 
38
39
  def api_max_response_tokens(self):
39
40
  if self.model == 'claude-sonnet-4-0' or \
@@ -53,4 +54,5 @@ class AnthropicMixin:
53
54
  self.model == 'clade-3-opus-20240229' or \
54
55
  self.model == 'claude-3-sonnet-20240229' or \
55
56
  self.model == 'claude-3-haiku-20240307':
56
- return 4_096
57
+ return 4_096
58
+ return None
@@ -8,7 +8,9 @@ class DeepSeekMixin:
8
8
  def api_max_context_tokens(self):
9
9
  if self.model == 'deepseek-reasoner':
10
10
  return 64_000
11
+ return None
11
12
 
12
13
  def api_max_response_tokens(self):
13
14
  if self.model == 'deepseek-reasoner':
14
15
  return 8_000
16
+ return None
@@ -10,7 +10,9 @@ class GoogleMixin:
10
10
  def api_max_context_tokens(self):
11
11
  if self.model.startswith('gemini-2.5-'):
12
12
  return 1_048_576
13
+ return None
13
14
 
14
15
  def api_max_response_tokens(self):
15
16
  if self.model == 'gemini-2.5-':
16
17
  return 65_536
18
+ return None
@@ -1,3 +1,5 @@
1
+ from ...utils import UserMessage
2
+
1
3
  SUPPORTED_COMPLETION_MODELS = [
2
4
  'davinci-002',
3
5
  ]
@@ -86,7 +88,9 @@ class OpenAIMixin:
86
88
  self.model == 'gpt-4.1-mini' or \
87
89
  self.model == 'gpt-4.1-nano':
88
90
  return 1_047_576
89
- raise ValueError(f'Unsupported model: {self.model}')
91
+ msg = f'Unsupported model: {self.model}'
92
+ UserMessage(msg)
93
+ raise ValueError(msg)
90
94
 
91
95
  def api_max_response_tokens(self):
92
96
  if self.model == 'davinci-002':
@@ -122,7 +126,9 @@ class OpenAIMixin:
122
126
  self.model == 'gpt-5-nano' or \
123
127
  self.model == 'gpt-5-chat-latest':
124
128
  return 128_000
125
- raise ValueError(f'Unsupported model: {self.model}')
129
+ msg = f'Unsupported model: {self.model}'
130
+ UserMessage(msg)
131
+ raise ValueError(msg)
126
132
 
127
133
  def api_embedding_dims(self):
128
134
  if self.model == 'text-embedding-ada-002':
@@ -131,4 +137,6 @@ class OpenAIMixin:
131
137
  return 1_536
132
138
  if self.model == 'text-embedding-3-large':
133
139
  return 3_072
134
- raise ValueError(f'Unsupported model: {self.model}')
140
+ msg = f'Unsupported model: {self.model}'
141
+ UserMessage(msg)
142
+ raise ValueError(msg)
symai/backend/settings.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import json
2
- import os
3
2
  import sys
4
3
  from pathlib import Path
5
4
 
@@ -18,6 +17,26 @@ class SymAIConfig:
18
17
  self._env_config_dir = self._env_path / '.symai'
19
18
  self._home_config_dir = Path.home() / '.symai'
20
19
  self._debug_dir = Path.cwd() # Current working directory for debug mode
20
+ self._active_paths: dict[str, Path] = {}
21
+
22
+ def _canonical_key(self, filename: str | Path) -> str:
23
+ """Return a canonical identifier for config files regardless of input type."""
24
+ path = Path(filename)
25
+ if path.is_absolute() or path.parent != Path():
26
+ return str(path)
27
+ return path.name or str(path)
28
+
29
+ def _remove_legacy_path_keys(self, key: str) -> None:
30
+ """Drop legacy Path keys that collide with the canonical key."""
31
+ target_path = Path(key)
32
+ target_name = target_path.name or key
33
+ stale_keys: list[Path] = [
34
+ existing_key for existing_key in self._active_paths
35
+ if isinstance(existing_key, Path)
36
+ and (existing_key.name == target_name or str(existing_key) == key)
37
+ ]
38
+ for stale_key in stale_keys:
39
+ self._active_paths.pop(stale_key, None)
21
40
 
22
41
  @property
23
42
  def config_dir(self) -> Path:
@@ -31,41 +50,59 @@ class SymAIConfig:
31
50
  # Finally home directory
32
51
  return self._home_config_dir
33
52
 
34
- def get_config_path(self, filename: str, fallback_to_home: bool = False) -> Path:
53
+ def get_config_path(self, filename: str | Path, fallback_to_home: bool = False) -> Path:
35
54
  """Gets the config path using the priority system or forces fallback to home."""
36
- debug_config = self._debug_dir / filename
37
- env_config = self._env_config_dir / filename
38
- home_config = self._home_config_dir / filename
55
+ input_path = Path(filename)
56
+ if input_path.is_absolute() or input_path.parent != Path():
57
+ return input_path
58
+
59
+ normalized_filename = self._canonical_key(filename)
60
+ # Only use the basename for managed directories
61
+ normalized_filename = Path(normalized_filename).name
62
+ debug_config = self._debug_dir / normalized_filename
63
+ env_config = self._env_config_dir / normalized_filename
64
+ home_config = self._home_config_dir / normalized_filename
39
65
 
40
66
  # Check debug first (only valid for symai.config.json)
41
- if filename == 'symai.config.json' and debug_config.exists():
67
+ if normalized_filename == 'symai.config.json' and debug_config.exists():
42
68
  return debug_config
43
69
 
44
70
  # If forced to fallback, return home config if it exists, otherwise environment
45
71
  if fallback_to_home:
46
- return home_config if home_config.exists() else env_config
72
+ if home_config.exists():
73
+ return home_config
74
+ return env_config
47
75
 
48
76
  # Normal priority-based resolution
49
- # If environment config doesn't exist, return that path (for creation)
50
- if not env_config.exists():
77
+ if env_config.exists():
51
78
  return env_config
52
- # Otherwise use environment config
79
+ if home_config.exists():
80
+ return home_config
53
81
  return env_config
54
82
 
55
- def load_config(self, filename: str, fallback_to_home: bool = False) -> dict:
83
+ def load_config(self, filename: str | Path, fallback_to_home: bool = False) -> dict:
56
84
  """Loads JSON data from the determined config location."""
57
85
  config_path = self.get_config_path(filename, fallback_to_home=fallback_to_home)
86
+ key = self._canonical_key(filename)
58
87
  if not config_path.exists():
88
+ self._remove_legacy_path_keys(key)
89
+ self._active_paths.pop(key, None)
59
90
  return {}
60
- with open(config_path, 'r', encoding='utf-8') as f:
61
- return json.load(f)
91
+ with config_path.open(encoding='utf-8') as f:
92
+ config = json.load(f)
93
+ self._remove_legacy_path_keys(key)
94
+ self._active_paths[key] = config_path
95
+ return config
62
96
 
63
- def save_config(self, filename: str, data: dict, fallback_to_home: bool = False) -> None:
97
+ def save_config(self, filename: str | Path, data: dict, fallback_to_home: bool = False) -> None:
64
98
  """Saves JSON data to the determined config location."""
65
99
  config_path = self.get_config_path(filename, fallback_to_home=fallback_to_home)
66
- os.makedirs(config_path.parent, exist_ok=True)
67
- with open(config_path, 'w', encoding='utf-8') as f:
100
+ key = self._canonical_key(filename)
101
+ config_path.parent.mkdir(parents=True, exist_ok=True)
102
+ with config_path.open('w', encoding='utf-8') as f:
68
103
  json.dump(data, f, indent=4)
104
+ self._remove_legacy_path_keys(key)
105
+ self._active_paths[key] = config_path
69
106
 
70
107
  def migrate_config(self, filename: str, updates: dict) -> None:
71
108
  """Updates existing configuration with new fields."""
@@ -73,6 +110,36 @@ class SymAIConfig:
73
110
  config.update(updates)
74
111
  self.save_config(filename, config)
75
112
 
113
+ def get_active_path(self, filename: str | Path) -> Path:
114
+ """Returns the last path used to read or write the given config file."""
115
+ key = self._canonical_key(filename)
116
+ cached = self._active_paths.get(key)
117
+ if cached is not None:
118
+ return cached
119
+ for legacy_key, cached_path in list(self._active_paths.items()):
120
+ if isinstance(legacy_key, Path) and (
121
+ legacy_key.name == key or str(legacy_key) == key
122
+ ):
123
+ self._active_paths.pop(legacy_key, None)
124
+ self._active_paths[key] = cached_path
125
+ return cached_path
126
+ return self.get_config_path(filename)
127
+
128
+ def get_active_config_dir(self) -> Path:
129
+ """Returns the directory backing the active symai configuration."""
130
+ symai_key = self._canonical_key('symai.config.json')
131
+ cached = self._active_paths.get(symai_key)
132
+ if cached is not None:
133
+ return cached.parent
134
+ for legacy_key, cached_path in list(self._active_paths.items()):
135
+ if isinstance(legacy_key, Path) and (
136
+ legacy_key.name == symai_key or str(legacy_key) == symai_key
137
+ ):
138
+ self._active_paths.pop(legacy_key, None)
139
+ self._active_paths[symai_key] = cached_path
140
+ return cached_path.parent
141
+ return self.config_dir
142
+
76
143
  SYMAI_CONFIG = {}
77
144
  SYMSH_CONFIG = {}
78
145
  SYMSERVER_CONFIG = {}
symai/chat.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import logging
2
2
  import re
3
- from typing import Optional
4
3
 
5
4
  from loguru import logger
6
5
 
@@ -13,6 +12,7 @@ from .post_processors import ConsolePostProcessor, StripPostProcessor
13
12
  from .pre_processors import ConsoleInputPreProcessor
14
13
  from .prompts import MemoryCapabilities, SymbiaCapabilities
15
14
  from .symbol import Expression, Symbol
15
+ from .utils import UserMessage
16
16
 
17
17
  logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
18
18
 
@@ -45,10 +45,10 @@ class ChatBot(Expression):
45
45
  self.detect_memory_usage = InContextClassification(MemoryCapabilities())
46
46
  self._last_user_input: str = ''
47
47
 
48
- def repeat(self, query, **kwargs):
48
+ def repeat(self, query, **_kwargs):
49
49
  return self.narrate('Symbia does not understand and asks to repeat and give more context.', prompt=query)
50
50
 
51
- def narrate(self, message: str, context: str = None, category: str = None, end: bool = False, **kwargs) -> Symbol:
51
+ def narrate(self, message: str, context: str | None = None, category: str | None = None, end: bool = False, **kwargs) -> Symbol:
52
52
  reflection = context if context is not None else ''
53
53
  ltmem_recall = 'No memories retrieved.'
54
54
  stmem_recall = '\n'.join(self.short_term_memory.recall())
@@ -73,7 +73,8 @@ class ChatBot(Expression):
73
73
  if do == 'SAVE':
74
74
  self.long_term_memory(f'{self.name}: {reflection}', operation='add', top_k=self.long_term_mem_top_k, index_name=self.index_name)
75
75
  self.long_term_memory('save', operation='config', index_name=self.index_name)
76
- if self.verbose: logger.debug(f'Store new long-term memory:\n{reflection}\n')
76
+ if self.verbose:
77
+ logger.debug(f'Store new long-term memory:\n{reflection}\n')
77
78
  message = f'{self.name} inform the user that the memory was stored.'
78
79
  elif do == 'DUPLICATE':
79
80
  message = f'{self.name} engages the user in a conversation about the duplicate topic, showing the user she remembered the past interaction.'
@@ -86,7 +87,8 @@ class ChatBot(Expression):
86
87
 
87
88
  reply = f'{self.name}: {self._narration(message, self._last_user_input, reflection, context, ltmem_recall, stmem_recall, **kwargs)}'
88
89
 
89
- if end: print('\n\n', reply)
90
+ if end:
91
+ UserMessage(f'\n\n{reply}', text="extensity")
90
92
 
91
93
  return Symbol(reply)
92
94
 
@@ -118,8 +120,8 @@ class ChatBot(Expression):
118
120
  @staticmethod
119
121
  def _init_custom_input_postprocessor(that):
120
122
  class CustomInputPostProcessor(ConsolePostProcessor):
121
- def __call__(self, rsp, argument):
122
- that.short_term_memory.store(f'User: {str(rsp)}')
123
+ def __call__(self, rsp, _argument):
124
+ that.short_term_memory.store(f'User: {rsp!s}')
123
125
  return rsp
124
126
  return CustomInputPostProcessor
125
127
 
@@ -157,108 +159,130 @@ The chatbot always reply in the following format
157
159
  @core.zero_shot(prompt=prompt, **kwargs)
158
160
  def _func(_) -> str:
159
161
  pass
160
- if self.verbose: logger.debug(f'Narration:\n{prompt}\n')
161
- res = _func(self)
162
- res = res.replace(f'{self.name}: ', '').strip()
163
- return res
162
+ if self.verbose:
163
+ logger.debug(f'Narration:\n{prompt}\n')
164
+ return _func(self).replace(f'{self.name}: ', '').strip()
164
165
 
165
166
  class SymbiaChat(ChatBot):
166
167
  def __init__(self, name: str = 'Symbia', verbose: bool = False, **kwargs):
167
168
  super().__init__(name=name, verbose=verbose, **kwargs)
168
169
  self.message = self.narrate(f'{self.name} introduces herself, writes a greeting message and asks how to help.', context=None)
169
170
 
170
- def forward(self, usr: Optional[str] = None) -> Symbol:
171
+ def forward(self, usr: str | None = None) -> Symbol:
171
172
  loop = True
172
173
  ask_input = True
173
174
  if usr:
174
175
  ask_input = False
175
176
  usr = self._to_symbol(usr)
176
177
 
177
- # added step-by-step interaction with the user if input is provided
178
178
  while loop:
179
- # if no input is provided, ask for input
180
- if ask_input:
181
- usr = self.input(self.message)
182
- else:
183
- loop = False # break the loop after the first iteration
184
-
179
+ usr, loop = self._resolve_user_input(usr, loop, ask_input)
185
180
  self._last_user_input = usr
186
- if self.verbose: logger.debug(f'User:\n{usr}\n')
187
-
188
- if len(str(usr)) > 0:
189
- ctxt = str(self.detect_capability(usr))
190
- else:
191
- ctxt = '[DK]'
181
+ self._log_verbose('User', usr)
192
182
 
193
- if self.verbose: logger.debug(f'In-context:\n{ctxt}\n')
183
+ ctxt = self._context_from_user(usr)
184
+ self._log_verbose('In-context', ctxt)
194
185
 
195
- if '[EXIT]' in ctxt:
196
- self.message = self.narrate(f'{self.name} writes friendly goodbye message.', context=None, end=True)
186
+ if self._handle_exit_context(ctxt):
197
187
  break
198
- elif '[HELP]' in ctxt:
199
- reflection = self._extract_reflection(ctxt)
200
- self.message = self.narrate(f'{self.name} ', context=reflection)
201
- elif '[RECALL]' in ctxt:
202
- reflection = self._extract_reflection(ctxt)
203
- category = self._extract_category(ctxt)
204
- self.message = self.narrate(f'{self.name} uses replies based on what has been recovered from the memory.', context=ctxt, category=category)
205
- elif '[DK]' in ctxt:
206
- reflection = self._extract_reflection(ctxt)
207
- self.message = self.narrate(f'{self.name} is not sure about the message and references and asks the user for more context.', context=reflection)
208
188
 
209
- else:
210
- try:
211
- if '[SYMBOLIC]' in ctxt:
212
- q = usr.extract("mathematical formula that WolframAlpha can solve")
213
- rsp = self.interfaces['symbolic'](q)
214
- self.message = self.narrate(f'{self.name} replies to the user and provides the solution of the math problem.', context=rsp)
215
- elif '[SEARCH]' in ctxt:
216
- q = usr.extract('user query request')
217
- rsp = self.interfaces['search'](q)
218
- self.message = self.narrate(f'{self.name} replies to the user based on the online search results.', context=rsp)
219
- elif '[SCRAPER]' in ctxt:
220
- q = usr.extract('URL from text')
221
- q = q.convert('proper URL, example: https://www.google.com')
222
- rsp = self.interfaces['scraper'](q)
223
- self.message = self.narrate(f'{self.name} replies to the user and narrates its findings.', context=rsp)
224
- elif '[SPEECH-TO-TEXT]' in ctxt:
225
- q = usr.extract('extract file path')
226
- rsp = self.interfaces['stt'](q)
227
- self.message = self.narrate(f'{self.name} replies to the user and transcribes the content of the audio file.', context=rsp)
228
- elif '[TEXT-TO-IMAGE]' in ctxt:
229
- q = usr.extract('text for image creation')
230
- rsp = self.interfaces['drawing'](q)
231
- self.message = self.narrate('Symbia replies to the user and provides the image URL.', context=rsp)
232
- elif '[FILE]' in ctxt:
233
- file_path = usr.extract('extract file path')
234
- q = usr.extract('user question')
235
- rsp = self.interfaces['file'](file_path)
236
- self.message = self.narrate(f'{self.name} replies to the user and outlines and relies to the user query.', context=rsp)
237
- else:
238
- q = usr.extract('user query request')
239
- reflection = self._extract_reflection(ctxt)
240
- self.message = self.narrate(f'{self.name} tries to interpret the response, and if unclear asks the user to restate the statement or add more context.', context=reflection)
241
-
242
- except Exception as e:
243
- reflection = self._extract_reflection(ctxt)
244
- self.message = self.narrate(f'{self.name} apologizes and explains the user what went wrong.', context=str(e))
189
+ if self._handle_reflection_context(ctxt):
190
+ continue
191
+
192
+ self._handle_interface_context(usr, ctxt)
245
193
 
246
194
  return self.message
247
195
 
196
+ def _resolve_user_input(self, usr: Symbol | None, loop: bool, ask_input: bool) -> tuple[Symbol, bool]:
197
+ if ask_input:
198
+ usr = self.input(self.message)
199
+ else:
200
+ loop = False
201
+ return usr, loop
202
+
203
+ def _log_verbose(self, title: str, content) -> None:
204
+ if self.verbose:
205
+ logger.debug(f'{title}:\n{content}\n')
206
+
207
+ def _context_from_user(self, usr: Symbol) -> str:
208
+ text = str(usr)
209
+ if len(text) == 0:
210
+ return '[DK]'
211
+ return str(self.detect_capability(usr))
212
+
213
+ def _handle_exit_context(self, ctxt: str) -> bool:
214
+ if '[EXIT]' in ctxt:
215
+ self.message = self.narrate(f'{self.name} writes friendly goodbye message.', context=None, end=True)
216
+ return True
217
+ return False
218
+
219
+ def _handle_reflection_context(self, ctxt: str) -> bool:
220
+ if '[HELP]' in ctxt:
221
+ reflection = self._extract_reflection(ctxt)
222
+ self.message = self.narrate(f'{self.name} ', context=reflection)
223
+ return True
224
+ if '[RECALL]' in ctxt:
225
+ reflection = self._extract_reflection(ctxt)
226
+ category = self._extract_category(ctxt)
227
+ self.message = self.narrate(f'{self.name} uses replies based on what has been recovered from the memory.', context=ctxt, category=category)
228
+ return True
229
+ if '[DK]' in ctxt:
230
+ reflection = self._extract_reflection(ctxt)
231
+ self.message = self.narrate(f'{self.name} is not sure about the message and references and asks the user for more context.', context=reflection)
232
+ return True
233
+ return False
234
+
235
+ def _handle_interface_context(self, usr: Symbol, ctxt: str) -> None:
236
+ try:
237
+ if '[SYMBOLIC]' in ctxt:
238
+ q = usr.extract("mathematical formula that WolframAlpha can solve")
239
+ rsp = self.interfaces['symbolic'](q)
240
+ self.message = self.narrate(f'{self.name} replies to the user and provides the solution of the math problem.', context=rsp)
241
+ elif '[SEARCH]' in ctxt:
242
+ q = usr.extract('user query request')
243
+ rsp = self.interfaces['search'](q)
244
+ self.message = self.narrate(f'{self.name} replies to the user based on the online search results.', context=rsp)
245
+ elif '[SCRAPER]' in ctxt:
246
+ q = usr.extract('URL from text')
247
+ q = q.convert('proper URL, example: https://www.google.com')
248
+ rsp = self.interfaces['scraper'](q)
249
+ self.message = self.narrate(f'{self.name} replies to the user and narrates its findings.', context=rsp)
250
+ elif '[SPEECH-TO-TEXT]' in ctxt:
251
+ q = usr.extract('extract file path')
252
+ rsp = self.interfaces['stt'](q)
253
+ self.message = self.narrate(f'{self.name} replies to the user and transcribes the content of the audio file.', context=rsp)
254
+ elif '[TEXT-TO-IMAGE]' in ctxt:
255
+ q = usr.extract('text for image creation')
256
+ rsp = self.interfaces['drawing'](q)
257
+ self.message = self.narrate('Symbia replies to the user and provides the image URL.', context=rsp)
258
+ elif '[FILE]' in ctxt:
259
+ file_path = usr.extract('extract file path')
260
+ q = usr.extract('user question')
261
+ rsp = self.interfaces['file'](file_path)
262
+ self.message = self.narrate(f'{self.name} replies to the user and outlines and relies to the user query.', context=rsp)
263
+ else:
264
+ q = usr.extract('user query request')
265
+ reflection = self._extract_reflection(ctxt)
266
+ self.message = self.narrate(f'{self.name} tries to interpret the response, and if unclear asks the user to restate the statement or add more context.', context=reflection)
267
+
268
+ except Exception as e:
269
+ reflection = self._extract_reflection(ctxt)
270
+ self.message = self.narrate(f'{self.name} apologizes and explains the user what went wrong.', context=str(e))
271
+
248
272
  def _extract_reflection(self, msg: str) -> str:
249
273
  res = re.findall(r'\(([^)]+)\)', msg)
250
274
  if len(res) > 0:
251
275
  return res.pop()
252
- return
276
+ return None
253
277
 
254
278
  def _extract_category(self, msg: str) -> str:
255
279
  res = re.findall(r'\[([^]]+)\]', msg)
256
280
  if len(res) > 0:
257
281
  return res.pop()
258
- return
282
+ return None
259
283
 
260
284
  def _memory_scratchpad(self, context, short_term_memory, long_term_memory):
261
- scratchpad = f'''
285
+ return f'''
262
286
  [REFLECT](
263
287
  Query: {self._last_user_input}
264
288
  Reflection: {self._extract_reflection(context)}
@@ -272,7 +296,6 @@ Reflection: {self._extract_reflection(context)}
272
296
  {long_term_memory}
273
297
  )
274
298
  '''
275
- return scratchpad
276
299
 
277
300
  def run() -> None:
278
301
  chat = SymbiaChat()
symai/collect/__init__.py CHANGED
@@ -1,2 +1,8 @@
1
+ from .dynamic import create_object_from_string
1
2
  from .pipeline import CollectionRepository, rec_serialize
2
- from .dynamic import create_object_from_string
3
+
4
+ __all__ = [
5
+ "CollectionRepository",
6
+ "create_object_from_string",
7
+ "rec_serialize",
8
+ ]
symai/collect/dynamic.py CHANGED
@@ -1,5 +1,5 @@
1
- import re
2
1
  import ast
2
+ import re
3
3
 
4
4
 
5
5
  class DynamicClass:
@@ -20,7 +20,7 @@ def create_dynamic_class(class_name, **kwargs):
20
20
 
21
21
  def parse_custom_class_instances(s):
22
22
  pattern = r"(\w+)\((.*?)\)"
23
- if type(s) != str:
23
+ if not isinstance(s, str):
24
24
  return s
25
25
  matches = re.finditer(pattern, s)
26
26
 
@@ -29,79 +29,87 @@ def parse_custom_class_instances(s):
29
29
  class_args = match.group(2)
30
30
  try:
31
31
  parsed_args = ast.literal_eval(f'{{{class_args}}}')
32
- except:
32
+ except (ValueError, SyntaxError):
33
33
  parsed_args = create_object_from_string(class_args)
34
34
  class_instance = create_dynamic_class(class_name, **parsed_args)
35
35
  s = s.replace(match.group(0), repr(class_instance))
36
36
 
37
37
  return s
38
38
 
39
- # TODO: fix to properly parse nested lists and dicts
40
- def create_object_from_string(str_class):
41
- def updated_attributes_process(str_class):
42
- # Regular expression to extract key-value pairs
43
- attr_pattern = r"(\w+)=(\[.*?\]|\{.*?\}|'.*?'|None|\w+)"
44
- attributes = re.findall(attr_pattern, str_class)
45
-
46
- # Create an instance of the dynamic class with initial attributes
47
- updated_attributes = []
48
- # remove string up until 'content='
49
- content = str_class.split('ChatCompletionMessage(content=')[-1].split(", role=")[0][1:-1]
50
- updated_attributes.append(('content', content))
51
- for key, value in attributes:
52
- if key.startswith("'") and key.endswith("'"):
53
- key = key.strip("'")
54
- if key.startswith('"') and key.endswith('"'):
55
- key = key.strip('"')
56
- if value.startswith("'") and value.endswith("'"):
57
- value = value.strip("'")
58
- if value.startswith('"') and value.endswith('"'):
59
- value = value.strip('"')
60
-
61
- if value.startswith('[') and value.endswith(']'):
62
- value = parse_value(value)
63
- dir(value)
64
- if hasattr(value, '__dict__'):
65
- for k in value.__dict__.keys():
66
- v = getattr(value, k)
67
- if type(v) == str:
68
- value[k.strip("'")] = v.strip("'")
69
- elif value.startswith('{') and value.endswith('}'):
70
- value = parse_value(value)
71
- new_value = {}
72
- for k in value.keys():
73
- v = value[k]
74
- if type(v) == str:
75
- v = v.strip("'")
76
- new_value[k.strip("'")] = v
77
- value = new_value
78
- updated_attributes.append((key, value))
79
- return updated_attributes
80
-
81
- def parse_value(value):
82
- try:
83
- value = parse_custom_class_instances(value)
84
- if type(value) != str:
85
- return value
86
- if value.startswith('['):
87
- value = value[1:-1]
88
- values = value.split(',')
89
- return [parse_value(v.strip()) for v in values]
90
- elif value.startswith('{'):
91
- value = value[1:-1]
92
- values = value.split(',')
93
- return {k.strip(): parse_value(v.strip()) for k, v in [v.split(':', 1) for v in values]}
94
- res = ast.literal_eval(value)
95
- if isinstance(res, dict):
96
- return {k: parse_value(v) for k, v in res.items()}
97
- elif isinstance(res, list) or isinstance(res, tuple) or isinstance(res, set):
98
- return [parse_value(v) for v in res]
99
- else:
100
- return res
101
- except:
39
+
40
+ def _strip_quotes(text):
41
+ if not isinstance(text, str):
42
+ return text
43
+ if text.startswith("'") and text.endswith("'"):
44
+ return text.strip("'")
45
+ if text.startswith('"') and text.endswith('"'):
46
+ return text.strip('"')
47
+ return text
48
+
49
+
50
+ def _extract_content(str_class):
51
+ return str_class.split('ChatCompletionMessage(content=')[-1].split(", role=")[0][1:-1]
52
+
53
+
54
+ def _parse_value(value):
55
+ try:
56
+ value = parse_custom_class_instances(value)
57
+ if not isinstance(value, str):
102
58
  return value
59
+ if value.startswith('['):
60
+ inner_values = value[1:-1]
61
+ values = inner_values.split(',')
62
+ return [_parse_value(v.strip()) for v in values]
63
+ if value.startswith('{'):
64
+ inner_values = value[1:-1]
65
+ values = inner_values.split(',')
66
+ return {k.strip(): _parse_value(v.strip()) for k, v in [v.split(':', 1) for v in values]}
67
+ result = ast.literal_eval(value)
68
+ if isinstance(result, dict):
69
+ return {k: _parse_value(v) for k, v in result.items()}
70
+ if isinstance(result, (list, tuple, set)):
71
+ return [_parse_value(v) for v in result]
72
+ return result
73
+ except (ValueError, SyntaxError):
74
+ return value
75
+
76
+
77
+ def _process_list_value(raw_value):
78
+ parsed_value = _parse_value(raw_value)
79
+ dir(parsed_value)
80
+ if hasattr(parsed_value, '__dict__'):
81
+ for key in parsed_value.__dict__:
82
+ value = getattr(parsed_value, key)
83
+ if isinstance(value, str):
84
+ parsed_value[key.strip("'")] = value.strip("'")
85
+ return parsed_value
86
+
87
+
88
+ def _process_dict_value(raw_value):
89
+ parsed_value = _parse_value(raw_value)
90
+ new_value = {}
91
+ for key, value in parsed_value.items():
92
+ stripped_value = value.strip("'") if isinstance(value, str) else value
93
+ new_value[key.strip("'")] = stripped_value
94
+ return new_value
95
+
96
+
97
+ def _collect_attributes(str_class):
98
+ attr_pattern = r"(\w+)=(\[.*?\]|\{.*?\}|'.*?'|None|\w+)"
99
+ attributes = re.findall(attr_pattern, str_class)
100
+ updated_attributes = [('content', _extract_content(str_class))]
101
+ for key, raw_value in attributes:
102
+ attr_key = _strip_quotes(key)
103
+ attr_value = _strip_quotes(raw_value)
104
+ if attr_value.startswith('[') and attr_value.endswith(']'):
105
+ attr_value = _process_list_value(attr_value)
106
+ elif attr_value.startswith('{') and attr_value.endswith('}'):
107
+ attr_value = _process_dict_value(attr_value)
108
+ updated_attributes.append((attr_key, attr_value))
109
+ return updated_attributes
103
110
 
104
- updated_attributes = updated_attributes_process(str_class)
105
- obj = DynamicClass(**{key: parse_value(value) for key, value in updated_attributes})
106
111
 
107
- return obj
112
+ # TODO: fix to properly parse nested lists and dicts
113
+ def create_object_from_string(str_class):
114
+ updated_attributes = _collect_attributes(str_class)
115
+ return DynamicClass(**{key: _parse_value(value) for key, value in updated_attributes})