symbolicai 0.21.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. symai/__init__.py +269 -173
  2. symai/backend/base.py +123 -110
  3. symai/backend/engines/drawing/engine_bfl.py +45 -44
  4. symai/backend/engines/drawing/engine_gpt_image.py +112 -97
  5. symai/backend/engines/embedding/engine_llama_cpp.py +63 -52
  6. symai/backend/engines/embedding/engine_openai.py +25 -21
  7. symai/backend/engines/execute/engine_python.py +19 -18
  8. symai/backend/engines/files/engine_io.py +104 -95
  9. symai/backend/engines/imagecaptioning/engine_blip2.py +28 -24
  10. symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +102 -79
  11. symai/backend/engines/index/engine_pinecone.py +124 -97
  12. symai/backend/engines/index/engine_qdrant.py +1011 -0
  13. symai/backend/engines/index/engine_vectordb.py +84 -56
  14. symai/backend/engines/lean/engine_lean4.py +96 -52
  15. symai/backend/engines/neurosymbolic/__init__.py +41 -13
  16. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +330 -248
  17. symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +329 -264
  18. symai/backend/engines/neurosymbolic/engine_cerebras.py +328 -0
  19. symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +118 -88
  20. symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +344 -299
  21. symai/backend/engines/neurosymbolic/engine_groq.py +173 -115
  22. symai/backend/engines/neurosymbolic/engine_huggingface.py +114 -84
  23. symai/backend/engines/neurosymbolic/engine_llama_cpp.py +144 -118
  24. symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +415 -307
  25. symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +394 -231
  26. symai/backend/engines/ocr/engine_apilayer.py +23 -27
  27. symai/backend/engines/output/engine_stdout.py +10 -13
  28. symai/backend/engines/{webscraping → scrape}/engine_requests.py +101 -54
  29. symai/backend/engines/search/engine_openai.py +100 -88
  30. symai/backend/engines/search/engine_parallel.py +665 -0
  31. symai/backend/engines/search/engine_perplexity.py +44 -45
  32. symai/backend/engines/search/engine_serpapi.py +37 -34
  33. symai/backend/engines/speech_to_text/engine_local_whisper.py +54 -51
  34. symai/backend/engines/symbolic/engine_wolframalpha.py +15 -9
  35. symai/backend/engines/text_to_speech/engine_openai.py +20 -26
  36. symai/backend/engines/text_vision/engine_clip.py +39 -37
  37. symai/backend/engines/userinput/engine_console.py +5 -6
  38. symai/backend/mixin/__init__.py +13 -0
  39. symai/backend/mixin/anthropic.py +48 -38
  40. symai/backend/mixin/deepseek.py +6 -5
  41. symai/backend/mixin/google.py +7 -4
  42. symai/backend/mixin/groq.py +2 -4
  43. symai/backend/mixin/openai.py +140 -110
  44. symai/backend/settings.py +87 -20
  45. symai/chat.py +216 -123
  46. symai/collect/__init__.py +7 -1
  47. symai/collect/dynamic.py +80 -70
  48. symai/collect/pipeline.py +67 -51
  49. symai/collect/stats.py +161 -109
  50. symai/components.py +707 -360
  51. symai/constraints.py +24 -12
  52. symai/core.py +1857 -1233
  53. symai/core_ext.py +83 -80
  54. symai/endpoints/api.py +166 -104
  55. symai/extended/.DS_Store +0 -0
  56. symai/extended/__init__.py +46 -12
  57. symai/extended/api_builder.py +29 -21
  58. symai/extended/arxiv_pdf_parser.py +23 -14
  59. symai/extended/bibtex_parser.py +9 -6
  60. symai/extended/conversation.py +156 -126
  61. symai/extended/document.py +50 -30
  62. symai/extended/file_merger.py +57 -14
  63. symai/extended/graph.py +51 -32
  64. symai/extended/html_style_template.py +18 -14
  65. symai/extended/interfaces/blip_2.py +2 -3
  66. symai/extended/interfaces/clip.py +4 -3
  67. symai/extended/interfaces/console.py +9 -1
  68. symai/extended/interfaces/dall_e.py +4 -2
  69. symai/extended/interfaces/file.py +2 -0
  70. symai/extended/interfaces/flux.py +4 -2
  71. symai/extended/interfaces/gpt_image.py +16 -7
  72. symai/extended/interfaces/input.py +2 -1
  73. symai/extended/interfaces/llava.py +1 -2
  74. symai/extended/interfaces/{naive_webscraping.py → naive_scrape.py} +4 -3
  75. symai/extended/interfaces/naive_vectordb.py +9 -10
  76. symai/extended/interfaces/ocr.py +5 -3
  77. symai/extended/interfaces/openai_search.py +2 -0
  78. symai/extended/interfaces/parallel.py +30 -0
  79. symai/extended/interfaces/perplexity.py +2 -0
  80. symai/extended/interfaces/pinecone.py +12 -9
  81. symai/extended/interfaces/python.py +2 -0
  82. symai/extended/interfaces/serpapi.py +3 -1
  83. symai/extended/interfaces/terminal.py +2 -4
  84. symai/extended/interfaces/tts.py +3 -2
  85. symai/extended/interfaces/whisper.py +3 -2
  86. symai/extended/interfaces/wolframalpha.py +2 -1
  87. symai/extended/metrics/__init__.py +11 -1
  88. symai/extended/metrics/similarity.py +14 -13
  89. symai/extended/os_command.py +39 -29
  90. symai/extended/packages/__init__.py +29 -3
  91. symai/extended/packages/symdev.py +51 -43
  92. symai/extended/packages/sympkg.py +41 -35
  93. symai/extended/packages/symrun.py +63 -50
  94. symai/extended/repo_cloner.py +14 -12
  95. symai/extended/seo_query_optimizer.py +15 -13
  96. symai/extended/solver.py +116 -91
  97. symai/extended/summarizer.py +12 -10
  98. symai/extended/taypan_interpreter.py +17 -18
  99. symai/extended/vectordb.py +122 -92
  100. symai/formatter/__init__.py +9 -1
  101. symai/formatter/formatter.py +51 -47
  102. symai/formatter/regex.py +70 -69
  103. symai/functional.py +325 -176
  104. symai/imports.py +190 -147
  105. symai/interfaces.py +57 -28
  106. symai/memory.py +45 -35
  107. symai/menu/screen.py +28 -19
  108. symai/misc/console.py +66 -56
  109. symai/misc/loader.py +8 -5
  110. symai/models/__init__.py +17 -1
  111. symai/models/base.py +395 -236
  112. symai/models/errors.py +1 -2
  113. symai/ops/__init__.py +32 -22
  114. symai/ops/measures.py +24 -25
  115. symai/ops/primitives.py +1149 -731
  116. symai/post_processors.py +58 -50
  117. symai/pre_processors.py +86 -82
  118. symai/processor.py +21 -13
  119. symai/prompts.py +764 -685
  120. symai/server/huggingface_server.py +135 -49
  121. symai/server/llama_cpp_server.py +21 -11
  122. symai/server/qdrant_server.py +206 -0
  123. symai/shell.py +100 -42
  124. symai/shellsv.py +700 -492
  125. symai/strategy.py +630 -346
  126. symai/symbol.py +368 -322
  127. symai/utils.py +100 -78
  128. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/METADATA +22 -10
  129. symbolicai-1.1.0.dist-info/RECORD +168 -0
  130. symbolicai-0.21.0.dist-info/RECORD +0 -162
  131. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/WHEEL +0 -0
  132. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/entry_points.txt +0 -0
  133. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/licenses/LICENSE +0 -0
  134. {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/top_level.txt +0 -0
symai/backend/settings.py CHANGED
@@ -1,5 +1,4 @@
1
1
  import json
2
- import os
3
2
  import sys
4
3
  from pathlib import Path
5
4
 
@@ -15,57 +14,96 @@ class SymAIConfig:
15
14
  def __init__(self):
16
15
  """Initialize configuration paths based on current Python environment."""
17
16
  self._env_path = Path(sys.prefix)
18
- self._env_config_dir = self._env_path / '.symai'
19
- self._home_config_dir = Path.home() / '.symai'
17
+ self._env_config_dir = self._env_path / ".symai"
18
+ self._home_config_dir = Path.home() / ".symai"
20
19
  self._debug_dir = Path.cwd() # Current working directory for debug mode
20
+ self._active_paths: dict[str, Path] = {}
21
+
22
+ def _canonical_key(self, filename: str | Path) -> str:
23
+ """Return a canonical identifier for config files regardless of input type."""
24
+ path = Path(filename)
25
+ if path.is_absolute() or path.parent != Path():
26
+ return str(path)
27
+ return path.name or str(path)
28
+
29
+ def _remove_legacy_path_keys(self, key: str) -> None:
30
+ """Drop legacy Path keys that collide with the canonical key."""
31
+ target_path = Path(key)
32
+ target_name = target_path.name or key
33
+ stale_keys: list[Path] = [
34
+ existing_key
35
+ for existing_key in self._active_paths
36
+ if isinstance(existing_key, Path)
37
+ and (existing_key.name == target_name or str(existing_key) == key)
38
+ ]
39
+ for stale_key in stale_keys:
40
+ self._active_paths.pop(stale_key, None)
21
41
 
22
42
  @property
23
43
  def config_dir(self) -> Path:
24
44
  """Returns the active configuration directory based on priority system."""
25
45
  # Debug mode takes precedence
26
- if (self._debug_dir / 'symai.config.json').exists():
27
- return self._debug_dir / '.symai'
46
+ if (self._debug_dir / "symai.config.json").exists():
47
+ return self._debug_dir / ".symai"
28
48
  # Then environment config
29
49
  if self._env_config_dir.exists():
30
50
  return self._env_config_dir
31
51
  # Finally home directory
32
52
  return self._home_config_dir
33
53
 
34
- def get_config_path(self, filename: str, fallback_to_home: bool = False) -> Path:
54
+ def get_config_path(self, filename: str | Path, fallback_to_home: bool = False) -> Path:
35
55
  """Gets the config path using the priority system or forces fallback to home."""
36
- debug_config = self._debug_dir / filename
37
- env_config = self._env_config_dir / filename
38
- home_config = self._home_config_dir / filename
56
+ input_path = Path(filename)
57
+ if input_path.is_absolute() or input_path.parent != Path():
58
+ return input_path
59
+
60
+ normalized_filename = self._canonical_key(filename)
61
+ # Only use the basename for managed directories
62
+ normalized_filename = Path(normalized_filename).name
63
+ debug_config = self._debug_dir / normalized_filename
64
+ env_config = self._env_config_dir / normalized_filename
65
+ home_config = self._home_config_dir / normalized_filename
39
66
 
40
67
  # Check debug first (only valid for symai.config.json)
41
- if filename == 'symai.config.json' and debug_config.exists():
68
+ if normalized_filename == "symai.config.json" and debug_config.exists():
42
69
  return debug_config
43
70
 
44
71
  # If forced to fallback, return home config if it exists, otherwise environment
45
72
  if fallback_to_home:
46
- return home_config if home_config.exists() else env_config
73
+ if home_config.exists():
74
+ return home_config
75
+ return env_config
47
76
 
48
77
  # Normal priority-based resolution
49
- # If environment config doesn't exist, return that path (for creation)
50
- if not env_config.exists():
78
+ if env_config.exists():
51
79
  return env_config
52
- # Otherwise use environment config
80
+ if home_config.exists():
81
+ return home_config
53
82
  return env_config
54
83
 
55
- def load_config(self, filename: str, fallback_to_home: bool = False) -> dict:
84
+ def load_config(self, filename: str | Path, fallback_to_home: bool = False) -> dict:
56
85
  """Loads JSON data from the determined config location."""
57
86
  config_path = self.get_config_path(filename, fallback_to_home=fallback_to_home)
87
+ key = self._canonical_key(filename)
58
88
  if not config_path.exists():
89
+ self._remove_legacy_path_keys(key)
90
+ self._active_paths.pop(key, None)
59
91
  return {}
60
- with open(config_path, 'r', encoding='utf-8') as f:
61
- return json.load(f)
92
+ with config_path.open(encoding="utf-8") as f:
93
+ config = json.load(f)
94
+ self._remove_legacy_path_keys(key)
95
+ self._active_paths[key] = config_path
96
+ return config
62
97
 
63
- def save_config(self, filename: str, data: dict, fallback_to_home: bool = False) -> None:
98
+ def save_config(self, filename: str | Path, data: dict, fallback_to_home: bool = False) -> None:
64
99
  """Saves JSON data to the determined config location."""
65
100
  config_path = self.get_config_path(filename, fallback_to_home=fallback_to_home)
66
- os.makedirs(config_path.parent, exist_ok=True)
67
- with open(config_path, 'w', encoding='utf-8') as f:
101
+ key = self._canonical_key(filename)
102
+ config_path.parent.mkdir(parents=True, exist_ok=True)
103
+ with config_path.open("w", encoding="utf-8") as f:
68
104
  json.dump(data, f, indent=4)
105
+ self._remove_legacy_path_keys(key)
106
+ self._active_paths[key] = config_path
69
107
 
70
108
  def migrate_config(self, filename: str, updates: dict) -> None:
71
109
  """Updates existing configuration with new fields."""
@@ -73,6 +111,35 @@ class SymAIConfig:
73
111
  config.update(updates)
74
112
  self.save_config(filename, config)
75
113
 
114
+ def get_active_path(self, filename: str | Path) -> Path:
115
+ """Returns the last path used to read or write the given config file."""
116
+ key = self._canonical_key(filename)
117
+ cached = self._active_paths.get(key)
118
+ if cached is not None:
119
+ return cached
120
+ for legacy_key, cached_path in list(self._active_paths.items()):
121
+ if isinstance(legacy_key, Path) and (legacy_key.name == key or str(legacy_key) == key):
122
+ self._active_paths.pop(legacy_key, None)
123
+ self._active_paths[key] = cached_path
124
+ return cached_path
125
+ return self.get_config_path(filename)
126
+
127
+ def get_active_config_dir(self) -> Path:
128
+ """Returns the directory backing the active symai configuration."""
129
+ symai_key = self._canonical_key("symai.config.json")
130
+ cached = self._active_paths.get(symai_key)
131
+ if cached is not None:
132
+ return cached.parent
133
+ for legacy_key, cached_path in list(self._active_paths.items()):
134
+ if isinstance(legacy_key, Path) and (
135
+ legacy_key.name == symai_key or str(legacy_key) == symai_key
136
+ ):
137
+ self._active_paths.pop(legacy_key, None)
138
+ self._active_paths[symai_key] = cached_path
139
+ return cached_path.parent
140
+ return self.config_dir
141
+
142
+
76
143
  SYMAI_CONFIG = {}
77
144
  SYMSH_CONFIG = {}
78
145
  SYMSERVER_CONFIG = {}
symai/chat.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import logging
2
2
  import re
3
- from typing import Optional
4
3
 
5
4
  from loguru import logger
6
5
 
@@ -13,22 +12,23 @@ from .post_processors import ConsolePostProcessor, StripPostProcessor
13
12
  from .pre_processors import ConsoleInputPreProcessor
14
13
  from .prompts import MemoryCapabilities, SymbiaCapabilities
15
14
  from .symbol import Expression, Symbol
15
+ from .utils import UserMessage
16
16
 
17
- logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
17
+ logging.getLogger("charset_normalizer").setLevel(logging.ERROR)
18
18
 
19
19
 
20
20
  class ChatBot(Expression):
21
- _symai_chat: str = '''This is a conversation between a chatbot (Symbia:) and a human (User:). The chatbot follows a narrative structure, primarily relying on the provided instructions. It uses the user's input as a conditioning factor to generate its responses. Whenever Symbia retrieves any long-term memories, it checks the user's query and incorporates information from the long-term memory buffer into its response. If the long-term memories cannot provide a suitable answer, Symbia then checks its short-term memory to be aware of the topics discussed in the recent conversation rounds. Your primary task is to reply to the user's question or statement by generating a relevant and contextually appropriate response. Do not focus on filling the scratchpad with narration, long-term memory recall, short-term memory recall, or reflections. Always consider any follow-up questions or relevant information from the user to generate a response that is contextually relevant. Endeavor to reply to the greatest possible effort.'''
21
+ _symai_chat: str = """This is a conversation between a chatbot (Symbia:) and a human (User:). The chatbot follows a narrative structure, primarily relying on the provided instructions. It uses the user's input as a conditioning factor to generate its responses. Whenever Symbia retrieves any long-term memories, it checks the user's query and incorporates information from the long-term memory buffer into its response. If the long-term memories cannot provide a suitable answer, Symbia then checks its short-term memory to be aware of the topics discussed in the recent conversation rounds. Your primary task is to reply to the user's question or statement by generating a relevant and contextually appropriate response. Do not focus on filling the scratchpad with narration, long-term memory recall, short-term memory recall, or reflections. Always consider any follow-up questions or relevant information from the user to generate a response that is contextually relevant. Endeavor to reply to the greatest possible effort."""
22
22
 
23
23
  def __init__(
24
24
  self,
25
25
  value: str | None = None,
26
- name: str = 'Symbia',
26
+ name: str = "Symbia",
27
27
  verbose: bool = False,
28
28
  short_term_mem_window_size: int = 10,
29
29
  long_term_mem_top_k: int = 10,
30
- index_name: str = 'symbia_index',
31
- **kwargs
30
+ index_name: str = "symbia_index",
31
+ **kwargs,
32
32
  ):
33
33
  super().__init__(value, **kwargs)
34
34
  self.sym_return_type = ChatBot
@@ -38,55 +38,75 @@ class ChatBot(Expression):
38
38
  self.interfaces = cfg_to_interface()
39
39
  self.short_term_memory = SlidingWindowListMemory(window_size=short_term_mem_window_size)
40
40
  self.long_term_mem_top_k = long_term_mem_top_k
41
- self.long_term_memory = self.interfaces['indexing']
41
+ self.long_term_memory = self.interfaces["indexing"]
42
42
  self._preprocessor = ChatBot._init_custom_input_preprocessor(name=name, that=self)
43
43
  self._postprocessor = ChatBot._init_custom_input_postprocessor(that=self)
44
44
  self.detect_capability = InContextClassification(SymbiaCapabilities())
45
45
  self.detect_memory_usage = InContextClassification(MemoryCapabilities())
46
- self._last_user_input: str = ''
46
+ self._last_user_input: str = ""
47
47
 
48
- def repeat(self, query, **kwargs):
49
- return self.narrate('Symbia does not understand and asks to repeat and give more context.', prompt=query)
50
-
51
- def narrate(self, message: str, context: str = None, category: str = None, end: bool = False, **kwargs) -> Symbol:
52
- reflection = context if context is not None else ''
53
- ltmem_recall = 'No memories retrieved.'
54
- stmem_recall = '\n'.join(self.short_term_memory.recall())
55
- stmem_recall = stmem_recall if len(stmem_recall) > 0 else 'No memories retrieved.'
56
- ltmem_recall = 'No memories retrieved.'
48
+ def repeat(self, query, **_kwargs):
49
+ return self.narrate(
50
+ "Symbia does not understand and asks to repeat and give more context.", prompt=query
51
+ )
57
52
 
58
- if category == 'RECALL':
59
- if (HOME_PATH / 'localdb' / f'{self.index_name}.pkl').exists():
60
- ltmem_recall = '\n'.join(self.long_term_memory(reflection, operation='search', index_name=self.index_name))
53
+ def narrate(
54
+ self,
55
+ message: str,
56
+ context: str | None = None,
57
+ category: str | None = None,
58
+ end: bool = False,
59
+ **kwargs,
60
+ ) -> Symbol:
61
+ reflection = context if context is not None else ""
62
+ ltmem_recall = "No memories retrieved."
63
+ stmem_recall = "\n".join(self.short_term_memory.recall())
64
+ stmem_recall = stmem_recall if len(stmem_recall) > 0 else "No memories retrieved."
65
+ ltmem_recall = "No memories retrieved."
66
+
67
+ if category == "RECALL":
68
+ if (HOME_PATH / "localdb" / f"{self.index_name}.pkl").exists():
69
+ ltmem_recall = "\n".join(
70
+ self.long_term_memory(
71
+ reflection, operation="search", index_name=self.index_name
72
+ )
73
+ )
61
74
  scratchpad = self._memory_scratchpad(reflection, stmem_recall, ltmem_recall)
62
75
  memory_usage = str(self.detect_memory_usage(scratchpad))
63
76
 
64
77
  if self.verbose:
65
- logger.debug(f'Scratchpad:\n{scratchpad}\n')
66
- logger.debug(f'Memory usage:\n{memory_usage}\n')
67
- logger.debug(f'Retrieved from short-term memory:\n{stmem_recall}\n')
68
- logger.debug(f'Retrieved from long-term memory:\n{ltmem_recall}\n')
78
+ logger.debug(f"Scratchpad:\n{scratchpad}\n")
79
+ logger.debug(f"Memory usage:\n{memory_usage}\n")
80
+ logger.debug(f"Retrieved from short-term memory:\n{stmem_recall}\n")
81
+ logger.debug(f"Retrieved from long-term memory:\n{ltmem_recall}\n")
69
82
 
70
83
  do = self._extract_category(memory_usage)
71
84
  reflection = self._extract_reflection(memory_usage)
72
85
 
73
- if do == 'SAVE':
74
- self.long_term_memory(f'{self.name}: {reflection}', operation='add', top_k=self.long_term_mem_top_k, index_name=self.index_name)
75
- self.long_term_memory('save', operation='config', index_name=self.index_name)
76
- if self.verbose: logger.debug(f'Store new long-term memory:\n{reflection}\n')
77
- message = f'{self.name} inform the user that the memory was stored.'
78
- elif do == 'DUPLICATE':
79
- message = f'{self.name} engages the user in a conversation about the duplicate topic, showing the user she remembered the past interaction.'
80
- elif do == 'IRRELEVANT':
81
- message = f'{self.name} discusses the topic with the user.'
86
+ if do == "SAVE":
87
+ self.long_term_memory(
88
+ f"{self.name}: {reflection}",
89
+ operation="add",
90
+ top_k=self.long_term_mem_top_k,
91
+ index_name=self.index_name,
92
+ )
93
+ self.long_term_memory("save", operation="config", index_name=self.index_name)
94
+ if self.verbose:
95
+ logger.debug(f"Store new long-term memory:\n{reflection}\n")
96
+ message = f"{self.name} inform the user that the memory was stored."
97
+ elif do == "DUPLICATE":
98
+ message = f"{self.name} engages the user in a conversation about the duplicate topic, showing the user she remembered the past interaction."
99
+ elif do == "IRRELEVANT":
100
+ message = f"{self.name} discusses the topic with the user."
82
101
 
83
102
  if self.verbose:
84
- logger.debug(f'Storing new short-term memory:\nUser: {self._last_user_input}\n')
85
- logger.debug(f'Storing new short-term memory:\n{self.name}: {reflection}\n')
103
+ logger.debug(f"Storing new short-term memory:\nUser: {self._last_user_input}\n")
104
+ logger.debug(f"Storing new short-term memory:\n{self.name}: {reflection}\n")
86
105
 
87
- reply = f'{self.name}: {self._narration(message, self._last_user_input, reflection, context, ltmem_recall, stmem_recall, **kwargs)}'
106
+ reply = f"{self.name}: {self._narration(message, self._last_user_input, reflection, context, ltmem_recall, stmem_recall, **kwargs)}"
88
107
 
89
- if end: print('\n\n', reply)
108
+ if end:
109
+ UserMessage(f"\n\n{reply}", text="extensity")
90
110
 
91
111
  return Symbol(reply)
92
112
 
@@ -94,10 +114,11 @@ class ChatBot(Expression):
94
114
  @core.userinput(
95
115
  pre_processors=[self._preprocessor()],
96
116
  post_processors=[StripPostProcessor(), self._postprocessor()],
97
- **kwargs
117
+ **kwargs,
98
118
  )
99
119
  def _func(_, message) -> str:
100
120
  pass
121
+
101
122
  return Symbol(_func(self, message))
102
123
 
103
124
  @property
@@ -108,23 +129,34 @@ class ChatBot(Expression):
108
129
  def _init_custom_input_preprocessor(name, that):
109
130
  class CustomInputPreProcessor(ConsoleInputPreProcessor):
110
131
  def __call__(self, argument):
111
- msg = re.sub(f'{name}:\s*', '', str(argument.args[0]))
112
- console = f'\n{name}: {msg}\n$> '
132
+ msg = re.sub(f"{name}:\s*", "", str(argument.args[0]))
133
+ console = f"\n{name}: {msg}\n$> "
113
134
  if len(msg) > 0:
114
- that.short_term_memory.store(f'{name}: ' + msg)
135
+ that.short_term_memory.store(f"{name}: " + msg)
115
136
  return console
137
+
116
138
  return CustomInputPreProcessor
117
139
 
118
140
  @staticmethod
119
141
  def _init_custom_input_postprocessor(that):
120
142
  class CustomInputPostProcessor(ConsolePostProcessor):
121
- def __call__(self, rsp, argument):
122
- that.short_term_memory.store(f'User: {str(rsp)}')
143
+ def __call__(self, rsp, _argument):
144
+ that.short_term_memory.store(f"User: {rsp!s}")
123
145
  return rsp
146
+
124
147
  return CustomInputPostProcessor
125
148
 
126
- def _narration(self, msg: str, query: str, reflection: str, context: str, ltmem_recall: str, stmem_recall: str, **kwargs):
127
- prompt = f'''
149
+ def _narration(
150
+ self,
151
+ msg: str,
152
+ query: str,
153
+ reflection: str,
154
+ context: str,
155
+ ltmem_recall: str,
156
+ stmem_recall: str,
157
+ **kwargs,
158
+ ):
159
+ prompt = f"""
128
160
  {self._symai_chat.format(self.name)}
129
161
 
130
162
  [NARRATION](
@@ -153,112 +185,172 @@ class ChatBot(Expression):
153
185
 
154
186
  The chatbot always reply in the following format
155
187
  {self.name}: <reply>
156
- '''
188
+ """
189
+
157
190
  @core.zero_shot(prompt=prompt, **kwargs)
158
191
  def _func(_) -> str:
159
192
  pass
160
- if self.verbose: logger.debug(f'Narration:\n{prompt}\n')
161
- res = _func(self)
162
- res = res.replace(f'{self.name}: ', '').strip()
163
- return res
193
+
194
+ if self.verbose:
195
+ logger.debug(f"Narration:\n{prompt}\n")
196
+ return _func(self).replace(f"{self.name}: ", "").strip()
197
+
164
198
 
165
199
  class SymbiaChat(ChatBot):
166
- def __init__(self, name: str = 'Symbia', verbose: bool = False, **kwargs):
200
+ def __init__(self, name: str = "Symbia", verbose: bool = False, **kwargs):
167
201
  super().__init__(name=name, verbose=verbose, **kwargs)
168
- self.message = self.narrate(f'{self.name} introduces herself, writes a greeting message and asks how to help.', context=None)
202
+ self.message = self.narrate(
203
+ f"{self.name} introduces herself, writes a greeting message and asks how to help.",
204
+ context=None,
205
+ )
169
206
 
170
- def forward(self, usr: Optional[str] = None) -> Symbol:
207
+ def forward(self, usr: str | None = None) -> Symbol:
171
208
  loop = True
172
209
  ask_input = True
173
210
  if usr:
174
211
  ask_input = False
175
212
  usr = self._to_symbol(usr)
176
213
 
177
- # added step-by-step interaction with the user if input is provided
178
214
  while loop:
179
- # if no input is provided, ask for input
180
- if ask_input:
181
- usr = self.input(self.message)
182
- else:
183
- loop = False # break the loop after the first iteration
184
-
215
+ usr, loop = self._resolve_user_input(usr, loop, ask_input)
185
216
  self._last_user_input = usr
186
- if self.verbose: logger.debug(f'User:\n{usr}\n')
217
+ self._log_verbose("User", usr)
187
218
 
188
- if len(str(usr)) > 0:
189
- ctxt = str(self.detect_capability(usr))
190
- else:
191
- ctxt = '[DK]'
192
-
193
- if self.verbose: logger.debug(f'In-context:\n{ctxt}\n')
219
+ ctxt = self._context_from_user(usr)
220
+ self._log_verbose("In-context", ctxt)
194
221
 
195
- if '[EXIT]' in ctxt:
196
- self.message = self.narrate(f'{self.name} writes friendly goodbye message.', context=None, end=True)
222
+ if self._handle_exit_context(ctxt):
197
223
  break
198
- elif '[HELP]' in ctxt:
199
- reflection = self._extract_reflection(ctxt)
200
- self.message = self.narrate(f'{self.name} ', context=reflection)
201
- elif '[RECALL]' in ctxt:
202
- reflection = self._extract_reflection(ctxt)
203
- category = self._extract_category(ctxt)
204
- self.message = self.narrate(f'{self.name} uses replies based on what has been recovered from the memory.', context=ctxt, category=category)
205
- elif '[DK]' in ctxt:
206
- reflection = self._extract_reflection(ctxt)
207
- self.message = self.narrate(f'{self.name} is not sure about the message and references and asks the user for more context.', context=reflection)
208
224
 
209
- else:
210
- try:
211
- if '[SYMBOLIC]' in ctxt:
212
- q = usr.extract("mathematical formula that WolframAlpha can solve")
213
- rsp = self.interfaces['symbolic'](q)
214
- self.message = self.narrate(f'{self.name} replies to the user and provides the solution of the math problem.', context=rsp)
215
- elif '[SEARCH]' in ctxt:
216
- q = usr.extract('user query request')
217
- rsp = self.interfaces['search'](q)
218
- self.message = self.narrate(f'{self.name} replies to the user based on the online search results.', context=rsp)
219
- elif '[SCRAPER]' in ctxt:
220
- q = usr.extract('URL from text')
221
- q = q.convert('proper URL, example: https://www.google.com')
222
- rsp = self.interfaces['scraper'](q)
223
- self.message = self.narrate(f'{self.name} replies to the user and narrates its findings.', context=rsp)
224
- elif '[SPEECH-TO-TEXT]' in ctxt:
225
- q = usr.extract('extract file path')
226
- rsp = self.interfaces['stt'](q)
227
- self.message = self.narrate(f'{self.name} replies to the user and transcribes the content of the audio file.', context=rsp)
228
- elif '[TEXT-TO-IMAGE]' in ctxt:
229
- q = usr.extract('text for image creation')
230
- rsp = self.interfaces['drawing'](q)
231
- self.message = self.narrate('Symbia replies to the user and provides the image URL.', context=rsp)
232
- elif '[FILE]' in ctxt:
233
- file_path = usr.extract('extract file path')
234
- q = usr.extract('user question')
235
- rsp = self.interfaces['file'](file_path)
236
- self.message = self.narrate(f'{self.name} replies to the user and outlines and relies to the user query.', context=rsp)
237
- else:
238
- q = usr.extract('user query request')
239
- reflection = self._extract_reflection(ctxt)
240
- self.message = self.narrate(f'{self.name} tries to interpret the response, and if unclear asks the user to restate the statement or add more context.', context=reflection)
241
-
242
- except Exception as e:
243
- reflection = self._extract_reflection(ctxt)
244
- self.message = self.narrate(f'{self.name} apologizes and explains the user what went wrong.', context=str(e))
225
+ if self._handle_reflection_context(ctxt):
226
+ continue
227
+
228
+ self._handle_interface_context(usr, ctxt)
245
229
 
246
230
  return self.message
247
231
 
232
+ def _resolve_user_input(
233
+ self, usr: Symbol | None, loop: bool, ask_input: bool
234
+ ) -> tuple[Symbol, bool]:
235
+ if ask_input:
236
+ usr = self.input(self.message)
237
+ else:
238
+ loop = False
239
+ return usr, loop
240
+
241
+ def _log_verbose(self, title: str, content) -> None:
242
+ if self.verbose:
243
+ logger.debug(f"{title}:\n{content}\n")
244
+
245
+ def _context_from_user(self, usr: Symbol) -> str:
246
+ text = str(usr)
247
+ if len(text) == 0:
248
+ return "[DK]"
249
+ return str(self.detect_capability(usr))
250
+
251
+ def _handle_exit_context(self, ctxt: str) -> bool:
252
+ if "[EXIT]" in ctxt:
253
+ self.message = self.narrate(
254
+ f"{self.name} writes friendly goodbye message.", context=None, end=True
255
+ )
256
+ return True
257
+ return False
258
+
259
+ def _handle_reflection_context(self, ctxt: str) -> bool:
260
+ if "[HELP]" in ctxt:
261
+ reflection = self._extract_reflection(ctxt)
262
+ self.message = self.narrate(f"{self.name} ", context=reflection)
263
+ return True
264
+ if "[RECALL]" in ctxt:
265
+ reflection = self._extract_reflection(ctxt)
266
+ category = self._extract_category(ctxt)
267
+ self.message = self.narrate(
268
+ f"{self.name} uses replies based on what has been recovered from the memory.",
269
+ context=ctxt,
270
+ category=category,
271
+ )
272
+ return True
273
+ if "[DK]" in ctxt:
274
+ reflection = self._extract_reflection(ctxt)
275
+ self.message = self.narrate(
276
+ f"{self.name} is not sure about the message and references and asks the user for more context.",
277
+ context=reflection,
278
+ )
279
+ return True
280
+ return False
281
+
282
+ def _handle_interface_context(self, usr: Symbol, ctxt: str) -> None:
283
+ try:
284
+ if "[SYMBOLIC]" in ctxt:
285
+ q = usr.extract("mathematical formula that WolframAlpha can solve")
286
+ rsp = self.interfaces["symbolic"](q)
287
+ self.message = self.narrate(
288
+ f"{self.name} replies to the user and provides the solution of the math problem.",
289
+ context=rsp,
290
+ )
291
+ elif "[SEARCH]" in ctxt:
292
+ q = usr.extract("user query request")
293
+ rsp = self.interfaces["search"](q)
294
+ self.message = self.narrate(
295
+ f"{self.name} replies to the user based on the online search results.",
296
+ context=rsp,
297
+ )
298
+ elif "[SCRAPER]" in ctxt:
299
+ q = usr.extract("URL from text")
300
+ q = q.convert("proper URL, example: https://www.google.com")
301
+ rsp = self.interfaces["scraper"](q)
302
+ self.message = self.narrate(
303
+ f"{self.name} replies to the user and narrates its findings.", context=rsp
304
+ )
305
+ elif "[SPEECH-TO-TEXT]" in ctxt:
306
+ q = usr.extract("extract file path")
307
+ rsp = self.interfaces["stt"](q)
308
+ self.message = self.narrate(
309
+ f"{self.name} replies to the user and transcribes the content of the audio file.",
310
+ context=rsp,
311
+ )
312
+ elif "[TEXT-TO-IMAGE]" in ctxt:
313
+ q = usr.extract("text for image creation")
314
+ rsp = self.interfaces["drawing"](q)
315
+ self.message = self.narrate(
316
+ "Symbia replies to the user and provides the image URL.", context=rsp
317
+ )
318
+ elif "[FILE]" in ctxt:
319
+ file_path = usr.extract("extract file path")
320
+ q = usr.extract("user question")
321
+ rsp = self.interfaces["file"](file_path)
322
+ self.message = self.narrate(
323
+ f"{self.name} replies to the user and outlines and relies to the user query.",
324
+ context=rsp,
325
+ )
326
+ else:
327
+ q = usr.extract("user query request")
328
+ reflection = self._extract_reflection(ctxt)
329
+ self.message = self.narrate(
330
+ f"{self.name} tries to interpret the response, and if unclear asks the user to restate the statement or add more context.",
331
+ context=reflection,
332
+ )
333
+
334
+ except Exception as e:
335
+ reflection = self._extract_reflection(ctxt)
336
+ self.message = self.narrate(
337
+ f"{self.name} apologizes and explains the user what went wrong.", context=str(e)
338
+ )
339
+
248
340
  def _extract_reflection(self, msg: str) -> str:
249
- res = re.findall(r'\(([^)]+)\)', msg)
341
+ res = re.findall(r"\(([^)]+)\)", msg)
250
342
  if len(res) > 0:
251
343
  return res.pop()
252
- return
344
+ return None
253
345
 
254
346
  def _extract_category(self, msg: str) -> str:
255
- res = re.findall(r'\[([^]]+)\]', msg)
347
+ res = re.findall(r"\[([^]]+)\]", msg)
256
348
  if len(res) > 0:
257
349
  return res.pop()
258
- return
350
+ return None
259
351
 
260
352
  def _memory_scratchpad(self, context, short_term_memory, long_term_memory):
261
- scratchpad = f'''
353
+ return f"""
262
354
  [REFLECT](
263
355
  Query: {self._last_user_input}
264
356
  Reflection: {self._extract_reflection(context)}
@@ -271,12 +363,13 @@ Reflection: {self._extract_reflection(context)}
271
363
  [LONG-TERM MEMORY RECALL](
272
364
  {long_term_memory}
273
365
  )
274
- '''
275
- return scratchpad
366
+ """
367
+
276
368
 
277
369
  def run() -> None:
278
370
  chat = SymbiaChat()
279
371
  chat()
280
372
 
281
- if __name__ == '__main__':
373
+
374
+ if __name__ == "__main__":
282
375
  run()
symai/collect/__init__.py CHANGED
@@ -1,2 +1,8 @@
1
+ from .dynamic import create_object_from_string
1
2
  from .pipeline import CollectionRepository, rec_serialize
2
- from .dynamic import create_object_from_string
3
+
4
+ __all__ = [
5
+ "CollectionRepository",
6
+ "create_object_from_string",
7
+ "rec_serialize",
8
+ ]