symbolicai 1.4.0__tar.gz → 1.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (270) hide show
  1. {symbolicai-1.4.0 → symbolicai-1.6.0}/AGENTS.md +1 -1
  2. {symbolicai-1.4.0 → symbolicai-1.6.0}/PKG-INFO +3 -9
  3. {symbolicai-1.4.0 → symbolicai-1.6.0}/README.md +1 -4
  4. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/drawing_engine.md +43 -1
  5. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/neurosymbolic_engine.md +25 -1
  6. symbolicai-1.6.0/docs/source/ENGINES/scrape_engine.md +143 -0
  7. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/search_engine.md +72 -0
  8. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/INSTALLATION.md +1 -4
  9. {symbolicai-1.4.0 → symbolicai-1.6.0}/pyproject.toml +1 -2
  10. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/__init__.py +21 -71
  11. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/base.py +0 -26
  12. symbolicai-1.6.0/symai/backend/engines/drawing/engine_gemini_image.py +101 -0
  13. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/embedding/engine_openai.py +11 -8
  14. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/__init__.py +8 -0
  15. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +14 -1
  16. symbolicai-1.6.0/symai/backend/engines/neurosymbolic/engine_openrouter.py +294 -0
  17. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/scrape/engine_requests.py +39 -10
  18. symbolicai-1.6.0/symai/backend/engines/search/__init__.py +13 -0
  19. symbolicai-1.6.0/symai/backend/engines/search/engine_firecrawl.py +333 -0
  20. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/search/engine_parallel.py +5 -5
  21. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/__init__.py +4 -0
  22. symbolicai-1.6.0/symai/backend/mixin/openrouter.py +2 -0
  23. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/components.py +212 -16
  24. symbolicai-1.6.0/symai/extended/interfaces/firecrawl.py +30 -0
  25. symbolicai-1.6.0/symai/extended/interfaces/nanobanana.py +23 -0
  26. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/parallel.py +5 -5
  27. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/functional.py +3 -4
  28. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/interfaces.py +2 -0
  29. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/ops/primitives.py +0 -18
  30. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/shellsv.py +2 -7
  31. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/PKG-INFO +3 -9
  32. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/SOURCES.txt +7 -4
  33. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/requires.txt +1 -5
  34. symbolicai-1.6.0/uv.lock +7659 -0
  35. symbolicai-1.4.0/docs/source/ENGINES/scrape_engine.md +0 -43
  36. symbolicai-1.4.0/symai/collect/__init__.py +0 -8
  37. symbolicai-1.4.0/symai/collect/dynamic.py +0 -117
  38. symbolicai-1.4.0/symai/collect/pipeline.py +0 -156
  39. symbolicai-1.4.0/symai/collect/stats.py +0 -434
  40. symbolicai-1.4.0/uv.lock +0 -7700
  41. {symbolicai-1.4.0 → symbolicai-1.6.0}/.gitbook.yaml +0 -0
  42. {symbolicai-1.4.0 → symbolicai-1.6.0}/.github/FUNDING.yml +0 -0
  43. {symbolicai-1.4.0 → symbolicai-1.6.0}/.gitignore +0 -0
  44. {symbolicai-1.4.0 → symbolicai-1.6.0}/.symai/symsh.config.json +0 -0
  45. {symbolicai-1.4.0 → symbolicai-1.6.0}/CITATION.cff +0 -0
  46. {symbolicai-1.4.0 → symbolicai-1.6.0}/Dockerfile +0 -0
  47. {symbolicai-1.4.0 → symbolicai-1.6.0}/LICENSE +0 -0
  48. {symbolicai-1.4.0 → symbolicai-1.6.0}/MANIFEST.in +0 -0
  49. {symbolicai-1.4.0 → symbolicai-1.6.0}/app.py +0 -0
  50. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/banner.png +0 -0
  51. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/cat.jpg +0 -0
  52. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/cat.png +0 -0
  53. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/contract_flow.png +0 -0
  54. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img1.png +0 -0
  55. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img10.png +0 -0
  56. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img2.png +0 -0
  57. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img3.png +0 -0
  58. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img4.png +0 -0
  59. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img5.png +0 -0
  60. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img6.png +0 -0
  61. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img7.png +0 -0
  62. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img8.png +0 -0
  63. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/img9.png +0 -0
  64. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/preview.gif +0 -0
  65. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/screen1.jpeg +0 -0
  66. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/symai_logo.png +0 -0
  67. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/symsh.png +0 -0
  68. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid1.png +0 -0
  69. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid2.png +0 -0
  70. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid3.png +0 -0
  71. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid4.png +0 -0
  72. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid5.png +0 -0
  73. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/images/vid6.png +0 -0
  74. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/results/news.html +0 -0
  75. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/results/news.png +0 -0
  76. {symbolicai-1.4.0 → symbolicai-1.6.0}/assets/results/news_prev.png +0 -0
  77. {symbolicai-1.4.0 → symbolicai-1.6.0}/bin/install.ps1 +0 -0
  78. {symbolicai-1.4.0 → symbolicai-1.6.0}/bin/install.sh +0 -0
  79. {symbolicai-1.4.0 → symbolicai-1.6.0}/build.py +0 -0
  80. {symbolicai-1.4.0 → symbolicai-1.6.0}/docker-compose.yml +0 -0
  81. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/clip_engine.md +0 -0
  82. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/custom_engine.md +0 -0
  83. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/file_engine.md +0 -0
  84. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/indexing_engine.md +0 -0
  85. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/local_engine.md +0 -0
  86. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/ocr_engine.md +0 -0
  87. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/speech_to_text_engine.md +0 -0
  88. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/ENGINES/symbolic_engine.md +0 -0
  89. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/contracts.md +0 -0
  90. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/error_handling.md +0 -0
  91. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/expressions.md +0 -0
  92. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/import.md +0 -0
  93. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/operations.md +0 -0
  94. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/FEATURES/primitives.md +0 -0
  95. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/INTRODUCTION.md +0 -0
  96. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/QUICKSTART.md +0 -0
  97. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/SUMMARY.md +0 -0
  98. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TOOLS/chatbot.md +0 -0
  99. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TOOLS/packages.md +0 -0
  100. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TOOLS/shell.md +0 -0
  101. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TUTORIALS/chatbot.md +0 -0
  102. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TUTORIALS/context.md +0 -0
  103. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TUTORIALS/data_query.md +0 -0
  104. {symbolicai-1.4.0 → symbolicai-1.6.0}/docs/source/TUTORIALS/video_tutorials.md +0 -0
  105. {symbolicai-1.4.0 → symbolicai-1.6.0}/environment.yml +0 -0
  106. {symbolicai-1.4.0 → symbolicai-1.6.0}/examples/contracts.ipynb +0 -0
  107. {symbolicai-1.4.0 → symbolicai-1.6.0}/examples/primitives.ipynb +0 -0
  108. {symbolicai-1.4.0 → symbolicai-1.6.0}/icon_converter.py +0 -0
  109. {symbolicai-1.4.0 → symbolicai-1.6.0}/installer.py +0 -0
  110. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/Basics.ipynb +0 -0
  111. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/ChatBot.ipynb +0 -0
  112. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/Conversation.ipynb +0 -0
  113. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/Indexer.ipynb +0 -0
  114. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/News.ipynb +0 -0
  115. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/Queries.ipynb +0 -0
  116. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/TTS_Persona.ipynb +0 -0
  117. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/Lean engine.png +0 -0
  118. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/a_star.txt +0 -0
  119. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/abstract.py +0 -0
  120. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/audio.mp3 +0 -0
  121. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/dbpedia_samples.jsonl +0 -0
  122. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/dbpedia_samples_prepared_train.jsonl +0 -0
  123. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/dbpedia_samples_prepared_valid.jsonl +0 -0
  124. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/demo.py +0 -0
  125. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/demo_strategy.py +0 -0
  126. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/docs.py +0 -0
  127. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/einsteins_puzzle.txt +0 -0
  128. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/file.json +0 -0
  129. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/lean.py +0 -0
  130. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/news.py +0 -0
  131. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/paper.pdf +0 -0
  132. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/paper.py +0 -0
  133. {symbolicai-1.4.0 → symbolicai-1.6.0}/legacy/notebooks/examples/sql.py +0 -0
  134. {symbolicai-1.4.0 → symbolicai-1.6.0}/public/eai.svg +0 -0
  135. {symbolicai-1.4.0 → symbolicai-1.6.0}/pytest.ini +0 -0
  136. {symbolicai-1.4.0 → symbolicai-1.6.0}/ruff.toml +0 -0
  137. {symbolicai-1.4.0 → symbolicai-1.6.0}/setup.cfg +0 -0
  138. {symbolicai-1.4.0 → symbolicai-1.6.0}/setup.py +0 -0
  139. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/TERMS_OF_SERVICE.md +0 -0
  140. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/__init__.py +0 -0
  141. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/__init__.py +0 -0
  142. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/drawing/engine_bfl.py +0 -0
  143. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/drawing/engine_gpt_image.py +0 -0
  144. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/embedding/engine_llama_cpp.py +0 -0
  145. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/execute/engine_python.py +0 -0
  146. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/files/engine_io.py +0 -0
  147. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/imagecaptioning/engine_blip2.py +0 -0
  148. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +0 -0
  149. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/index/engine_pinecone.py +0 -0
  150. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/index/engine_qdrant.py +0 -0
  151. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/index/engine_vectordb.py +0 -0
  152. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/lean/engine_lean4.py +0 -0
  153. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +0 -0
  154. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +0 -0
  155. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_cerebras.py +0 -0
  156. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +0 -0
  157. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_groq.py +0 -0
  158. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_huggingface.py +0 -0
  159. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_llama_cpp.py +0 -0
  160. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +0 -0
  161. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +0 -0
  162. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/neurosymbolic/engine_openai_responses.py +0 -0
  163. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/ocr/engine_apilayer.py +0 -0
  164. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/output/engine_stdout.py +0 -0
  165. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/search/engine_openai.py +0 -0
  166. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/search/engine_perplexity.py +0 -0
  167. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/search/engine_serpapi.py +0 -0
  168. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/speech_to_text/engine_local_whisper.py +0 -0
  169. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/symbolic/engine_wolframalpha.py +0 -0
  170. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/text_to_speech/engine_openai.py +0 -0
  171. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/text_vision/engine_clip.py +0 -0
  172. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/engines/userinput/engine_console.py +0 -0
  173. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/anthropic.py +0 -0
  174. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/cerebras.py +0 -0
  175. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/deepseek.py +0 -0
  176. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/google.py +0 -0
  177. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/groq.py +0 -0
  178. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/mixin/openai.py +0 -0
  179. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/backend/settings.py +0 -0
  180. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/chat.py +0 -0
  181. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/constraints.py +0 -0
  182. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/context.py +0 -0
  183. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/core.py +0 -0
  184. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/core_ext.py +0 -0
  185. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/endpoints/__init__py +0 -0
  186. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/endpoints/api.py +0 -0
  187. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/exceptions.py +0 -0
  188. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/.DS_Store +0 -0
  189. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/__init__.py +0 -0
  190. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/api_builder.py +0 -0
  191. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/arxiv_pdf_parser.py +0 -0
  192. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/bibtex_parser.py +0 -0
  193. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/conversation.py +0 -0
  194. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/document.py +0 -0
  195. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/file_merger.py +0 -0
  196. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/graph.py +0 -0
  197. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/html_style_template.py +0 -0
  198. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/__init__.py +0 -0
  199. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/blip_2.py +0 -0
  200. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/clip.py +0 -0
  201. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/console.py +0 -0
  202. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/dall_e.py +0 -0
  203. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/file.py +0 -0
  204. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/flux.py +0 -0
  205. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/gpt_image.py +0 -0
  206. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/input.py +0 -0
  207. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/llava.py +0 -0
  208. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/local_search.py +0 -0
  209. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/naive_scrape.py +0 -0
  210. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/naive_vectordb.py +0 -0
  211. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/ocr.py +0 -0
  212. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/openai_search.py +0 -0
  213. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/perplexity.py +0 -0
  214. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/pinecone.py +0 -0
  215. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/python.py +0 -0
  216. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/serpapi.py +0 -0
  217. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/terminal.py +0 -0
  218. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/tts.py +0 -0
  219. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/whisper.py +0 -0
  220. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/interfaces/wolframalpha.py +0 -0
  221. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/metrics/__init__.py +0 -0
  222. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/metrics/similarity.py +0 -0
  223. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/os_command.py +0 -0
  224. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/packages/__init__.py +0 -0
  225. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/packages/symdev.py +0 -0
  226. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/packages/sympkg.py +0 -0
  227. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/packages/symrun.py +0 -0
  228. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/repo_cloner.py +0 -0
  229. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/seo_query_optimizer.py +0 -0
  230. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/solver.py +0 -0
  231. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/summarizer.py +0 -0
  232. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/taypan_interpreter.py +0 -0
  233. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/extended/vectordb.py +0 -0
  234. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/formatter/__init__.py +0 -0
  235. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/formatter/emoji.pytxt +0 -0
  236. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/formatter/formatter.py +0 -0
  237. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/formatter/regex.py +0 -0
  238. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/imports.py +0 -0
  239. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/memory.py +0 -0
  240. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/menu/__init__.py +0 -0
  241. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/menu/screen.py +0 -0
  242. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/misc/__init__.py +0 -0
  243. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/misc/console.py +0 -0
  244. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/misc/loader.py +0 -0
  245. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/models/__init__.py +0 -0
  246. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/models/base.py +0 -0
  247. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/models/errors.py +0 -0
  248. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/ops/__init__.py +0 -0
  249. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/ops/measures.py +0 -0
  250. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/post_processors.py +0 -0
  251. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/pre_processors.py +0 -0
  252. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/processor.py +0 -0
  253. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/prompts.py +0 -0
  254. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/server/__init__.py +0 -0
  255. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/server/huggingface_server.py +0 -0
  256. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/server/llama_cpp_server.py +0 -0
  257. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/server/qdrant_server.py +0 -0
  258. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/shell.py +0 -0
  259. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/strategy.py +0 -0
  260. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/symbol.py +0 -0
  261. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/symsh.md +0 -0
  262. {symbolicai-1.4.0 → symbolicai-1.6.0}/symai/utils.py +0 -0
  263. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/dependency_links.txt +0 -0
  264. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/entry_points.txt +0 -0
  265. {symbolicai-1.4.0 → symbolicai-1.6.0}/symbolicai.egg-info/top_level.txt +0 -0
  266. {symbolicai-1.4.0 → symbolicai-1.6.0}/tests/README.md +0 -0
  267. {symbolicai-1.4.0 → symbolicai-1.6.0}/tests/data/audio.mp3 +0 -0
  268. {symbolicai-1.4.0 → symbolicai-1.6.0}/tests/data/pg1727.txt +0 -0
  269. {symbolicai-1.4.0 → symbolicai-1.6.0}/tests/data/symmetry_breaking.pdf +0 -0
  270. {symbolicai-1.4.0 → symbolicai-1.6.0}/trusted_repos.yml +0 -0
@@ -53,7 +53,7 @@ CLI entrypoints (after install): `symchat`, `symsh`, `symconfig`, `symserver`.
53
53
 
54
54
  ## Configuration & Secrets
55
55
  - Config precedence: `./symai.config.json`, `{venv}/.symai/symai.config.json`, then `~/.symai/symai.config.json`.
56
- - Common keys: `NEUROSYMBOLIC_ENGINE_MODEL`, `NEUROSYMBOLIC_ENGINE_API_KEY`, `SYMAI_WARNINGS=0`, `SUPPORT_COMMUNITY`.
56
+ - Common keys: `NEUROSYMBOLIC_ENGINE_MODEL`, `NEUROSYMBOLIC_ENGINE_API_KEY`, `SYMAI_WARNINGS=0`.
57
57
  - Inspect active config with `symconfig`.
58
58
  - Never commit API keys, tokens, or generated artifacts (`dist/`, caches, logs).
59
59
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: symbolicai
3
- Version: 1.4.0
3
+ Version: 1.6.0
4
4
  Summary: A Neurosymbolic Perspective on Large Language Models
5
5
  Author-email: Marius-Constantin Dinu <marius@extensity.ai>, Leoveanu-Condrei Claudiu <leo@extensity.ai>
6
6
  License: BSD 3-Clause License
@@ -88,10 +88,6 @@ Requires-Dist: llvmlite>=0.45.1
88
88
  Requires-Dist: cerebras-cloud-sdk>=1.59.0
89
89
  Provides-Extra: bitsandbytes
90
90
  Requires-Dist: bitsandbytes>=0.43.1; extra == "bitsandbytes"
91
- Provides-Extra: blip2
92
- Requires-Dist: decord>=0.6.0; extra == "blip2"
93
- Requires-Dist: salesforce-lavis>=1.0.0; extra == "blip2"
94
- Requires-Dist: opencv-python-headless>=4.5.5.64; extra == "blip2"
95
91
  Provides-Extra: hf
96
92
  Requires-Dist: transformers>=4.45.2; extra == "hf"
97
93
  Requires-Dist: accelerate>=0.33.0; extra == "hf"
@@ -113,6 +109,7 @@ Requires-Dist: openai-whisper>=20240930; extra == "whisper"
113
109
  Requires-Dist: numba>=0.62.1; extra == "whisper"
114
110
  Requires-Dist: llvmlite>=0.45.1; extra == "whisper"
115
111
  Provides-Extra: search
112
+ Requires-Dist: firecrawl-py>=4.12.0; extra == "search"
116
113
  Requires-Dist: parallel-web>=0.3.3; extra == "search"
117
114
  Provides-Extra: serpapi
118
115
  Requires-Dist: google_search_results>=2.4.2; extra == "serpapi"
@@ -424,15 +421,12 @@ Example of a configuration file with all engines enabled:
424
421
  "VISION_ENGINE_MODEL": "openai/clip-vit-base-patch32",
425
422
  "OCR_ENGINE_API_KEY": "<APILAYER_API_KEY>",
426
423
  "SPEECH_TO_TEXT_ENGINE_MODEL": "turbo",
427
- "SPEECH_TO_TEXT_API_KEY": "",
428
- "SUPPORT_COMMUNITY": true
424
+ "SPEECH_TO_TEXT_API_KEY": ""
429
425
  }
430
426
  ```
431
427
 
432
428
  With these steps completed, you should be ready to start using SymbolicAI in your projects.
433
429
 
434
- > ❗️**NOTE**❗️Our framework allows you to support us train models for local usage by enabling the data collection feature. On application startup we show the terms of services and you can activate or disable this community feature. We do not share or sell your data to 3rd parties and only use the data for research purposes and to improve your user experience. To change this setting open the `symai.config.json` and turn it on/off by setting the `SUPPORT_COMMUNITY` property to `True/False` via the config file or the respective environment variable.
435
-
436
430
  > ❗️**NOTE**❗️By default, the user warnings are enabled. To disable them, export `SYMAI_WARNINGS=0` in your environment variables.
437
431
 
438
432
  ### Running tests
@@ -282,15 +282,12 @@ Example of a configuration file with all engines enabled:
282
282
  "VISION_ENGINE_MODEL": "openai/clip-vit-base-patch32",
283
283
  "OCR_ENGINE_API_KEY": "<APILAYER_API_KEY>",
284
284
  "SPEECH_TO_TEXT_ENGINE_MODEL": "turbo",
285
- "SPEECH_TO_TEXT_API_KEY": "",
286
- "SUPPORT_COMMUNITY": true
285
+ "SPEECH_TO_TEXT_API_KEY": ""
287
286
  }
288
287
  ```
289
288
 
290
289
  With these steps completed, you should be ready to start using SymbolicAI in your projects.
291
290
 
292
- > ❗️**NOTE**❗️Our framework allows you to support us train models for local usage by enabling the data collection feature. On application startup we show the terms of services and you can activate or disable this community feature. We do not share or sell your data to 3rd parties and only use the data for research purposes and to improve your user experience. To change this setting open the `symai.config.json` and turn it on/off by setting the `SUPPORT_COMMUNITY` property to `True/False` via the config file or the respective environment variable.
293
-
294
291
  > ❗️**NOTE**❗️By default, the user warnings are enabled. To disable them, export `SYMAI_WARNINGS=0` in your environment variables.
295
292
 
296
293
  ### Running tests
@@ -1,9 +1,10 @@
1
1
  # Drawing Engine
2
2
 
3
- We now expose two distinct high-level drawing interfaces:
3
+ We now expose three distinct high-level drawing interfaces:
4
4
 
5
5
  1. **`gpt_image`** – a unified wrapper around OpenAI’s Images API (DALL·E 2/3 and `gpt-image-*`).
6
6
  2. **`flux`** – Black Forest Labs’ Flux text-to-image models via api.us1.bfl.ai.
7
+ 3. **`nanobanana`** – Google Gemini image generation models via `google-genai`.
7
8
 
8
9
  Both return a list of local PNG file paths.
9
10
 
@@ -153,3 +154,44 @@ Under the hood Flux uses:
153
154
  - GET `https://api.us1.bfl.ai/v1/get_result?id={request_id}`
154
155
 
155
156
  and writes out local PNG file(s).
157
+
158
+ ---
159
+
160
+ ## 3. Google “nanobanana” (Gemini Image) Interface
161
+
162
+ Use `Interface("nanobanana")` to generate images with Gemini image models (via the `google-genai` SDK).
163
+ This interface currently supports **create-only** generation.
164
+
165
+ Supported models (as of this release):
166
+ - `gemini-2.5-flash-image`
167
+ - `gemini-3-pro-image-preview`
168
+
169
+ ```python
170
+ from symai.interfaces import Interface
171
+
172
+ nanobanana = Interface("nanobanana")
173
+
174
+ paths = nanobanana(
175
+ "a fluffy cat with a cowboy hat",
176
+ operation="create", # currently only 'create' is implemented
177
+ model="gemini-2.5-flash-image",
178
+ )
179
+
180
+ print(paths[0]) # → /tmp/tmpabcd.png
181
+ ```
182
+
183
+ ### Supported Parameters
184
+
185
+ - `prompt` (str)
186
+ - `operation` (`"create"`)
187
+ - `model` (str, default from `SYMAI_CONFIG["DRAWING_ENGINE_MODEL"]`)
188
+ - `response_modalities` (list[str], default `["IMAGE"]`)
189
+ - `config` (optional): a `google.genai.types.GenerateContentConfig` instance
190
+ - `except_remedy` (callable)
191
+
192
+ ### Configuration
193
+
194
+ Set these keys in `symai.config.json` (or via your preferred config location):
195
+
196
+ - `DRAWING_ENGINE_API_KEY`: your Gemini API key
197
+ - `DRAWING_ENGINE_MODEL`: one of the supported Gemini image model names, e.g. `gemini-2.5-flash-image`
@@ -8,6 +8,7 @@ Depending on which backend you configure (OpenAI/GPT, Claude, Gemini, Deepseek,
8
8
  * Local engines (llamacpp, HuggingFace) do *not* yet support token counting, JSON format enforcement, or vision inputs in the same way.
9
9
  * Groq engine requires a special format for the `NEUROSYMBOLIC_ENGINE_MODEL` key: `groq:model_id`. E.g., `groq:qwen/qwen3-32b`.
10
10
  * Cerebras engine requires a special format for the `NEUROSYMBOLIC_ENGINE_MODEL` key: `cerebras:model_id`. E.g., `cerebras:gpt-oss-120b`.
11
+ * OpenRouter engine requires a special format for the `NEUROSYMBOLIC_ENGINE_MODEL` key: `openrouter:model_id`. E.g., `openrouter:moonshotai/kimi-k2.5`.
11
12
  * OpenAI Responses API engine requires the `responses:` prefix: `responses:model_id`. E.g., `responses:gpt-4.1`, `responses:o3-mini`. This uses OpenAI's newer `/v1/responses` endpoint instead of `/v1/chat/completions`.
12
13
  * Token‐truncation and streaming are handled automatically but may vary in behavior by engine.
13
14
 
@@ -209,6 +210,23 @@ print(metadata["thinking"])
209
210
 
210
211
  For OpenAI Responses API with reasoning models (e.g., `o3-mini`, `o3`, `o4-mini`, `gpt-5`, `gpt-5.1`), the thinking trace is extracted from the reasoning summary in the response output.
211
212
 
213
+ ### OpenRouter
214
+
215
+ ```python
216
+ from symai import Symbol
217
+
218
+ # openrouter:moonshotai/kimi-k2.5
219
+ res, metadata = Symbol("Topic: Disneyland") \
220
+ .query(
221
+ "Write a dystopic take on the topic.",
222
+ return_metadata=True
223
+ )
224
+ print(res)
225
+ print(metadata.get("thinking", "No thinking trace available"))
226
+ ```
227
+
228
+ OpenRouter backends provide access to multiple model providers through a unified API gateway. The engine automatically handles model routing and supports provider-specific features when available. Token counting is not implemented for OpenRouter models, similar to Groq and Cerebras engines.
229
+
212
230
  ---
213
231
 
214
232
  ## JSON‐Only Responses
@@ -264,7 +282,13 @@ print(Symbol(string).tokens)
264
282
  ### Tracking Usage and Estimating Costs with `MetadataTracker`
265
283
 
266
284
  For more detailed tracking of API calls, token usage, and estimating costs, you can use the `MetadataTracker` in conjunction with `RuntimeInfo`. This is particularly useful for monitoring multiple calls within a specific code block.
267
- > ❗️**NOTE**❗️we only track OpenAI models for now (chat and search).
285
+ > ❗️**NOTE**❗️`MetadataTracker` collects raw per-call metadata for any `Engine`, but **token usage extraction** (i.e. `tracker.usage` → `RuntimeInfo`) is currently implemented for:
286
+ >
287
+ > - **OpenAI**: `GPTXChatEngine`, `GPTXReasoningEngine`, `OpenAIResponsesEngine`, `GPTXSearchEngine` (eg. `gpt-5-chat-latest`)
288
+ > - **Claude (Anthropic)**: `ClaudeXChatEngine`, `ClaudeXReasoningEngine` (eg. `claude-sonnet-4-5`)
289
+ > - **Gemini (Google)**: `GeminiXReasoningEngine` (e.g. `gemini-2.5-pro`, `gemini-2.5-flash`)
290
+ >
291
+ > For other engines, `tracker.metadata` will still contain raw outputs, but `tracker.usage` may be empty or partial.
268
292
 
269
293
  `MetadataTracker` collects metadata from engine calls made within its context. `RuntimeInfo` then processes this raw metadata to provide a summary of token counts, number of API calls, elapsed time, and an estimated cost if pricing information is provided.
270
294
 
@@ -0,0 +1,143 @@
1
+ # Scrape Engine
2
+
3
+ ## Naive Scrape
4
+
5
+ To access data from the web, we can use the `naive_scrape` interface. The engine underneath is very lightweight and can be used to scrape data from websites. It is based on the `requests` library, as well as `trafilatura` for output formatting, and `bs4` for HTML parsing. `trafilatura` currently supports the following output formats: `json`, `csv`, `html`, `markdown`, `text`, `xml`
6
+
7
+ ```python
8
+ from symai.interfaces import Interface
9
+
10
+ scraper = Interface("naive_scrape")
11
+ url = "https://docs.astral.sh/uv/guides/scripts/#next-steps"
12
+ res = scraper(url)
13
+ ```
14
+
15
+ ## Parallel (Parallel.ai)
16
+
17
+ The Parallel.ai integration routes scrape calls through the official `parallel-web` SDK and can handle PDFs, JavaScript-heavy feeds, and standard HTML pages in the same workflow. Instantiate the Parallel interface and call `.scrape(...)` with the target URL. The engine detects scrape requests automatically whenever a URL is supplied.
18
+
19
+ ```python
20
+ from symai.extended import Interface
21
+
22
+ scraper = Interface("parallel")
23
+ article = scraper.scrape(
24
+ "https://trafilatura.readthedocs.io/en/latest/crawls.html",
25
+ full_content=True, # optional: request full document text
26
+ excerpts=True, # optional: default True, retain excerpt snippets
27
+ objective="Summarize crawl guidance for internal notes."
28
+ )
29
+ print(str(article))
30
+ ```
31
+
32
+ Configuration requires a Parallel API key and the Parallel model token. Add the following to your settings:
33
+
34
+ ```bash
35
+ {
36
+
37
+ "SEARCH_ENGINE_API_KEY": "…",
38
+ "SEARCH_ENGINE_MODEL": "parallel"
39
+
40
+ }
41
+ ```
42
+
43
+ When invoked with a URL, the engine hits Parallel's Extract API and returns an `ExtractResult`. The result string joins excerpts or the full content if requested. Because processing is offloaded to Parallel's hosted infrastructure, the engine remains reliable on dynamic pages that the naive scraper cannot render. Install the dependency with `pip install parallel-web` before enabling this engine.
44
+
45
+ ## Firecrawl
46
+
47
+ Firecrawl.dev specializes in reliable web scraping with automatic handling of JavaScript-rendered content, proxies, and anti-bot mechanisms. It converts web pages into clean formats suitable for LLM consumption and supports advanced features like actions, caching, and location-based scraping.
48
+
49
+ ### Examples
50
+
51
+ ```python
52
+ from symai.extended import Interface
53
+
54
+ scraper = Interface("firecrawl")
55
+
56
+ # Example 1: Basic webpage scraping
57
+ content = scraper.scrape(
58
+ "https://docs.firecrawl.dev/introduction",
59
+ formats=["markdown"]
60
+ )
61
+ print(content)
62
+
63
+ # Example 2: PDF scraping with content extraction and trimming
64
+ pdf_full = scraper.scrape(
65
+ "https://pmc.ncbi.nlm.nih.gov/articles/PMC7231600",
66
+ only_main_content=True,
67
+ formats=["markdown"],
68
+ proxy="auto"
69
+ )
70
+ # Trim locally if needed
71
+ pdf_trimmed = str(pdf_full)[:100]
72
+
73
+ # Note: JS-heavy sites like Twitter/LinkedIn are currently not fully supported
74
+ # They typically return 403 Forbidden errors (may vary by subscription tier)
75
+ ```
76
+
77
+ ### Configuration
78
+
79
+ Enable the engine by configuring Firecrawl credentials:
80
+
81
+ ```bash
82
+ {
83
+ "SEARCH_ENGINE_API_KEY": "fc-your-api-key",
84
+ "SEARCH_ENGINE_MODEL": "firecrawl"
85
+ }
86
+ ```
87
+
88
+ ### JSON Schema Extraction
89
+
90
+ Firecrawl supports structured data extraction using JSON schemas. This is useful for extracting specific fields from web pages using LLM-powered extraction:
91
+
92
+ ```python
93
+ from pydantic import Field
94
+ from symai.extended import Interface
95
+ from symai.models import LLMDataModel
96
+
97
+ class MetadataModel(LLMDataModel):
98
+ """Bibliographic metadata extracted from a source document."""
99
+ title: str = Field(description="Title of the source.")
100
+ year: str = Field(description="Publication year (4 digits) or Unknown.")
101
+ authors: list[str] = Field(default_factory=list, description="List of authors.")
102
+ doi: str | None = Field(default=None, description="DOI if available.")
103
+
104
+ # Build JSON format config from Pydantic schema
105
+ schema = MetadataModel.model_json_schema()
106
+ json_format = {
107
+ "type": "json",
108
+ "prompt": "Extract bibliographic metadata from this academic paper.",
109
+ "schema": schema,
110
+ }
111
+
112
+ scraper = Interface("firecrawl")
113
+ result = scraper.scrape(
114
+ "https://journals.physiology.org/doi/full/10.1152/ajpregu.00051.2002",
115
+ formats=[json_format],
116
+ proxy="auto"
117
+ )
118
+
119
+ # Access extracted data as dict
120
+ extracted = result.raw["json"]
121
+ metadata = MetadataModel(**extracted)
122
+ print(metadata.model_dump())
123
+
124
+ # Or as JSON string
125
+ print(str(result))
126
+ ```
127
+
128
+ ### Supported Parameters
129
+
130
+ The engine supports many parameters (passed as kwargs). Common ones include:
131
+ - **formats**: Output formats (["markdown"], ["html"], ["rawHtml"])
132
+ - **only_main_content**: Extract main content only (boolean)
133
+ - **proxy**: Proxy mode ("basic", "stealth", "auto")
134
+ - **location**: Geographic location object with optional country and languages
135
+ - Example: `{"country": "US"}` or `{"country": "RO", "languages": ["ro"]}`
136
+ - **maxAge**: Cache duration in seconds (integer)
137
+ - **storeInCache**: Enable caching (boolean)
138
+ - **actions**: Page interactions before scraping (list of action objects)
139
+ - Example: `[{"type": "wait", "milliseconds": 2000}]`
140
+ - Example: `[{"type": "click", "selector": ".button"}]`
141
+ - Example: `[{"type": "scroll", "direction": "down", "amount": 500}]`
142
+
143
+ Check the Firecrawl v2 API documentation for the complete list of available parameters and action types.
@@ -152,3 +152,75 @@ Here's how to configure the OpenAI search engine:
152
152
  ```
153
153
 
154
154
  This engine calls the OpenAI Responses API under the hood. When you target a reasoning-capable model, pass a `reasoning` dictionary matching the Responses payload schema (for example `{"effort": "low", "summary": "auto"}`). If omitted, the engine falls back to the default effort/summary settings shown above.
155
+
156
+ ## Firecrawl
157
+ Firecrawl.dev provides web scraping and search capabilities with built-in handling of dynamic JavaScript content and anti-bot mechanisms. The engine converts web pages into clean markdown and can perform web searches across multiple sources with advanced filtering and content extraction.
158
+
159
+ ### Comprehensive Search Example
160
+
161
+ ```python
162
+ from symai.extended import Interface
163
+
164
+ engine = Interface("firecrawl")
165
+
166
+ # Example 1: Location-aware search with language, scraping, and citations
167
+ result = engine.search(
168
+ "who is nicusor dan",
169
+ limit=5,
170
+ location="Romania",
171
+ lang="ro",
172
+ sources=["web"],
173
+ formats=["markdown"],
174
+ only_main_content=True,
175
+ proxy="stealth"
176
+ )
177
+
178
+ # Access structured citations (similar to parallel.ai)
179
+ citations = result.get_citations()
180
+ for citation in citations:
181
+ print(f"[{citation.id}] {citation.title}: {citation.url}")
182
+
183
+ # Example 2: Domain-filtered search with character limits
184
+ domains = ["arxiv.org", "nature.com"]
185
+ filters = " OR ".join(f"site:{domain}" for domain in domains)
186
+ query = f"({filters}) what is thermodynamic computing"
187
+
188
+ result = engine.search(
189
+ query,
190
+ limit=10,
191
+ max_chars_per_result=500,
192
+ categories=["research"],
193
+ formats=["markdown"],
194
+ proxy="basic"
195
+ )
196
+ print(result)
197
+ ```
198
+
199
+ ### Configuration
200
+
201
+ Enable the engine by configuring Firecrawl credentials:
202
+
203
+ ```bash
204
+ {
205
+ "SEARCH_ENGINE_API_KEY": "fc-your-api-key",
206
+ "SEARCH_ENGINE_MODEL": "firecrawl"
207
+ }
208
+ ```
209
+
210
+ ### Supported Parameters
211
+
212
+ The engine supports many parameters (passed as kwargs). Common ones include:
213
+ - **limit**: Max number of results
214
+ - **location**: Country code string for search (e.g., "Romania", "Germany")
215
+ - **lang**: Language code string for search (e.g., "ro", "es") - hint, not enforcement
216
+ - **sources**: List of sources (["web"], ["news"], ["images"])
217
+ - **categories**: Content types (["research"], ["github"], ["pdf"])
218
+ - **tbs**: Time-based filter (e.g., "qdr:d" for past day)
219
+ - **formats**: Output formats for scraped content (["markdown"], ["html"])
220
+ - **only_main_content**: Extract main content only when scraping (boolean)
221
+ - **max_chars_per_result**: Truncate results locally (integer)
222
+ - **proxy**: Proxy mode for scraping ("basic", "stealth", "auto")
223
+ - **scrape_location**: Location object for scraping with optional country and languages
224
+ - Example: `{"country": "US"}` or `{"country": "RO", "languages": ["ro"]}`
225
+
226
+ Check the Firecrawl v2 API documentation for the complete list of available parameters.
@@ -144,15 +144,12 @@ Example of a configuration file with all engines enabled:
144
144
  "VISION_ENGINE_MODEL": "openai/clip-vit-base-patch32",
145
145
  "OCR_ENGINE_API_KEY": "<APILAYER_API_KEY>",
146
146
  "SPEECH_TO_TEXT_ENGINE_MODEL": "turbo",
147
- "SPEECH_TO_TEXT_API_KEY": "",
148
- "SUPPORT_COMMUNITY": true
147
+ "SPEECH_TO_TEXT_API_KEY": ""
149
148
  }
150
149
  ```
151
150
 
152
151
  With these steps completed, you should be ready to start using SymbolicAI in your projects.
153
152
 
154
- > ❗️**NOTE**❗️Our framework allows you to support us train models for local usage by enabling the data collection feature. On application startup we show the terms of services and you can activate or disable this community feature. We do not share or sell your data to 3rd parties and only use the data for research purposes and to improve your user experience. To change this setting open the `symai.config.json` and turn it on/off by setting the `SUPPORT_COMMUNITY` property to `True/False` via the config file or the respective environment variable.
155
-
156
153
  > ❗️**NOTE**❗️By default, the user warnings are enabled. To disable them, export `SYMAI_WARNINGS=0` in your environment variables.
157
154
 
158
155
  ### Running tests
@@ -72,13 +72,12 @@ dependencies = [
72
72
 
73
73
  [project.optional-dependencies]
74
74
  bitsandbytes = ["bitsandbytes>=0.43.1"] # handle separately because of Apple Silicon
75
- blip2 = ["decord>=0.6.0", "salesforce-lavis>=1.0.0", "opencv-python-headless>=4.5.5.64"]
76
75
  hf = ["transformers>=4.45.2", "accelerate>=0.33.0", "peft>=0.13.1", "datasets>=3.0.1", "trl>=0.11.3"]
77
76
  scrape = ["beautifulsoup4>=4.12.3", "trafilatura>=2.0.0", "pdfminer.six", "playwright>=1.45.0", "parallel-web>=0.3.3"]
78
77
  llama_cpp = ["llama-cpp-python[server]>=0.3.7"] # handle separately since this dependency may not compile and require special maintenance
79
78
  wolframalpha = ["wolframalpha>=5.0.0"]
80
79
  whisper = ["openai-whisper>=20240930", "numba>=0.62.1", "llvmlite>=0.45.1"]
81
- search = ["parallel-web>=0.3.3"]
80
+ search = ["firecrawl-py>=4.12.0", "parallel-web>=0.3.3"]
82
81
  serpapi = ["google_search_results>=2.4.2"]
83
82
  services = ["fastapi>=0.110.0", "redis>=5.0.2", "uvicorn>=0.27.1"]
84
83
  solver = ["z3-solver>=4.12.6.0"]
@@ -11,8 +11,6 @@ from rich.table import Table
11
11
  from rich.tree import Tree
12
12
 
13
13
  from .backend import settings
14
- from .menu.screen import show_intro_menu
15
- from .misc.console import ConsoleStyle
16
14
  from .utils import UserMessage
17
15
 
18
16
  # do not remove - hides the libraries' debug messages
@@ -33,7 +31,7 @@ os.environ["TOKENIZERS_PARALLELISM"] = "false"
33
31
  # Create singleton instance
34
32
  config_manager = settings.SymAIConfig()
35
33
 
36
- SYMAI_VERSION = "1.4.0"
34
+ SYMAI_VERSION = "1.6.0"
37
35
  __version__ = SYMAI_VERSION
38
36
  __root_dir__ = config_manager.config_dir
39
37
 
@@ -87,20 +85,6 @@ def _start_symai():
87
85
  # Load and manage configurations
88
86
  symai_config = config_manager.load_config("symai.config.json")
89
87
 
90
- # MIGRATE THE ENVIRONMENT VARIABLES
91
- # *==========================================================================================================*
92
- if "COLLECTION_URI" not in symai_config:
93
- updates = {
94
- "COLLECTION_URI": "mongodb+srv://User:vt3epocXitd6WlQ6@extensityai.c1ajxxy.mongodb.net/?retryWrites=true&w=majority",
95
- "COLLECTION_DB": "ExtensityAI",
96
- "COLLECTION_STORAGE": "SymbolicAI",
97
- "SUPPORT_COMMUNITY": False,
98
- }
99
- config_manager.migrate_config("symai.config.json", updates)
100
- with ConsoleStyle("info") as console:
101
- msg = "Currently you are sharing your user experience with us by uploading the data to our research server, and thereby helping us improve future models and the overall SymbolicAI experience. We thank you very much for supporting the research community! If you wish to disable the data collection option go to your .symai config situated in your home directory or set the environment variable `SUPPORT_COMMUNITY` to `False`."
102
- console.print(msg)
103
-
104
88
  # POST-MIGRATION CHECKS
105
89
  # *==============================================================================================================*
106
90
  if "TEXT_TO_SPEECH_ENGINE_API_KEY" not in symai_config:
@@ -114,11 +98,6 @@ def _start_symai():
114
98
  symsh_config = config_manager.load_config("symsh.config.json")
115
99
  symserver_config = config_manager.load_config("symserver.config.json")
116
100
 
117
- # MIGRATE THE SHELL SPLASH SCREEN CONFIGURATION
118
- # *==============================================================================================================*
119
- if "show-splash-screen" not in symsh_config:
120
- config_manager.migrate_config("symsh.config.json", {"show-splash-screen": True})
121
-
122
101
  # CHECK IF THE USER HAS A NEUROSYMBOLIC API KEY
123
102
  # *==============================================================================================================*
124
103
  if not (
@@ -130,7 +109,6 @@ def _start_symai():
130
109
  ):
131
110
  # Try to fallback to the global (home) config if environment is not home
132
111
  if config_manager.config_dir != config_manager._home_config_dir:
133
- show_intro_menu()
134
112
  UserMessage(
135
113
  f"You didn't configure your environment ({config_manager.config_dir})! Falling back to the global ({config_manager._home_config_dir}) configuration if it exists."
136
114
  )
@@ -345,57 +323,29 @@ def display_config():
345
323
 
346
324
 
347
325
  def setup_wizard(_symai_config_path_):
348
- show_intro_menu()
349
-
350
- _nesy_engine_api_key = ""
351
- _nesy_engine_model = ""
352
- _symbolic_engine_api_key = ""
353
- _symbolic_engine_model = ""
354
- _embedding_engine_api_key = ""
355
- _embedding_model = ""
356
- _drawing_engine_api_key = ""
357
- _drawing_engine_model = ""
358
- _vision_engine_model = ""
359
- _search_engine_api_key = ""
360
- _search_engine_model = ""
361
- _ocr_engine_api_key = ""
362
- _speech_to_text_engine_model = ""
363
- _speech_to_text_api_key = ""
364
- _text_to_speech_engine_api_key = ""
365
- _text_to_speech_engine_model = ""
366
- _text_to_speech_engine_voice = ""
367
- _indexing_engine_api_key = ""
368
- _indexing_engine_environment = ""
369
- _caption_engine_environment = ""
370
- _support_comminity = False
371
-
372
326
  config_manager.save_config(
373
327
  _symai_config_path_,
374
328
  {
375
- "NEUROSYMBOLIC_ENGINE_API_KEY": _nesy_engine_api_key,
376
- "NEUROSYMBOLIC_ENGINE_MODEL": _nesy_engine_model,
377
- "SYMBOLIC_ENGINE_API_KEY": _symbolic_engine_api_key,
378
- "SYMBOLIC_ENGINE": _symbolic_engine_model,
379
- "EMBEDDING_ENGINE_API_KEY": _embedding_engine_api_key,
380
- "EMBEDDING_ENGINE_MODEL": _embedding_model,
381
- "DRAWING_ENGINE_API_KEY": _drawing_engine_api_key,
382
- "DRAWING_ENGINE_MODEL": _drawing_engine_model,
383
- "VISION_ENGINE_MODEL": _vision_engine_model,
384
- "SEARCH_ENGINE_API_KEY": _search_engine_api_key,
385
- "SEARCH_ENGINE_MODEL": _search_engine_model,
386
- "OCR_ENGINE_API_KEY": _ocr_engine_api_key,
387
- "SPEECH_TO_TEXT_ENGINE_MODEL": _speech_to_text_engine_model,
388
- "SPEECH_TO_TEXT_API_KEY": _speech_to_text_api_key,
389
- "TEXT_TO_SPEECH_ENGINE_API_KEY": _text_to_speech_engine_api_key,
390
- "TEXT_TO_SPEECH_ENGINE_MODEL": _text_to_speech_engine_model,
391
- "TEXT_TO_SPEECH_ENGINE_VOICE": _text_to_speech_engine_voice,
392
- "INDEXING_ENGINE_API_KEY": _indexing_engine_api_key,
393
- "INDEXING_ENGINE_ENVIRONMENT": _indexing_engine_environment,
394
- "CAPTION_ENGINE_MODEL": _caption_engine_environment,
395
- "COLLECTION_URI": "mongodb+srv://User:vt3epocXitd6WlQ6@extensityai.c1ajxxy.mongodb.net/?retryWrites=true&w=majority",
396
- "COLLECTION_DB": "ExtensityAI",
397
- "COLLECTION_STORAGE": "SymbolicAI",
398
- "SUPPORT_COMMUNITY": _support_comminity,
329
+ "NEUROSYMBOLIC_ENGINE_API_KEY": "",
330
+ "NEUROSYMBOLIC_ENGINE_MODEL": "",
331
+ "SYMBOLIC_ENGINE_API_KEY": "",
332
+ "SYMBOLIC_ENGINE": "",
333
+ "EMBEDDING_ENGINE_API_KEY": "",
334
+ "EMBEDDING_ENGINE_MODEL": "",
335
+ "DRAWING_ENGINE_API_KEY": "",
336
+ "DRAWING_ENGINE_MODEL": "",
337
+ "VISION_ENGINE_MODEL": "",
338
+ "SEARCH_ENGINE_API_KEY": "",
339
+ "SEARCH_ENGINE_MODEL": "",
340
+ "OCR_ENGINE_API_KEY": "",
341
+ "SPEECH_TO_TEXT_ENGINE_MODEL": "",
342
+ "SPEECH_TO_TEXT_API_KEY": "",
343
+ "TEXT_TO_SPEECH_ENGINE_API_KEY": "",
344
+ "TEXT_TO_SPEECH_ENGINE_MODEL": "",
345
+ "TEXT_TO_SPEECH_ENGINE_VOICE": "",
346
+ "INDEXING_ENGINE_API_KEY": "",
347
+ "INDEXING_ENGINE_ENVIRONMENT": "",
348
+ "CAPTION_ENGINE_MODEL": "",
399
349
  },
400
350
  )
401
351
 
@@ -3,21 +3,11 @@ import time
3
3
  from abc import ABC, abstractmethod
4
4
  from typing import Any
5
5
 
6
- from ..collect import CollectionRepository, rec_serialize
7
6
  from ..utils import UserMessage
8
7
  from .settings import HOME_PATH
9
8
 
10
9
  ENGINE_UNREGISTERED = "<UNREGISTERED/>"
11
10
 
12
- COLLECTION_LOGGING_ENGINES = {
13
- "GPTXChatEngine",
14
- "GPTXCompletionEngine",
15
- "SerpApiEngine",
16
- "WolframAlphaEngine",
17
- "SeleniumEngine",
18
- "OCREngine",
19
- }
20
-
21
11
 
22
12
  class Engine(ABC):
23
13
  def __init__(self) -> None:
@@ -26,8 +16,6 @@ class Engine(ABC):
26
16
  self.logging = False
27
17
  self.log_level = logging.DEBUG
28
18
  self.time_clock = False
29
- self.collection = CollectionRepository()
30
- self.collection.connect()
31
19
  # create formatter
32
20
  __root_dir__ = HOME_PATH
33
21
  __root_dir__.mkdir(parents=True, exist_ok=True)
@@ -66,9 +54,6 @@ class Engine(ABC):
66
54
  if self.logging:
67
55
  self.logger.log(self.log_level, log)
68
56
 
69
- if str(self) in COLLECTION_LOGGING_ENGINES:
70
- self._record_collection_entry(argument, metadata, req_time)
71
-
72
57
  self._trigger_output_handlers(argument, res, metadata)
73
58
  return res, metadata
74
59
 
@@ -92,17 +77,6 @@ class Engine(ABC):
92
77
  if argument_handler:
93
78
  argument_handler((result, metadata))
94
79
 
95
- def _record_collection_entry(self, argument: Any, metadata: dict, req_time: float) -> None:
96
- self.collection.add(
97
- forward={"args": rec_serialize(argument.args), "kwds": rec_serialize(argument.kwargs)},
98
- engine=str(self),
99
- metadata={
100
- "time": req_time,
101
- "data": rec_serialize(metadata),
102
- "argument": rec_serialize(argument),
103
- },
104
- )
105
-
106
80
  def id(self) -> str:
107
81
  return ENGINE_UNREGISTERED
108
82