symbolicai 0.21.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- symai/__init__.py +96 -64
- symai/backend/base.py +93 -80
- symai/backend/engines/drawing/engine_bfl.py +12 -11
- symai/backend/engines/drawing/engine_gpt_image.py +108 -87
- symai/backend/engines/embedding/engine_llama_cpp.py +20 -24
- symai/backend/engines/embedding/engine_openai.py +3 -5
- symai/backend/engines/execute/engine_python.py +6 -5
- symai/backend/engines/files/engine_io.py +74 -67
- symai/backend/engines/imagecaptioning/engine_blip2.py +3 -3
- symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +54 -38
- symai/backend/engines/index/engine_pinecone.py +23 -24
- symai/backend/engines/index/engine_vectordb.py +16 -14
- symai/backend/engines/lean/engine_lean4.py +38 -34
- symai/backend/engines/neurosymbolic/__init__.py +41 -13
- symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +262 -182
- symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +263 -191
- symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +53 -49
- symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +212 -211
- symai/backend/engines/neurosymbolic/engine_groq.py +87 -63
- symai/backend/engines/neurosymbolic/engine_huggingface.py +21 -24
- symai/backend/engines/neurosymbolic/engine_llama_cpp.py +44 -46
- symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +256 -229
- symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +270 -150
- symai/backend/engines/ocr/engine_apilayer.py +6 -8
- symai/backend/engines/output/engine_stdout.py +1 -4
- symai/backend/engines/search/engine_openai.py +7 -7
- symai/backend/engines/search/engine_perplexity.py +5 -5
- symai/backend/engines/search/engine_serpapi.py +12 -14
- symai/backend/engines/speech_to_text/engine_local_whisper.py +20 -27
- symai/backend/engines/symbolic/engine_wolframalpha.py +3 -3
- symai/backend/engines/text_to_speech/engine_openai.py +5 -7
- symai/backend/engines/text_vision/engine_clip.py +7 -11
- symai/backend/engines/userinput/engine_console.py +3 -3
- symai/backend/engines/webscraping/engine_requests.py +81 -48
- symai/backend/mixin/__init__.py +13 -0
- symai/backend/mixin/anthropic.py +4 -2
- symai/backend/mixin/deepseek.py +2 -0
- symai/backend/mixin/google.py +2 -0
- symai/backend/mixin/openai.py +11 -3
- symai/backend/settings.py +83 -16
- symai/chat.py +101 -78
- symai/collect/__init__.py +7 -1
- symai/collect/dynamic.py +77 -69
- symai/collect/pipeline.py +35 -27
- symai/collect/stats.py +75 -63
- symai/components.py +198 -169
- symai/constraints.py +15 -12
- symai/core.py +698 -359
- symai/core_ext.py +32 -34
- symai/endpoints/api.py +80 -73
- symai/extended/.DS_Store +0 -0
- symai/extended/__init__.py +46 -12
- symai/extended/api_builder.py +11 -8
- symai/extended/arxiv_pdf_parser.py +13 -12
- symai/extended/bibtex_parser.py +2 -3
- symai/extended/conversation.py +101 -90
- symai/extended/document.py +17 -10
- symai/extended/file_merger.py +18 -13
- symai/extended/graph.py +18 -13
- symai/extended/html_style_template.py +2 -4
- symai/extended/interfaces/blip_2.py +1 -2
- symai/extended/interfaces/clip.py +1 -2
- symai/extended/interfaces/console.py +7 -1
- symai/extended/interfaces/dall_e.py +1 -1
- symai/extended/interfaces/flux.py +1 -1
- symai/extended/interfaces/gpt_image.py +1 -1
- symai/extended/interfaces/input.py +1 -1
- symai/extended/interfaces/llava.py +0 -1
- symai/extended/interfaces/naive_vectordb.py +7 -8
- symai/extended/interfaces/naive_webscraping.py +1 -1
- symai/extended/interfaces/ocr.py +1 -1
- symai/extended/interfaces/pinecone.py +6 -5
- symai/extended/interfaces/serpapi.py +1 -1
- symai/extended/interfaces/terminal.py +2 -3
- symai/extended/interfaces/tts.py +1 -1
- symai/extended/interfaces/whisper.py +1 -1
- symai/extended/interfaces/wolframalpha.py +1 -1
- symai/extended/metrics/__init__.py +11 -1
- symai/extended/metrics/similarity.py +11 -13
- symai/extended/os_command.py +17 -16
- symai/extended/packages/__init__.py +29 -3
- symai/extended/packages/symdev.py +19 -16
- symai/extended/packages/sympkg.py +12 -9
- symai/extended/packages/symrun.py +21 -19
- symai/extended/repo_cloner.py +11 -10
- symai/extended/seo_query_optimizer.py +1 -2
- symai/extended/solver.py +20 -23
- symai/extended/summarizer.py +4 -3
- symai/extended/taypan_interpreter.py +10 -12
- symai/extended/vectordb.py +99 -82
- symai/formatter/__init__.py +9 -1
- symai/formatter/formatter.py +12 -16
- symai/formatter/regex.py +62 -63
- symai/functional.py +173 -122
- symai/imports.py +136 -127
- symai/interfaces.py +56 -27
- symai/memory.py +14 -13
- symai/misc/console.py +49 -39
- symai/misc/loader.py +5 -3
- symai/models/__init__.py +17 -1
- symai/models/base.py +269 -181
- symai/models/errors.py +0 -1
- symai/ops/__init__.py +32 -22
- symai/ops/measures.py +11 -15
- symai/ops/primitives.py +348 -228
- symai/post_processors.py +32 -28
- symai/pre_processors.py +39 -41
- symai/processor.py +6 -4
- symai/prompts.py +59 -45
- symai/server/huggingface_server.py +23 -20
- symai/server/llama_cpp_server.py +7 -5
- symai/shell.py +3 -4
- symai/shellsv.py +499 -375
- symai/strategy.py +517 -287
- symai/symbol.py +111 -116
- symai/utils.py +42 -36
- {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/METADATA +4 -2
- symbolicai-1.0.0.dist-info/RECORD +163 -0
- symbolicai-0.21.0.dist-info/RECORD +0 -162
- {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/WHEEL +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/entry_points.txt +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/licenses/LICENSE +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.0.0.dist-info}/top_level.txt +0 -0
symai/__init__.py
CHANGED
|
@@ -13,7 +13,7 @@ from rich.tree import Tree
|
|
|
13
13
|
from .backend import settings
|
|
14
14
|
from .menu.screen import show_intro_menu
|
|
15
15
|
from .misc.console import ConsoleStyle
|
|
16
|
-
from .utils import
|
|
16
|
+
from .utils import UserMessage
|
|
17
17
|
|
|
18
18
|
# do not remove - hides the libraries' debug messages
|
|
19
19
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
@@ -33,23 +33,19 @@ os.environ['TOKENIZERS_PARALLELISM'] = "false"
|
|
|
33
33
|
# Create singleton instance
|
|
34
34
|
config_manager = settings.SymAIConfig()
|
|
35
35
|
|
|
36
|
-
SYMAI_VERSION = "0.
|
|
36
|
+
SYMAI_VERSION = "1.0.0"
|
|
37
37
|
__version__ = SYMAI_VERSION
|
|
38
38
|
__root_dir__ = config_manager.config_dir
|
|
39
39
|
|
|
40
40
|
def _start_symai():
|
|
41
|
-
global _symai_config_
|
|
42
|
-
global _symsh_config_
|
|
43
|
-
global _symserver_config_
|
|
44
|
-
|
|
45
41
|
# Create config directories if they don't exist
|
|
46
|
-
|
|
47
|
-
|
|
42
|
+
config_manager._env_config_dir.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
config_manager._home_config_dir.mkdir(parents=True, exist_ok=True)
|
|
48
44
|
|
|
49
45
|
# CREATE THE SHELL CONFIGURATION FILE IF IT DOES NOT EXIST YET
|
|
50
46
|
# *==============================================================================================================*
|
|
51
47
|
_symsh_config_path_ = config_manager.get_config_path('symsh.config.json')
|
|
52
|
-
if not
|
|
48
|
+
if not _symsh_config_path_.exists():
|
|
53
49
|
config_manager.save_config('symsh.config.json', {
|
|
54
50
|
"colors": {
|
|
55
51
|
"completion-menu.completion.current": "bg:#323232 #212121",
|
|
@@ -71,23 +67,23 @@ def _start_symai():
|
|
|
71
67
|
# CREATE A SERVER CONFIGURATION FILE IF IT DOES NOT EXIST YET
|
|
72
68
|
# *==============================================================================================================*
|
|
73
69
|
_symserver_config_path_ = config_manager.get_config_path('symserver.config.json')
|
|
74
|
-
if not
|
|
70
|
+
if not _symserver_config_path_.exists():
|
|
75
71
|
config_manager.save_config('symserver.config.json', {})
|
|
76
72
|
|
|
77
73
|
# Get appropriate config path (debug mode handling is now in config_manager)
|
|
78
74
|
_symai_config_path_ = config_manager.get_config_path('symai.config.json')
|
|
79
75
|
|
|
80
|
-
if not
|
|
76
|
+
if not _symai_config_path_.exists():
|
|
81
77
|
setup_wizard(_symai_config_path_)
|
|
82
|
-
|
|
78
|
+
UserMessage(f'No configuration file found for the environment. A new configuration file has been created at {_symai_config_path_}. Please configure your environment.')
|
|
83
79
|
sys.exit(1)
|
|
84
80
|
|
|
85
81
|
# Load and manage configurations
|
|
86
|
-
|
|
82
|
+
symai_config = config_manager.load_config('symai.config.json')
|
|
87
83
|
|
|
88
84
|
# MIGRATE THE ENVIRONMENT VARIABLES
|
|
89
85
|
# *==========================================================================================================*
|
|
90
|
-
if 'COLLECTION_URI' not in
|
|
86
|
+
if 'COLLECTION_URI' not in symai_config:
|
|
91
87
|
updates = {
|
|
92
88
|
'COLLECTION_URI': "mongodb+srv://User:vt3epocXitd6WlQ6@extensityai.c1ajxxy.mongodb.net/?retryWrites=true&w=majority",
|
|
93
89
|
'COLLECTION_DB': "ExtensityAI",
|
|
@@ -101,57 +97,59 @@ def _start_symai():
|
|
|
101
97
|
|
|
102
98
|
# POST-MIGRATION CHECKS
|
|
103
99
|
# *==============================================================================================================*
|
|
104
|
-
if 'TEXT_TO_SPEECH_ENGINE_API_KEY' not in
|
|
100
|
+
if 'TEXT_TO_SPEECH_ENGINE_API_KEY' not in symai_config:
|
|
105
101
|
updates = {
|
|
106
|
-
'TEXT_TO_SPEECH_ENGINE_API_KEY':
|
|
102
|
+
'TEXT_TO_SPEECH_ENGINE_API_KEY': symai_config.get('NEUROSYMBOLIC_ENGINE_API_KEY', '')
|
|
107
103
|
}
|
|
108
104
|
config_manager.migrate_config('symai.config.json', updates)
|
|
109
105
|
|
|
110
106
|
# Load all configurations
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
107
|
+
symai_config = config_manager.load_config('symai.config.json')
|
|
108
|
+
symsh_config = config_manager.load_config('symsh.config.json')
|
|
109
|
+
symserver_config = config_manager.load_config('symserver.config.json')
|
|
114
110
|
|
|
115
111
|
# MIGRATE THE SHELL SPLASH SCREEN CONFIGURATION
|
|
116
112
|
# *==============================================================================================================*
|
|
117
|
-
if 'show-splash-screen' not in
|
|
113
|
+
if 'show-splash-screen' not in symsh_config:
|
|
118
114
|
config_manager.migrate_config('symsh.config.json', {'show-splash-screen': True})
|
|
119
115
|
|
|
120
116
|
# CHECK IF THE USER HAS A NEUROSYMBOLIC API KEY
|
|
121
117
|
# *==============================================================================================================*
|
|
122
118
|
if not (
|
|
123
|
-
|
|
124
|
-
|
|
119
|
+
symai_config['NEUROSYMBOLIC_ENGINE_MODEL'].lower().startswith('llama') or \
|
|
120
|
+
symai_config['NEUROSYMBOLIC_ENGINE_MODEL'].lower().startswith('huggingface')) \
|
|
125
121
|
and \
|
|
126
122
|
(
|
|
127
|
-
|
|
128
|
-
len(
|
|
123
|
+
symai_config['NEUROSYMBOLIC_ENGINE_API_KEY'] is None or \
|
|
124
|
+
len(symai_config['NEUROSYMBOLIC_ENGINE_API_KEY']) == 0):
|
|
129
125
|
# Try to fallback to the global (home) config if environment is not home
|
|
130
126
|
if config_manager.config_dir != config_manager._home_config_dir:
|
|
131
127
|
show_intro_menu()
|
|
132
|
-
|
|
128
|
+
UserMessage(f"You didn't configure your environment ({config_manager.config_dir})! Falling back to the global ({config_manager._home_config_dir}) configuration if it exists.")
|
|
133
129
|
# Force loading from home
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
130
|
+
symai_config = config_manager.load_config('symai.config.json', fallback_to_home=True)
|
|
131
|
+
symsh_config = config_manager.load_config('symsh.config.json', fallback_to_home=True)
|
|
132
|
+
symserver_config = config_manager.load_config('symserver.config.json', fallback_to_home=True)
|
|
137
133
|
|
|
138
134
|
# If still not valid, warn and exit
|
|
139
|
-
if not
|
|
140
|
-
|
|
135
|
+
if not symai_config.get('NEUROSYMBOLIC_ENGINE_API_KEY'):
|
|
136
|
+
UserMessage('The mandatory neuro-symbolic engine is not initialized. Please set NEUROSYMBOLIC_ENGINE_MODEL and NEUROSYMBOLIC_ENGINE_API_KEY.')
|
|
141
137
|
sys.exit(1)
|
|
142
138
|
|
|
143
|
-
settings.SYMAI_CONFIG =
|
|
144
|
-
settings.SYMSH_CONFIG =
|
|
145
|
-
settings.SYMSERVER_CONFIG =
|
|
139
|
+
settings.SYMAI_CONFIG = symai_config
|
|
140
|
+
settings.SYMSH_CONFIG = symsh_config
|
|
141
|
+
settings.SYMSERVER_CONFIG = symserver_config
|
|
142
|
+
return symai_config, symsh_config, symserver_config
|
|
146
143
|
|
|
147
144
|
|
|
148
145
|
def run_server():
|
|
149
146
|
_symserver_config_ = {}
|
|
150
147
|
if settings.SYMAI_CONFIG.get("NEUROSYMBOLIC_ENGINE_MODEL").startswith("llama") or settings.SYMAI_CONFIG.get("EMBEDDING_ENGINE_MODEL").startswith("llama"):
|
|
151
|
-
|
|
148
|
+
# Keep optional llama_cpp dependencies lazy.
|
|
149
|
+
from .server.llama_cpp_server import llama_cpp_server # noqa
|
|
152
150
|
|
|
153
151
|
command, args = llama_cpp_server()
|
|
154
|
-
_symserver_config_.update(zip(args[::2], args[1::2]))
|
|
152
|
+
_symserver_config_.update(zip(args[::2], args[1::2], strict=False))
|
|
155
153
|
_symserver_config_['online'] = True
|
|
156
154
|
|
|
157
155
|
config_manager.save_config("symserver.config.json", _symserver_config_)
|
|
@@ -161,14 +159,15 @@ def run_server():
|
|
|
161
159
|
try:
|
|
162
160
|
subprocess.run(command, check=True)
|
|
163
161
|
except KeyboardInterrupt:
|
|
164
|
-
|
|
162
|
+
UserMessage("Server stopped!")
|
|
165
163
|
except Exception as e:
|
|
166
|
-
|
|
164
|
+
UserMessage(f"Error running server: {e}")
|
|
167
165
|
finally:
|
|
168
166
|
config_manager.save_config("symserver.config.json", {'online': False})
|
|
169
167
|
|
|
170
168
|
elif settings.SYMAI_CONFIG.get("NEUROSYMBOLIC_ENGINE_MODEL").startswith("huggingface"):
|
|
171
|
-
|
|
169
|
+
# HuggingFace server stack is optional; import only when requested.
|
|
170
|
+
from .server.huggingface_server import huggingface_server # noqa
|
|
172
171
|
|
|
173
172
|
command, args = huggingface_server()
|
|
174
173
|
_symserver_config_.update(vars(args))
|
|
@@ -179,13 +178,17 @@ def run_server():
|
|
|
179
178
|
try:
|
|
180
179
|
command(host=args.host, port=args.port)
|
|
181
180
|
except KeyboardInterrupt:
|
|
182
|
-
|
|
181
|
+
UserMessage("Server stopped!")
|
|
183
182
|
except Exception as e:
|
|
184
|
-
|
|
183
|
+
UserMessage(f"Error running server: {e}")
|
|
185
184
|
finally:
|
|
186
185
|
config_manager.save_config("symserver.config.json", {'online': False})
|
|
187
186
|
else:
|
|
188
|
-
|
|
187
|
+
msg = (
|
|
188
|
+
"You're trying to run a local server without a valid neuro-symbolic engine model. "
|
|
189
|
+
"Please set a valid model in your configuration file. Current available options are 'llamacpp' and 'huggingface'."
|
|
190
|
+
)
|
|
191
|
+
UserMessage(msg, raise_with=ValueError)
|
|
189
192
|
|
|
190
193
|
|
|
191
194
|
# *==============================================================================================================*
|
|
@@ -218,7 +221,7 @@ def display_config():
|
|
|
218
221
|
debug_branch = tree.add("[yellow]Debug Mode Config (CWD)[/yellow]")
|
|
219
222
|
debug_config = config_manager._debug_dir / 'symai.config.json'
|
|
220
223
|
if debug_config.exists():
|
|
221
|
-
with open(
|
|
224
|
+
with debug_config.open() as f:
|
|
222
225
|
content = json.load(f)
|
|
223
226
|
debug_branch.add(f"📄 [green]{debug_config}[/green]\n{format_config_content(content)}")
|
|
224
227
|
else:
|
|
@@ -235,7 +238,7 @@ def display_config():
|
|
|
235
238
|
for config_file, icon in env_configs.items():
|
|
236
239
|
config_path = config_manager._env_config_dir / config_file
|
|
237
240
|
if config_path.exists():
|
|
238
|
-
with open(
|
|
241
|
+
with config_path.open() as f:
|
|
239
242
|
content = json.load(f)
|
|
240
243
|
env_branch.add(f"{icon} [green]{config_path}[/green]\n{format_config_content(content)}")
|
|
241
244
|
else:
|
|
@@ -246,7 +249,7 @@ def display_config():
|
|
|
246
249
|
for config_file, icon in env_configs.items():
|
|
247
250
|
config_path = config_manager._home_config_dir / config_file
|
|
248
251
|
if config_path.exists():
|
|
249
|
-
with open(
|
|
252
|
+
with config_path.open() as f:
|
|
250
253
|
content = json.load(f)
|
|
251
254
|
home_branch.add(f"{icon} [green]{config_path}[/green]\n{format_config_content(content)}")
|
|
252
255
|
else:
|
|
@@ -258,10 +261,10 @@ def display_config():
|
|
|
258
261
|
summary.add_column("Active Path")
|
|
259
262
|
|
|
260
263
|
active_paths = {
|
|
261
|
-
"Primary Config Dir": config_manager.
|
|
262
|
-
"symai.config.json": config_manager.
|
|
263
|
-
"symsh.config.json": config_manager.
|
|
264
|
-
"symserver.config.json": config_manager.
|
|
264
|
+
"Primary Config Dir": config_manager.get_active_config_dir(),
|
|
265
|
+
"symai.config.json": config_manager.get_active_path('symai.config.json'),
|
|
266
|
+
"symsh.config.json": config_manager.get_active_path('symsh.config.json'),
|
|
267
|
+
"symserver.config.json": config_manager.get_active_path('symserver.config.json')
|
|
265
268
|
}
|
|
266
269
|
|
|
267
270
|
for config_type, path in active_paths.items():
|
|
@@ -333,19 +336,48 @@ def setup_wizard(_symai_config_path_):
|
|
|
333
336
|
"SUPPORT_COMMUNITY": _support_comminity
|
|
334
337
|
})
|
|
335
338
|
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
from .
|
|
340
|
-
from .
|
|
341
|
-
from .
|
|
342
|
-
from .
|
|
343
|
-
from .
|
|
344
|
-
from .
|
|
345
|
-
from .
|
|
346
|
-
from .
|
|
347
|
-
from .
|
|
348
|
-
from .
|
|
349
|
-
from .
|
|
350
|
-
from .
|
|
351
|
-
|
|
339
|
+
_symai_config_, _symsh_config_, _symserver_config_ = _start_symai()
|
|
340
|
+
|
|
341
|
+
from .backend.base import Engine # noqa
|
|
342
|
+
from .components import Function, PrimitiveDisabler # noqa
|
|
343
|
+
from .core import few_shot, zero_shot # noqa
|
|
344
|
+
from .extended import Conversation # noqa
|
|
345
|
+
from .functional import EngineRepository # noqa
|
|
346
|
+
from .imports import Import # noqa
|
|
347
|
+
from .interfaces import Interface # noqa
|
|
348
|
+
from .post_processors import PostProcessor # noqa
|
|
349
|
+
from .pre_processors import PreProcessor # noqa
|
|
350
|
+
from .prompts import Prompt, PromptLanguage, PromptRegistry # noqa
|
|
351
|
+
from .shell import Shell # noqa
|
|
352
|
+
from .strategy import Strategy # noqa
|
|
353
|
+
from .symbol import Call, Expression, GlobalSymbolPrimitive, Metadata, Symbol # noqa
|
|
354
|
+
|
|
355
|
+
__all__ = [
|
|
356
|
+
"SYMAI_VERSION",
|
|
357
|
+
"Call",
|
|
358
|
+
"Conversation",
|
|
359
|
+
"Engine",
|
|
360
|
+
"EngineRepository",
|
|
361
|
+
"Expression",
|
|
362
|
+
"Function",
|
|
363
|
+
"GlobalSymbolPrimitive",
|
|
364
|
+
"Import",
|
|
365
|
+
"Interface",
|
|
366
|
+
"Metadata",
|
|
367
|
+
"PostProcessor",
|
|
368
|
+
"PreProcessor",
|
|
369
|
+
"PrimitiveDisabler",
|
|
370
|
+
"Prompt",
|
|
371
|
+
"PromptLanguage",
|
|
372
|
+
"PromptRegistry",
|
|
373
|
+
"Shell",
|
|
374
|
+
"Strategy",
|
|
375
|
+
"Symbol",
|
|
376
|
+
"__root_dir__",
|
|
377
|
+
"__version__",
|
|
378
|
+
"config_manager",
|
|
379
|
+
"few_shot",
|
|
380
|
+
"run_server",
|
|
381
|
+
"setup_wizard",
|
|
382
|
+
"zero_shot",
|
|
383
|
+
]
|
symai/backend/base.py
CHANGED
|
@@ -1,16 +1,23 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
import os
|
|
3
2
|
import time
|
|
4
|
-
|
|
5
|
-
from
|
|
6
|
-
from typing import Any, List, Tuple
|
|
7
|
-
from .settings import HOME_PATH
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from typing import Any
|
|
8
5
|
|
|
9
6
|
from ..collect import CollectionRepository, rec_serialize
|
|
10
|
-
|
|
7
|
+
from ..utils import UserMessage
|
|
8
|
+
from .settings import HOME_PATH
|
|
11
9
|
|
|
12
10
|
ENGINE_UNREGISTERED = '<UNREGISTERED/>'
|
|
13
11
|
|
|
12
|
+
COLLECTION_LOGGING_ENGINES = {
|
|
13
|
+
'GPTXChatEngine',
|
|
14
|
+
'GPTXCompletionEngine',
|
|
15
|
+
'SerpApiEngine',
|
|
16
|
+
'WolframAlphaEngine',
|
|
17
|
+
'SeleniumEngine',
|
|
18
|
+
'OCREngine',
|
|
19
|
+
}
|
|
20
|
+
|
|
14
21
|
class Engine(ABC):
|
|
15
22
|
def __init__(self) -> None:
|
|
16
23
|
super().__init__()
|
|
@@ -22,7 +29,7 @@ class Engine(ABC):
|
|
|
22
29
|
self.collection.connect()
|
|
23
30
|
# create formatter
|
|
24
31
|
__root_dir__ = HOME_PATH
|
|
25
|
-
|
|
32
|
+
__root_dir__.mkdir(parents=True, exist_ok=True)
|
|
26
33
|
__file_path__ = __root_dir__ / "engine.log"
|
|
27
34
|
logging.basicConfig(filename=__file_path__, filemode="a", format='%(asctime)s %(name)s %(levelname)s %(message)s')
|
|
28
35
|
self.logger = logging.getLogger()
|
|
@@ -34,7 +41,7 @@ class Engine(ABC):
|
|
|
34
41
|
stream.setFormatter(streamformat)
|
|
35
42
|
self.logger.addHandler(stream)
|
|
36
43
|
|
|
37
|
-
def __call__(self, argument: Any) ->
|
|
44
|
+
def __call__(self, argument: Any) -> tuple[list[str], dict]:
|
|
38
45
|
log = {
|
|
39
46
|
'Input': {
|
|
40
47
|
'self': self,
|
|
@@ -44,64 +51,67 @@ class Engine(ABC):
|
|
|
44
51
|
}
|
|
45
52
|
start_time = time.time()
|
|
46
53
|
|
|
47
|
-
|
|
48
|
-
if hasattr(argument.prop.instance, '_metadata') and hasattr(argument.prop.instance._metadata, 'input_handler'):
|
|
49
|
-
input_handler = argument.prop.instance._metadata.input_handler if hasattr(argument.prop.instance._metadata, 'input_handler') else None
|
|
50
|
-
if input_handler is not None:
|
|
51
|
-
input_handler((argument.prop.processed_input, argument))
|
|
52
|
-
# check for kwargs based input handler
|
|
53
|
-
if argument.prop.input_handler is not None:
|
|
54
|
-
argument.prop.input_handler((argument.prop.processed_input, argument))
|
|
54
|
+
self._trigger_input_handlers(argument)
|
|
55
55
|
|
|
56
|
-
# execute the engine
|
|
57
56
|
res, metadata = self.forward(argument)
|
|
58
57
|
|
|
59
|
-
# compute time
|
|
60
58
|
req_time = time.time() - start_time
|
|
61
59
|
metadata['time'] = req_time
|
|
62
60
|
if self.time_clock:
|
|
63
|
-
|
|
61
|
+
UserMessage(f"{argument.prop.func}: {req_time} sec")
|
|
64
62
|
log['Output'] = res
|
|
65
63
|
if self.verbose:
|
|
66
64
|
view = {k: v for k, v in list(log['Input'].items()) if k != 'self'}
|
|
67
|
-
input_ = f"{str(log['Input']['self'])[:50]}, {
|
|
68
|
-
|
|
65
|
+
input_ = f"{str(log['Input']['self'])[:50]}, {argument.prop.func!s}, {view!s}"
|
|
66
|
+
UserMessage(f"{input_[:150]} {str(log['Output'])[:100]}")
|
|
69
67
|
if self.logging:
|
|
70
68
|
self.logger.log(self.log_level, log)
|
|
71
69
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
or str(self) == 'WolframAlphaEngine' \
|
|
77
|
-
or str(self) == 'SeleniumEngine' \
|
|
78
|
-
or str(self) == 'OCREngine':
|
|
79
|
-
self.collection.add(
|
|
80
|
-
forward={'args': rec_serialize(argument.args), 'kwds': rec_serialize(argument.kwargs)},
|
|
81
|
-
engine=str(self),
|
|
82
|
-
metadata={
|
|
83
|
-
'time': req_time,
|
|
84
|
-
'data': rec_serialize(metadata),
|
|
85
|
-
'argument': rec_serialize(argument)
|
|
86
|
-
}
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
# check for global object based output handler
|
|
90
|
-
if hasattr(argument.prop.instance, '_metadata') and hasattr(argument.prop.instance._metadata, 'output_handler'):
|
|
91
|
-
output_handler = argument.prop.instance._metadata.output_handler if hasattr(argument.prop.instance._metadata, 'output_handler') else None
|
|
92
|
-
if output_handler:
|
|
93
|
-
output_handler(res)
|
|
94
|
-
# check for kwargs based output handler
|
|
95
|
-
if argument.prop.output_handler:
|
|
96
|
-
argument.prop.output_handler((res, metadata))
|
|
70
|
+
if str(self) in COLLECTION_LOGGING_ENGINES:
|
|
71
|
+
self._record_collection_entry(argument, metadata, req_time)
|
|
72
|
+
|
|
73
|
+
self._trigger_output_handlers(argument, res, metadata)
|
|
97
74
|
return res, metadata
|
|
98
75
|
|
|
76
|
+
def _trigger_input_handlers(self, argument: Any) -> None:
|
|
77
|
+
instance_metadata = getattr(argument.prop.instance, '_metadata', None)
|
|
78
|
+
if instance_metadata is not None:
|
|
79
|
+
input_handler = getattr(instance_metadata, 'input_handler', None)
|
|
80
|
+
if input_handler is not None:
|
|
81
|
+
input_handler((argument.prop.processed_input, argument))
|
|
82
|
+
argument_handler = argument.prop.input_handler
|
|
83
|
+
if argument_handler is not None:
|
|
84
|
+
argument_handler((argument.prop.processed_input, argument))
|
|
85
|
+
|
|
86
|
+
def _trigger_output_handlers(self, argument: Any, result: Any, metadata: dict | None) -> None:
|
|
87
|
+
instance_metadata = getattr(argument.prop.instance, '_metadata', None)
|
|
88
|
+
if instance_metadata is not None:
|
|
89
|
+
output_handler = getattr(instance_metadata, 'output_handler', None)
|
|
90
|
+
if output_handler:
|
|
91
|
+
output_handler(result)
|
|
92
|
+
argument_handler = argument.prop.output_handler
|
|
93
|
+
if argument_handler:
|
|
94
|
+
argument_handler((result, metadata))
|
|
95
|
+
|
|
96
|
+
def _record_collection_entry(self, argument: Any, metadata: dict, req_time: float) -> None:
|
|
97
|
+
self.collection.add(
|
|
98
|
+
forward={'args': rec_serialize(argument.args), 'kwds': rec_serialize(argument.kwargs)},
|
|
99
|
+
engine=str(self),
|
|
100
|
+
metadata={
|
|
101
|
+
'time': req_time,
|
|
102
|
+
'data': rec_serialize(metadata),
|
|
103
|
+
'argument': rec_serialize(argument)
|
|
104
|
+
}
|
|
105
|
+
)
|
|
106
|
+
|
|
99
107
|
def id(self) -> str:
|
|
100
108
|
return ENGINE_UNREGISTERED
|
|
101
109
|
|
|
102
110
|
def preview(self, argument):
|
|
103
|
-
#
|
|
104
|
-
from ..symbol import
|
|
111
|
+
# Used here to avoid backend.base <-> symbol circular import.
|
|
112
|
+
from ..symbol import ( # noqa
|
|
113
|
+
Symbol,
|
|
114
|
+
)
|
|
105
115
|
class Preview(Symbol):
|
|
106
116
|
def __repr__(self) -> str:
|
|
107
117
|
'''
|
|
@@ -117,20 +127,22 @@ class Engine(ABC):
|
|
|
117
127
|
|
|
118
128
|
return Preview(argument), {}
|
|
119
129
|
|
|
120
|
-
|
|
121
|
-
|
|
130
|
+
@abstractmethod
|
|
131
|
+
def forward(self, *args: Any, **kwds: Any) -> list[str]:
|
|
132
|
+
raise NotADirectoryError
|
|
122
133
|
|
|
134
|
+
@abstractmethod
|
|
123
135
|
def prepare(self, argument):
|
|
124
|
-
raise NotImplementedError
|
|
136
|
+
raise NotImplementedError
|
|
125
137
|
|
|
126
|
-
def command(self, *
|
|
127
|
-
if kwargs.get('verbose'
|
|
138
|
+
def command(self, *_args, **kwargs):
|
|
139
|
+
if kwargs.get('verbose'):
|
|
128
140
|
self.verbose = kwargs['verbose']
|
|
129
|
-
if kwargs.get('logging'
|
|
141
|
+
if kwargs.get('logging'):
|
|
130
142
|
self.logging = kwargs['logging']
|
|
131
|
-
if kwargs.get('log_level'
|
|
143
|
+
if kwargs.get('log_level'):
|
|
132
144
|
self.log_level = kwargs['log_level']
|
|
133
|
-
if kwargs.get('time_clock'
|
|
145
|
+
if kwargs.get('time_clock'):
|
|
134
146
|
self.time_clock = kwargs['time_clock']
|
|
135
147
|
|
|
136
148
|
def __str__(self) -> str:
|
|
@@ -155,41 +167,42 @@ class BatchEngine(Engine):
|
|
|
155
167
|
self.time_clock = True
|
|
156
168
|
self.allows_batching = True
|
|
157
169
|
|
|
158
|
-
def __call__(self, arguments:
|
|
170
|
+
def __call__(self, arguments: list[Any]) -> list[tuple[Any, dict]]:
|
|
159
171
|
start_time = time.time()
|
|
160
172
|
for arg in arguments:
|
|
161
|
-
|
|
162
|
-
input_handler = getattr(arg.prop.instance._metadata, 'input_handler', None)
|
|
163
|
-
if input_handler is not None:
|
|
164
|
-
input_handler((arg.prop.processed_input, arg))
|
|
165
|
-
if arg.prop.input_handler is not None:
|
|
166
|
-
arg.prop.input_handler((arg.prop.processed_input, arg))
|
|
173
|
+
self._trigger_input_handlers(arg)
|
|
167
174
|
|
|
168
|
-
|
|
169
|
-
results, metadata_list = self.forward(arguments)
|
|
170
|
-
except Exception as e:
|
|
171
|
-
results = [e] * len(arguments)
|
|
172
|
-
metadata_list = [None] * len(arguments)
|
|
175
|
+
results, metadata_list = self._execute_batch(arguments)
|
|
173
176
|
|
|
174
177
|
total_time = time.time() - start_time
|
|
175
178
|
if self.time_clock:
|
|
176
|
-
|
|
179
|
+
UserMessage(f"Total execution time: {total_time} sec")
|
|
177
180
|
|
|
178
|
-
|
|
181
|
+
return self._prepare_batch_results(arguments, results, metadata_list, total_time)
|
|
179
182
|
|
|
180
|
-
|
|
183
|
+
def _execute_batch(self, arguments: list[Any]) -> tuple[list[Any], list[dict | None]]:
|
|
184
|
+
try:
|
|
185
|
+
return self.forward(arguments)
|
|
186
|
+
except Exception as error:
|
|
187
|
+
return [error] * len(arguments), [None] * len(arguments)
|
|
188
|
+
|
|
189
|
+
def _prepare_batch_results(
|
|
190
|
+
self,
|
|
191
|
+
arguments: list[Any],
|
|
192
|
+
results: list[Any],
|
|
193
|
+
metadata_list: list[dict | None],
|
|
194
|
+
total_time: float,
|
|
195
|
+
) -> list[tuple[Any, dict | None]]:
|
|
196
|
+
return_list = []
|
|
197
|
+
for arg, result, metadata in zip(arguments, results, metadata_list, strict=False):
|
|
181
198
|
if metadata is not None:
|
|
182
199
|
metadata['time'] = total_time / len(arguments)
|
|
183
200
|
|
|
184
|
-
|
|
185
|
-
output_handler = getattr(arg.prop.instance._metadata, 'output_handler', None)
|
|
186
|
-
if output_handler:
|
|
187
|
-
output_handler(result)
|
|
188
|
-
if arg.prop.output_handler:
|
|
189
|
-
arg.prop.output_handler((result, metadata))
|
|
190
|
-
|
|
201
|
+
self._trigger_output_handlers(arg, result, metadata)
|
|
191
202
|
return_list.append((result, metadata))
|
|
192
203
|
return return_list
|
|
193
204
|
|
|
194
|
-
def forward(self,
|
|
195
|
-
|
|
205
|
+
def forward(self, _arguments: list[Any]) -> tuple[list[Any], list[dict]]:
|
|
206
|
+
msg = "Subclasses must implement forward method"
|
|
207
|
+
UserMessage(msg)
|
|
208
|
+
raise NotImplementedError(msg)
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import tempfile
|
|
3
3
|
import time
|
|
4
|
-
from
|
|
4
|
+
from pathlib import Path
|
|
5
5
|
|
|
6
6
|
import requests
|
|
7
7
|
|
|
8
8
|
from ....symbol import Result
|
|
9
|
+
from ....utils import UserMessage
|
|
9
10
|
from ...base import Engine
|
|
10
11
|
from ...settings import SYMAI_CONFIG
|
|
11
12
|
|
|
@@ -19,17 +20,18 @@ class FluxResult(Result):
|
|
|
19
20
|
def __init__(self, value, **kwargs):
|
|
20
21
|
super().__init__(value, **kwargs)
|
|
21
22
|
# unpack the result
|
|
22
|
-
|
|
23
|
+
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_file:
|
|
24
|
+
path = tmp_file.name
|
|
23
25
|
url = value.get('result').get('sample')
|
|
24
26
|
request = requests.get(url, allow_redirects=True)
|
|
25
27
|
request.raise_for_status()
|
|
26
|
-
with open(
|
|
28
|
+
with Path(path).open("wb") as f:
|
|
27
29
|
f.write(request.content)
|
|
28
30
|
self._value = [path]
|
|
29
31
|
|
|
30
32
|
|
|
31
33
|
class DrawingEngine(Engine):
|
|
32
|
-
def __init__(self, api_key:
|
|
34
|
+
def __init__(self, api_key: str | None = None, model: str | None = None):
|
|
33
35
|
super().__init__()
|
|
34
36
|
self.config = SYMAI_CONFIG
|
|
35
37
|
self.api_key = self.config['DRAWING_ENGINE_API_KEY'] if api_key is None else api_key
|
|
@@ -51,16 +53,12 @@ class DrawingEngine(Engine):
|
|
|
51
53
|
def forward(self, argument):
|
|
52
54
|
prompt = argument.prop.prepared_input
|
|
53
55
|
kwargs = argument.kwargs
|
|
54
|
-
model = kwargs.get('model', self.model)
|
|
55
56
|
width = kwargs.get('width', 1024)
|
|
56
57
|
height = kwargs.get('height', 768)
|
|
57
58
|
steps = kwargs.get('steps', 40)
|
|
58
|
-
prompt_upsampling = kwargs.get('prompt_upsampling', False)
|
|
59
59
|
seed = kwargs.get('seed', None)
|
|
60
60
|
guidance = kwargs.get('guidance', None)
|
|
61
61
|
safety_tolerance = kwargs.get('safety_tolerance', 2)
|
|
62
|
-
interval = kwargs.get('interval', None)
|
|
63
|
-
output_format = kwargs.get('output_format', 'png')
|
|
64
62
|
except_remedy = kwargs.get('except_remedy', None)
|
|
65
63
|
|
|
66
64
|
headers = {
|
|
@@ -93,7 +91,10 @@ class DrawingEngine(Engine):
|
|
|
93
91
|
data = response.json()
|
|
94
92
|
request_id = data.get("id")
|
|
95
93
|
if not request_id:
|
|
96
|
-
|
|
94
|
+
UserMessage(
|
|
95
|
+
f"Failed to get request ID! Response payload: {data}",
|
|
96
|
+
raise_with=Exception,
|
|
97
|
+
)
|
|
97
98
|
|
|
98
99
|
while True:
|
|
99
100
|
time.sleep(5)
|
|
@@ -118,8 +119,8 @@ class DrawingEngine(Engine):
|
|
|
118
119
|
|
|
119
120
|
metadata = {}
|
|
120
121
|
return [rsp], metadata
|
|
121
|
-
|
|
122
|
-
|
|
122
|
+
UserMessage(f"Unknown operation: {kwargs['operation']}", raise_with=Exception)
|
|
123
|
+
return [], {}
|
|
123
124
|
|
|
124
125
|
def prepare(self, argument):
|
|
125
126
|
argument.prop.prepared_input = str(argument.prop.processed_input)
|