symbolicai 0.21.0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- symai/__init__.py +269 -173
- symai/backend/base.py +123 -110
- symai/backend/engines/drawing/engine_bfl.py +45 -44
- symai/backend/engines/drawing/engine_gpt_image.py +112 -97
- symai/backend/engines/embedding/engine_llama_cpp.py +63 -52
- symai/backend/engines/embedding/engine_openai.py +25 -21
- symai/backend/engines/execute/engine_python.py +19 -18
- symai/backend/engines/files/engine_io.py +104 -95
- symai/backend/engines/imagecaptioning/engine_blip2.py +28 -24
- symai/backend/engines/imagecaptioning/engine_llavacpp_client.py +102 -79
- symai/backend/engines/index/engine_pinecone.py +124 -97
- symai/backend/engines/index/engine_qdrant.py +1011 -0
- symai/backend/engines/index/engine_vectordb.py +84 -56
- symai/backend/engines/lean/engine_lean4.py +96 -52
- symai/backend/engines/neurosymbolic/__init__.py +41 -13
- symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_chat.py +330 -248
- symai/backend/engines/neurosymbolic/engine_anthropic_claudeX_reasoning.py +329 -264
- symai/backend/engines/neurosymbolic/engine_cerebras.py +328 -0
- symai/backend/engines/neurosymbolic/engine_deepseekX_reasoning.py +118 -88
- symai/backend/engines/neurosymbolic/engine_google_geminiX_reasoning.py +344 -299
- symai/backend/engines/neurosymbolic/engine_groq.py +173 -115
- symai/backend/engines/neurosymbolic/engine_huggingface.py +114 -84
- symai/backend/engines/neurosymbolic/engine_llama_cpp.py +144 -118
- symai/backend/engines/neurosymbolic/engine_openai_gptX_chat.py +415 -307
- symai/backend/engines/neurosymbolic/engine_openai_gptX_reasoning.py +394 -231
- symai/backend/engines/ocr/engine_apilayer.py +23 -27
- symai/backend/engines/output/engine_stdout.py +10 -13
- symai/backend/engines/{webscraping → scrape}/engine_requests.py +101 -54
- symai/backend/engines/search/engine_openai.py +100 -88
- symai/backend/engines/search/engine_parallel.py +665 -0
- symai/backend/engines/search/engine_perplexity.py +44 -45
- symai/backend/engines/search/engine_serpapi.py +37 -34
- symai/backend/engines/speech_to_text/engine_local_whisper.py +54 -51
- symai/backend/engines/symbolic/engine_wolframalpha.py +15 -9
- symai/backend/engines/text_to_speech/engine_openai.py +20 -26
- symai/backend/engines/text_vision/engine_clip.py +39 -37
- symai/backend/engines/userinput/engine_console.py +5 -6
- symai/backend/mixin/__init__.py +13 -0
- symai/backend/mixin/anthropic.py +48 -38
- symai/backend/mixin/deepseek.py +6 -5
- symai/backend/mixin/google.py +7 -4
- symai/backend/mixin/groq.py +2 -4
- symai/backend/mixin/openai.py +140 -110
- symai/backend/settings.py +87 -20
- symai/chat.py +216 -123
- symai/collect/__init__.py +7 -1
- symai/collect/dynamic.py +80 -70
- symai/collect/pipeline.py +67 -51
- symai/collect/stats.py +161 -109
- symai/components.py +707 -360
- symai/constraints.py +24 -12
- symai/core.py +1857 -1233
- symai/core_ext.py +83 -80
- symai/endpoints/api.py +166 -104
- symai/extended/.DS_Store +0 -0
- symai/extended/__init__.py +46 -12
- symai/extended/api_builder.py +29 -21
- symai/extended/arxiv_pdf_parser.py +23 -14
- symai/extended/bibtex_parser.py +9 -6
- symai/extended/conversation.py +156 -126
- symai/extended/document.py +50 -30
- symai/extended/file_merger.py +57 -14
- symai/extended/graph.py +51 -32
- symai/extended/html_style_template.py +18 -14
- symai/extended/interfaces/blip_2.py +2 -3
- symai/extended/interfaces/clip.py +4 -3
- symai/extended/interfaces/console.py +9 -1
- symai/extended/interfaces/dall_e.py +4 -2
- symai/extended/interfaces/file.py +2 -0
- symai/extended/interfaces/flux.py +4 -2
- symai/extended/interfaces/gpt_image.py +16 -7
- symai/extended/interfaces/input.py +2 -1
- symai/extended/interfaces/llava.py +1 -2
- symai/extended/interfaces/{naive_webscraping.py → naive_scrape.py} +4 -3
- symai/extended/interfaces/naive_vectordb.py +9 -10
- symai/extended/interfaces/ocr.py +5 -3
- symai/extended/interfaces/openai_search.py +2 -0
- symai/extended/interfaces/parallel.py +30 -0
- symai/extended/interfaces/perplexity.py +2 -0
- symai/extended/interfaces/pinecone.py +12 -9
- symai/extended/interfaces/python.py +2 -0
- symai/extended/interfaces/serpapi.py +3 -1
- symai/extended/interfaces/terminal.py +2 -4
- symai/extended/interfaces/tts.py +3 -2
- symai/extended/interfaces/whisper.py +3 -2
- symai/extended/interfaces/wolframalpha.py +2 -1
- symai/extended/metrics/__init__.py +11 -1
- symai/extended/metrics/similarity.py +14 -13
- symai/extended/os_command.py +39 -29
- symai/extended/packages/__init__.py +29 -3
- symai/extended/packages/symdev.py +51 -43
- symai/extended/packages/sympkg.py +41 -35
- symai/extended/packages/symrun.py +63 -50
- symai/extended/repo_cloner.py +14 -12
- symai/extended/seo_query_optimizer.py +15 -13
- symai/extended/solver.py +116 -91
- symai/extended/summarizer.py +12 -10
- symai/extended/taypan_interpreter.py +17 -18
- symai/extended/vectordb.py +122 -92
- symai/formatter/__init__.py +9 -1
- symai/formatter/formatter.py +51 -47
- symai/formatter/regex.py +70 -69
- symai/functional.py +325 -176
- symai/imports.py +190 -147
- symai/interfaces.py +57 -28
- symai/memory.py +45 -35
- symai/menu/screen.py +28 -19
- symai/misc/console.py +66 -56
- symai/misc/loader.py +8 -5
- symai/models/__init__.py +17 -1
- symai/models/base.py +395 -236
- symai/models/errors.py +1 -2
- symai/ops/__init__.py +32 -22
- symai/ops/measures.py +24 -25
- symai/ops/primitives.py +1149 -731
- symai/post_processors.py +58 -50
- symai/pre_processors.py +86 -82
- symai/processor.py +21 -13
- symai/prompts.py +764 -685
- symai/server/huggingface_server.py +135 -49
- symai/server/llama_cpp_server.py +21 -11
- symai/server/qdrant_server.py +206 -0
- symai/shell.py +100 -42
- symai/shellsv.py +700 -492
- symai/strategy.py +630 -346
- symai/symbol.py +368 -322
- symai/utils.py +100 -78
- {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/METADATA +22 -10
- symbolicai-1.1.0.dist-info/RECORD +168 -0
- symbolicai-0.21.0.dist-info/RECORD +0 -162
- {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/WHEEL +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/entry_points.txt +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/licenses/LICENSE +0 -0
- {symbolicai-0.21.0.dist-info → symbolicai-1.1.0.dist-info}/top_level.txt +0 -0
symai/extended/interfaces/ocr.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from ... import core
|
|
2
|
-
from ...symbol import Expression
|
|
3
2
|
from ...backend.engines.ocr.engine_apilayer import ApiLayerResult
|
|
3
|
+
from ...symbol import Expression
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class ocr(Expression):
|
|
@@ -9,9 +9,11 @@ class ocr(Expression):
|
|
|
9
9
|
self.name = self.__class__.__name__
|
|
10
10
|
|
|
11
11
|
def __call__(self, image_url: str, **kwargs) -> ApiLayerResult:
|
|
12
|
-
if not image_url.startswith(
|
|
13
|
-
image_url = f
|
|
12
|
+
if not image_url.startswith("http"):
|
|
13
|
+
image_url = f"file://{image_url}"
|
|
14
|
+
|
|
14
15
|
@core.ocr(image=image_url, **kwargs)
|
|
15
16
|
def _func(_) -> ApiLayerResult:
|
|
16
17
|
pass
|
|
18
|
+
|
|
17
19
|
return _func(self)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from ... import core
|
|
2
|
+
from ...backend.engines.search.engine_parallel import ExtractResult, SearchResult
|
|
3
|
+
from ...symbol import Expression, Symbol
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class parallel(Expression):
|
|
7
|
+
def __init__(self, *args, **kwargs):
|
|
8
|
+
super().__init__(*args, **kwargs)
|
|
9
|
+
self.name = self.__class__.__name__
|
|
10
|
+
|
|
11
|
+
def search(self, query: Symbol, **kwargs) -> SearchResult:
|
|
12
|
+
query = self._to_symbol(query)
|
|
13
|
+
|
|
14
|
+
@core.search(query=query.value, **kwargs)
|
|
15
|
+
def _func(_) -> SearchResult:
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
return _func(self)
|
|
19
|
+
|
|
20
|
+
def scrape(self, url: str, **kwargs) -> ExtractResult:
|
|
21
|
+
symbol = self._to_symbol(url)
|
|
22
|
+
options = dict(kwargs)
|
|
23
|
+
options.pop("query", None)
|
|
24
|
+
options["url"] = symbol.value
|
|
25
|
+
|
|
26
|
+
@core.search(query="", **options)
|
|
27
|
+
def _func(_, *_args, **_inner_kwargs) -> ExtractResult:
|
|
28
|
+
return None
|
|
29
|
+
|
|
30
|
+
return _func(self)
|
|
@@ -1,21 +1,24 @@
|
|
|
1
|
-
from ... import
|
|
1
|
+
from ...backend.engines.index.engine_pinecone import PineconeIndexEngine, PineconeResult
|
|
2
2
|
from ...symbol import Expression
|
|
3
|
-
from ...
|
|
3
|
+
from ...utils import UserMessage
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class pinecone(Expression):
|
|
7
|
-
def __init__(self, index_name
|
|
7
|
+
def __init__(self, index_name=PineconeIndexEngine._default_index_name, *args, **kwargs):
|
|
8
8
|
super().__init__(*args, **kwargs)
|
|
9
9
|
self.index_name = index_name
|
|
10
10
|
self.name = self.__class__.__name__
|
|
11
11
|
|
|
12
|
-
def __call__(
|
|
13
|
-
stmt
|
|
12
|
+
def __call__(
|
|
13
|
+
self, stmt: str, operation: str = "search", index_name=None, **kwargs
|
|
14
|
+
) -> PineconeResult:
|
|
15
|
+
stmt = self._to_symbol(stmt)
|
|
14
16
|
index = self.index_name if index_name is None else index_name
|
|
15
|
-
if
|
|
17
|
+
if operation == "search":
|
|
16
18
|
return self.get(query=stmt.embedding, index_name=index, ori_query=stmt.value, **kwargs)
|
|
17
|
-
|
|
19
|
+
if operation == "add":
|
|
18
20
|
return self.add(doc=stmt.zip(), index_name=index, **kwargs)
|
|
19
|
-
|
|
21
|
+
if operation == "config":
|
|
20
22
|
return self.index(path=stmt.value, index_name=index, **kwargs)
|
|
21
|
-
|
|
23
|
+
UserMessage("Operation not supported", raise_with=NotImplementedError)
|
|
24
|
+
return None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from ... import core
|
|
2
|
-
from ...symbol import Expression, Symbol
|
|
3
2
|
from ...backend.engines.search.engine_serpapi import SearchResult
|
|
3
|
+
from ...symbol import Expression, Symbol
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class serpapi(Expression):
|
|
@@ -10,7 +10,9 @@ class serpapi(Expression):
|
|
|
10
10
|
|
|
11
11
|
def __call__(self, query: Symbol, **kwargs) -> SearchResult:
|
|
12
12
|
query = self._to_symbol(query)
|
|
13
|
+
|
|
13
14
|
@core.search(query=query.value, **kwargs)
|
|
14
15
|
def _func(_) -> SearchResult:
|
|
15
16
|
pass
|
|
17
|
+
|
|
16
18
|
return _func(self)
|
|
@@ -1,7 +1,5 @@
|
|
|
1
|
-
from typing import List
|
|
2
|
-
|
|
3
|
-
from ...symbol import Expression
|
|
4
1
|
from ...shellsv import process_command
|
|
2
|
+
from ...symbol import Expression
|
|
5
3
|
|
|
6
4
|
|
|
7
5
|
class terminal(Expression):
|
|
@@ -9,5 +7,5 @@ class terminal(Expression):
|
|
|
9
7
|
super().__init__(*args, **kwargs)
|
|
10
8
|
self.name = self.__class__.__name__
|
|
11
9
|
|
|
12
|
-
def __call__(self, command: str = None, **kwargs) -> "terminal":
|
|
10
|
+
def __call__(self, command: str | None = None, **kwargs) -> "terminal":
|
|
13
11
|
return self.sym_return_type(process_command(command, **kwargs))
|
symai/extended/interfaces/tts.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from ... import core
|
|
2
|
-
from ...symbol import Expression,
|
|
2
|
+
from ...symbol import Expression, Result, Symbol
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
class tts(Expression):
|
|
@@ -7,8 +7,9 @@ class tts(Expression):
|
|
|
7
7
|
super().__init__(*args, **kwargs)
|
|
8
8
|
self.name = self.__class__.__name__
|
|
9
9
|
|
|
10
|
-
def __call__(self, prompt: Symbol, path: str, voice: str =
|
|
10
|
+
def __call__(self, prompt: Symbol, path: str, voice: str = "nova", **kwargs) -> Result:
|
|
11
11
|
@core.text_to_speech(prompt=str(prompt), path=path, voice=voice, **kwargs)
|
|
12
12
|
def _func(_) -> Result:
|
|
13
13
|
pass
|
|
14
|
+
|
|
14
15
|
return _func(self)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from ... import core
|
|
2
|
-
from ...symbol import Expression
|
|
3
2
|
from ...backend.engines.speech_to_text.engine_local_whisper import WhisperResult
|
|
3
|
+
from ...symbol import Expression
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class whisper(Expression):
|
|
@@ -8,8 +8,9 @@ class whisper(Expression):
|
|
|
8
8
|
super().__init__(*args, **kwargs)
|
|
9
9
|
self.name = self.__class__.__name__
|
|
10
10
|
|
|
11
|
-
def __call__(self, audio_path: str, operation: str =
|
|
11
|
+
def __call__(self, audio_path: str, operation: str = "decode", **kwargs) -> WhisperResult:
|
|
12
12
|
@core.speech_to_text(audio=audio_path, prompt=operation, **kwargs)
|
|
13
13
|
def _func(_) -> WhisperResult:
|
|
14
14
|
pass
|
|
15
|
+
|
|
15
16
|
return _func(self)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from ... import core
|
|
2
|
-
from ...symbol import Expression
|
|
3
2
|
from ...backend.engines.symbolic.engine_wolframalpha import WolframResult
|
|
3
|
+
from ...symbol import Expression
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class wolframalpha(Expression):
|
|
@@ -12,4 +12,5 @@ class wolframalpha(Expression):
|
|
|
12
12
|
@core.expression(**kwargs)
|
|
13
13
|
def _func(_, expr: str) -> WolframResult:
|
|
14
14
|
pass
|
|
15
|
+
|
|
15
16
|
return _func(self, expr)
|
|
@@ -1 +1,11 @@
|
|
|
1
|
-
from .
|
|
1
|
+
from . import similarity as _similarity
|
|
2
|
+
|
|
3
|
+
__all__ = getattr(_similarity, "__all__", None) # noqa
|
|
4
|
+
if __all__ is None:
|
|
5
|
+
__all__ = [name for name in dir(_similarity) if not name.startswith("_")]
|
|
6
|
+
|
|
7
|
+
for _name in __all__:
|
|
8
|
+
globals()[_name] = getattr(_similarity, _name)
|
|
9
|
+
|
|
10
|
+
del _name
|
|
11
|
+
del _similarity
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
"""
|
|
2
|
+
@TODO: This must go away. As soon as we have Qdrant, everything related to vectordb must be cleaned up.
|
|
3
|
+
"""
|
|
4
|
+
|
|
1
5
|
import random
|
|
2
6
|
|
|
3
7
|
import numpy as np
|
|
@@ -11,8 +15,7 @@ def get_norm_vector(vector):
|
|
|
11
15
|
"""
|
|
12
16
|
if len(vector.shape) == 1:
|
|
13
17
|
return vector / np.linalg.norm(vector)
|
|
14
|
-
|
|
15
|
-
return vector / np.linalg.norm(vector, axis=1)[:, np.newaxis]
|
|
18
|
+
return vector / np.linalg.norm(vector, axis=1)[:, np.newaxis]
|
|
16
19
|
|
|
17
20
|
|
|
18
21
|
def dot_product(vectors, query_vector):
|
|
@@ -22,8 +25,7 @@ def dot_product(vectors, query_vector):
|
|
|
22
25
|
:param query_vector: vector
|
|
23
26
|
:return: dot product between the vector and the matrix of vectors
|
|
24
27
|
"""
|
|
25
|
-
|
|
26
|
-
return similarities
|
|
28
|
+
return np.dot(vectors, query_vector.T)
|
|
27
29
|
|
|
28
30
|
|
|
29
31
|
def cosine_similarity(vectors, query_vector):
|
|
@@ -35,8 +37,7 @@ def cosine_similarity(vectors, query_vector):
|
|
|
35
37
|
"""
|
|
36
38
|
norm_vectors = get_norm_vector(vectors)
|
|
37
39
|
norm_query_vector = get_norm_vector(query_vector)
|
|
38
|
-
|
|
39
|
-
return similarities
|
|
40
|
+
return np.dot(norm_vectors, norm_query_vector.T)
|
|
40
41
|
|
|
41
42
|
|
|
42
43
|
def euclidean_metric(vectors, query_vector, get_similarity_score=True):
|
|
@@ -49,7 +50,7 @@ def euclidean_metric(vectors, query_vector, get_similarity_score=True):
|
|
|
49
50
|
"""
|
|
50
51
|
similarities = np.linalg.norm(vectors - query_vector, axis=1)
|
|
51
52
|
if get_similarity_score:
|
|
52
|
-
|
|
53
|
+
return 1 / (1 + similarities)
|
|
53
54
|
return similarities
|
|
54
55
|
|
|
55
56
|
|
|
@@ -60,12 +61,12 @@ def derridaean_similarity(vectors, query_vector):
|
|
|
60
61
|
:param query_vector: vector
|
|
61
62
|
:return: derridaean similarity between the vector and the matrix of vectors
|
|
62
63
|
"""
|
|
64
|
+
|
|
63
65
|
def random_change(value):
|
|
64
66
|
return value + random.uniform(-0.2, 0.2)
|
|
65
67
|
|
|
66
68
|
similarities = cosine_similarity(vectors, query_vector)
|
|
67
|
-
|
|
68
|
-
return derrida_similarities
|
|
69
|
+
return np.vectorize(random_change)(similarities)
|
|
69
70
|
|
|
70
71
|
|
|
71
72
|
def adams_similarity(vectors, query_vector):
|
|
@@ -75,12 +76,12 @@ def adams_similarity(vectors, query_vector):
|
|
|
75
76
|
:param query_vector: vector
|
|
76
77
|
:return: adams similarity between the vector and the matrix of vectors
|
|
77
78
|
"""
|
|
78
|
-
|
|
79
|
+
|
|
80
|
+
def adams_change(_value):
|
|
79
81
|
return 0.42
|
|
80
82
|
|
|
81
83
|
similarities = cosine_similarity(vectors, query_vector)
|
|
82
|
-
|
|
83
|
-
return adams_similarities
|
|
84
|
+
return np.vectorize(adams_change)(similarities)
|
|
84
85
|
|
|
85
86
|
|
|
86
87
|
def ranking_algorithm_sort(vectors, query_vector, top_k=5, metric=cosine_similarity):
|
|
@@ -94,4 +95,4 @@ def ranking_algorithm_sort(vectors, query_vector, top_k=5, metric=cosine_similar
|
|
|
94
95
|
"""
|
|
95
96
|
similarities = metric(vectors, query_vector)
|
|
96
97
|
top_indices = np.argsort(similarities, axis=0)[-top_k:][::-1]
|
|
97
|
-
return top_indices.flatten(), similarities[top_indices].flatten()
|
|
98
|
+
return top_indices.flatten(), similarities[top_indices].flatten()
|
symai/extended/os_command.py
CHANGED
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
import platform
|
|
2
2
|
import subprocess
|
|
3
3
|
|
|
4
|
-
from typing import Dict, List
|
|
5
|
-
|
|
6
4
|
from ..post_processors import CodeExtractPostProcessor
|
|
7
5
|
from ..symbol import Expression, Symbol
|
|
6
|
+
from ..utils import UserMessage
|
|
8
7
|
|
|
9
8
|
Context = """[DESCRIPTION]:
|
|
10
9
|
Adapt the user query to an OS patform command (commands must be executable in terminal, shell, bash or powershell)!
|
|
@@ -62,41 +61,52 @@ Write an executable command that starts a process according to the user query, p
|
|
|
62
61
|
|
|
63
62
|
|
|
64
63
|
class OSCommand(Expression):
|
|
65
|
-
def __init__(
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
programs: list[str],
|
|
67
|
+
metadata: dict[str, str] | None = None,
|
|
68
|
+
verbose: bool = False,
|
|
69
|
+
os_platform: str = "auto",
|
|
70
|
+
**kwargs,
|
|
71
|
+
):
|
|
72
|
+
if metadata is None:
|
|
73
|
+
metadata = {}
|
|
70
74
|
super().__init__(**kwargs)
|
|
71
|
-
self.verbose:
|
|
72
|
-
self.os_platform:
|
|
73
|
-
self.programs:
|
|
74
|
-
self.meta:
|
|
75
|
+
self.verbose: bool = verbose
|
|
76
|
+
self.os_platform: str = os_platform
|
|
77
|
+
self.programs: list[str] = programs
|
|
78
|
+
self.meta: dict[str, str] = metadata
|
|
75
79
|
|
|
76
|
-
if self.os_platform ==
|
|
80
|
+
if self.os_platform == "auto":
|
|
77
81
|
self.os_platform = platform.platform()
|
|
78
82
|
if len(programs) == 0:
|
|
79
|
-
|
|
83
|
+
UserMessage("No programs specified!", raise_with=Exception)
|
|
80
84
|
|
|
81
|
-
def execute_os_command(self, *args, **
|
|
85
|
+
def execute_os_command(self, *args, **_kwargs):
|
|
82
86
|
command = args[0]
|
|
83
|
-
|
|
84
|
-
if
|
|
85
|
-
return [subprocess.run(["bash", "-c", str(command)])]
|
|
86
|
-
|
|
87
|
-
return [subprocess.run(["powershell", "-Command", str(command)])]
|
|
88
|
-
|
|
89
|
-
return [subprocess.run(["bash", "-c", str(command)])]
|
|
90
|
-
|
|
91
|
-
|
|
87
|
+
UserMessage(f"Executing {self.os_platform} command: {command}")
|
|
88
|
+
if "linux" in self.os_platform.lower():
|
|
89
|
+
return [subprocess.run(["bash", "-c", str(command)], check=False)]
|
|
90
|
+
if "windows" in self.os_platform.lower():
|
|
91
|
+
return [subprocess.run(["powershell", "-Command", str(command)], check=False)]
|
|
92
|
+
if "mac" in self.os_platform.lower():
|
|
93
|
+
return [subprocess.run(["bash", "-c", str(command)], check=False)]
|
|
94
|
+
UserMessage("Unsupported platform!", raise_with=Exception)
|
|
95
|
+
return []
|
|
92
96
|
|
|
93
97
|
def forward(self, sym: Symbol, **kwargs) -> Expression:
|
|
94
98
|
sym = self._to_symbol(sym)
|
|
95
|
-
kwargs[
|
|
99
|
+
kwargs["verbose"] = self.verbose
|
|
96
100
|
|
|
97
|
-
prompt = Context.format(
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
metadata=self.meta)
|
|
101
|
+
prompt = Context.format(
|
|
102
|
+
programs=self.programs, platform=self.os_platform, query=sym, metadata=self.meta
|
|
103
|
+
)
|
|
101
104
|
command = sym.query(prompt, post_processors=[CodeExtractPostProcessor()], **kwargs)
|
|
102
|
-
return self.sym_return_type(
|
|
105
|
+
return self.sym_return_type(
|
|
106
|
+
self.output(
|
|
107
|
+
expr=self.execute_os_command,
|
|
108
|
+
raw_input=True,
|
|
109
|
+
processed_input=command.value,
|
|
110
|
+
**kwargs,
|
|
111
|
+
)
|
|
112
|
+
)
|
|
@@ -1,3 +1,29 @@
|
|
|
1
|
-
from .
|
|
2
|
-
from .
|
|
3
|
-
from .
|
|
1
|
+
from . import symdev as _symdev
|
|
2
|
+
from . import sympkg as _sympkg
|
|
3
|
+
from . import symrun as _symrun
|
|
4
|
+
|
|
5
|
+
__all__ = []
|
|
6
|
+
_seen_names = set()
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _export_module(module, seen_names: set[str] = _seen_names) -> None:
|
|
10
|
+
public_names = getattr(module, "__all__", None)
|
|
11
|
+
if public_names is None:
|
|
12
|
+
public_names = [name for name in dir(module) if not name.startswith("_")]
|
|
13
|
+
for name in public_names:
|
|
14
|
+
globals()[name] = getattr(module, name)
|
|
15
|
+
if name not in seen_names:
|
|
16
|
+
__all__.append(name)
|
|
17
|
+
seen_names.add(name)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
for _module in (_symdev, _sympkg, _symrun):
|
|
21
|
+
_export_module(_module)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
del _export_module
|
|
25
|
+
del _module
|
|
26
|
+
del _seen_names
|
|
27
|
+
del _symdev
|
|
28
|
+
del _sympkg
|
|
29
|
+
del _symrun
|
|
@@ -9,78 +9,86 @@ from loguru import logger
|
|
|
9
9
|
from ... import config_manager
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
class PackageInitializer
|
|
12
|
+
class PackageInitializer:
|
|
13
13
|
def __init__(self):
|
|
14
|
-
self.package_dir = Path(config_manager.config_dir) /
|
|
14
|
+
self.package_dir = Path(config_manager.config_dir) / "packages"
|
|
15
15
|
|
|
16
|
-
if not
|
|
17
|
-
|
|
16
|
+
if not self.package_dir.exists():
|
|
17
|
+
self.package_dir.mkdir(parents=True)
|
|
18
18
|
|
|
19
19
|
os.chdir(self.package_dir)
|
|
20
20
|
|
|
21
21
|
parser = argparse.ArgumentParser(
|
|
22
|
-
description=
|
|
23
|
-
Initialize a new GitHub package from the command line.
|
|
24
|
-
usage=
|
|
22
|
+
description="""SymbolicAI package initializer.
|
|
23
|
+
Initialize a new GitHub package from the command line.""",
|
|
24
|
+
usage="""symdev <command> <username>/<package_name>
|
|
25
25
|
Available commands:
|
|
26
26
|
c Create a new package [default if no command is given]
|
|
27
|
-
|
|
27
|
+
""",
|
|
28
28
|
)
|
|
29
29
|
|
|
30
|
-
parser.add_argument(
|
|
30
|
+
parser.add_argument("command", help="Subcommand to run")
|
|
31
31
|
args = parser.parse_args(sys.argv[1:2])
|
|
32
32
|
if len(args.command) > 1 and not hasattr(self, args.command):
|
|
33
|
-
|
|
33
|
+
args.package = args.command
|
|
34
34
|
self.c(args)
|
|
35
35
|
elif len(args.command) == 1 and not hasattr(self, args.command):
|
|
36
|
-
logger.error(
|
|
36
|
+
logger.error("Unrecognized command")
|
|
37
37
|
parser.print_help()
|
|
38
38
|
exit(1)
|
|
39
39
|
else:
|
|
40
40
|
getattr(self, args.command)()
|
|
41
41
|
|
|
42
|
-
def c(self, args
|
|
42
|
+
def c(self, args=None):
|
|
43
43
|
parser = argparse.ArgumentParser(
|
|
44
|
-
description=
|
|
45
|
-
|
|
44
|
+
description="Create a new package", usage="symdev c <username>/<package>"
|
|
45
|
+
)
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
"package", help="Name of user based on GitHub username and package to install"
|
|
46
48
|
)
|
|
47
|
-
parser.add_argument('package', help='Name of user based on GitHub username and package to install')
|
|
48
49
|
if args is None:
|
|
49
50
|
args = parser.parse_args(sys.argv[2:3])
|
|
50
|
-
vals = args.package.split(
|
|
51
|
+
vals = args.package.split("/")
|
|
51
52
|
try:
|
|
52
53
|
username = vals[0]
|
|
53
54
|
package_name = vals[1]
|
|
54
|
-
except:
|
|
55
|
-
logger.error(
|
|
55
|
+
except IndexError:
|
|
56
|
+
logger.error("Invalid package name: {git_username}/{package_name}")
|
|
56
57
|
parser.print_help()
|
|
57
58
|
exit(1)
|
|
58
59
|
|
|
59
|
-
package_path =
|
|
60
|
-
if
|
|
61
|
-
logger.info(
|
|
60
|
+
package_path = self.package_dir / username / package_name
|
|
61
|
+
if package_path.exists():
|
|
62
|
+
logger.info("Package already exists")
|
|
62
63
|
exit(1)
|
|
63
64
|
|
|
64
|
-
logger.info(
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
with
|
|
70
|
-
|
|
71
|
-
with
|
|
72
|
-
f.write(
|
|
73
|
-
with
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
65
|
+
logger.info("Creating package...")
|
|
66
|
+
package_path.mkdir(parents=True)
|
|
67
|
+
src_path = package_path / "src"
|
|
68
|
+
src_path.mkdir(parents=True)
|
|
69
|
+
|
|
70
|
+
with (package_path / ".gitignore").open("w"):
|
|
71
|
+
pass
|
|
72
|
+
with (package_path / "LICENSE").open("w") as f:
|
|
73
|
+
f.write("MIT License")
|
|
74
|
+
with (package_path / "README.md").open("w") as f:
|
|
75
|
+
f.write("# " + package_name + "\n## <Project Description>")
|
|
76
|
+
with (package_path / "requirements.txt").open("w"):
|
|
77
|
+
pass
|
|
78
|
+
with (package_path / "package.json").open("w") as f:
|
|
79
|
+
json.dump(
|
|
80
|
+
{
|
|
81
|
+
"version": "0.0.1",
|
|
82
|
+
"name": username + "/" + package_name,
|
|
83
|
+
"description": "<Project Description>",
|
|
84
|
+
"expressions": [{"module": "src/func", "type": "MyExpression"}],
|
|
85
|
+
"run": {"module": "src/func", "type": "MyExpression"},
|
|
86
|
+
"dependencies": [],
|
|
87
|
+
},
|
|
88
|
+
f,
|
|
89
|
+
indent=4,
|
|
90
|
+
)
|
|
91
|
+
with (src_path / "func.py").open("w") as f:
|
|
84
92
|
f.write("""from symai import Expression, Function
|
|
85
93
|
|
|
86
94
|
|
|
@@ -99,12 +107,12 @@ class MyExpression(Expression):
|
|
|
99
107
|
data = self._to_symbol(data)
|
|
100
108
|
self.fn.format(template=template)
|
|
101
109
|
return self.fn(data, *args, **kwargs)""")
|
|
102
|
-
logger.success(
|
|
110
|
+
logger.success("Package created successfully at: " + str(package_path))
|
|
103
111
|
|
|
104
112
|
|
|
105
113
|
def run() -> None:
|
|
106
114
|
PackageInitializer()
|
|
107
115
|
|
|
108
116
|
|
|
109
|
-
if __name__ ==
|
|
117
|
+
if __name__ == "__main__":
|
|
110
118
|
run()
|