lionagi 0.0.205__tar.gz → 0.0.206__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {lionagi-0.0.205 → lionagi-0.0.206}/PKG-INFO +1 -1
- lionagi-0.0.206/lionagi/_services/litellm.py +33 -0
- lionagi-0.0.206/lionagi/_services/mlx_service.py +39 -0
- lionagi-0.0.206/lionagi/_services/ollama.py +33 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/services.py +40 -0
- lionagi-0.0.206/lionagi/_services/transformers.py +73 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/bridge/langchain.py +40 -6
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/bridge/llama_index.py +88 -11
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/branch/branch.py +4 -4
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/sessions/session.py +6 -3
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/loaders/chunker.py +2 -1
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tools/tool_manager.py +5 -5
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/__init__.py +2 -1
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/call_util.py +13 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/sys_util.py +63 -1
- lionagi-0.0.206/lionagi/version.py +1 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi.egg-info/PKG-INFO +1 -1
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi.egg-info/SOURCES.txt +1 -0
- lionagi-0.0.205/lionagi/_services/litellm.py +0 -1
- lionagi-0.0.205/lionagi/_services/ollama.py +0 -1
- lionagi-0.0.205/lionagi/_services/transformers.py +0 -46
- lionagi-0.0.205/lionagi/version.py +0 -1
- {lionagi-0.0.205 → lionagi-0.0.206}/LICENSE +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/README.md +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/README.rst +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/anthropic.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/anyscale.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/azure.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/base_service.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/bedrock.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/everlyai.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/gemini.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/gpt4all.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/huggingface.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/localai.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/mistralai.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/oai.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/openllm.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/openrouter.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/perplexity.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/predibase.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/rungpt.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/vllm.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/_services/xinference.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/planner.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/prompter.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/scorer.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/summarizer.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/agents/validator.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/bridge/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/configs/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/configs/oai_configs.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/configs/openrouter_configs.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/branch/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/branch/branch_manager.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/branch/cluster.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/branch/conversation.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/core_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/flow/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/flow/flow.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/flow/flow_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/instruction_set/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/instruction_set/instruction_set.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/messages/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/messages/messages.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/core/sessions/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/chroma.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/deeplake.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/elasticsearch.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/lantern.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/pinecone.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/postgres.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/datastores/qdrant.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/loaders/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/loaders/load_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/loaders/reader.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/models/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/models/base_model.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/models/imodel.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/async_queue.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/base_condition.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/base_node.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/base_tool.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/data_logger.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/data_node.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/schema/status_tracker.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/structures/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/structures/graph.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/structures/relationship.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/structures/structure.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_api_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_call_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_encrypt_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_io_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_nested_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tests/test_utils/test_sys_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tools/__init__.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/tools/tool_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/api_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/encrypt_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/io_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/nested_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/pd_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi/utils/url_util.py +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi.egg-info/dependency_links.txt +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi.egg-info/requires.txt +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/lionagi.egg-info/top_level.txt +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/pyproject.toml +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/setup.cfg +0 -0
- {lionagi-0.0.205 → lionagi-0.0.206}/setup.py +0 -0
@@ -0,0 +1,33 @@
|
|
1
|
+
from ..utils.sys_util import install_import, is_package_installed
|
2
|
+
from .base_service import BaseService
|
3
|
+
|
4
|
+
|
5
|
+
class LiteLLMService(BaseService):
|
6
|
+
def __init__(self, model: str = None, **kwargs):
|
7
|
+
super().__init__()
|
8
|
+
|
9
|
+
try:
|
10
|
+
if not is_package_installed('litellm'):
|
11
|
+
install_import(
|
12
|
+
package_name='litellm',
|
13
|
+
import_name='acompletion'
|
14
|
+
)
|
15
|
+
from litellm import acompletion
|
16
|
+
self.acompletion = acompletion
|
17
|
+
except:
|
18
|
+
raise ImportError(f'Unable to import required module from ollama. Please make sure that ollama is installed.')
|
19
|
+
|
20
|
+
self.model = model
|
21
|
+
self.kwargs = kwargs
|
22
|
+
|
23
|
+
async def serve_chat(self, messages, **kwargs):
|
24
|
+
payload = {'messages': messages}
|
25
|
+
kwargs = {**self.kwargs, **kwargs}
|
26
|
+
|
27
|
+
try:
|
28
|
+
completion = await self.acompletion(model=self.model, messages=messages, **kwargs)
|
29
|
+
return payload, completion
|
30
|
+
except Exception as e:
|
31
|
+
self.status_tracker.num_tasks_failed += 1
|
32
|
+
raise e
|
33
|
+
|
@@ -0,0 +1,39 @@
|
|
1
|
+
from ..utils.sys_util import install_import, is_package_installed, as_dict
|
2
|
+
from .base_service import BaseService
|
3
|
+
|
4
|
+
|
5
|
+
class MlXService(BaseService):
|
6
|
+
def __init__(self, model="mlx-community/OLMo-7B-hf-4bit-mlx", **kwargs):
|
7
|
+
|
8
|
+
if not is_package_installed('mlx_lm'):
|
9
|
+
install_import(package_name='mlx_lm')
|
10
|
+
|
11
|
+
from mlx_lm import load, generate
|
12
|
+
super().__init__()
|
13
|
+
|
14
|
+
model_, tokenizer = load(model, **kwargs)
|
15
|
+
|
16
|
+
self.model_name = model
|
17
|
+
self.model = model_
|
18
|
+
self.tokenizer = tokenizer
|
19
|
+
self.generate = generate
|
20
|
+
|
21
|
+
async def serve_chat(self, messages, **kwargs):
|
22
|
+
prompts = [as_dict(msg['content'])['instruction'] for msg in messages if msg['role'] == 'user']
|
23
|
+
|
24
|
+
payload = {'messages': messages}
|
25
|
+
|
26
|
+
try:
|
27
|
+
response = self.generate(
|
28
|
+
self.model, self.tokenizer, prompt=f"{prompts[-1]} \nOutput: ", **kwargs
|
29
|
+
)
|
30
|
+
completion = {
|
31
|
+
'model': self.model_name,
|
32
|
+
'choices': [{'message': response}]
|
33
|
+
}
|
34
|
+
|
35
|
+
return payload, completion
|
36
|
+
except Exception as e:
|
37
|
+
self.status_tracker.num_tasks_failed += 1
|
38
|
+
raise e
|
39
|
+
|
@@ -0,0 +1,33 @@
|
|
1
|
+
from ..utils.sys_util import install_import, is_package_installed
|
2
|
+
from .base_service import BaseService
|
3
|
+
|
4
|
+
class OllamaService(BaseService):
|
5
|
+
def __init__(self, model: str = None, **kwargs):
|
6
|
+
super().__init__()
|
7
|
+
|
8
|
+
try:
|
9
|
+
if not is_package_installed('ollama'):
|
10
|
+
install_import(
|
11
|
+
package_name='ollama',
|
12
|
+
import_name='Client'
|
13
|
+
)
|
14
|
+
import ollama
|
15
|
+
self.ollama = ollama
|
16
|
+
except:
|
17
|
+
raise ImportError(f'Unable to import required module from ollama. Please make sure that ollama is installed.')
|
18
|
+
|
19
|
+
self.model = model
|
20
|
+
self.client = self.ollama.Client(**kwargs)
|
21
|
+
|
22
|
+
async def serve_chat(self, messages, **kwargs):
|
23
|
+
self.ollama.pull(self.model)
|
24
|
+
payload = {'messages': messages}
|
25
|
+
|
26
|
+
try:
|
27
|
+
completion = self.client.chat(model=self.model, messages=messages, **kwargs)
|
28
|
+
completion['choices'] = [{'message': completion.pop('message')}]
|
29
|
+
return payload, completion
|
30
|
+
except Exception as e:
|
31
|
+
self.status_tracker.num_tasks_failed += 1
|
32
|
+
raise e
|
33
|
+
|
@@ -89,3 +89,43 @@ class Services:
|
|
89
89
|
|
90
90
|
from .anthropic import AnthropicService
|
91
91
|
return AnthropicService(**kwargs)
|
92
|
+
|
93
|
+
@staticmethod
|
94
|
+
def Ollama(**kwargs):
|
95
|
+
"""
|
96
|
+
A service to interact with Ollama
|
97
|
+
|
98
|
+
Attributes:
|
99
|
+
model (str): name of the model to use
|
100
|
+
kwargs (Optional[Any]): additional kwargs for calling the model
|
101
|
+
"""
|
102
|
+
|
103
|
+
from .ollama import OllamaService
|
104
|
+
return OllamaService(**kwargs)
|
105
|
+
|
106
|
+
@staticmethod
|
107
|
+
def LiteLLM(**kwargs):
|
108
|
+
"""
|
109
|
+
A service to interact with Litellm
|
110
|
+
|
111
|
+
Attributes:
|
112
|
+
model (str): name of the model to use
|
113
|
+
kwargs (Optional[Any]): additional kwargs for calling the model
|
114
|
+
"""
|
115
|
+
|
116
|
+
from .litellm import LiteLLMService
|
117
|
+
return LiteLLMService(**kwargs)
|
118
|
+
|
119
|
+
@staticmethod
|
120
|
+
def MLX(**kwargs):
|
121
|
+
"""
|
122
|
+
A service to interact with MlX
|
123
|
+
|
124
|
+
Attributes:
|
125
|
+
model (str): name of the model to use
|
126
|
+
kwargs (Optional[Any]): additional kwargs for calling the model
|
127
|
+
"""
|
128
|
+
|
129
|
+
from .mlx_service import MlXService
|
130
|
+
return MlXService(**kwargs)
|
131
|
+
|
@@ -0,0 +1,73 @@
|
|
1
|
+
from typing import Union, Dict, Any
|
2
|
+
import subprocess
|
3
|
+
|
4
|
+
from ..utils.sys_util import install_import, get_cpu_architecture, is_package_installed
|
5
|
+
from .base_service import BaseService
|
6
|
+
|
7
|
+
|
8
|
+
def get_pytorch_install_command():
|
9
|
+
cpu_arch = get_cpu_architecture()
|
10
|
+
|
11
|
+
if cpu_arch == 'apple_silicon':
|
12
|
+
return "pip install --pre torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/nightly/cpu"
|
13
|
+
else:
|
14
|
+
# Default CPU installation
|
15
|
+
return "pip install torch torchvision torchaudio"
|
16
|
+
|
17
|
+
def install_pytorch():
|
18
|
+
command = get_pytorch_install_command()
|
19
|
+
try:
|
20
|
+
subprocess.run(command.split(), check=True)
|
21
|
+
print("PyTorch installed successfully.")
|
22
|
+
except subprocess.CalledProcessError as e:
|
23
|
+
print(f"Failed to install PyTorch: {e}")
|
24
|
+
|
25
|
+
class TransformersService(BaseService):
|
26
|
+
def __init__(self, task: str = None, model: Union[str, Any] = None, config: Union[str, Dict, Any] = None, device='cpu', **kwargs):
|
27
|
+
super().__init__()
|
28
|
+
self.task = task
|
29
|
+
self.model = model
|
30
|
+
self.config = config
|
31
|
+
try:
|
32
|
+
from transformers import pipeline
|
33
|
+
self.pipeline = pipeline
|
34
|
+
except ImportError:
|
35
|
+
try:
|
36
|
+
if not is_package_installed('torch'):
|
37
|
+
in_ = input("PyTorch is required for transformers. Would you like to install it now? (y/n): ")
|
38
|
+
if in_ == 'y':
|
39
|
+
install_pytorch()
|
40
|
+
if not is_package_installed('transformers'):
|
41
|
+
in_ = input("transformers is required. Would you like to install it now? (y/n): ")
|
42
|
+
if in_ == 'y':
|
43
|
+
install_import(
|
44
|
+
package_name='transformers',
|
45
|
+
import_name='pipeline'
|
46
|
+
)
|
47
|
+
from transformers import pipeline
|
48
|
+
self.pipeline = pipeline
|
49
|
+
except Exception as e:
|
50
|
+
raise ImportError(f'Unable to import required module from transformers. Please make sure that transformers is installed. Error: {e}')
|
51
|
+
|
52
|
+
self.pipe = self.pipeline(task=task, model=model, config=config, device=device, **kwargs)
|
53
|
+
|
54
|
+
async def serve_chat(self, messages, **kwargs):
|
55
|
+
if self.task:
|
56
|
+
if self.task != 'conversational':
|
57
|
+
raise ValueError(f"Invalid transformers pipeline task: {self.task}.")
|
58
|
+
|
59
|
+
payload = {'messages': messages}
|
60
|
+
conversation = self.pipe(str(messages), **kwargs)
|
61
|
+
|
62
|
+
texts = conversation[-1]['generated_text']
|
63
|
+
msgs = str(texts.split(']')[1:]).replace('\\n', '').replace("[\'", "").replace('\\', '')
|
64
|
+
|
65
|
+
completion = {
|
66
|
+
"model": self.pipe.model,
|
67
|
+
"choices": [{
|
68
|
+
"message": msgs
|
69
|
+
}]
|
70
|
+
}
|
71
|
+
|
72
|
+
return payload, completion
|
73
|
+
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from typing import Union, Callable, List, Dict, Any, TypeVar
|
2
|
-
from ..utils.sys_util import change_dict_key
|
2
|
+
from ..utils.sys_util import change_dict_key, install_import
|
3
3
|
from ..schema.data_node import DataNode
|
4
4
|
|
5
5
|
|
@@ -43,7 +43,18 @@ def to_langchain_document(datanode: T, **kwargs: Any) -> Any:
|
|
43
43
|
>>> isinstance(lc_document, LangchainDocument)
|
44
44
|
True
|
45
45
|
"""
|
46
|
-
|
46
|
+
try:
|
47
|
+
from langchain.schema import Document
|
48
|
+
except ImportError:
|
49
|
+
try:
|
50
|
+
install_import(
|
51
|
+
package_name='langchain',
|
52
|
+
module_name='schema',
|
53
|
+
import_name='Document',
|
54
|
+
)
|
55
|
+
from langchain.schema import Document
|
56
|
+
except Exception as e:
|
57
|
+
raise ImportError(f'Unable to import required module from langchain. Please make sure that langchain is installed. Error: {e}')
|
47
58
|
|
48
59
|
dnode = datanode.to_dict()
|
49
60
|
change_dict_key(dnode, old_key='content', new_key='page_content')
|
@@ -73,11 +84,24 @@ def langchain_loader(loader: Union[str, Callable],
|
|
73
84
|
>>> isinstance(data, dict)
|
74
85
|
True
|
75
86
|
"""
|
76
|
-
|
77
|
-
|
87
|
+
try:
|
88
|
+
import langchain.document_loaders as document_loaders
|
89
|
+
except ImportError:
|
90
|
+
try:
|
91
|
+
install_import(
|
92
|
+
package_name='langchain',
|
93
|
+
module_name='document_loaders',
|
94
|
+
)
|
95
|
+
import langchain.document_loaders as document_loaders
|
96
|
+
except Exception as e:
|
97
|
+
raise ImportError(f'Unable to import required module from langchain. Please make sure that langchain is installed. Error: {e}')
|
98
|
+
|
78
99
|
try:
|
79
100
|
if isinstance(loader, str):
|
80
|
-
|
101
|
+
try:
|
102
|
+
loader = getattr(document_loaders, loader)
|
103
|
+
except ImportError as e:
|
104
|
+
raise ValueError(f'Unable to import {loader} from langchain.document_loaders. Some dependency of LangChain are not installed. Error: {e}')
|
81
105
|
else:
|
82
106
|
loader = loader
|
83
107
|
except Exception as e:
|
@@ -110,7 +134,17 @@ def langchain_text_splitter(data: Union[str, List],
|
|
110
134
|
ValueError: If the specified text splitter is invalid or if the splitting fails.
|
111
135
|
"""
|
112
136
|
|
113
|
-
|
137
|
+
try:
|
138
|
+
import langchain.text_splitter as text_splitter
|
139
|
+
except ImportError:
|
140
|
+
try:
|
141
|
+
install_import(
|
142
|
+
package_name='langchain',
|
143
|
+
module_name='text_splitter'
|
144
|
+
)
|
145
|
+
import langchain.text_splitter as text_splitter
|
146
|
+
except Exception as e:
|
147
|
+
raise ImportError(f'Unable to import required module from langchain. Please make sure that langchain is installed. Error: {e}')
|
114
148
|
|
115
149
|
try:
|
116
150
|
if isinstance(splitter, str):
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from typing import Union, Callable, List, Any, Dict, TypeVar
|
2
|
-
from ..utils.sys_util import change_dict_key
|
2
|
+
from ..utils.sys_util import change_dict_key, install_import, is_package_installed
|
3
3
|
from ..schema.data_node import DataNode
|
4
4
|
|
5
5
|
|
@@ -38,7 +38,18 @@ def to_llama_index_textnode(datanode: T, **kwargs: Any) -> Any:
|
|
38
38
|
datanode = DataNode(...)
|
39
39
|
textnode = to_llama_index_textnode(datanode, additional_arg=1)
|
40
40
|
"""
|
41
|
-
|
41
|
+
try:
|
42
|
+
from llama_index.schema import TextNode
|
43
|
+
except ImportError:
|
44
|
+
try:
|
45
|
+
install_import(
|
46
|
+
package_name='llama_index',
|
47
|
+
module_name='schema',
|
48
|
+
import_name='TextNode'
|
49
|
+
)
|
50
|
+
from llama_index.schema import TextNode
|
51
|
+
except Exception as e:
|
52
|
+
raise ImportError(f'Unable to import required module from llama_index. Please make sure that llama_index is installed. Error: {e}')
|
42
53
|
|
43
54
|
dnode = datanode.to_dict()
|
44
55
|
change_dict_key(dnode, old_key='content', new_key='text')
|
@@ -67,14 +78,36 @@ def get_llama_reader(reader: Union[str, Callable]) -> Callable:
|
|
67
78
|
def custom_reader(): pass
|
68
79
|
reader = get_llama_reader(custom_reader)
|
69
80
|
"""
|
81
|
+
|
70
82
|
try:
|
71
83
|
if isinstance(reader, str):
|
72
84
|
if reader == 'SimpleDirectoryReader':
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
85
|
+
try:
|
86
|
+
from llama_index import SimpleDirectoryReader
|
87
|
+
return SimpleDirectoryReader
|
88
|
+
except ImportError or ModuleNotFoundError:
|
89
|
+
try:
|
90
|
+
install_import(
|
91
|
+
package_name='llama_index',
|
92
|
+
import_name='SimpleDirectoryReader'
|
93
|
+
)
|
94
|
+
from llama_index import SimpleDirectoryReader
|
95
|
+
return SimpleDirectoryReader
|
96
|
+
except Exception as e:
|
97
|
+
raise ImportError(f'Failed to import SimpleDirectoryReader. Error: {e}')
|
98
|
+
else:
|
99
|
+
try:
|
100
|
+
from llama_index import download_loader
|
101
|
+
return download_loader(reader)
|
102
|
+
except ImportError:
|
103
|
+
try:
|
104
|
+
install_import(
|
105
|
+
package_name='llama_index',
|
106
|
+
import_name='download_loader'
|
107
|
+
)
|
108
|
+
return download_loader(reader)
|
109
|
+
except Exception as e:
|
110
|
+
raise ImportError(f'Failed to import download_loader from LlamaIndex. Error: {e}')
|
78
111
|
else:
|
79
112
|
return reader
|
80
113
|
except Exception as e:
|
@@ -133,11 +166,43 @@ def get_llama_parser(parser: Union[str, Callable]) -> Callable:
|
|
133
166
|
def custom_parser(): pass
|
134
167
|
parser = get_llama_parser(custom_parser)
|
135
168
|
"""
|
136
|
-
|
137
|
-
|
169
|
+
|
170
|
+
try:
|
171
|
+
import llama_index.node_parser as node_parser
|
172
|
+
except ImportError:
|
173
|
+
try:
|
174
|
+
install_import(
|
175
|
+
package_name='llama_index',
|
176
|
+
module_name='node_parser'
|
177
|
+
)
|
178
|
+
import llama_index.node_parser as node_parser
|
179
|
+
except ImportError:
|
180
|
+
raise ImportError('Failed to import Llama Index. Please install Llama Index to use this function.')
|
181
|
+
except Exception as e:
|
182
|
+
raise ValueError(f'Invalid node parser: {parser}. Error: {e}')
|
138
183
|
|
139
184
|
try:
|
140
|
-
|
185
|
+
import llama_index.text_splitter as text_splitter
|
186
|
+
except ImportError:
|
187
|
+
try:
|
188
|
+
install_import(
|
189
|
+
package_name='llama_index',
|
190
|
+
module_name='text_splitter'
|
191
|
+
)
|
192
|
+
import llama_index.text_splitter as text_splitter
|
193
|
+
except ImportError:
|
194
|
+
raise ImportError('Failed to import Llama Index. Please install Llama Index to use this function.')
|
195
|
+
|
196
|
+
try:
|
197
|
+
if parser == 'CodeSplitter':
|
198
|
+
if not is_package_installed('tree_sitter_languages'):
|
199
|
+
install_import(package_name='tree_sitter_languages')
|
200
|
+
|
201
|
+
a = getattr(node_parser, parser)
|
202
|
+
if a is not None:
|
203
|
+
return a
|
204
|
+
else:
|
205
|
+
raise ImportError(f'Failed to import {parser} from Llama Index.')
|
141
206
|
except Exception as e1:
|
142
207
|
try:
|
143
208
|
if isinstance(parser, str):
|
@@ -172,13 +237,25 @@ def llama_index_node_parser(documents: List[Any],
|
|
172
237
|
Example:
|
173
238
|
nodes = llama_index_node_parser(documents, "DefaultNodeParser")
|
174
239
|
"""
|
175
|
-
parser = get_llama_parser(parser)
|
176
240
|
|
177
241
|
try:
|
242
|
+
parser = get_llama_parser(parser)
|
178
243
|
parser_obj = parser(*parser_args, **parser_kwargs)
|
179
244
|
nodes = parser_obj.get_nodes_from_documents(documents, **parsing_kwargs)
|
180
245
|
return nodes
|
181
246
|
|
247
|
+
except ImportError as e:
|
248
|
+
module_name = str(e).split("\'")[-2]
|
249
|
+
try:
|
250
|
+
install_import(package_name=module_name)
|
251
|
+
parser = get_llama_parser(parser)
|
252
|
+
parser_obj = parser(*parser_args, **parser_kwargs)
|
253
|
+
nodes = parser_obj.get_nodes_from_documents(documents, **parsing_kwargs)
|
254
|
+
return nodes
|
255
|
+
except Exception as e:
|
256
|
+
raise ImportError(f'Failed to install and import {module_name}. Error: {e}')
|
257
|
+
|
258
|
+
|
182
259
|
except Exception as e1:
|
183
260
|
try:
|
184
261
|
parser_obj = parser.from_defaults(*parser_args, **parser_kwargs)
|
@@ -7,7 +7,7 @@ from collections import deque
|
|
7
7
|
import asyncio
|
8
8
|
from dotenv import load_dotenv
|
9
9
|
|
10
|
-
from lionagi.utils import as_dict, get_flattened_keys, lcall
|
10
|
+
from lionagi.utils import as_dict, get_flattened_keys, alcall, lcall, mcall, to_list
|
11
11
|
from lionagi.schema import Tool
|
12
12
|
from lionagi._services.base_service import StatusTracker, BaseService
|
13
13
|
from lionagi._services.oai import OpenAIService
|
@@ -375,10 +375,10 @@ class Branch(Conversation):
|
|
375
375
|
self.tool_manager.get_function_call
|
376
376
|
)
|
377
377
|
|
378
|
-
|
378
|
+
outs = await alcall(func_calls, self.tool_manager.invoke)
|
379
|
+
|
380
|
+
outs = to_list(outs, flatten=True)
|
379
381
|
|
380
|
-
tasks = [self.tool_manager.invoke(i) for i in func_calls]
|
381
|
-
outs = await asyncio.gather(*tasks)
|
382
382
|
for out_, f in zip(outs, func_calls):
|
383
383
|
self.add_message(
|
384
384
|
response={
|
@@ -8,9 +8,9 @@ from ..messages.messages import System, Instruction
|
|
8
8
|
from ..branch.branch import Branch
|
9
9
|
from ..branch.branch_manager import BranchManager
|
10
10
|
|
11
|
-
|
11
|
+
|
12
12
|
load_dotenv()
|
13
|
-
|
13
|
+
|
14
14
|
|
15
15
|
class Session:
|
16
16
|
"""
|
@@ -33,7 +33,7 @@ class Session:
|
|
33
33
|
sender: Optional[str] = None,
|
34
34
|
dir: Optional[str] = None,
|
35
35
|
llmconfig: Optional[Dict[str, Any]] = None,
|
36
|
-
service: OpenAIService =
|
36
|
+
service: OpenAIService = None,
|
37
37
|
branches: Optional[Dict[str, Branch]] = None,
|
38
38
|
default_branch: Optional[Branch] = None,
|
39
39
|
default_branch_name: str = 'main',
|
@@ -52,6 +52,9 @@ class Session:
|
|
52
52
|
"""
|
53
53
|
|
54
54
|
self.branches = branches if isinstance(branches, dict) else {}
|
55
|
+
if service is None:
|
56
|
+
service = OpenAIService()
|
57
|
+
|
55
58
|
self.default_branch = default_branch if default_branch else Branch(name=default_branch_name, service=service, llmconfig=llmconfig)
|
56
59
|
self.default_branch_name = default_branch_name
|
57
60
|
if system:
|
@@ -140,7 +140,8 @@ def chunk(documents,
|
|
140
140
|
elif chunker_type == ChunkerType.LLAMAINDEX:
|
141
141
|
if documents_convert_func:
|
142
142
|
documents = documents_convert_func(documents, 'llama_index')
|
143
|
-
nodes = llama_index_node_parser(documents, chunker, chunker_args, chunker_kwargs, chunking_kwargs)
|
143
|
+
nodes = llama_index_node_parser(documents, chunker, chunker_args, chunker_kwargs, chunking_kwargs)
|
144
|
+
|
144
145
|
if isinstance(to_datanode, bool) and to_datanode is True:
|
145
146
|
nodes = lcall(nodes, from_llama_index)
|
146
147
|
elif isinstance(to_datanode, Callable):
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import json
|
2
2
|
import asyncio
|
3
3
|
from typing import Dict, Union, List, Tuple, Any
|
4
|
-
from lionagi.utils import lcall
|
4
|
+
from lionagi.utils.call_util import lcall, is_coroutine_func, _call_handler
|
5
5
|
from lionagi.schema import BaseNode, Tool
|
6
6
|
|
7
7
|
|
@@ -74,10 +74,10 @@ class ToolManager(BaseNode):
|
|
74
74
|
func = tool.func
|
75
75
|
parser = tool.parser
|
76
76
|
try:
|
77
|
-
|
78
|
-
|
79
|
-
else
|
80
|
-
|
77
|
+
tasks = [await _call_handler(func, **kwargs)]
|
78
|
+
out = await asyncio.gather(*tasks)
|
79
|
+
return parser(out) if parser else out
|
80
|
+
|
81
81
|
except Exception as e:
|
82
82
|
raise ValueError(f"Error when invoking function {name} with arguments {kwargs} with error message {e}")
|
83
83
|
else:
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from .sys_util import (
|
2
2
|
get_timestamp, create_copy, create_path, split_path,
|
3
3
|
get_bins, change_dict_key, str_to_num, create_id,
|
4
|
-
as_dict)
|
4
|
+
as_dict, is_package_installed, install_import)
|
5
5
|
|
6
6
|
from .nested_util import (
|
7
7
|
to_readable_dict, nfilter, nset, nget,
|
@@ -19,6 +19,7 @@ from .call_util import (
|
|
19
19
|
|
20
20
|
|
21
21
|
__all__ = [
|
22
|
+
"is_package_installed", "install_import",
|
22
23
|
'get_timestamp', 'create_copy', 'create_path', 'split_path',
|
23
24
|
'get_bins', 'change_dict_key', 'str_to_num', 'create_id',
|
24
25
|
'as_dict', 'to_list', 'to_readable_dict', 'nfilter', 'nset',
|
@@ -2,6 +2,8 @@ import asyncio
|
|
2
2
|
import functools
|
3
3
|
import logging
|
4
4
|
import time
|
5
|
+
|
6
|
+
from concurrent.futures import ThreadPoolExecutor
|
5
7
|
from typing import Any, Callable, Generator, Iterable, List, Dict, Optional, Tuple
|
6
8
|
|
7
9
|
from aiocache import cached
|
@@ -643,6 +645,17 @@ class CallDecorator:
|
|
643
645
|
reducing the frequency of resource-intensive operations.
|
644
646
|
"""
|
645
647
|
return Throttle(period)
|
648
|
+
|
649
|
+
@staticmethod
|
650
|
+
def force_async(fn):
|
651
|
+
pool = ThreadPoolExecutor()
|
652
|
+
|
653
|
+
@functools.wraps(fn)
|
654
|
+
def wrapper(*args, **kwargs):
|
655
|
+
future = pool.submit(fn, *args, **kwargs)
|
656
|
+
return asyncio.wrap_future(future) # make it awaitable
|
657
|
+
|
658
|
+
return wrapper
|
646
659
|
|
647
660
|
class Throttle:
|
648
661
|
"""
|
@@ -1,15 +1,27 @@
|
|
1
1
|
import os
|
2
|
+
import subprocess
|
3
|
+
import sys
|
2
4
|
import copy
|
3
5
|
from datetime import datetime
|
4
6
|
import hashlib
|
5
7
|
import re
|
6
|
-
|
8
|
+
import importlib.util
|
9
|
+
import platform
|
7
10
|
import json
|
8
11
|
import logging
|
9
12
|
|
10
13
|
from typing import Any, List, Dict, Union
|
11
14
|
|
12
15
|
|
16
|
+
|
17
|
+
def get_cpu_architecture():
|
18
|
+
arch = platform.machine()
|
19
|
+
if 'arm' in arch or 'aarch64' in arch:
|
20
|
+
return 'apple_silicon'
|
21
|
+
else:
|
22
|
+
return 'other_cpu'
|
23
|
+
|
24
|
+
|
13
25
|
def get_timestamp() -> str:
|
14
26
|
"""
|
15
27
|
Generates a current timestamp in ISO format with colons and periods replaced by
|
@@ -352,3 +364,53 @@ def is_schema(dict_: Dict, schema: Dict):
|
|
352
364
|
def timestamp_to_datetime(timestamp):
|
353
365
|
return datetime.fromtimestamp(timestamp)
|
354
366
|
|
367
|
+
def install_import(package_name, module_name=None, import_name=None, pip_name=None):
|
368
|
+
|
369
|
+
"""
|
370
|
+
For a simple package import where package name matches pip name
|
371
|
+
import xx
|
372
|
+
install_and_import('xx')
|
373
|
+
|
374
|
+
|
375
|
+
For importing a module from a package
|
376
|
+
import xx.yy
|
377
|
+
install_and_import('xx', 'yy')
|
378
|
+
|
379
|
+
For importing a specific attribute from a module/sub-module
|
380
|
+
from xx.yy import zz
|
381
|
+
install_and_import('xx', 'yy', 'zz')
|
382
|
+
|
383
|
+
For cases where pip name differs from package name
|
384
|
+
install_and_import('xx', pip_name='different_pip_name')
|
385
|
+
|
386
|
+
For full example with different pip name
|
387
|
+
install_and_import('xx', 'yy', 'zz', 'different_pip_name')
|
388
|
+
"""
|
389
|
+
|
390
|
+
if pip_name is None:
|
391
|
+
pip_name = package_name # Defaults to package_name if pip_name is not explicitly provided
|
392
|
+
|
393
|
+
full_import_path = package_name if module_name is None else f"{package_name}.{module_name}"
|
394
|
+
|
395
|
+
try:
|
396
|
+
if import_name:
|
397
|
+
# For importing a specific name from a module or sub-module
|
398
|
+
module = __import__(full_import_path, fromlist=[import_name])
|
399
|
+
getattr(module, import_name)
|
400
|
+
else:
|
401
|
+
# For importing the module or package itself
|
402
|
+
__import__(full_import_path)
|
403
|
+
print(f"Successfully imported {import_name or full_import_path}.")
|
404
|
+
except ImportError:
|
405
|
+
print(f"Module {full_import_path} or attribute {import_name} not found. Installing {pip_name}...")
|
406
|
+
subprocess.check_call([sys.executable, "-m", "pip", "install", pip_name])
|
407
|
+
# Retry the import after installation
|
408
|
+
if import_name:
|
409
|
+
module = __import__(full_import_path, fromlist=[import_name])
|
410
|
+
getattr(module, import_name)
|
411
|
+
else:
|
412
|
+
__import__(full_import_path)
|
413
|
+
|
414
|
+
def is_package_installed(package_name):
|
415
|
+
package_spec = importlib.util.find_spec(package_name)
|
416
|
+
return package_spec is not None
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "0.0.206"
|
@@ -1 +0,0 @@
|
|
1
|
-
# TODO
|
@@ -1 +0,0 @@
|
|
1
|
-
# TODO
|
@@ -1,46 +0,0 @@
|
|
1
|
-
from typing import Union, Dict, Any
|
2
|
-
from transformers import pipeline, Conversation
|
3
|
-
|
4
|
-
from .base_service import BaseService
|
5
|
-
|
6
|
-
import functools
|
7
|
-
from concurrent.futures import ThreadPoolExecutor
|
8
|
-
import asyncio
|
9
|
-
|
10
|
-
|
11
|
-
def force_async(fn):
|
12
|
-
pool = ThreadPoolExecutor()
|
13
|
-
|
14
|
-
@functools.wraps(fn)
|
15
|
-
def wrapper(*args, **kwargs):
|
16
|
-
future = pool.submit(fn, *args, **kwargs)
|
17
|
-
return asyncio.wrap_future(future) # make it awaitable
|
18
|
-
|
19
|
-
return wrapper
|
20
|
-
|
21
|
-
|
22
|
-
class TransformersService(BaseService):
|
23
|
-
def __init__(self, task: str = None, model: Union[str, Any] = None, config: Union[str, Dict, Any] = None, **kwargs):
|
24
|
-
super().__init__()
|
25
|
-
self.task = task
|
26
|
-
self.model = model
|
27
|
-
self.config = config
|
28
|
-
self.pipe = pipeline(task=task, model=model, config=config, **kwargs)
|
29
|
-
|
30
|
-
@force_async
|
31
|
-
def serve_chat(self, messages, **kwargs):
|
32
|
-
if self.task:
|
33
|
-
if self.task != 'conversational':
|
34
|
-
raise ValueError(f"Invalid transformers pipeline task: {self.task}. Valid task: 'conversational'")
|
35
|
-
|
36
|
-
payload = {'messages': messages}
|
37
|
-
conversation = self.pipe(Conversation(messages), **kwargs)
|
38
|
-
completion = {"Conversation id": conversation.uuid,
|
39
|
-
"model": self.pipe.model,
|
40
|
-
"choices": [{
|
41
|
-
"message": conversation.messages[-1]
|
42
|
-
}]
|
43
|
-
}
|
44
|
-
|
45
|
-
return payload, completion
|
46
|
-
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__ = "0.0.205"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|