lionagi 0.0.115__py3-none-any.whl → 0.0.204__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- lionagi/__init__.py +1 -2
- lionagi/_services/__init__.py +5 -0
- lionagi/_services/anthropic.py +79 -0
- lionagi/_services/base_service.py +414 -0
- lionagi/_services/oai.py +98 -0
- lionagi/_services/openrouter.py +44 -0
- lionagi/_services/services.py +91 -0
- lionagi/_services/transformers.py +46 -0
- lionagi/bridge/langchain.py +26 -16
- lionagi/bridge/llama_index.py +50 -20
- lionagi/configs/oai_configs.py +2 -14
- lionagi/configs/openrouter_configs.py +2 -2
- lionagi/core/__init__.py +7 -8
- lionagi/core/branch/branch.py +589 -0
- lionagi/core/branch/branch_manager.py +139 -0
- lionagi/core/branch/conversation.py +484 -0
- lionagi/core/core_util.py +59 -0
- lionagi/core/flow/flow.py +19 -0
- lionagi/core/flow/flow_util.py +62 -0
- lionagi/core/instruction_set/__init__.py +0 -5
- lionagi/core/instruction_set/instruction_set.py +343 -0
- lionagi/core/messages/messages.py +176 -0
- lionagi/core/sessions/__init__.py +0 -5
- lionagi/core/sessions/session.py +428 -0
- lionagi/loaders/chunker.py +51 -47
- lionagi/loaders/load_util.py +2 -2
- lionagi/loaders/reader.py +45 -39
- lionagi/models/imodel.py +53 -0
- lionagi/schema/async_queue.py +158 -0
- lionagi/schema/base_node.py +318 -147
- lionagi/schema/base_tool.py +31 -1
- lionagi/schema/data_logger.py +74 -38
- lionagi/schema/data_node.py +57 -6
- lionagi/structures/graph.py +132 -10
- lionagi/structures/relationship.py +58 -20
- lionagi/structures/structure.py +36 -25
- lionagi/tests/test_utils/test_api_util.py +219 -0
- lionagi/tests/test_utils/test_call_util.py +785 -0
- lionagi/tests/test_utils/test_encrypt_util.py +323 -0
- lionagi/tests/test_utils/test_io_util.py +238 -0
- lionagi/tests/test_utils/test_nested_util.py +338 -0
- lionagi/tests/test_utils/test_sys_util.py +358 -0
- lionagi/tools/tool_manager.py +186 -0
- lionagi/tools/tool_util.py +266 -3
- lionagi/utils/__init__.py +21 -61
- lionagi/utils/api_util.py +359 -71
- lionagi/utils/call_util.py +839 -264
- lionagi/utils/encrypt_util.py +283 -16
- lionagi/utils/io_util.py +178 -93
- lionagi/utils/nested_util.py +672 -0
- lionagi/utils/pd_util.py +57 -0
- lionagi/utils/sys_util.py +284 -156
- lionagi/utils/url_util.py +55 -0
- lionagi/version.py +1 -1
- {lionagi-0.0.115.dist-info → lionagi-0.0.204.dist-info}/METADATA +21 -17
- lionagi-0.0.204.dist-info/RECORD +106 -0
- lionagi/core/conversations/__init__.py +0 -5
- lionagi/core/conversations/conversation.py +0 -107
- lionagi/core/flows/__init__.py +0 -8
- lionagi/core/flows/flow.py +0 -8
- lionagi/core/flows/flow_util.py +0 -62
- lionagi/core/instruction_set/instruction_sets.py +0 -7
- lionagi/core/sessions/sessions.py +0 -185
- lionagi/endpoints/__init__.py +0 -5
- lionagi/endpoints/audio.py +0 -17
- lionagi/endpoints/chatcompletion.py +0 -54
- lionagi/messages/__init__.py +0 -11
- lionagi/messages/instruction.py +0 -15
- lionagi/messages/message.py +0 -110
- lionagi/messages/response.py +0 -33
- lionagi/messages/system.py +0 -12
- lionagi/objs/__init__.py +0 -11
- lionagi/objs/abc_objs.py +0 -39
- lionagi/objs/async_queue.py +0 -135
- lionagi/objs/messenger.py +0 -85
- lionagi/objs/tool_manager.py +0 -253
- lionagi/services/__init__.py +0 -11
- lionagi/services/base_api_service.py +0 -230
- lionagi/services/oai.py +0 -34
- lionagi/services/openrouter.py +0 -31
- lionagi/tests/test_api_util.py +0 -46
- lionagi/tests/test_call_util.py +0 -115
- lionagi/tests/test_convert_util.py +0 -202
- lionagi/tests/test_encrypt_util.py +0 -33
- lionagi/tests/test_flat_util.py +0 -426
- lionagi/tests/test_sys_util.py +0 -0
- lionagi/utils/convert_util.py +0 -229
- lionagi/utils/flat_util.py +0 -599
- lionagi-0.0.115.dist-info/RECORD +0 -110
- /lionagi/{services → _services}/anyscale.py +0 -0
- /lionagi/{services → _services}/azure.py +0 -0
- /lionagi/{services → _services}/bedrock.py +0 -0
- /lionagi/{services → _services}/everlyai.py +0 -0
- /lionagi/{services → _services}/gemini.py +0 -0
- /lionagi/{services → _services}/gpt4all.py +0 -0
- /lionagi/{services → _services}/huggingface.py +0 -0
- /lionagi/{services → _services}/litellm.py +0 -0
- /lionagi/{services → _services}/localai.py +0 -0
- /lionagi/{services → _services}/mistralai.py +0 -0
- /lionagi/{services → _services}/ollama.py +0 -0
- /lionagi/{services → _services}/openllm.py +0 -0
- /lionagi/{services → _services}/perplexity.py +0 -0
- /lionagi/{services → _services}/predibase.py +0 -0
- /lionagi/{services → _services}/rungpt.py +0 -0
- /lionagi/{services → _services}/vllm.py +0 -0
- /lionagi/{services → _services}/xinference.py +0 -0
- /lionagi/{endpoints/assistants.py → agents/__init__.py} +0 -0
- /lionagi/{tools → agents}/planner.py +0 -0
- /lionagi/{tools → agents}/prompter.py +0 -0
- /lionagi/{tools → agents}/scorer.py +0 -0
- /lionagi/{tools → agents}/summarizer.py +0 -0
- /lionagi/{tools → agents}/validator.py +0 -0
- /lionagi/{endpoints/embeddings.py → core/branch/__init__.py} +0 -0
- /lionagi/{services/anthropic.py → core/branch/cluster.py} +0 -0
- /lionagi/{endpoints/finetune.py → core/flow/__init__.py} +0 -0
- /lionagi/{endpoints/image.py → core/messages/__init__.py} +0 -0
- /lionagi/{endpoints/moderation.py → models/__init__.py} +0 -0
- /lionagi/{endpoints/vision.py → models/base_model.py} +0 -0
- /lionagi/{objs → schema}/status_tracker.py +0 -0
- /lionagi/tests/{test_io_util.py → test_utils/__init__.py} +0 -0
- {lionagi-0.0.115.dist-info → lionagi-0.0.204.dist-info}/LICENSE +0 -0
- {lionagi-0.0.115.dist-info → lionagi-0.0.204.dist-info}/WHEEL +0 -0
- {lionagi-0.0.115.dist-info → lionagi-0.0.204.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,91 @@
|
|
1
|
+
class Services:
|
2
|
+
|
3
|
+
@staticmethod
|
4
|
+
def OpenAI(**kwargs):
|
5
|
+
"""
|
6
|
+
A service to interact with OpenAI's API endpoints.
|
7
|
+
|
8
|
+
Attributes:
|
9
|
+
api_key (Optional[str]): The API key used for authentication.
|
10
|
+
schema (Dict[str, Any]): The schema defining the service's endpoints.
|
11
|
+
status_tracker (StatusTracker): The object tracking the status of API calls.
|
12
|
+
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
13
|
+
base_url (str): The base URL for the OpenAI API.
|
14
|
+
available_endpoints (list): A list of available API endpoints, including
|
15
|
+
'chat/completions'
|
16
|
+
key_scheme (str): The environment variable name for API key.
|
17
|
+
token_encoding_name (str): The default token encoding scheme.
|
18
|
+
"""
|
19
|
+
|
20
|
+
from .oai import OpenAIService
|
21
|
+
return OpenAIService(**kwargs)
|
22
|
+
|
23
|
+
@staticmethod
|
24
|
+
def OpenRouter(**kwargs):
|
25
|
+
"""
|
26
|
+
A service to interact with OpenRouter's API endpoints.
|
27
|
+
|
28
|
+
Attributes:
|
29
|
+
api_key (Optional[str]): The API key used for authentication.
|
30
|
+
schema (Dict[str, Any]): The schema defining the service's endpoints.
|
31
|
+
status_tracker (StatusTracker): The object tracking the status of API calls.
|
32
|
+
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
33
|
+
base_url (str): The base URL for the OpenAI API.
|
34
|
+
available_endpoints (list): A list of available API endpoints, including
|
35
|
+
'chat/completions'
|
36
|
+
key_scheme (str): The environment variable name for API key.
|
37
|
+
token_encoding_name (str): The default token encoding scheme.
|
38
|
+
"""
|
39
|
+
|
40
|
+
from .openrouter import OpenRouterService
|
41
|
+
return OpenRouterService(**kwargs)
|
42
|
+
|
43
|
+
@staticmethod
|
44
|
+
def Transformers(**kwargs):
|
45
|
+
"""
|
46
|
+
A service to interact with Transformers' pipeline
|
47
|
+
|
48
|
+
Attributes:
|
49
|
+
task (str): The specific task to be performed by the transformer model.
|
50
|
+
Currently, only 'conversational' tasks are supported.
|
51
|
+
model (Union[str, Any]): Identifier for the transformer model to be used. This
|
52
|
+
can be a model name or a path to a model.
|
53
|
+
config (Union[str, Dict, Any]): Configuration for the transformer model. Can
|
54
|
+
include tokenizer information among others.
|
55
|
+
pipe (pipeline): The loaded transformer pipeline for the specified task, model,
|
56
|
+
and configuration.
|
57
|
+
|
58
|
+
Warnings:
|
59
|
+
- Ensure the selected model is suitable for conversational tasks to avoid
|
60
|
+
unexpected behavior.
|
61
|
+
- As this service heavily relies on external libraries (Hugging Face's
|
62
|
+
Transformers), ensure they are installed and updated to compatible versions.
|
63
|
+
|
64
|
+
Dependencies:
|
65
|
+
- Requires the `transformers` library by Hugging Face and `asyncio` for
|
66
|
+
asynchronous operations.
|
67
|
+
"""
|
68
|
+
|
69
|
+
from .transformers import TransformersService
|
70
|
+
return TransformersService(**kwargs)
|
71
|
+
|
72
|
+
|
73
|
+
@staticmethod
|
74
|
+
def Anthropic(**kwargs):
|
75
|
+
"""
|
76
|
+
A service to interact with Anthropic's API endpoints.
|
77
|
+
|
78
|
+
Attributes:
|
79
|
+
api_key (Optional[str]): The API key used for authentication.
|
80
|
+
schema (Dict[str, Any]): The schema defining the service's endpoints.
|
81
|
+
status_tracker (StatusTracker): The object tracking the status of API calls.
|
82
|
+
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
83
|
+
base_url (str): The base URL for the Anthropic API.
|
84
|
+
available_endpoints (list): A list of available API endpoints, including
|
85
|
+
'chat/completions'
|
86
|
+
key_scheme (str): The environment variable name for API key.
|
87
|
+
token_encoding_name (str): The default token encoding scheme.
|
88
|
+
"""
|
89
|
+
|
90
|
+
from .anthropic import AnthropicService
|
91
|
+
return AnthropicService(**kwargs)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
from typing import Union, str, Dict, Any
|
2
|
+
from transformers import pipeline, Conversation
|
3
|
+
|
4
|
+
from .base_service import BaseService
|
5
|
+
|
6
|
+
import functools
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
8
|
+
import asyncio
|
9
|
+
|
10
|
+
|
11
|
+
def force_async(fn):
|
12
|
+
pool = ThreadPoolExecutor()
|
13
|
+
|
14
|
+
@functools.wraps(fn)
|
15
|
+
def wrapper(*args, **kwargs):
|
16
|
+
future = pool.submit(fn, *args, **kwargs)
|
17
|
+
return asyncio.wrap_future(future) # make it awaitable
|
18
|
+
|
19
|
+
return wrapper
|
20
|
+
|
21
|
+
|
22
|
+
class TransformersService(BaseService):
|
23
|
+
def __init__(self, task: str = None, model: Union[str, Any] = None, config: Union[str, Dict, Any] = None, **kwargs):
|
24
|
+
super().__init__()
|
25
|
+
self.task = task
|
26
|
+
self.model = model
|
27
|
+
self.config = config
|
28
|
+
self.pipe = pipeline(task=task, model=model, config=config, **kwargs)
|
29
|
+
|
30
|
+
@force_async
|
31
|
+
def serve_chat(self, messages, **kwargs):
|
32
|
+
if self.task:
|
33
|
+
if self.task != 'conversational':
|
34
|
+
raise ValueError(f"Invalid transformers pipeline task: {self.task}. Valid task: 'conversational'")
|
35
|
+
|
36
|
+
payload = {'messages': messages}
|
37
|
+
conversation = self.pipe(Conversation(messages), **kwargs)
|
38
|
+
completion = {"Conversation id": conversation.uuid,
|
39
|
+
"model": self.pipe.model,
|
40
|
+
"choices": [{
|
41
|
+
"message": conversation.messages[-1]
|
42
|
+
}]
|
43
|
+
}
|
44
|
+
|
45
|
+
return payload, completion
|
46
|
+
|
lionagi/bridge/langchain.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from typing import Union, Callable, List, Dict, Any, TypeVar
|
2
|
-
from
|
3
|
-
from
|
2
|
+
from ..utils.sys_util import change_dict_key
|
3
|
+
from ..schema.data_node import DataNode
|
4
|
+
|
4
5
|
|
5
6
|
T = TypeVar('T', bound='DataNode')
|
6
7
|
|
@@ -8,11 +9,17 @@ def from_langchain(lc_doc: Any) -> T:
|
|
8
9
|
"""
|
9
10
|
Converts a langchain document into a DataNode object.
|
10
11
|
|
11
|
-
|
12
|
+
Args:
|
12
13
|
lc_doc (Any): The langchain document to be converted.
|
13
14
|
|
14
15
|
Returns:
|
15
|
-
|
16
|
+
T: A DataNode object created from the langchain document.
|
17
|
+
|
18
|
+
Examples:
|
19
|
+
>>> lc_doc = LangchainDocument(...)
|
20
|
+
>>> data_node = from_langchain(lc_doc)
|
21
|
+
>>> isinstance(data_node, DataNode)
|
22
|
+
True
|
16
23
|
"""
|
17
24
|
info_json = lc_doc.to_json()
|
18
25
|
info_node = {'lc_id': info_json['id']}
|
@@ -23,13 +30,18 @@ def to_langchain_document(datanode: T, **kwargs: Any) -> Any:
|
|
23
30
|
"""
|
24
31
|
Converts a DataNode into a langchain Document.
|
25
32
|
|
26
|
-
|
27
|
-
datanode (
|
28
|
-
|
33
|
+
Args:
|
34
|
+
datanode (T): The DataNode to be converted.
|
29
35
|
**kwargs: Additional keyword arguments to be included in the Document.
|
30
36
|
|
31
37
|
Returns:
|
32
38
|
Any: A langchain Document created from the DataNode.
|
39
|
+
|
40
|
+
Examples:
|
41
|
+
>>> data_node = DataNode(...)
|
42
|
+
>>> lc_document = to_langchain_document(data_node, author="John Doe")
|
43
|
+
>>> isinstance(lc_document, LangchainDocument)
|
44
|
+
True
|
33
45
|
"""
|
34
46
|
from langchain.schema import Document
|
35
47
|
|
@@ -45,11 +57,9 @@ def langchain_loader(loader: Union[str, Callable],
|
|
45
57
|
"""
|
46
58
|
Loads data using a specified langchain loader.
|
47
59
|
|
48
|
-
|
60
|
+
Args:
|
49
61
|
loader (Union[str, Callable]): The name of the loader function or the loader function itself.
|
50
|
-
|
51
62
|
loader_args (List[Any]): Positional arguments to pass to the loader function.
|
52
|
-
|
53
63
|
loader_kwargs (Dict[str, Any]): Keyword arguments to pass to the loader function.
|
54
64
|
|
55
65
|
Returns:
|
@@ -57,6 +67,11 @@ def langchain_loader(loader: Union[str, Callable],
|
|
57
67
|
|
58
68
|
Raises:
|
59
69
|
ValueError: If the specified loader is invalid or if the loader fails to load data.
|
70
|
+
|
71
|
+
Examples:
|
72
|
+
>>> data = langchain_loader("json_loader", loader_args=["data.json"])
|
73
|
+
>>> isinstance(data, dict)
|
74
|
+
True
|
60
75
|
"""
|
61
76
|
import langchain.document_loaders as document_loaders
|
62
77
|
|
@@ -79,17 +94,13 @@ def langchain_text_splitter(data: Union[str, List],
|
|
79
94
|
splitter: Union[str, Callable],
|
80
95
|
splitter_args: List[Any] = [],
|
81
96
|
splitter_kwargs: Dict[str, Any] = {}) -> List[str]:
|
82
|
-
|
83
97
|
"""
|
84
98
|
Splits text or a list of documents using a specified langchain text splitter.
|
85
99
|
|
86
|
-
|
100
|
+
Args:
|
87
101
|
data (Union[str, List]): The input text or list of documents to be split.
|
88
|
-
|
89
102
|
splitter (Union[str, Callable]): The name of the text splitter function or the function itself.
|
90
|
-
|
91
103
|
splitter_args (List[Any]): Positional arguments to pass to the splitter function.
|
92
|
-
|
93
104
|
splitter_kwargs (Dict[str, Any]): Keyword arguments to pass to the splitter function.
|
94
105
|
|
95
106
|
Returns:
|
@@ -148,4 +159,3 @@ def langchain_text_splitter(data: Union[str, List],
|
|
148
159
|
# return docs
|
149
160
|
# except Exception as e:
|
150
161
|
# raise ValueError(f'Failed to split. Error: {e}')
|
151
|
-
#
|
lionagi/bridge/llama_index.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from typing import Union, Callable, List, Any, Dict, TypeVar
|
2
|
-
from
|
3
|
-
from
|
2
|
+
from ..utils.sys_util import change_dict_key
|
3
|
+
from ..schema.data_node import DataNode
|
4
|
+
|
4
5
|
|
5
6
|
T = TypeVar('T', bound='DataNode')
|
6
7
|
|
@@ -8,13 +9,16 @@ def from_llama_index(llama_node: Any, **kwargs: Any) -> T:
|
|
8
9
|
"""
|
9
10
|
Converts a Llama Index node into a DataNode object.
|
10
11
|
|
11
|
-
|
12
|
+
Args:
|
12
13
|
llama_node (Any): The Llama Index node to be converted.
|
13
|
-
|
14
14
|
**kwargs: Additional keyword arguments for JSON serialization.
|
15
15
|
|
16
16
|
Returns:
|
17
|
-
|
17
|
+
T: A DataNode object created from the Llama Index node.
|
18
|
+
|
19
|
+
Example:
|
20
|
+
llama_node = LlamaIndexNode(...)
|
21
|
+
datanode = from_llama_index(llama_node, serialize_dates=True)
|
18
22
|
"""
|
19
23
|
llama_dict = llama_node.to_dict(**kwargs)
|
20
24
|
return DataNode.from_dict(llama_dict)
|
@@ -23,15 +27,17 @@ def to_llama_index_textnode(datanode: T, **kwargs: Any) -> Any:
|
|
23
27
|
"""
|
24
28
|
Converts a DataNode into a Llama Index TextNode.
|
25
29
|
|
26
|
-
|
27
|
-
datanode (
|
28
|
-
|
30
|
+
Args:
|
31
|
+
datanode (T): The DataNode to be converted.
|
29
32
|
**kwargs: Additional keyword arguments to be included in the TextNode.
|
30
33
|
|
31
34
|
Returns:
|
32
|
-
|
35
|
+
Any: A Llama Index TextNode created from the DataNode.
|
36
|
+
|
37
|
+
Example:
|
38
|
+
datanode = DataNode(...)
|
39
|
+
textnode = to_llama_index_textnode(datanode, additional_arg=1)
|
33
40
|
"""
|
34
|
-
# to llama_index textnode
|
35
41
|
from llama_index.schema import TextNode
|
36
42
|
|
37
43
|
dnode = datanode.to_dict()
|
@@ -46,7 +52,7 @@ def get_llama_reader(reader: Union[str, Callable]) -> Callable:
|
|
46
52
|
"""
|
47
53
|
Gets a Llama Index reader function.
|
48
54
|
|
49
|
-
|
55
|
+
Args:
|
50
56
|
reader (Union[str, Callable]): The name of the reader function or the reader function itself.
|
51
57
|
|
52
58
|
Returns:
|
@@ -54,6 +60,12 @@ def get_llama_reader(reader: Union[str, Callable]) -> Callable:
|
|
54
60
|
|
55
61
|
Raises:
|
56
62
|
ValueError: If the specified reader is invalid.
|
63
|
+
|
64
|
+
Example:
|
65
|
+
reader = get_llama_reader("SimpleDirectoryReader")
|
66
|
+
# or for a custom function
|
67
|
+
def custom_reader(): pass
|
68
|
+
reader = get_llama_reader(custom_reader)
|
57
69
|
"""
|
58
70
|
try:
|
59
71
|
if isinstance(reader, str):
|
@@ -76,15 +88,11 @@ def llama_index_reader(reader: Union[str, Callable],
|
|
76
88
|
"""
|
77
89
|
Loads documents using a specified Llama Index reader.
|
78
90
|
|
79
|
-
|
91
|
+
Args:
|
80
92
|
reader (Union[str, Callable]): The name of the reader function or the reader function itself.
|
81
|
-
|
82
93
|
reader_args (List[Any]): Positional arguments to pass to the reader function.
|
83
|
-
|
84
94
|
reader_kwargs (Dict[str, Any]): Keyword arguments to pass to the reader function.
|
85
|
-
|
86
95
|
load_data_args (List[Any]): Positional arguments for the load_data method.
|
87
|
-
|
88
96
|
load_data_kwargs (Dict[str, Any]): Keyword arguments for the load_data method.
|
89
97
|
|
90
98
|
Returns:
|
@@ -92,6 +100,9 @@ def llama_index_reader(reader: Union[str, Callable],
|
|
92
100
|
|
93
101
|
Raises:
|
94
102
|
ValueError: If the specified reader is invalid or if the reader fails to load documents.
|
103
|
+
|
104
|
+
Example:
|
105
|
+
documents = llama_index_reader("SimpleDirectoryReader", reader_args=["/path/to/data"])
|
95
106
|
"""
|
96
107
|
reader = get_llama_reader(reader)
|
97
108
|
|
@@ -104,6 +115,24 @@ def llama_index_reader(reader: Union[str, Callable],
|
|
104
115
|
raise ValueError(f'Failed to read. Error: {e}')
|
105
116
|
|
106
117
|
def get_llama_parser(parser: Union[str, Callable]) -> Callable:
|
118
|
+
"""
|
119
|
+
Gets a Llama Index parser function or object.
|
120
|
+
|
121
|
+
Args:
|
122
|
+
parser (Union[str, Callable]): The name of the parser function or the parser function itself.
|
123
|
+
|
124
|
+
Returns:
|
125
|
+
Callable: The Llama Index parser function or object.
|
126
|
+
|
127
|
+
Raises:
|
128
|
+
ValueError: If the specified parser is invalid.
|
129
|
+
|
130
|
+
Example:
|
131
|
+
parser = get_llama_parser("DefaultNodeParser")
|
132
|
+
# or for a custom function
|
133
|
+
def custom_parser(): pass
|
134
|
+
parser = get_llama_parser(custom_parser)
|
135
|
+
"""
|
107
136
|
import llama_index.node_parser as node_parser
|
108
137
|
import llama_index.text_splitter as text_splitter
|
109
138
|
|
@@ -127,20 +156,21 @@ def llama_index_node_parser(documents: List[Any],
|
|
127
156
|
"""
|
128
157
|
Parses documents into nodes using a specified Llama Index node parser.
|
129
158
|
|
130
|
-
|
159
|
+
Args:
|
131
160
|
documents (List[Any]): The documents to parse.
|
132
|
-
|
133
161
|
parser (Union[str, Callable]): The name of the parser function or the parser function itself.
|
134
|
-
|
135
162
|
parser_args (List[Any]): Positional arguments to pass to the parser function.
|
136
|
-
|
137
163
|
parser_kwargs (Dict[str, Any]): Keyword arguments to pass to the parser function.
|
164
|
+
parsing_kwargs (Dict[str, Any]): Keyword arguments for the parsing process.
|
138
165
|
|
139
166
|
Returns:
|
140
167
|
List[Any]: A list of nodes parsed from the documents.
|
141
168
|
|
142
169
|
Raises:
|
143
170
|
ValueError: If the specified parser is invalid or if the parser fails to parse the documents.
|
171
|
+
|
172
|
+
Example:
|
173
|
+
nodes = llama_index_node_parser(documents, "DefaultNodeParser")
|
144
174
|
"""
|
145
175
|
parser = get_llama_parser(parser)
|
146
176
|
|
lionagi/configs/oai_configs.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
# ChatCompletion
|
4
4
|
oai_chat_llmconfig = {
|
5
|
-
"model": "gpt-4-
|
5
|
+
"model": "gpt-4-turbo-preview",
|
6
6
|
"frequency_penalty": 0,
|
7
7
|
"max_tokens": None,
|
8
8
|
"n": 1,
|
@@ -46,18 +46,6 @@ oai_finetune_schema = {
|
|
46
46
|
}
|
47
47
|
|
48
48
|
|
49
|
-
|
50
|
-
# Embeddings
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
49
|
# Audio ---- create speech
|
62
50
|
|
63
51
|
oai_audio_speech_llmconfig = {
|
@@ -127,7 +115,7 @@ oai_audio_translations_schema = {
|
|
127
115
|
|
128
116
|
oai_schema = {
|
129
117
|
|
130
|
-
"chat": oai_chat_schema,
|
118
|
+
"chat/completions": oai_chat_schema,
|
131
119
|
"finetune": oai_finetune_schema,
|
132
120
|
"audio_speech": oai_audio_speech_schema,
|
133
121
|
"audio_transcriptions": oai_audio_transcriptions_schema,
|
@@ -1,5 +1,5 @@
|
|
1
1
|
openrouter_chat_llmconfig = {
|
2
|
-
"model": "gpt-4-
|
2
|
+
"model": "gpt-4-turbo-preview",
|
3
3
|
"frequency_penalty": 0,
|
4
4
|
"max_tokens": None,
|
5
5
|
"n": 1,
|
@@ -43,7 +43,7 @@ openrouter_finetune_schema = {
|
|
43
43
|
|
44
44
|
openrouter_schema = {
|
45
45
|
|
46
|
-
"chat": openrouter_chat_schema,
|
46
|
+
"chat/completions": openrouter_chat_schema,
|
47
47
|
"finetune": openrouter_finetune_schema
|
48
48
|
|
49
49
|
}
|
lionagi/core/__init__.py
CHANGED
@@ -1,12 +1,11 @@
|
|
1
|
-
|
2
|
-
from .
|
3
|
-
from .sessions import Session
|
4
|
-
from .flows import run_session #, Flow
|
1
|
+
from .messages.messages import System, Instruction
|
2
|
+
from .branch.branch import Branch
|
3
|
+
from .sessions.session import Session
|
5
4
|
|
6
5
|
|
7
6
|
__all__ = [
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
7
|
+
'System',
|
8
|
+
'Instruction',
|
9
|
+
'Branch',
|
10
|
+
'Session'
|
12
11
|
]
|