lionagi 0.0.201__py3-none-any.whl → 0.0.204__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/_services/anthropic.py +79 -1
- lionagi/_services/base_service.py +1 -1
- lionagi/_services/services.py +61 -25
- lionagi/_services/transformers.py +46 -0
- lionagi/agents/__init__.py +0 -0
- lionagi/configs/oai_configs.py +1 -1
- lionagi/configs/openrouter_configs.py +1 -1
- lionagi/core/__init__.py +3 -7
- lionagi/core/branch/__init__.py +0 -0
- lionagi/core/branch/branch.py +589 -0
- lionagi/core/branch/branch_manager.py +139 -0
- lionagi/core/branch/cluster.py +1 -0
- lionagi/core/branch/conversation.py +484 -0
- lionagi/core/core_util.py +59 -0
- lionagi/core/flow/__init__.py +0 -0
- lionagi/core/flow/flow.py +19 -0
- lionagi/core/instruction_set/__init__.py +0 -0
- lionagi/core/instruction_set/instruction_set.py +343 -0
- lionagi/core/messages/__init__.py +0 -0
- lionagi/core/messages/messages.py +176 -0
- lionagi/core/sessions/__init__.py +0 -0
- lionagi/core/sessions/session.py +428 -0
- lionagi/models/__init__.py +0 -0
- lionagi/models/base_model.py +0 -0
- lionagi/models/imodel.py +53 -0
- lionagi/schema/data_logger.py +75 -155
- lionagi/tests/test_utils/test_call_util.py +658 -657
- lionagi/tools/tool_manager.py +121 -188
- lionagi/utils/__init__.py +5 -10
- lionagi/utils/call_util.py +667 -585
- lionagi/utils/io_util.py +3 -0
- lionagi/utils/nested_util.py +17 -211
- lionagi/utils/pd_util.py +57 -0
- lionagi/utils/sys_util.py +220 -184
- lionagi/utils/url_util.py +55 -0
- lionagi/version.py +1 -1
- {lionagi-0.0.201.dist-info → lionagi-0.0.204.dist-info}/METADATA +12 -8
- {lionagi-0.0.201.dist-info → lionagi-0.0.204.dist-info}/RECORD +47 -32
- lionagi/core/branch.py +0 -193
- lionagi/core/conversation.py +0 -341
- lionagi/core/flow.py +0 -8
- lionagi/core/instruction_set.py +0 -150
- lionagi/core/messages.py +0 -243
- lionagi/core/sessions.py +0 -474
- /lionagi/{tools → agents}/planner.py +0 -0
- /lionagi/{tools → agents}/prompter.py +0 -0
- /lionagi/{tools → agents}/scorer.py +0 -0
- /lionagi/{tools → agents}/summarizer.py +0 -0
- /lionagi/{tools → agents}/validator.py +0 -0
- /lionagi/core/{flow_util.py → flow/flow_util.py} +0 -0
- {lionagi-0.0.201.dist-info → lionagi-0.0.204.dist-info}/LICENSE +0 -0
- {lionagi-0.0.201.dist-info → lionagi-0.0.204.dist-info}/WHEEL +0 -0
- {lionagi-0.0.201.dist-info → lionagi-0.0.204.dist-info}/top_level.txt +0 -0
lionagi/_services/anthropic.py
CHANGED
@@ -1 +1,79 @@
|
|
1
|
-
|
1
|
+
from os import getenv
|
2
|
+
from .base_service import BaseService, PayloadCreation
|
3
|
+
|
4
|
+
class AnthropicService(BaseService):
|
5
|
+
"""
|
6
|
+
A service to interact with Anthropic's API endpoints.
|
7
|
+
|
8
|
+
Attributes:
|
9
|
+
base_url (str): The base URL for the Anthropic API.
|
10
|
+
available_endpoints (list): A list of available API endpoints.
|
11
|
+
schema (dict): The schema configuration for the API.
|
12
|
+
key_scheme (str): The environment variable name for Anthropic API key.
|
13
|
+
token_encoding_name (str): The default token encoding scheme.
|
14
|
+
|
15
|
+
Examples:
|
16
|
+
>>> service = AnthropicService(api_key="your_api_key")
|
17
|
+
>>> asyncio.run(service.serve("Hello, world!", "chat/completions"))
|
18
|
+
(payload, completion)
|
19
|
+
"""
|
20
|
+
|
21
|
+
base_url = "https://api.anthropic.com/v1/"
|
22
|
+
available_endpoints = ['chat/completions']
|
23
|
+
schema = {} # TODO
|
24
|
+
key_scheme = "ANTHROPIC_API_KEY"
|
25
|
+
token_encoding_name = "cl100k_base"
|
26
|
+
|
27
|
+
def __init__(self, api_key = None, key_scheme = None,schema = None, token_encoding_name: str = "cl100k_base", **kwargs):
|
28
|
+
key_scheme = key_scheme or self.key_scheme
|
29
|
+
super().__init__(
|
30
|
+
api_key = api_key or getenv(key_scheme),
|
31
|
+
schema = schema or self.schema,
|
32
|
+
token_encoding_name=token_encoding_name,
|
33
|
+
**kwargs
|
34
|
+
)
|
35
|
+
self.active_endpoint = []
|
36
|
+
|
37
|
+
async def serve(self, input_, endpoint="chat/completions", method="post", **kwargs):
|
38
|
+
"""
|
39
|
+
Serves the input using the specified endpoint and method.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
input_: The input text to be processed.
|
43
|
+
endpoint: The API endpoint to use for processing.
|
44
|
+
method: The HTTP method to use for the request.
|
45
|
+
**kwargs: Additional keyword arguments to pass to the payload creation.
|
46
|
+
|
47
|
+
Returns:
|
48
|
+
A tuple containing the payload and the completion response from the API.
|
49
|
+
"""
|
50
|
+
if endpoint not in self.active_endpoint:
|
51
|
+
await self. init_endpoint(endpoint)
|
52
|
+
if endpoint == "chat/completions":
|
53
|
+
return await self.serve_chat(input_, **kwargs)
|
54
|
+
else:
|
55
|
+
return ValueError(f'{endpoint} is currently not supported')
|
56
|
+
|
57
|
+
async def serve_chat(self, messages, **kwargs):
|
58
|
+
"""
|
59
|
+
Serves the chat completion request with the given messages.
|
60
|
+
|
61
|
+
Args:
|
62
|
+
messages: The messages to be included in the chat completion.
|
63
|
+
**kwargs: Additional keyword arguments for payload creation.
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
A tuple containing the payload and the completion response from the API.
|
67
|
+
"""
|
68
|
+
if "chat/completions" not in self.active_endpoint:
|
69
|
+
await self. init_endpoint("chat/completions")
|
70
|
+
self.active_endpoint.append("chat/completions")
|
71
|
+
payload = PayloadCreation.chat_completion(
|
72
|
+
messages, self.endpoints["chat/completions"].config, self.schema["chat/completions"], **kwargs)
|
73
|
+
|
74
|
+
try:
|
75
|
+
completion = await self.call_api(payload, "chat/completions", "post")
|
76
|
+
return payload, completion
|
77
|
+
except Exception as e:
|
78
|
+
self.status_tracker.num_tasks_failed += 1
|
79
|
+
raise e
|
@@ -283,6 +283,7 @@ class BaseService:
|
|
283
283
|
'interval': interval
|
284
284
|
}
|
285
285
|
|
286
|
+
|
286
287
|
async def init_endpoint(self, endpoint_: Optional[Union[List[str], List[EndPoint], str, EndPoint]] = None) -> None:
|
287
288
|
"""
|
288
289
|
Initializes the specified endpoint or all endpoints if none is specified.
|
@@ -411,4 +412,3 @@ class PayloadCreation:
|
|
411
412
|
optional_=schema['optional'],
|
412
413
|
input_key="training_file",
|
413
414
|
**kwargs)
|
414
|
-
|
lionagi/_services/services.py
CHANGED
@@ -1,11 +1,9 @@
|
|
1
|
-
from .oai import OpenAIService
|
2
|
-
from .openrouter import OpenRouterService
|
3
|
-
|
4
1
|
class Services:
|
5
2
|
|
6
3
|
@staticmethod
|
7
4
|
def OpenAI(**kwargs):
|
8
|
-
"""
|
5
|
+
"""
|
6
|
+
A service to interact with OpenAI's API endpoints.
|
9
7
|
|
10
8
|
Attributes:
|
11
9
|
api_key (Optional[str]): The API key used for authentication.
|
@@ -13,25 +11,19 @@ class Services:
|
|
13
11
|
status_tracker (StatusTracker): The object tracking the status of API calls.
|
14
12
|
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
15
13
|
base_url (str): The base URL for the OpenAI API.
|
16
|
-
available_endpoints (list): A list of available API endpoints
|
17
|
-
|
14
|
+
available_endpoints (list): A list of available API endpoints, including
|
15
|
+
'chat/completions'
|
18
16
|
key_scheme (str): The environment variable name for API key.
|
19
17
|
token_encoding_name (str): The default token encoding scheme.
|
20
|
-
|
21
|
-
Examples:
|
22
|
-
>>> service = OpenAIService(api_key="your_api_key")
|
23
|
-
>>> asyncio.run(service.serve("Hello, world!", "chat/completions"))
|
24
|
-
(payload, completion)
|
25
|
-
|
26
|
-
>>> service = OpenAIService()
|
27
|
-
>>> asyncio.run(service.serve("Convert this text to speech.", "audio_speech"))
|
28
|
-
ValueError: 'audio_speech' is currently not supported
|
29
18
|
"""
|
19
|
+
|
20
|
+
from .oai import OpenAIService
|
30
21
|
return OpenAIService(**kwargs)
|
31
22
|
|
32
23
|
@staticmethod
|
33
24
|
def OpenRouter(**kwargs):
|
34
|
-
"""
|
25
|
+
"""
|
26
|
+
A service to interact with OpenRouter's API endpoints.
|
35
27
|
|
36
28
|
Attributes:
|
37
29
|
api_key (Optional[str]): The API key used for authentication.
|
@@ -39,17 +31,61 @@ class Services:
|
|
39
31
|
status_tracker (StatusTracker): The object tracking the status of API calls.
|
40
32
|
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
41
33
|
base_url (str): The base URL for the OpenAI API.
|
42
|
-
available_endpoints (list): A list of available API endpoints
|
34
|
+
available_endpoints (list): A list of available API endpoints, including
|
35
|
+
'chat/completions'
|
43
36
|
key_scheme (str): The environment variable name for API key.
|
44
37
|
token_encoding_name (str): The default token encoding scheme.
|
38
|
+
"""
|
45
39
|
|
46
|
-
|
47
|
-
|
48
|
-
>>> asyncio.run(service.serve("Hello, world!", "chat/completions"))
|
49
|
-
(payload, completion)
|
40
|
+
from .openrouter import OpenRouterService
|
41
|
+
return OpenRouterService(**kwargs)
|
50
42
|
|
51
|
-
|
52
|
-
|
53
|
-
ValueError: 'audio_speech' is currently not supported
|
43
|
+
@staticmethod
|
44
|
+
def Transformers(**kwargs):
|
54
45
|
"""
|
55
|
-
|
46
|
+
A service to interact with Transformers' pipeline
|
47
|
+
|
48
|
+
Attributes:
|
49
|
+
task (str): The specific task to be performed by the transformer model.
|
50
|
+
Currently, only 'conversational' tasks are supported.
|
51
|
+
model (Union[str, Any]): Identifier for the transformer model to be used. This
|
52
|
+
can be a model name or a path to a model.
|
53
|
+
config (Union[str, Dict, Any]): Configuration for the transformer model. Can
|
54
|
+
include tokenizer information among others.
|
55
|
+
pipe (pipeline): The loaded transformer pipeline for the specified task, model,
|
56
|
+
and configuration.
|
57
|
+
|
58
|
+
Warnings:
|
59
|
+
- Ensure the selected model is suitable for conversational tasks to avoid
|
60
|
+
unexpected behavior.
|
61
|
+
- As this service heavily relies on external libraries (Hugging Face's
|
62
|
+
Transformers), ensure they are installed and updated to compatible versions.
|
63
|
+
|
64
|
+
Dependencies:
|
65
|
+
- Requires the `transformers` library by Hugging Face and `asyncio` for
|
66
|
+
asynchronous operations.
|
67
|
+
"""
|
68
|
+
|
69
|
+
from .transformers import TransformersService
|
70
|
+
return TransformersService(**kwargs)
|
71
|
+
|
72
|
+
|
73
|
+
@staticmethod
|
74
|
+
def Anthropic(**kwargs):
|
75
|
+
"""
|
76
|
+
A service to interact with Anthropic's API endpoints.
|
77
|
+
|
78
|
+
Attributes:
|
79
|
+
api_key (Optional[str]): The API key used for authentication.
|
80
|
+
schema (Dict[str, Any]): The schema defining the service's endpoints.
|
81
|
+
status_tracker (StatusTracker): The object tracking the status of API calls.
|
82
|
+
endpoints (Dict[str, EndPoint]): A dictionary of endpoint objects.
|
83
|
+
base_url (str): The base URL for the Anthropic API.
|
84
|
+
available_endpoints (list): A list of available API endpoints, including
|
85
|
+
'chat/completions'
|
86
|
+
key_scheme (str): The environment variable name for API key.
|
87
|
+
token_encoding_name (str): The default token encoding scheme.
|
88
|
+
"""
|
89
|
+
|
90
|
+
from .anthropic import AnthropicService
|
91
|
+
return AnthropicService(**kwargs)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
from typing import Union, str, Dict, Any
|
2
|
+
from transformers import pipeline, Conversation
|
3
|
+
|
4
|
+
from .base_service import BaseService
|
5
|
+
|
6
|
+
import functools
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
8
|
+
import asyncio
|
9
|
+
|
10
|
+
|
11
|
+
def force_async(fn):
|
12
|
+
pool = ThreadPoolExecutor()
|
13
|
+
|
14
|
+
@functools.wraps(fn)
|
15
|
+
def wrapper(*args, **kwargs):
|
16
|
+
future = pool.submit(fn, *args, **kwargs)
|
17
|
+
return asyncio.wrap_future(future) # make it awaitable
|
18
|
+
|
19
|
+
return wrapper
|
20
|
+
|
21
|
+
|
22
|
+
class TransformersService(BaseService):
|
23
|
+
def __init__(self, task: str = None, model: Union[str, Any] = None, config: Union[str, Dict, Any] = None, **kwargs):
|
24
|
+
super().__init__()
|
25
|
+
self.task = task
|
26
|
+
self.model = model
|
27
|
+
self.config = config
|
28
|
+
self.pipe = pipeline(task=task, model=model, config=config, **kwargs)
|
29
|
+
|
30
|
+
@force_async
|
31
|
+
def serve_chat(self, messages, **kwargs):
|
32
|
+
if self.task:
|
33
|
+
if self.task != 'conversational':
|
34
|
+
raise ValueError(f"Invalid transformers pipeline task: {self.task}. Valid task: 'conversational'")
|
35
|
+
|
36
|
+
payload = {'messages': messages}
|
37
|
+
conversation = self.pipe(Conversation(messages), **kwargs)
|
38
|
+
completion = {"Conversation id": conversation.uuid,
|
39
|
+
"model": self.pipe.model,
|
40
|
+
"choices": [{
|
41
|
+
"message": conversation.messages[-1]
|
42
|
+
}]
|
43
|
+
}
|
44
|
+
|
45
|
+
return payload, completion
|
46
|
+
|
File without changes
|
lionagi/configs/oai_configs.py
CHANGED
lionagi/core/__init__.py
CHANGED
@@ -1,15 +1,11 @@
|
|
1
|
-
from .messages import System, Instruction
|
2
|
-
from .
|
3
|
-
from .
|
4
|
-
from .branch import Branch
|
5
|
-
from .sessions import Session
|
1
|
+
from .messages.messages import System, Instruction
|
2
|
+
from .branch.branch import Branch
|
3
|
+
from .sessions.session import Session
|
6
4
|
|
7
5
|
|
8
6
|
__all__ = [
|
9
7
|
'System',
|
10
8
|
'Instruction',
|
11
|
-
'Conversation',
|
12
|
-
'InstructionSet',
|
13
9
|
'Branch',
|
14
10
|
'Session'
|
15
11
|
]
|
File without changes
|