lionagi 0.0.114__tar.gz → 0.0.116__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (139) hide show
  1. {lionagi-0.0.114 → lionagi-0.0.116}/PKG-INFO +2 -2
  2. {lionagi-0.0.114 → lionagi-0.0.116}/README.md +1 -1
  3. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/__init__.py +7 -4
  4. lionagi-0.0.116/lionagi/bridge/__init__.py +22 -0
  5. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/bridge/langchain.py +23 -3
  6. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/bridge/llama_index.py +5 -3
  7. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/configs/__init__.py +1 -1
  8. lionagi-0.0.116/lionagi/configs/oai_configs.py +136 -0
  9. lionagi-0.0.116/lionagi/core/__init__.py +12 -0
  10. lionagi-0.0.116/lionagi/core/conversations/__init__.py +5 -0
  11. lionagi-0.0.116/lionagi/core/conversations/conversation.py +107 -0
  12. lionagi-0.0.116/lionagi/core/flows/__init__.py +8 -0
  13. lionagi-0.0.116/lionagi/core/flows/flow.py +8 -0
  14. lionagi-0.0.116/lionagi/core/flows/flow_util.py +62 -0
  15. lionagi-0.0.116/lionagi/core/instruction_set/__init__.py +5 -0
  16. lionagi-0.0.116/lionagi/core/instruction_set/instruction_sets.py +7 -0
  17. lionagi-0.0.116/lionagi/core/sessions/__init__.py +5 -0
  18. lionagi-0.0.116/lionagi/core/sessions/sessions.py +187 -0
  19. lionagi-0.0.116/lionagi/endpoints/__init__.py +5 -0
  20. lionagi-0.0.116/lionagi/endpoints/audio.py +17 -0
  21. lionagi-0.0.116/lionagi/endpoints/chatcompletion.py +54 -0
  22. {lionagi-0.0.114/lionagi/loader → lionagi-0.0.116/lionagi/loaders}/__init__.py +7 -1
  23. {lionagi-0.0.114/lionagi/loader → lionagi-0.0.116/lionagi/loaders}/chunker.py +6 -12
  24. lionagi-0.0.114/lionagi/utils/load_utils.py → lionagi-0.0.116/lionagi/loaders/load_util.py +47 -6
  25. {lionagi-0.0.114/lionagi/loader → lionagi-0.0.116/lionagi/loaders}/reader.py +4 -12
  26. lionagi-0.0.116/lionagi/messages/__init__.py +11 -0
  27. lionagi-0.0.116/lionagi/messages/instruction.py +15 -0
  28. lionagi-0.0.116/lionagi/messages/message.py +110 -0
  29. lionagi-0.0.116/lionagi/messages/response.py +33 -0
  30. lionagi-0.0.116/lionagi/messages/system.py +12 -0
  31. lionagi-0.0.116/lionagi/objs/__init__.py +11 -0
  32. lionagi-0.0.116/lionagi/objs/abc_objs.py +39 -0
  33. lionagi-0.0.116/lionagi/objs/async_queue.py +135 -0
  34. lionagi-0.0.116/lionagi/objs/messenger.py +85 -0
  35. lionagi-0.0.116/lionagi/objs/status_tracker.py +37 -0
  36. lionagi-0.0.114/lionagi/objs/tool_registry.py → lionagi-0.0.116/lionagi/objs/tool_manager.py +8 -6
  37. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/schema/__init__.py +3 -3
  38. lionagi-0.0.116/lionagi/schema/base_node.py +251 -0
  39. lionagi-0.0.116/lionagi/schema/base_tool.py +14 -0
  40. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/schema/data_logger.py +2 -3
  41. lionagi-0.0.116/lionagi/schema/data_node.py +37 -0
  42. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/__init__.py +1 -4
  43. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/base_api_service.py +15 -5
  44. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/oai.py +2 -2
  45. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/openrouter.py +2 -3
  46. lionagi-0.0.116/lionagi/services/predibase.py +0 -0
  47. lionagi-0.0.116/lionagi/services/rungpt.py +0 -0
  48. lionagi-0.0.116/lionagi/services/vllm.py +0 -0
  49. lionagi-0.0.116/lionagi/services/xinference.py +0 -0
  50. lionagi-0.0.116/lionagi/structures/graph.py +96 -0
  51. {lionagi-0.0.114/lionagi/structure → lionagi-0.0.116/lionagi/structures}/relationship.py +10 -2
  52. lionagi-0.0.116/lionagi/structures/structure.py +102 -0
  53. lionagi-0.0.116/lionagi/tests/__init__.py +0 -0
  54. lionagi-0.0.116/lionagi/tests/test_api_util.py +46 -0
  55. lionagi-0.0.116/lionagi/tests/test_call_util.py +115 -0
  56. lionagi-0.0.116/lionagi/tests/test_convert_util.py +202 -0
  57. lionagi-0.0.116/lionagi/tests/test_encrypt_util.py +33 -0
  58. lionagi-0.0.114/lionagi/tests/test_flatten_util.py → lionagi-0.0.116/lionagi/tests/test_flat_util.py +1 -1
  59. lionagi-0.0.116/lionagi/tests/test_io_util.py +0 -0
  60. lionagi-0.0.116/lionagi/tests/test_sys_util.py +0 -0
  61. lionagi-0.0.116/lionagi/tools/__init__.py +5 -0
  62. lionagi-0.0.116/lionagi/tools/tool_util.py +7 -0
  63. lionagi-0.0.116/lionagi/utils/__init__.py +69 -0
  64. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/utils/api_util.py +19 -17
  65. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/utils/call_util.py +2 -1
  66. lionagi-0.0.116/lionagi/utils/convert_util.py +229 -0
  67. lionagi-0.0.116/lionagi/utils/encrypt_util.py +16 -0
  68. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/utils/flat_util.py +38 -0
  69. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/utils/io_util.py +2 -2
  70. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/utils/sys_util.py +45 -10
  71. lionagi-0.0.116/lionagi/version.py +1 -0
  72. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi.egg-info/PKG-INFO +2 -2
  73. lionagi-0.0.116/lionagi.egg-info/SOURCES.txt +115 -0
  74. {lionagi-0.0.114 → lionagi-0.0.116}/setup.py +2 -1
  75. lionagi-0.0.114/lionagi/bridge/__init__.py +0 -7
  76. lionagi-0.0.114/lionagi/configs/oai_configs.py +0 -49
  77. lionagi-0.0.114/lionagi/core/__init__.py +0 -15
  78. lionagi-0.0.114/lionagi/core/conversations.py +0 -108
  79. lionagi-0.0.114/lionagi/core/flows.py +0 -1
  80. lionagi-0.0.114/lionagi/core/instruction_sets.py +0 -1
  81. lionagi-0.0.114/lionagi/core/messages.py +0 -166
  82. lionagi-0.0.114/lionagi/core/sessions.py +0 -297
  83. lionagi-0.0.114/lionagi/objs/__init__.py +0 -7
  84. lionagi-0.0.114/lionagi/objs/messenger.py +0 -163
  85. lionagi-0.0.114/lionagi/schema/base_schema.py +0 -252
  86. lionagi-0.0.114/lionagi/schema/base_tool.py +0 -9
  87. lionagi-0.0.114/lionagi/services/chatcompletion.py +0 -48
  88. lionagi-0.0.114/lionagi/services/service_objs.py +0 -282
  89. lionagi-0.0.114/lionagi/structure/structure.py +0 -160
  90. lionagi-0.0.114/lionagi/tools/coder.py +0 -1
  91. lionagi-0.0.114/lionagi/tools/sandbox.py +0 -1
  92. lionagi-0.0.114/lionagi/utils/__init__.py +0 -49
  93. lionagi-0.0.114/lionagi/utils/tool_util.py +0 -92
  94. lionagi-0.0.114/lionagi/utils/type_util.py +0 -81
  95. lionagi-0.0.114/lionagi/version.py +0 -1
  96. lionagi-0.0.114/lionagi.egg-info/SOURCES.txt +0 -89
  97. {lionagi-0.0.114 → lionagi-0.0.116}/LICENSE +0 -0
  98. {lionagi-0.0.114 → lionagi-0.0.116}/README.rst +0 -0
  99. /lionagi-0.0.114/lionagi/configs/openrouter_config.py → /lionagi-0.0.116/lionagi/configs/openrouter_configs.py +0 -0
  100. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/__init__.py +0 -0
  101. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/chroma.py +0 -0
  102. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/deeplake.py +0 -0
  103. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/elasticsearch.py +0 -0
  104. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/lantern.py +0 -0
  105. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/pinecone.py +0 -0
  106. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/postgres.py +0 -0
  107. {lionagi-0.0.114/lionagi/datastore → lionagi-0.0.116/lionagi/datastores}/qdrant.py +0 -0
  108. /lionagi-0.0.114/lionagi/services/anyscale.py → /lionagi-0.0.116/lionagi/endpoints/assistants.py +0 -0
  109. /lionagi-0.0.114/lionagi/services/bedrock.py → /lionagi-0.0.116/lionagi/endpoints/embeddings.py +0 -0
  110. /lionagi-0.0.114/lionagi/services/everlyai.py → /lionagi-0.0.116/lionagi/endpoints/finetune.py +0 -0
  111. /lionagi-0.0.114/lionagi/services/gemini.py → /lionagi-0.0.116/lionagi/endpoints/image.py +0 -0
  112. /lionagi-0.0.114/lionagi/services/gpt4all.py → /lionagi-0.0.116/lionagi/endpoints/moderation.py +0 -0
  113. /lionagi-0.0.114/lionagi/services/huggingface.py → /lionagi-0.0.116/lionagi/endpoints/vision.py +0 -0
  114. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/schema/base_condition.py +0 -0
  115. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/anthropic.py +0 -0
  116. /lionagi-0.0.114/lionagi/services/localai.py → /lionagi-0.0.116/lionagi/services/anyscale.py +0 -0
  117. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/azure.py +0 -0
  118. /lionagi-0.0.114/lionagi/services/mistralai.py → /lionagi-0.0.116/lionagi/services/bedrock.py +0 -0
  119. /lionagi-0.0.114/lionagi/services/openllm.py → /lionagi-0.0.116/lionagi/services/everlyai.py +0 -0
  120. /lionagi-0.0.114/lionagi/services/perplexity.py → /lionagi-0.0.116/lionagi/services/gemini.py +0 -0
  121. /lionagi-0.0.114/lionagi/services/predibase.py → /lionagi-0.0.116/lionagi/services/gpt4all.py +0 -0
  122. /lionagi-0.0.114/lionagi/services/rungpt.py → /lionagi-0.0.116/lionagi/services/huggingface.py +0 -0
  123. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/litellm.py +0 -0
  124. /lionagi-0.0.114/lionagi/services/vllm.py → /lionagi-0.0.116/lionagi/services/localai.py +0 -0
  125. /lionagi-0.0.114/lionagi/services/xinference.py → /lionagi-0.0.116/lionagi/services/mistralai.py +0 -0
  126. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/services/ollama.py +0 -0
  127. /lionagi-0.0.114/lionagi/tests/__init__.py → /lionagi-0.0.116/lionagi/services/openllm.py +0 -0
  128. /lionagi-0.0.114/lionagi/tools/__init__.py → /lionagi-0.0.116/lionagi/services/perplexity.py +0 -0
  129. {lionagi-0.0.114/lionagi/structure → lionagi-0.0.116/lionagi/structures}/__init__.py +0 -0
  130. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/tools/planner.py +0 -0
  131. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/tools/prompter.py +0 -0
  132. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/tools/scorer.py +0 -0
  133. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/tools/summarizer.py +0 -0
  134. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi/tools/validator.py +0 -0
  135. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi.egg-info/dependency_links.txt +0 -0
  136. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi.egg-info/requires.txt +0 -0
  137. {lionagi-0.0.114 → lionagi-0.0.116}/lionagi.egg-info/top_level.txt +0 -0
  138. {lionagi-0.0.114 → lionagi-0.0.116}/pyproject.toml +0 -0
  139. {lionagi-0.0.114 → lionagi-0.0.116}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lionagi
3
- Version: 0.0.114
3
+ Version: 0.0.116
4
4
  Summary: Towards automated general intelligence.
5
5
  Author: HaiyangLi
6
6
  Author-email: Haiyang Li <ocean@lionagi.ai>
@@ -224,7 +224,7 @@ Requires-Dist: httpx==0.25.1
224
224
 
225
225
 
226
226
 
227
- [PyPI](https://pypi.org/project/lionagi/) | [Documentation](https://lionagi.readthedocs.io/en/latest/) | [Discord](https://discord.gg/7RGWqpSxze)
227
+ [PyPI](https://pypi.org/project/lionagi/) | [Documentation](https://lionagi.readthedocs.io/en/latest/) | [Discord](https://discord.gg/mzDD5JtYRp)
228
228
 
229
229
 
230
230
  # LionAGI
@@ -2,7 +2,7 @@
2
2
 
3
3
 
4
4
 
5
- [PyPI](https://pypi.org/project/lionagi/) | [Documentation](https://lionagi.readthedocs.io/en/latest/) | [Discord](https://discord.gg/7RGWqpSxze)
5
+ [PyPI](https://pypi.org/project/lionagi/) | [Documentation](https://lionagi.readthedocs.io/en/latest/) | [Discord](https://discord.gg/mzDD5JtYRp)
6
6
 
7
7
 
8
8
  # LionAGI
@@ -17,13 +17,16 @@ Copyright 2023 HaiyangLi <ocean@lionagi.ai>
17
17
  import logging
18
18
  from .version import __version__
19
19
 
20
+
20
21
  from .utils import *
21
22
  from .schema import *
22
- from .structure import *
23
- from .core import *
23
+ from .structures import *
24
+ from .loaders import *
25
+ from .messages import *
24
26
  from .objs import *
25
- # from .datastore import *
26
- # from .structure import *
27
+ from .tools import *
28
+ from .core import *
29
+
27
30
 
28
31
 
29
32
  logger = logging.getLogger(__name__)
@@ -0,0 +1,22 @@
1
+ from .langchain import(
2
+ from_langchain, to_langchain_document, langchain_loader,
3
+ langchain_loader, langchain_text_splitter
4
+ )
5
+
6
+ from .llama_index import (
7
+ from_llama_index, to_llama_index_textnode, get_llama_reader,
8
+ llama_index_reader, get_llama_parser, llama_index_node_parser
9
+ )
10
+
11
+ __all__ = [
12
+ 'from_langchain',
13
+ 'to_langchain_document',
14
+ 'langchain_loader',
15
+ 'from_llama_index',
16
+ 'to_llama_index_textnode',
17
+ 'get_llama_reader',
18
+ 'llama_index_reader',
19
+ 'get_llama_parser',
20
+ 'llama_index_node_parser',
21
+ 'langchain_text_splitter'
22
+ ]
@@ -1,7 +1,8 @@
1
- from typing import Union, Callable, List, Dict, Any
2
- from ..schema.base_schema import T, DataNode
3
- from ..utils.sys_util import change_dict_key
1
+ from typing import Union, Callable, List, Dict, Any, TypeVar
2
+ from lionagi.schema.data_node import DataNode
3
+ from lionagi.utils.sys_util import change_dict_key
4
4
 
5
+ T = TypeVar('T', bound='DataNode')
5
6
 
6
7
  def from_langchain(lc_doc: Any) -> T:
7
8
  """
@@ -79,6 +80,25 @@ def langchain_text_splitter(data: Union[str, List],
79
80
  splitter_args: List[Any] = [],
80
81
  splitter_kwargs: Dict[str, Any] = {}) -> List[str]:
81
82
 
83
+ """
84
+ Splits text or a list of documents using a specified langchain text splitter.
85
+
86
+ Parameters:
87
+ data (Union[str, List]): The input text or list of documents to be split.
88
+
89
+ splitter (Union[str, Callable]): The name of the text splitter function or the function itself.
90
+
91
+ splitter_args (List[Any]): Positional arguments to pass to the splitter function.
92
+
93
+ splitter_kwargs (Dict[str, Any]): Keyword arguments to pass to the splitter function.
94
+
95
+ Returns:
96
+ List[str]: A list of chunks obtained by splitting the input.
97
+
98
+ Raises:
99
+ ValueError: If the specified text splitter is invalid or if the splitting fails.
100
+ """
101
+
82
102
  import langchain.text_splitter as text_splitter
83
103
 
84
104
  try:
@@ -1,7 +1,8 @@
1
- from typing import Union, Callable, List, Any, Dict
2
- from ..schema.base_schema import DataNode, T
3
- from ..utils.sys_util import change_dict_key
1
+ from typing import Union, Callable, List, Any, Dict, TypeVar
2
+ from lionagi.schema.data_node import DataNode
3
+ from lionagi.utils.sys_util import change_dict_key
4
4
 
5
+ T = TypeVar('T', bound='DataNode')
5
6
 
6
7
  def from_llama_index(llama_node: Any, **kwargs: Any) -> T:
7
8
  """
@@ -36,6 +37,7 @@ def to_llama_index_textnode(datanode: T, **kwargs: Any) -> Any:
36
37
  dnode = datanode.to_dict()
37
38
  change_dict_key(dnode, old_key='content', new_key='text')
38
39
  change_dict_key(dnode, old_key='node_id', new_key='id_')
40
+ dnode['text'] = str(dnode['text'])
39
41
 
40
42
  dnode = {**dnode, **kwargs}
41
43
  return TextNode.from_dict(dnode)
@@ -1,5 +1,5 @@
1
1
  from .oai_configs import oai_schema
2
- from .openrouter_config import openrouter_schema
2
+ from .openrouter_configs import openrouter_schema
3
3
 
4
4
  __all__ = [
5
5
  "oai_schema",
@@ -0,0 +1,136 @@
1
+ # Default configs for the OpenAI API
2
+
3
+ # ChatCompletion
4
+ oai_chat_llmconfig = {
5
+ "model": "gpt-4-1106-preview",
6
+ "frequency_penalty": 0,
7
+ "max_tokens": None,
8
+ "n": 1,
9
+ "presence_penalty": 0,
10
+ "response_format": {"type": "text"},
11
+ "seed": None,
12
+ "stop": None,
13
+ "stream": False,
14
+ "temperature": 0.7,
15
+ "top_p": 1,
16
+ "tools": None,
17
+ "tool_choice": "none",
18
+ "user": None
19
+ }
20
+
21
+ oai_chat_schema = {
22
+ "required" : ["model", "frequency_penalty", "n", "presence_penalty", "response_format", "temperature", "top_p"],
23
+ "optional": ["seed", "stop", "stream", "tools", "tool_choice", "user", "max_tokens"],
24
+ "input": "messages",
25
+ "config": oai_chat_llmconfig
26
+ }
27
+
28
+
29
+ # Finetune
30
+ oai_finetune_llmconfig = {
31
+ "model": "gpt-3.5-turbo",
32
+ "hyperparameters": {
33
+ "batch_size": "auto",
34
+ "learning_rate_multiplier": "auto",
35
+ "n_epochs": "auto"
36
+ },
37
+ "suffix": None,
38
+ "training_file": None,
39
+ }
40
+
41
+ oai_finetune_schema = {
42
+ "required" : ["model", "training_file"],
43
+ "optional": ["hyperparameters", "suffix", "validate_file"],
44
+ "input": ["training_file"],
45
+ "config": oai_finetune_llmconfig
46
+ }
47
+
48
+
49
+
50
+ # Embeddings
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+ # Audio ---- create speech
62
+
63
+ oai_audio_speech_llmconfig = {
64
+ "model": "tts-1",
65
+ "voice": "alloy",
66
+ "response_format": "mp3",
67
+ "speed": 1
68
+ }
69
+ oai_audio_speech_schema = {
70
+ "required" : ["model", "voice"],
71
+ "optional": ["response_format", "speed"],
72
+ "input": "input",
73
+ "config": oai_audio_speech_llmconfig
74
+ }
75
+
76
+
77
+ # Audio ----------- create transcription
78
+ oai_audio_transcriptions_llmconfig = {
79
+ "model": "whisper-1",
80
+ "language": None,
81
+ "prompt": None,
82
+ "response_format": "json",
83
+ "temperature": 0
84
+ }
85
+ oai_audio_transcriptions_schema = {
86
+ "required" : ["model", "voice"],
87
+ "optional": ["response_format", "language", "prompt", "response_format", "temperature"],
88
+ "input": "file",
89
+ "config": oai_audio_transcriptions_llmconfig
90
+ }
91
+
92
+
93
+ # Audio ------------ translations
94
+ oai_audio_translations_llmconfig = {
95
+ "model": "whisper-1",
96
+ "prompt": None,
97
+ "response_format": "json",
98
+ "temperature": 0
99
+ }
100
+
101
+ oai_audio_translations_schema = {
102
+ "required" : ["model"],
103
+ "optional": ["response_format", "speed", "prompt", "temperature"],
104
+ "input": "file",
105
+ "config": oai_audio_translations_llmconfig
106
+ }
107
+
108
+
109
+
110
+
111
+
112
+
113
+
114
+
115
+ # images
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+ oai_schema = {
129
+
130
+ "chat": oai_chat_schema,
131
+ "finetune": oai_finetune_schema,
132
+ "audio_speech": oai_audio_speech_schema,
133
+ "audio_transcriptions": oai_audio_transcriptions_schema,
134
+ "audio_translations": oai_audio_translations_schema,
135
+
136
+ }
@@ -0,0 +1,12 @@
1
+ # from .instruction_set import InstructionSet
2
+ from .conversations import Conversation
3
+ from .sessions import Session
4
+ from .flows import run_session #, Flow
5
+
6
+
7
+ __all__ = [
8
+ "Conversation",
9
+ "Session",
10
+ "run_session",
11
+ # "Flow"
12
+ ]
@@ -0,0 +1,5 @@
1
+ from .conversation import Conversation
2
+
3
+ __all__ = [
4
+ "Conversation"
5
+ ]
@@ -0,0 +1,107 @@
1
+ from typing import List, Any
2
+
3
+ from lionagi.schema.base_node import BaseNode
4
+ from lionagi.messages import Message, Response
5
+ from lionagi.objs.messenger import Messenger
6
+
7
+
8
+ class Conversation(BaseNode):
9
+ """
10
+ A conversation that handles messages and responses.
11
+
12
+ Attributes:
13
+ response_counts (int): A counter for the number of responses in the conversation.
14
+ messages (List[Message]): A list of message objects in the conversation.
15
+ msgr (Messenger): An instance of Messenger to create message objects.
16
+ responses (List[Response]): A list of response objects in the conversation.
17
+ """
18
+
19
+ response_counts : int = 0
20
+ messages: List[Message] = []
21
+ msgr : Any = Messenger()
22
+ responses: List[Response] = []
23
+
24
+ def initiate_conversation(
25
+ self, system=None, instruction=None,
26
+ context=None, name=None
27
+ ):
28
+ """
29
+ Initiates a new conversation, erase previous messages and responses.
30
+
31
+ Parameters:
32
+ system (Any, optional): System information to include in the initial message. Defaults to None.
33
+ instruction (Any, optional): Instruction details to include in the conversation. Defaults to None.
34
+ context (Any, optional): Contextual information relevant to the conversation. Defaults to None.
35
+ name (str, optional): The name associated with the conversation. Defaults to None.
36
+
37
+ Returns:
38
+ None
39
+ """
40
+ self.messages, self.responses = [], []
41
+ self.add_messages(system=system)
42
+ self.add_messages(instruction=instruction, context=context, name=name)
43
+
44
+ # modify the message adding to accomodate tools
45
+ def add_messages(
46
+ self, system=None, instruction=None,
47
+ context=None, response=None, name=None
48
+ ):
49
+ """
50
+ Adds a new message object to the conversation messages list based on the provided parameters.
51
+
52
+ Parameters:
53
+ system (Any, optional): System information to include in the message. Defaults to None.
54
+ instruction (Any, optional): Instruction details to include in the message. Defaults to None.
55
+ context (Any, optional): Contextual information relevant to the message. Defaults to None.
56
+ response (Any, optional): Response details to include in the message. Defaults to None.
57
+ name (str, optional): The name associated with the message. Defaults to None.
58
+
59
+ Returns:
60
+ None
61
+ """
62
+ msg = self.msgr.create_message(
63
+ system=system, instruction=instruction,
64
+ context=context, response=response, name=name
65
+ )
66
+ self.messages.append(msg)
67
+
68
+ def change_system(self, system):
69
+ """
70
+ Changes the system information of the first message in the conversation.
71
+
72
+ Parameters:
73
+ system (Any): The new system information to be set.
74
+
75
+ Returns:
76
+ None
77
+ """
78
+ self.messages[0] = self.msgr.create_message(system=system)
79
+
80
+
81
+ def keep_last_n_exchanges(self, n: int):
82
+ """
83
+ Keeps only the last n exchanges in the conversation, where an exchange starts with a user message. This function trims the conversation to retain only the specified number of the most recent exchanges.
84
+ An exchange is defined as a sequence of messages starting with a user message.
85
+ The first message in the conversation, typically a system message, is always retained.
86
+
87
+ Parameters:
88
+ n (int): The number of exchanges to keep in the conversation.
89
+
90
+ Returns:
91
+ None: The method modifies the conversation in place and does not return a value.
92
+
93
+ Raises:
94
+ ValueError: If n is not a positive integer.
95
+
96
+ Note:
97
+ This function assumes the first message in the conversation is a system message and each user message
98
+ marks the beginning of a new exchange.
99
+ """
100
+ response_indices = [
101
+ index for index, message in enumerate(self.messages[1:])
102
+ if message.role == "user"
103
+ ]
104
+ if len(response_indices) >= n:
105
+ first_index_to_keep = response_indices[-n] + 1
106
+ self.messages = [self.system] + self.messages[first_index_to_keep:]
107
+
@@ -0,0 +1,8 @@
1
+ from .flow_util import run_session
2
+ from .flow import Flow
3
+
4
+
5
+ __all__ = [
6
+ "run_session",
7
+ "Flow"
8
+ ]
@@ -0,0 +1,8 @@
1
+ from lionagi.structures import Relationship, Structure
2
+ from ..sessions import Session
3
+ from .flow_util import run_session
4
+
5
+ # represents structured sessions
6
+
7
+ class Flow(Structure):
8
+ ...
@@ -0,0 +1,62 @@
1
+ from ..sessions import Session
2
+
3
+ def get_config(temperature, max_tokens, key_scheme, n):
4
+ f = lambda i:{
5
+ "temperature": temperature[i],
6
+ "max_tokens": max_tokens[i],
7
+ }
8
+ return {
9
+ "key": f"{key_scheme}{n+1}",
10
+ "config": f(n)
11
+ }
12
+
13
+ async def run_workflow(
14
+ session, prompts, temperature, max_tokens,
15
+ key_scheme, num_prompts, context
16
+ ):
17
+ for i in range(num_prompts):
18
+ key_, config_ = get_config(temperature, max_tokens, key_scheme, i)
19
+ if i == 0:
20
+ await session.initiate(instruction=prompts[key_], context=context, **config_)
21
+ else:
22
+ await session.followup(instruction=prompts[key_], **config_)
23
+
24
+ return session
25
+
26
+ async def run_auto_workflow(
27
+ session, prompts, temperature, max_tokens,
28
+ key_scheme, num_prompts, context
29
+ ):
30
+ for i in range(num_prompts):
31
+ key_, config_ = get_config(temperature, max_tokens, key_scheme, i)
32
+ if i == 0:
33
+ await session.initiate(instruction=prompts[key_], context=context, **config_)
34
+ else:
35
+ await session.auto_followup(instruction=prompts[key_], **config_)
36
+
37
+ return session
38
+
39
+ async def run_session(
40
+ prompts, dir, llmconfig, key_scheme, num_prompts,
41
+ temperature, max_tokens, type_=None, tools=None
42
+ ):
43
+ prompts_ = prompts.copy()
44
+ session = Session(
45
+ system=prompts_.pop('system', 'You are a helpful assistant'),
46
+ dir = dir,
47
+ llmconfig = llmconfig
48
+ )
49
+ if tools:
50
+ session.register_tools(tools)
51
+ if type_ is None:
52
+ session = await run_workflow(
53
+ session, prompts_, temperature, max_tokens,
54
+ key_scheme=key_scheme, num_prompts=num_prompts
55
+ )
56
+ elif type_ == 'auto':
57
+ session = await run_auto_workflow(
58
+ session, prompts_, temperature, max_tokens,
59
+ key_scheme=key_scheme, num_prompts=num_prompts
60
+ )
61
+
62
+ return session
@@ -0,0 +1,5 @@
1
+ from .instruction_sets import InstructionSet
2
+
3
+ __all__ = [
4
+ "InstructionSet"
5
+ ]
@@ -0,0 +1,7 @@
1
+ from lionagi.structures import Relationship, Structure
2
+ from ..messages import Message, Instruction
3
+
4
+ # dynamically structured preconfigured instructions
5
+
6
+ class InstructionSet(Structure):
7
+ ...
@@ -0,0 +1,5 @@
1
+ from .sessions import Session
2
+
3
+ __all__ = [
4
+ "Session"
5
+ ]