lionagi 0.0.114__py3-none-any.whl → 0.0.116__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. lionagi/__init__.py +7 -4
  2. lionagi/bridge/__init__.py +19 -4
  3. lionagi/bridge/langchain.py +23 -3
  4. lionagi/bridge/llama_index.py +5 -3
  5. lionagi/configs/__init__.py +1 -1
  6. lionagi/configs/oai_configs.py +88 -1
  7. lionagi/core/__init__.py +6 -9
  8. lionagi/core/conversations/__init__.py +5 -0
  9. lionagi/core/conversations/conversation.py +107 -0
  10. lionagi/core/flows/__init__.py +8 -0
  11. lionagi/core/flows/flow.py +8 -0
  12. lionagi/core/flows/flow_util.py +62 -0
  13. lionagi/core/instruction_set/__init__.py +5 -0
  14. lionagi/core/instruction_set/instruction_sets.py +7 -0
  15. lionagi/core/sessions/__init__.py +5 -0
  16. lionagi/core/sessions/sessions.py +187 -0
  17. lionagi/endpoints/__init__.py +5 -0
  18. lionagi/endpoints/assistants.py +0 -0
  19. lionagi/endpoints/audio.py +17 -0
  20. lionagi/endpoints/chatcompletion.py +54 -0
  21. lionagi/endpoints/embeddings.py +0 -0
  22. lionagi/endpoints/finetune.py +0 -0
  23. lionagi/endpoints/image.py +0 -0
  24. lionagi/endpoints/moderation.py +0 -0
  25. lionagi/endpoints/vision.py +0 -0
  26. lionagi/{loader → loaders}/__init__.py +7 -1
  27. lionagi/{loader → loaders}/chunker.py +6 -12
  28. lionagi/{utils/load_utils.py → loaders/load_util.py} +47 -6
  29. lionagi/{loader → loaders}/reader.py +4 -12
  30. lionagi/messages/__init__.py +11 -0
  31. lionagi/messages/instruction.py +15 -0
  32. lionagi/messages/message.py +110 -0
  33. lionagi/messages/response.py +33 -0
  34. lionagi/messages/system.py +12 -0
  35. lionagi/objs/__init__.py +10 -6
  36. lionagi/objs/abc_objs.py +39 -0
  37. lionagi/objs/async_queue.py +135 -0
  38. lionagi/objs/messenger.py +70 -148
  39. lionagi/objs/status_tracker.py +37 -0
  40. lionagi/objs/{tool_registry.py → tool_manager.py} +8 -6
  41. lionagi/schema/__init__.py +3 -3
  42. lionagi/schema/base_node.py +251 -0
  43. lionagi/schema/base_tool.py +8 -3
  44. lionagi/schema/data_logger.py +2 -3
  45. lionagi/schema/data_node.py +37 -0
  46. lionagi/services/__init__.py +1 -4
  47. lionagi/services/base_api_service.py +15 -5
  48. lionagi/services/oai.py +2 -2
  49. lionagi/services/openrouter.py +2 -3
  50. lionagi/structures/graph.py +96 -0
  51. lionagi/{structure → structures}/relationship.py +10 -2
  52. lionagi/structures/structure.py +102 -0
  53. lionagi/tests/test_api_util.py +46 -0
  54. lionagi/tests/test_call_util.py +115 -0
  55. lionagi/tests/test_convert_util.py +202 -0
  56. lionagi/tests/test_encrypt_util.py +33 -0
  57. lionagi/tests/{test_flatten_util.py → test_flat_util.py} +1 -1
  58. lionagi/tests/test_io_util.py +0 -0
  59. lionagi/tests/test_sys_util.py +0 -0
  60. lionagi/tools/__init__.py +5 -0
  61. lionagi/tools/tool_util.py +7 -0
  62. lionagi/utils/__init__.py +55 -35
  63. lionagi/utils/api_util.py +19 -17
  64. lionagi/utils/call_util.py +2 -1
  65. lionagi/utils/convert_util.py +229 -0
  66. lionagi/utils/encrypt_util.py +16 -0
  67. lionagi/utils/flat_util.py +38 -0
  68. lionagi/utils/io_util.py +2 -2
  69. lionagi/utils/sys_util.py +45 -10
  70. lionagi/version.py +1 -1
  71. {lionagi-0.0.114.dist-info → lionagi-0.0.116.dist-info}/METADATA +2 -2
  72. lionagi-0.0.116.dist-info/RECORD +110 -0
  73. lionagi/core/conversations.py +0 -108
  74. lionagi/core/flows.py +0 -1
  75. lionagi/core/instruction_sets.py +0 -1
  76. lionagi/core/messages.py +0 -166
  77. lionagi/core/sessions.py +0 -297
  78. lionagi/schema/base_schema.py +0 -252
  79. lionagi/services/chatcompletion.py +0 -48
  80. lionagi/services/service_objs.py +0 -282
  81. lionagi/structure/structure.py +0 -160
  82. lionagi/tools/coder.py +0 -1
  83. lionagi/tools/sandbox.py +0 -1
  84. lionagi/utils/tool_util.py +0 -92
  85. lionagi/utils/type_util.py +0 -81
  86. lionagi-0.0.114.dist-info/RECORD +0 -84
  87. /lionagi/configs/{openrouter_config.py → openrouter_configs.py} +0 -0
  88. /lionagi/{datastore → datastores}/__init__.py +0 -0
  89. /lionagi/{datastore → datastores}/chroma.py +0 -0
  90. /lionagi/{datastore → datastores}/deeplake.py +0 -0
  91. /lionagi/{datastore → datastores}/elasticsearch.py +0 -0
  92. /lionagi/{datastore → datastores}/lantern.py +0 -0
  93. /lionagi/{datastore → datastores}/pinecone.py +0 -0
  94. /lionagi/{datastore → datastores}/postgres.py +0 -0
  95. /lionagi/{datastore → datastores}/qdrant.py +0 -0
  96. /lionagi/{structure → structures}/__init__.py +0 -0
  97. {lionagi-0.0.114.dist-info → lionagi-0.0.116.dist-info}/LICENSE +0 -0
  98. {lionagi-0.0.114.dist-info → lionagi-0.0.116.dist-info}/WHEEL +0 -0
  99. {lionagi-0.0.114.dist-info → lionagi-0.0.116.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,54 @@
1
+ from lionagi.objs.abc_objs import BaseEndpoint
2
+
3
+
4
+ class ChatCompletion(BaseEndpoint):
5
+ """
6
+ Represents an endpoint for chat completions in an API.
7
+
8
+ This class is designed to handle the creation of payloads for chat completion requests. The 'endpoint' attribute specifies the API endpoint for chat completions.
9
+
10
+ Attributes:
11
+ endpoint (str): The API endpoint for chat completions.
12
+ """
13
+ endpoint: str = "chat/completions"
14
+
15
+ @classmethod
16
+ def create_payload(scls, messages, llmconfig, schema, **kwargs):
17
+ """
18
+ Creates a payload for a chat completion request using provided messages, configuration, and schema.
19
+
20
+ This method constructs a payload dictionary that includes required and optional parameters
21
+ as specified in the schema. Required parameters are extracted from 'llmconfig' and 'kwargs',
22
+ while optional parameters are included only if they are truthy and not equal to the string "none".
23
+
24
+ Parameters:
25
+ messages (list): A list of message objects to include in the payload.
26
+ llmconfig (dict): A dictionary containing configuration settings for the large language model.
27
+ schema (dict): A dictionary defining required and optional keys for the payload.
28
+ The 'required' key should map to a list of required parameter names.
29
+ The 'optional' key should map to a list of optional parameter names.
30
+ **kwargs: Additional keyword arguments that can override or supplement 'llmconfig'.
31
+
32
+ Returns:
33
+ dict: A dictionary representing the payload for the chat completion request.
34
+
35
+ Example:
36
+ payload = ChatCompletion.create_payload(
37
+ messages=[{"text": "Hello, world!"}],
38
+ llmconfig={"max_tokens": 100},
39
+ schema={"required": ["max_tokens"], "optional": ["temperature"]}
40
+ )
41
+ """
42
+ config = {**llmconfig, **kwargs}
43
+ payload = {"messages": messages}
44
+ for key in schema['required']:
45
+ payload.update({key: config[key]})
46
+
47
+ for key in schema['optional']:
48
+ if bool(config[key]) is True and str(config[key]).lower() != "none":
49
+ payload.update({key: config[key]})
50
+ return payload
51
+
52
+ # def process_response(self, session, payload, completion):
53
+ # ...
54
+
File without changes
File without changes
File without changes
File without changes
File without changes
@@ -1,3 +1,4 @@
1
+ from .load_util import dir_to_path, dir_to_nodes, chunk_text, read_text, file_to_chunks
1
2
  from .reader import load, ReaderType, text_reader
2
3
  from .chunker import chunk, datanodes_convert, ChunkerType, text_chunker
3
4
 
@@ -8,5 +9,10 @@ __all__ = [
8
9
  'text_reader',
9
10
  'text_chunker',
10
11
  'ReaderType',
11
- 'ChunkerType'
12
+ 'ChunkerType',
13
+ 'dir_to_path',
14
+ 'dir_to_nodes',
15
+ 'chunk_text',
16
+ 'read_text',
17
+ 'file_to_chunks'
12
18
  ]
@@ -1,17 +1,11 @@
1
- from enum import Enum
1
+ # use utils, schema and bridge
2
2
  from typing import Union, Callable
3
3
 
4
- from ..bridge.langchain import langchain_text_splitter, from_langchain
5
- from ..bridge.llama_index import llama_index_node_parser, from_llama_index
6
- from ..schema.base_schema import DataNode
7
- from ..utils import lcall, file_to_chunks
8
-
9
- # define an enum to represent different types of chunkers
10
- class ChunkerType(str, Enum):
11
- PLAIN = 'plain' # default
12
- LANGCHAIN = 'langchain' # using langchain functions
13
- LLAMAINDEX = 'llama_index' # using llamaindex functions
14
- SELFDEFINED = 'self_defined' # create custom functions
4
+ from lionagi.utils import lcall
5
+ from lionagi.schema import DataNode
6
+ from lionagi.bridge import langchain_text_splitter, from_langchain, llama_index_node_parser, from_llama_index
7
+ from .load_util import ChunkerType, file_to_chunks
8
+
15
9
 
16
10
  # Function to convert documents to a specific format based on the chunker type
17
11
  def datanodes_convert(documents, chunker_type):
@@ -1,16 +1,30 @@
1
+ # use utils and schema
1
2
  import math
3
+ from enum import Enum
2
4
  from pathlib import Path
3
5
  from typing import List, Union, Dict, Any, Tuple
4
6
 
5
- from .type_util import to_list
6
- from .call_util import lcall
7
- from .io_util import to_csv
8
- from ..schema.base_schema import DataNode
7
+ from lionagi.utils import to_list, lcall
8
+ from lionagi.schema import DataNode
9
9
 
10
+ class ReaderType(str, Enum):
11
+ PLAIN = 'plain'
12
+ LANGCHAIN = 'langchain'
13
+ LLAMAINDEX = 'llama_index'
14
+ SELFDEFINED = 'self_defined'
10
15
 
16
+
17
+ class ChunkerType(str, Enum):
18
+ PLAIN = 'plain' # default
19
+ LANGCHAIN = 'langchain' # using langchain functions
20
+ LLAMAINDEX = 'llama_index' # using llamaindex functions
21
+ SELFDEFINED = 'self_defined' # create custom functions
22
+
23
+
11
24
  def dir_to_path(
12
25
  dir: str, ext: str, recursive: bool = False,
13
- flatten: bool = True) -> List[Path]:
26
+ flatten: bool = True
27
+ ) -> List[Path]:
14
28
  """
15
29
  Generates a list of file paths from a directory with the given file extension.
16
30
 
@@ -39,7 +53,32 @@ def dir_to_path(
39
53
  except:
40
54
  raise ValueError("Invalid directory or extension, please check the path")
41
55
 
42
- def dir_to_nodes(dir: str, ext, recursive: bool = False, flatten: bool = True, clean_text: bool = True):
56
+ def dir_to_nodes(
57
+ dir: str, ext: Union[List[str], str],
58
+ recursive: bool = False, flatten: bool = True,
59
+ clean_text: bool = True
60
+ ) -> List[DataNode]:
61
+ """
62
+ Converts directory contents into DataNode objects based on specified file extensions.
63
+
64
+ This function first retrieves a list of file paths from the specified directory, matching the given file extension. It then reads the content of these files, optionally cleaning the text, and converts each file's content into a DataNode object.
65
+
66
+ Parameters:
67
+ dir (str): The directory path from which to read files.
68
+ ext: The file extension(s) to include. Can be a single string or a list/tuple of strings.
69
+ recursive (bool, optional): If True, the function searches for files recursively in subdirectories. Defaults to False.
70
+ flatten (bool, optional): If True, flattens the directory structure in the returned paths. Defaults to True.
71
+ clean_text (bool, optional): If True, cleans the text read from files. Defaults to True.
72
+
73
+ Returns:
74
+ list: A list of DataNode objects created from the files in the specified directory.
75
+
76
+ Example:
77
+ nodes = dir_to_nodes("/path/to/dir", ".txt", recursive=True)
78
+ # This would read all .txt files in /path/to/dir and its subdirectories,
79
+ # converting them into DataNode objects.
80
+ """
81
+
43
82
  path_list = dir_to_path(dir, ext, recursive, flatten)
44
83
  files_info = lcall(path_list, read_text, clean=clean_text)
45
84
  nodes = lcall(files_info, lambda x: DataNode(content=x[0], metadata=x[1]))
@@ -186,6 +225,8 @@ def _file_to_chunks(input: Dict[str, Any],
186
225
  except Exception as e:
187
226
  raise ValueError(f"An error occurred while chunking the file. {e}")
188
227
 
228
+
229
+ # needs doing TODO
189
230
  def file_to_chunks(input,
190
231
  # project='project',
191
232
  # output_dir='data/logs/sources/',
@@ -1,17 +1,9 @@
1
- from enum import Enum
1
+ # use utils, schema, and bridge
2
2
  from typing import Union, Callable
3
3
 
4
- from lionagi.bridge.langchain import langchain_loader, from_langchain
5
- from lionagi.bridge.llama_index import llama_index_reader, from_llama_index
6
- from lionagi.utils.call_util import lcall
7
- from lionagi.utils.load_utils import dir_to_nodes
8
-
9
-
10
- class ReaderType(str, Enum):
11
- PLAIN = 'PLAIN'
12
- LANGCHAIN = 'langchain'
13
- LLAMAINDEX = 'llama_index'
14
- SELFDEFINED = 'self_defined'
4
+ from lionagi.utils import lcall
5
+ from lionagi.bridge import langchain_loader, from_langchain, llama_index_reader, from_llama_index
6
+ from .load_util import dir_to_nodes, ReaderType
15
7
 
16
8
 
17
9
  def _datanode_parser(nodes, parser):
@@ -0,0 +1,11 @@
1
+ from .message import Message
2
+ from .system import System
3
+ from .instruction import Instruction
4
+ from .response import Response
5
+
6
+ __all__ = [
7
+ "Message",
8
+ "System",
9
+ "Instruction",
10
+ "Response"
11
+ ]
@@ -0,0 +1,15 @@
1
+ from typing import Any, Optional
2
+ from .message import Message
3
+
4
+ class Instruction(Message):
5
+
6
+ def _create_message(self, instruction: Any, context=None ,name: Optional[str] = None) -> None:
7
+ self._create_roled_message(
8
+ role_="user",
9
+ content_key="instruction",
10
+ content=instruction,
11
+ name=name
12
+ )
13
+ if context:
14
+ self.content.update({"context": context})
15
+
@@ -0,0 +1,110 @@
1
+ import json
2
+ from typing import Any, Optional
3
+ from lionagi.schema import BaseNode
4
+
5
+ class Message(BaseNode):
6
+ """
7
+ Represents a message within a communication system, extending from BaseNode.
8
+
9
+ This class encapsulates the details of a message, including its role, content, and name.
10
+ It provides methods to manipulate and retrieve message information.
11
+
12
+ Attributes:
13
+ role (Optional[str]): The role associated with the message (e.g., 'user', 'system'). Defaults to None.
14
+ name (Optional[str]): The name associated with the message, often reflecting the role. Defaults to None.
15
+
16
+ Properties:
17
+ message: Returns the message as a dictionary including role and content.
18
+ message_content: Returns only the content part of the message.
19
+
20
+ Methods:
21
+ to_message: Converts the message object into a dictionary format.
22
+ create_role_message: Sets the role, content, and name of the message.
23
+ get_role: Retrieves the role of the message, formatted as a lowercase string.
24
+ get_name: Retrieves the name of the message, formatted as a lowercase string.
25
+ """
26
+
27
+ role: Optional[str] = None
28
+ name: Optional[str] = None
29
+
30
+ @property
31
+ def message(self):
32
+ """
33
+ Property that returns the message as a dictionary.
34
+
35
+ Returns:
36
+ dict: A dictionary representation of the message with 'role' and 'content'.
37
+ """
38
+ return self._to_message()
39
+
40
+ @property
41
+ def message_content(self):
42
+ """
43
+ Property that returns the content part of the message.
44
+
45
+ Returns:
46
+ Any: The content of the message.
47
+ """
48
+ return self.message['content']
49
+
50
+ def _to_message(self):
51
+ """
52
+ Converts the message object into a dictionary format, including role and content.
53
+
54
+ Returns:
55
+ dict: A dictionary with 'role' and 'content' keys.
56
+ """
57
+ out = {
58
+ "role": self.role,
59
+ "content": json.dumps(self.content) if isinstance(self.content, dict) else self.content
60
+ }
61
+ return out
62
+
63
+ def _create_roled_message(
64
+ self, role_: str, content: Any, content_key: str,
65
+ name: Optional[str] = None
66
+ ) -> None:
67
+ """
68
+ Sets the role, content, and name of the message.
69
+
70
+ Parameters:
71
+ role_ (str): The role to assign to the message.
72
+ content (Any): The content of the message.
73
+ content_key (str): The key under which the content is stored.
74
+ name (Optional[str]): The name associated with the message. Defaults to the role if not provided.
75
+ """
76
+ self.role = role_
77
+ self.content = {content_key: content}
78
+ self.name = name or role_
79
+
80
+ def get_role(self):
81
+ """
82
+ Retrieves the role of the message, formatted as a lowercase string.
83
+
84
+ Returns:
85
+ str: The message's role in lowercase.
86
+ """
87
+ return str(self.role).strip().lower()
88
+
89
+ def get_name(self):
90
+ """
91
+ Retrieves the name of the message, formatted as a lowercase string.
92
+
93
+ Returns:
94
+ str: The message's name in lowercase.
95
+ """
96
+ return str(self.name).strip().lower()
97
+
98
+ def __str__(self):
99
+ """
100
+ Informal string representation of Message object, intended to be readable.
101
+ Includes role, name, and a brief preview of the content.
102
+ """
103
+ content_preview = (
104
+ (str(self.content)[:75] + '...') if self.content and len(self.content) > 75
105
+ else str(self.content)
106
+ )
107
+ return f"Message(role={self.role}, name={self.name}, content='{content_preview}')"
108
+
109
+
110
+
@@ -0,0 +1,33 @@
1
+ from typing import Any, Optional
2
+ from .message import Message
3
+
4
+ class Response(Message):
5
+
6
+ def _create_message(self, response: Any, name: Optional[str] = None) -> None:
7
+ self.role = "assistant"
8
+ try:
9
+ response = response["message"]
10
+ if str(response['content']) == "None":
11
+ try:
12
+ tool_count = 0
13
+ func_list = []
14
+ while tool_count < len(response['tool_calls']):
15
+ if response['tool_calls'][tool_count]['type'] == 'function':
16
+ func_content = {
17
+ "function": ("func_" + response['tool_calls'][tool_count]['function']['name']),
18
+ "arguments": response['tool_calls'][tool_count]['function']['arguments']
19
+ }
20
+ func_list.append(func_content)
21
+ tool_count += 1
22
+
23
+ self.name = name or "func_request"
24
+ self.content = {'function_list': func_list}
25
+ except:
26
+ raise ValueError("Response message must be one of regular response or function calling")
27
+ else:
28
+ self.content = response['content']
29
+ self.name = name or "assistant"
30
+ except:
31
+ self.name = name or "func_call"
32
+ self.content = response
33
+
@@ -0,0 +1,12 @@
1
+ from typing import Any, Optional
2
+ from .message import Message
3
+
4
+ class System(Message):
5
+
6
+ def _create_message(self, system: Any, name: Optional[str] = None) -> None:
7
+ self._create_roled_message(
8
+ role_="system",
9
+ content_key="system",
10
+ content=system,
11
+ name=name
12
+ )
lionagi/objs/__init__.py CHANGED
@@ -1,7 +1,11 @@
1
- # # from .messenger import Messenger
2
- # from .tool_registry import ToolRegistry
1
+ from .messenger import Messenger
2
+ from .tool_manager import ToolManager
3
+ from .async_queue import AsyncQueue
4
+ from .status_tracker import StatusTracker
3
5
 
4
- # __all__ = [
5
- # 'Messenger',
6
- # 'ToolRegistry'
7
- # ]
6
+ __all__ = [
7
+ 'Messenger',
8
+ 'ToolManager',
9
+ 'AsyncQueue',
10
+ 'StatusTracker'
11
+ ]
@@ -0,0 +1,39 @@
1
+ from abc import abstractmethod, ABC, abstractproperty
2
+ from typing import Any, Dict, NoReturn
3
+
4
+
5
+ class BaseService(ABC):
6
+
7
+ @abstractmethod
8
+ def __init__(self) -> None:
9
+ ...
10
+
11
+ @abstractmethod
12
+ async def serve(self) -> Any:
13
+ ...
14
+
15
+
16
+ class RateLimiter(ABC):
17
+
18
+ def __init__(self, max_requests_per_minute: int, max_tokens_per_minute: int) -> None:
19
+ self.max_requests_per_minute = max_requests_per_minute
20
+ self.max_tokens_per_minute = max_tokens_per_minute
21
+ self.available_request_capacity = max_requests_per_minute
22
+ self.available_token_capacity = max_tokens_per_minute
23
+
24
+ @abstractmethod
25
+ async def rate_limit_replenisher(self) -> NoReturn:
26
+ ...
27
+
28
+ @abstractmethod
29
+ def calculate_num_token(self, payload: Dict[str, Any], api_endpoint: str) -> int:
30
+ ...
31
+
32
+
33
+ class BaseEndpoint(ABC):
34
+
35
+ endpoint: str = abstractproperty()
36
+
37
+ @abstractmethod
38
+ def create_payload(self, **kwargs):
39
+ ...
@@ -0,0 +1,135 @@
1
+ import asyncio
2
+ from typing import Any, Callable
3
+
4
+ class AsyncQueue:
5
+ """
6
+ A queue class that handles asynchronous operations using asyncio.
7
+
8
+ This class provides an asynchronous queue that can enqueue items, process them
9
+ asynchronously, and support graceful shutdowns. It is designed to facilitate
10
+ concurrent task processing in an orderly and controlled manner.
11
+
12
+ Attributes:
13
+ queue (asyncio.Queue):
14
+ A queue to hold items for asynchronous processing.
15
+ _stop_event (asyncio.Event):
16
+ An event to signal when the queue should stop processing.
17
+
18
+ Methods:
19
+ enqueue(item):
20
+ Add an item to the queue for processing.
21
+ dequeue():
22
+ Remove and return an item from the queue.
23
+ join():
24
+ Wait until all items in the queue have been processed.
25
+ stop():
26
+ Signal to stop processing new items in the queue.
27
+ stopped():
28
+ Check if the queue has been signaled to stop.
29
+ process_requests(func):
30
+ Process items using a provided function.
31
+ """
32
+
33
+ def __init__(self) -> None:
34
+ """
35
+ Initializes an AsyncQueue object with an empty asyncio Queue and a stop event.
36
+ """
37
+ self.queue = asyncio.Queue()
38
+ self._stop_event = asyncio.Event()
39
+
40
+ async def enqueue(self, item: Any) -> None:
41
+ """
42
+ Asynchronously add an item to the queue for processing.
43
+
44
+ Parameters:
45
+ item (Any): The item to be added to the queue.
46
+
47
+ Example:
48
+ >>> async_queue = AsyncQueue()
49
+ >>> asyncio.run(async_queue.enqueue('Task 1'))
50
+ """
51
+ await self.queue.put(item)
52
+
53
+ async def dequeue(self) -> Any:
54
+ """
55
+ Asynchronously remove and return an item from the queue.
56
+
57
+ If the queue is empty, this method will wait until an item is available.
58
+
59
+ Returns:
60
+ Any: The next item from the queue.
61
+
62
+ Example:
63
+ >>> async_queue = AsyncQueue()
64
+ >>> asyncio.run(async_queue.enqueue('Task 1'))
65
+ >>> asyncio.run(async_queue.dequeue())
66
+ 'Task 1'
67
+ """
68
+ return await self.queue.get()
69
+
70
+ async def join(self) -> None:
71
+ """
72
+ Asynchronously wait until all items in the queue have been processed.
73
+
74
+ This method blocks until every item that has been enqueued is processed,
75
+ ensuring that all tasks are completed.
76
+
77
+ Example:
78
+ >>> async_queue = AsyncQueue()
79
+ >>> asyncio.run(async_queue.enqueue('Task 1'))
80
+ >>> asyncio.run(async_queue.join()) # This will block until 'Task 1' is processed.
81
+ """
82
+ await self.queue.join()
83
+
84
+ async def stop(self) -> None:
85
+ """
86
+ Signal the queue to stop processing new items.
87
+
88
+ Once called, the queue will not process any new items after the current ones
89
+ are completed, allowing for a graceful shutdown.
90
+
91
+ Example:
92
+ >>> async_queue = AsyncQueue()
93
+ >>> asyncio.run(async_queue.stop()) # This signals the queue to stop processing.
94
+ """
95
+ self._stop_event.set()
96
+
97
+ def stopped(self) -> bool:
98
+ """
99
+ Check if the queue has been signaled to stop processing.
100
+
101
+ Returns:
102
+ bool: True if a stop has been signaled, False otherwise.
103
+
104
+ Example:
105
+ >>> async_queue = AsyncQueue()
106
+ >>> asyncio.run(async_queue.stop())
107
+ >>> async_queue.stopped()
108
+ True
109
+ """
110
+ return self._stop_event.is_set()
111
+
112
+ async def process_requests(self, func: Callable[[Any], Any]) -> None:
113
+ """
114
+ Asynchronously process items from the queue using the provided function.
115
+
116
+ Continuously dequeues items and applies the given function to each.
117
+ The processing stops when the queue is signaled to stop or a sentinel value (`None`) is dequeued.
118
+
119
+ Parameters:
120
+ func (Callable[[Any], Any]): A coroutine function to process items from the queue.
121
+
122
+ Example:
123
+ >>> async def sample_processing(task):
124
+ ... print("Processing:", task)
125
+ >>> async_queue = AsyncQueue()
126
+ >>> asyncio.run(async_queue.enqueue('Task 1'))
127
+ >>> asyncio.run(async_queue.process_requests(sample_processing))
128
+ Processing: Task 1
129
+ """
130
+ while not self.stopped():
131
+ item = await self.dequeue()
132
+ if item is None: # Using `None` as a sentinel value to cease processing.
133
+ await self.stop()
134
+ break
135
+ await func(item)