mojentic 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _examples/streaming.py +0 -1
- mojentic/llm/chat_session.py +24 -1
- mojentic/llm/chat_session_spec.py +40 -0
- {mojentic-1.0.0.dist-info → mojentic-1.0.1.dist-info}/METADATA +11 -11
- {mojentic-1.0.0.dist-info → mojentic-1.0.1.dist-info}/RECORD +8 -8
- {mojentic-1.0.0.dist-info → mojentic-1.0.1.dist-info}/WHEEL +1 -1
- {mojentic-1.0.0.dist-info → mojentic-1.0.1.dist-info}/licenses/LICENSE.md +0 -0
- {mojentic-1.0.0.dist-info → mojentic-1.0.1.dist-info}/top_level.txt +0 -0
_examples/streaming.py
CHANGED
mojentic/llm/chat_session.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import List, Optional
|
|
1
|
+
from typing import Iterator, List, Optional
|
|
2
2
|
|
|
3
3
|
from mojentic.llm import LLMBroker
|
|
4
4
|
from mojentic.llm.gateways.models import LLMMessage, MessageRole
|
|
@@ -78,6 +78,29 @@ class ChatSession:
|
|
|
78
78
|
self.insert_message(LLMMessage(role=MessageRole.Assistant, content=response))
|
|
79
79
|
return response
|
|
80
80
|
|
|
81
|
+
def send_stream(self, query) -> Iterator[str]:
|
|
82
|
+
"""
|
|
83
|
+
Send a query to the LLM and yield response chunks as they arrive. Records the query and
|
|
84
|
+
the full assembled response in the ongoing chat session after the stream is consumed.
|
|
85
|
+
|
|
86
|
+
Parameters
|
|
87
|
+
----------
|
|
88
|
+
query : str
|
|
89
|
+
The query to send to the LLM.
|
|
90
|
+
|
|
91
|
+
Yields
|
|
92
|
+
------
|
|
93
|
+
str
|
|
94
|
+
Content chunks from the LLM response as they arrive.
|
|
95
|
+
"""
|
|
96
|
+
self.insert_message(LLMMessage(role=MessageRole.User, content=query))
|
|
97
|
+
accumulated = []
|
|
98
|
+
for chunk in self.llm.generate_stream(self.messages, tools=self.tools, temperature=self.temperature):
|
|
99
|
+
accumulated.append(chunk)
|
|
100
|
+
yield chunk
|
|
101
|
+
self._ensure_all_messages_are_sized()
|
|
102
|
+
self.insert_message(LLMMessage(role=MessageRole.Assistant, content="".join(accumulated)))
|
|
103
|
+
|
|
81
104
|
def insert_message(self, message: LLMMessage):
|
|
82
105
|
"""
|
|
83
106
|
Add a message onto the end of the chat session. If the total token count exceeds the max context, the oldest
|
|
@@ -94,6 +94,46 @@ class DescribeChatSession:
|
|
|
94
94
|
assert chat_session.messages[1].content == "Query message 2"
|
|
95
95
|
assert chat_session.messages[2].content == INTENDED_RESPONSE_MESSAGE
|
|
96
96
|
|
|
97
|
+
class DescribeStreamingSend:
|
|
98
|
+
|
|
99
|
+
def should_yield_content_chunks(self, chat_session):
|
|
100
|
+
chat_session.llm.generate_stream.return_value = iter(["Hello", " world"])
|
|
101
|
+
|
|
102
|
+
chunks = list(chat_session.send_stream("Query message"))
|
|
103
|
+
|
|
104
|
+
assert chunks == ["Hello", " world"]
|
|
105
|
+
|
|
106
|
+
def should_grow_message_history_after_stream_consumed(self, chat_session):
|
|
107
|
+
chat_session.llm.generate_stream.return_value = iter(["Response"])
|
|
108
|
+
|
|
109
|
+
list(chat_session.send_stream("Query message"))
|
|
110
|
+
|
|
111
|
+
assert len(chat_session.messages) == 3
|
|
112
|
+
|
|
113
|
+
def should_record_full_assembled_response_in_history(self, chat_session):
|
|
114
|
+
chat_session.llm.generate_stream.return_value = iter(["Hello", " world"])
|
|
115
|
+
|
|
116
|
+
list(chat_session.send_stream("Query message"))
|
|
117
|
+
|
|
118
|
+
assert chat_session.messages[2].content == "Hello world"
|
|
119
|
+
|
|
120
|
+
def should_record_user_message_in_history(self, chat_session):
|
|
121
|
+
chat_session.llm.generate_stream.return_value = iter(["Response"])
|
|
122
|
+
|
|
123
|
+
list(chat_session.send_stream("Query message"))
|
|
124
|
+
|
|
125
|
+
assert chat_session.messages[1].role == MessageRole.User
|
|
126
|
+
assert chat_session.messages[1].content == "Query message"
|
|
127
|
+
|
|
128
|
+
def should_respect_context_capacity(self, chat_session):
|
|
129
|
+
chat_session.llm.generate_stream.return_value = iter(["Response 1"])
|
|
130
|
+
list(chat_session.send_stream("Query 1"))
|
|
131
|
+
|
|
132
|
+
chat_session.llm.generate_stream.return_value = iter(["Response 2"])
|
|
133
|
+
list(chat_session.send_stream("Query 2"))
|
|
134
|
+
|
|
135
|
+
assert len(chat_session.messages) == 3
|
|
136
|
+
|
|
97
137
|
class DescribeMessageRoles:
|
|
98
138
|
"""
|
|
99
139
|
Specifications for message role handling
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mojentic
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.1
|
|
4
4
|
Summary: Mojentic is an agentic framework that aims to provide a simple and flexible way to assemble teams of agents to solve complex problems.
|
|
5
5
|
Author-email: Stacey Vetzal <stacey@vetzal.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/svetzal/mojentic
|
|
@@ -11,28 +11,28 @@ Classifier: Operating System :: OS Independent
|
|
|
11
11
|
Requires-Python: >=3.11
|
|
12
12
|
Description-Content-Type: text/markdown
|
|
13
13
|
License-File: LICENSE.md
|
|
14
|
-
Requires-Dist: pydantic>=2.
|
|
14
|
+
Requires-Dist: pydantic>=2.12.5
|
|
15
15
|
Requires-Dist: structlog>=25.5.0
|
|
16
16
|
Requires-Dist: numpy>=2.3.5
|
|
17
|
-
Requires-Dist: ollama>=0.1
|
|
18
|
-
Requires-Dist: openai>=2.
|
|
19
|
-
Requires-Dist: anthropic>=0.
|
|
17
|
+
Requires-Dist: ollama>=0.6.1
|
|
18
|
+
Requires-Dist: openai>=2.12.0
|
|
19
|
+
Requires-Dist: anthropic>=0.75.0
|
|
20
20
|
Requires-Dist: tiktoken>=0.12.0
|
|
21
21
|
Requires-Dist: parsedatetime>=2.6
|
|
22
22
|
Requires-Dist: pytz>=2025.2
|
|
23
23
|
Requires-Dist: serpapi>=0.1.5
|
|
24
24
|
Requires-Dist: colorama>=0.4.6
|
|
25
25
|
Provides-Extra: dev
|
|
26
|
-
Requires-Dist: pytest>=9.0.
|
|
26
|
+
Requires-Dist: pytest>=9.0.2; extra == "dev"
|
|
27
27
|
Requires-Dist: pytest-asyncio>=1.3.0; extra == "dev"
|
|
28
28
|
Requires-Dist: pytest-spec>=5.2.0; extra == "dev"
|
|
29
29
|
Requires-Dist: pytest-cov>=7.0.0; extra == "dev"
|
|
30
|
-
Requires-Dist: pytest-mock>=3.
|
|
30
|
+
Requires-Dist: pytest-mock>=3.15.0; extra == "dev"
|
|
31
31
|
Requires-Dist: flake8>=7.3.0; extra == "dev"
|
|
32
|
-
Requires-Dist: bandit>=1.
|
|
33
|
-
Requires-Dist: pip-audit>=2.
|
|
34
|
-
Requires-Dist: mkdocs>=1.
|
|
35
|
-
Requires-Dist: mkdocs-material>=9.
|
|
32
|
+
Requires-Dist: bandit>=1.9.2; extra == "dev"
|
|
33
|
+
Requires-Dist: pip-audit>=2.10.0; extra == "dev"
|
|
34
|
+
Requires-Dist: mkdocs>=1.6.1; extra == "dev"
|
|
35
|
+
Requires-Dist: mkdocs-material>=9.7.0; extra == "dev"
|
|
36
36
|
Requires-Dist: mkdocs-llmstxt>=0.4.0; extra == "dev"
|
|
37
37
|
Requires-Dist: mkdocstrings[python]>=0.21.0; extra == "dev"
|
|
38
38
|
Requires-Dist: griffe-fieldz>=0.3.0; extra == "dev"
|
|
@@ -34,7 +34,7 @@ _examples/simple_llm_repl.py,sha256=l3Ul67AttvLrteC4Eikos2562ujxH9RAfJbr2WNkx2A,
|
|
|
34
34
|
_examples/simple_structured.py,sha256=V5c1RfhHb8ay8q7Xek_l8A2KEaIugKd0QHOePny6cQY,1312
|
|
35
35
|
_examples/simple_tool.py,sha256=Yr4y7q4-GyvvHEiRPFqVWcAe14IFHcatqVw5ypuaNZs,1279
|
|
36
36
|
_examples/solver_chat_session.py,sha256=uVBsqXRmyj3Vh0JrfZeCw5GzgwrL3-CR_xZOVTnYXGU,2015
|
|
37
|
-
_examples/streaming.py,sha256=
|
|
37
|
+
_examples/streaming.py,sha256=7iU0ji_-R6T90UgW47b2eFAcKhq5sAqNJ9kLEdH-m6Y,1450
|
|
38
38
|
_examples/tell_user_example.py,sha256=l3nYYyKh45e4wbMtHONnwmYPPlkSFv5Erh1dEukyUL4,1268
|
|
39
39
|
_examples/tracer_demo.py,sha256=KfHr0mpC3G97tIKo470OVDwYWYE7jRLAj0Bp_gZ1D4M,6984
|
|
40
40
|
_examples/tracer_qt_viewer.py,sha256=AOxupJGzkVyNZ_1LATKFz0s9VN-cdiwJ1waJGtVfUtA,14988
|
|
@@ -74,8 +74,8 @@ mojentic/agents/simple_recursive_agent_spec.py,sha256=rcIT2BWjT-sc2WevZ0ts9qi9Fe
|
|
|
74
74
|
mojentic/context/__init__.py,sha256=RKDcfejikUZMDuFYIfJpmLnoXoRCOCfLjOTiicjq1Yo,80
|
|
75
75
|
mojentic/context/shared_working_memory.py,sha256=Zt9MNGErEkDIUAaHvyhEOiTaEobI9l0MV4Z59lQFBr0,396
|
|
76
76
|
mojentic/llm/__init__.py,sha256=pHWdS6XRdPKhEWv1YpXaD5B5mUPojWM9ncYB-bXI2Qo,484
|
|
77
|
-
mojentic/llm/chat_session.py,sha256=
|
|
78
|
-
mojentic/llm/chat_session_spec.py,sha256=
|
|
77
|
+
mojentic/llm/chat_session.py,sha256=SacT4WLjUuoRpG4puNDdTpinlfEIQI8sC3bs2loFOS8,4909
|
|
78
|
+
mojentic/llm/chat_session_spec.py,sha256=Qek3kFmRYFnuS8vSnrhQ1vnanuatrShCpqUV0ffRi-g,5492
|
|
79
79
|
mojentic/llm/llm_broker.py,sha256=d59MvUBNgVAZbL4T6GUp-tMroTwwmcTJfyOzJSvejAw,16924
|
|
80
80
|
mojentic/llm/llm_broker_spec.py,sha256=N0wSAIakWXn-4cxwG3dPR0MycZNTW-lQl9jWHlchC2w,8662
|
|
81
81
|
mojentic/llm/message_composers.py,sha256=8_5fA-J1I3BZ_0YlgZkQhsn_u7H8yMGEVNYHUPYW1X8,12142
|
|
@@ -142,8 +142,8 @@ mojentic/tracer/tracer_system.py,sha256=KPSVIfGVOjSx6Vj_SvrisqJXKT6ddwBc_UCMQC6D
|
|
|
142
142
|
mojentic/tracer/tracer_system_spec.py,sha256=8hpQlmAWyjUvk7ihy339L0buQ-eH5rluaFvyMl-mSH4,8830
|
|
143
143
|
mojentic/utils/__init__.py,sha256=WvNYbtVeliMZn2sMX53CrOQlQLJBXi4mJNoocG7s_kI,116
|
|
144
144
|
mojentic/utils/formatting.py,sha256=YtXh0aYzLB9GKP8ZD6u1By1OBqPOXUtHirtq0GmHNag,948
|
|
145
|
-
mojentic-1.0.
|
|
146
|
-
mojentic-1.0.
|
|
147
|
-
mojentic-1.0.
|
|
148
|
-
mojentic-1.0.
|
|
149
|
-
mojentic-1.0.
|
|
145
|
+
mojentic-1.0.1.dist-info/licenses/LICENSE.md,sha256=txSgV8n5zY1W3NiF5HHsCwlaW0e8We1cSC6TuJUqxXA,1060
|
|
146
|
+
mojentic-1.0.1.dist-info/METADATA,sha256=-5_SMLRWZTgDjAMo4DNnkdaGyDIjxxtER_4dOFvlpKk,8711
|
|
147
|
+
mojentic-1.0.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
148
|
+
mojentic-1.0.1.dist-info/top_level.txt,sha256=Q-BvPQ8Eu1jnEqK8Xkr6A9C8Xa1z38oPZRHuA5MCTqg,19
|
|
149
|
+
mojentic-1.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|