jetflow 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jetflow-1.0.0/LICENSE +21 -0
- jetflow-1.0.0/PKG-INFO +327 -0
- jetflow-1.0.0/README.md +282 -0
- jetflow-1.0.0/jetflow/__init__.py +57 -0
- jetflow-1.0.0/jetflow/__version__.py +3 -0
- jetflow-1.0.0/jetflow/actions/__init__.py +8 -0
- jetflow-1.0.0/jetflow/actions/python_exec/__init__.py +211 -0
- jetflow-1.0.0/jetflow/actions/python_exec/utils.py +104 -0
- jetflow-1.0.0/jetflow/clients/__init__.py +20 -0
- jetflow-1.0.0/jetflow/clients/anthropic/__init__.py +6 -0
- jetflow-1.0.0/jetflow/clients/anthropic/async_.py +346 -0
- jetflow-1.0.0/jetflow/clients/anthropic/sync.py +340 -0
- jetflow-1.0.0/jetflow/clients/base.py +79 -0
- jetflow-1.0.0/jetflow/clients/openai/__init__.py +6 -0
- jetflow-1.0.0/jetflow/clients/openai/async_.py +459 -0
- jetflow-1.0.0/jetflow/clients/openai/sync.py +456 -0
- jetflow-1.0.0/jetflow/core/__init__.py +25 -0
- jetflow-1.0.0/jetflow/core/_action_wrappers.py +280 -0
- jetflow-1.0.0/jetflow/core/action.py +111 -0
- jetflow-1.0.0/jetflow/core/agent/__init__.py +6 -0
- jetflow-1.0.0/jetflow/core/agent/async_.py +442 -0
- jetflow-1.0.0/jetflow/core/agent/sync.py +584 -0
- jetflow-1.0.0/jetflow/core/chain.py +384 -0
- jetflow-1.0.0/jetflow/core/errors.py +1 -0
- jetflow-1.0.0/jetflow/core/events.py +97 -0
- jetflow-1.0.0/jetflow/core/message.py +185 -0
- jetflow-1.0.0/jetflow/core/response.py +63 -0
- jetflow-1.0.0/jetflow/py.typed +0 -0
- jetflow-1.0.0/jetflow/utils/__init__.py +7 -0
- jetflow-1.0.0/jetflow/utils/pricing.py +115 -0
- jetflow-1.0.0/jetflow/utils/usage.py +28 -0
- jetflow-1.0.0/jetflow/utils/verbose_logger.py +130 -0
- jetflow-1.0.0/jetflow.egg-info/PKG-INFO +327 -0
- jetflow-1.0.0/jetflow.egg-info/SOURCES.txt +37 -0
- jetflow-1.0.0/jetflow.egg-info/dependency_links.txt +1 -0
- jetflow-1.0.0/jetflow.egg-info/requires.txt +25 -0
- jetflow-1.0.0/jetflow.egg-info/top_level.txt +1 -0
- jetflow-1.0.0/pyproject.toml +83 -0
- jetflow-1.0.0/setup.cfg +4 -0
jetflow-1.0.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Lucas Astorian
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
jetflow-1.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: jetflow
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Lightweight, production-ready framework for building agentic workflows with LLMs
|
|
5
|
+
Author-email: Lucas Astorian <lucas@intellifin.ai>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/lucasastorian/jetflow
|
|
8
|
+
Project-URL: Repository, https://github.com/lucasastorian/jetflow
|
|
9
|
+
Project-URL: Issues, https://github.com/lucasastorian/jetflow/issues
|
|
10
|
+
Keywords: llm,agents,ai,langchain,openai,anthropic,gemini
|
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
19
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
20
|
+
Requires-Python: >=3.10
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
License-File: LICENSE
|
|
23
|
+
Requires-Dist: pydantic>=2.0
|
|
24
|
+
Requires-Dist: httpx>=0.24.0
|
|
25
|
+
Provides-Extra: openai
|
|
26
|
+
Requires-Dist: openai>=1.0.0; extra == "openai"
|
|
27
|
+
Requires-Dist: jiter>=0.5.0; extra == "openai"
|
|
28
|
+
Requires-Dist: tenacity>=8.0.0; extra == "openai"
|
|
29
|
+
Provides-Extra: anthropic
|
|
30
|
+
Requires-Dist: anthropic>=0.18.0; extra == "anthropic"
|
|
31
|
+
Requires-Dist: jiter>=0.5.0; extra == "anthropic"
|
|
32
|
+
Requires-Dist: tenacity>=8.0.0; extra == "anthropic"
|
|
33
|
+
Provides-Extra: all
|
|
34
|
+
Requires-Dist: openai>=1.0.0; extra == "all"
|
|
35
|
+
Requires-Dist: anthropic>=0.18.0; extra == "all"
|
|
36
|
+
Requires-Dist: jiter>=0.5.0; extra == "all"
|
|
37
|
+
Requires-Dist: tenacity>=8.0.0; extra == "all"
|
|
38
|
+
Provides-Extra: dev
|
|
39
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
40
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
41
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
42
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
43
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
44
|
+
Dynamic: license-file
|
|
45
|
+
|
|
46
|
+
# ⚡ Jetflow
|
|
47
|
+
|
|
48
|
+
[](https://pypi.org/project/jetflow)
|
|
49
|
+
[](https://www.python.org/downloads/)
|
|
50
|
+
[](LICENSE)
|
|
51
|
+
|
|
52
|
+
**Stop rebuilding the same agent patterns.**
|
|
53
|
+
|
|
54
|
+
Jetflow gives you **typed tools**, **short agent loops**, and **clean multi-agent composition**—all with **full cost visibility**.
|
|
55
|
+
|
|
56
|
+
* **Move fast.** Stand up real agents in minutes, not weeks.
|
|
57
|
+
* **Control cost.** See tokens and dollars per run.
|
|
58
|
+
* **Debug cleanly.** Read the full transcript, not a black box.
|
|
59
|
+
* **Scale simply.** Treat agents as tools. Chain them when it helps.
|
|
60
|
+
|
|
61
|
+
> **One mental model:** *schema-in → action calls → formatted exit*.
|
|
62
|
+
> Agents and actions share the same computational shape. That makes composition boring—in the good way.
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Why Jetflow (vs CrewAI/LangChain)
|
|
67
|
+
|
|
68
|
+
A lightweight, developer-first agent toolkit for real applications. LLM-agnostic, easy to set up and debug, and flexible from single agents to multi-agent chains.
|
|
69
|
+
|
|
70
|
+
| Dimension | Jetflow | CrewAI | LangChain |
|
|
71
|
+
|---|---|---|---|
|
|
72
|
+
| Target user | Developers integrating agents into apps | Non-dev “crew” workflows | Broad framework users |
|
|
73
|
+
| Abstraction | Low-level, code-first | High-level roles/crews | Many abstractions (chains/graphs) |
|
|
74
|
+
| Architecture | Explicit tools + short loops | Multi-agent by default | Varies by components |
|
|
75
|
+
| Setup/Debug | Minutes; small surface; full transcript | Heavier config/orchestration | Larger surface; callbacks/tools |
|
|
76
|
+
| LLM support | Vendor-neutral (OpenAI, Anthropic, pluggable) | Provider adapters | Large ecosystem |
|
|
77
|
+
| Orchestration | Single, multi-agent, sequential agent chains | Teams/crews | Chains, agents, graphs |
|
|
78
|
+
|
|
79
|
+
## Install
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
pip install jetflow[openai] # OpenAI
|
|
83
|
+
pip install jetflow[anthropic] # Anthropic
|
|
84
|
+
pip install jetflow[all] # Both
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
export OPENAI_API_KEY=...
|
|
89
|
+
export ANTHROPIC_API_KEY=...
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
**Async support:** Full async/await API available. Use `AsyncAgent`, `AsyncChain`, and `@async_action`.
|
|
93
|
+
|
|
94
|
+
---
|
|
95
|
+
|
|
96
|
+
## Quick Start 1 — Single Agent
|
|
97
|
+
|
|
98
|
+
Typed tool → short loop → visible cost.
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
from pydantic import BaseModel, Field
|
|
102
|
+
from jetflow import Agent, action
|
|
103
|
+
from jetflow.clients.openai import OpenAIClient
|
|
104
|
+
|
|
105
|
+
class Calculate(BaseModel):
|
|
106
|
+
"""Evaluate a safe arithmetic expression"""
|
|
107
|
+
expression: str = Field(description="e.g. '25 * 4 + 10'")
|
|
108
|
+
|
|
109
|
+
@action(schema=Calculate)
|
|
110
|
+
def calculator(p: Calculate) -> str:
|
|
111
|
+
env = {"__builtins__": {}}
|
|
112
|
+
fns = {"abs": abs, "round": round, "min": min, "max": max, "sum": sum, "pow": pow}
|
|
113
|
+
return str(eval(p.expression, env, fns))
|
|
114
|
+
|
|
115
|
+
agent = Agent(
|
|
116
|
+
client=OpenAIClient(model="gpt-5"),
|
|
117
|
+
actions=[calculator],
|
|
118
|
+
system_prompt="Answer clearly. Use tools when needed."
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
resp = agent.run("What is 25 * 4 + 10?")
|
|
122
|
+
print(resp.content) # -> "110"
|
|
123
|
+
print(f"Cost: ${resp.usage.estimated_cost:.4f}")
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
**Why teams use this:** strong schemas reduce junk calls, a short loop keeps latency predictable, and you see spend immediately.
|
|
127
|
+
|
|
128
|
+
---
|
|
129
|
+
|
|
130
|
+
## Quick Start 2 — Multi-Agent (agents as tools)
|
|
131
|
+
|
|
132
|
+
Let a **fast** model gather facts; let a **strong** model reason. Child agents return **one formatted result** via an exit action.
|
|
133
|
+
|
|
134
|
+
```python
|
|
135
|
+
from pydantic import BaseModel
|
|
136
|
+
from jetflow import Agent, action
|
|
137
|
+
from jetflow.clients.openai import OpenAIClient
|
|
138
|
+
|
|
139
|
+
# Child agent: research → returns a concise note
|
|
140
|
+
class ResearchNote(BaseModel):
|
|
141
|
+
summary: str
|
|
142
|
+
sources: list[str]
|
|
143
|
+
def format(self) -> str:
|
|
144
|
+
return f"{self.summary}\n\n" + "\n".join(f"- {s}" for s in self.sources)
|
|
145
|
+
|
|
146
|
+
@action(schema=ResearchNote, exit=True)
|
|
147
|
+
def FinishedResearch(note: ResearchNote) -> str:
|
|
148
|
+
return note.format()
|
|
149
|
+
|
|
150
|
+
researcher = Agent(
|
|
151
|
+
client=OpenAIClient(model="gpt-5-mini"),
|
|
152
|
+
actions=[/* your web_search tool */, FinishedResearch],
|
|
153
|
+
system_prompt="Search broadly. Deduplicate. Return concise notes.",
|
|
154
|
+
require_action=True
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Parent agent: deep analysis over the returned note
|
|
158
|
+
class FinalReport(BaseModel):
|
|
159
|
+
headline: str
|
|
160
|
+
bullets: list[str]
|
|
161
|
+
def format(self) -> str:
|
|
162
|
+
return f"{self.headline}\n\n" + "\n".join(f"- {b}" for b in self.bullets)
|
|
163
|
+
|
|
164
|
+
@action(schema=FinalReport, exit=True)
|
|
165
|
+
def Finished(report: FinalReport) -> str:
|
|
166
|
+
return report.format()
|
|
167
|
+
|
|
168
|
+
analyst = Agent(
|
|
169
|
+
client=OpenAIClient(model="gpt-5"),
|
|
170
|
+
actions=[researcher.to_action("research", "Search and summarize"), Finished],
|
|
171
|
+
system_prompt="Use research notes. Quantify impacts. Be precise.",
|
|
172
|
+
require_action=True
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
resp = analyst.run("Compare NVDA vs AMD inference margins using latest earnings calls.")
|
|
176
|
+
print(resp.content)
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
**What this buys you:** fast models scout, strong models conclude; strict boundaries prevent prompt bloat; parents get one crisp payload per child.
|
|
180
|
+
|
|
181
|
+
---
|
|
182
|
+
|
|
183
|
+
## Quick Start 3 — Sequential Agent Chains (shared transcript, sequential hand-off)
|
|
184
|
+
|
|
185
|
+
Run agents **in order** over the **same** message history. Classic "fast search → slow analysis".
|
|
186
|
+
|
|
187
|
+
```python
|
|
188
|
+
from jetflow import Chain
|
|
189
|
+
from jetflow.clients.openai import OpenAIClient
|
|
190
|
+
|
|
191
|
+
search_agent = Agent(
|
|
192
|
+
client=OpenAIClient(model="gpt-5-mini"),
|
|
193
|
+
actions=[/* web_search */, FinishedResearch],
|
|
194
|
+
system_prompt="Fast breadth-first search.",
|
|
195
|
+
require_action=True
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
analysis_agent = Agent(
|
|
199
|
+
client=OpenAIClient(model="gpt-5"),
|
|
200
|
+
actions=[/* calculator */, Finished],
|
|
201
|
+
system_prompt="Read prior messages. Analyze. Show working.",
|
|
202
|
+
require_action=True
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
chain = Chain([search_agent, analysis_agent])
|
|
206
|
+
resp = chain.run("Find ARM CPU commentary in recent earnings calls, then quantify margin impacts.")
|
|
207
|
+
print(resp.content)
|
|
208
|
+
print(f"Total cost: ${resp.usage.estimated_cost:.4f}")
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
**Why chains win:** you share context only when it compounds value, swap models per stage to balance speed and accuracy, and keep each agent narrowly focused.
|
|
212
|
+
|
|
213
|
+
---
|
|
214
|
+
|
|
215
|
+
## Async Support
|
|
216
|
+
|
|
217
|
+
Full async/await API. Same patterns, async primitives.
|
|
218
|
+
|
|
219
|
+
```python
|
|
220
|
+
from jetflow import AsyncAgent, AsyncChain, async_action
|
|
221
|
+
|
|
222
|
+
@async_action(schema=Calculate)
|
|
223
|
+
async def async_calculator(p: Calculate) -> str:
|
|
224
|
+
return str(eval(p.expression))
|
|
225
|
+
|
|
226
|
+
agent = AsyncAgent(
|
|
227
|
+
client=OpenAIClient(model="gpt-5"),
|
|
228
|
+
actions=[async_calculator]
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
resp = await agent.run("What is 25 * 4 + 10?")
|
|
232
|
+
```
|
|
233
|
+
|
|
234
|
+
**Use async when:** making concurrent API calls, handling many agents in parallel, or building async web services.
|
|
235
|
+
|
|
236
|
+
---
|
|
237
|
+
|
|
238
|
+
## Streaming
|
|
239
|
+
|
|
240
|
+
Stream events in real-time as the agent executes. Perfect for UI updates, progress bars, and live feedback.
|
|
241
|
+
|
|
242
|
+
```python
|
|
243
|
+
from jetflow import ContentDelta, ActionStart, ActionEnd, MessageEnd
|
|
244
|
+
|
|
245
|
+
with agent.stream("What is 25 * 4 + 10?") as events:
|
|
246
|
+
for event in events:
|
|
247
|
+
if isinstance(event, ContentDelta):
|
|
248
|
+
print(event.delta, end="", flush=True) # Stream text as it arrives
|
|
249
|
+
|
|
250
|
+
elif isinstance(event, ActionStart):
|
|
251
|
+
print(f"\n[Calling {event.name}...]")
|
|
252
|
+
|
|
253
|
+
elif isinstance(event, ActionEnd):
|
|
254
|
+
print(f"✓ {event.name}({event.body})")
|
|
255
|
+
|
|
256
|
+
elif isinstance(event, MessageEnd):
|
|
257
|
+
final = event.message # Complete message with all content
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
**Two modes:**
|
|
261
|
+
- **`mode="deltas"`** (default): Stream granular events (ContentDelta, ActionStart, ActionDelta, ActionEnd)
|
|
262
|
+
- **`mode="messages"`**: Stream only complete Message objects (MessageEnd events)
|
|
263
|
+
|
|
264
|
+
**Works for chains too:**
|
|
265
|
+
```python
|
|
266
|
+
with chain.stream("Research and analyze") as events:
|
|
267
|
+
for event in events:
|
|
268
|
+
if isinstance(event, ContentDelta):
|
|
269
|
+
print(event.delta, end="")
|
|
270
|
+
```
|
|
271
|
+
|
|
272
|
+
---
|
|
273
|
+
|
|
274
|
+
## Why Jetflow (in one breath)
|
|
275
|
+
|
|
276
|
+
* **Fewer moving parts.** Agents, actions, messages—nothing else.
|
|
277
|
+
* **Deterministic endings.** Use `require_action=True` + a `format()` exit to get one reliable result.
|
|
278
|
+
* **Real observability.** Full transcript + token and dollar accounting.
|
|
279
|
+
* **Composability that sticks.** Treat agents as tools; add chains when you need shared context.
|
|
280
|
+
* **Provider-agnostic.** OpenAI + Anthropic with matching streaming semantics.
|
|
281
|
+
|
|
282
|
+
---
|
|
283
|
+
|
|
284
|
+
## Production in 60 Seconds
|
|
285
|
+
|
|
286
|
+
* **Guard exits.** For anything that matters, set `require_action=True` and finish with a formattable exit action.
|
|
287
|
+
* **Budget hard-stops.** Choose `max_iter` and fail closed; treat errors as tool messages, not exceptions.
|
|
288
|
+
* **Pick models per stage.** Cheap for search/IO, strong for reasoning, writer for polish.
|
|
289
|
+
* **Log the transcript.** Store `response.messages` and `response.usage` for repro and cost tracking.
|
|
290
|
+
* **Test like code.** Snapshot transcripts for golden tests; track cost deltas PR-to-PR.
|
|
291
|
+
|
|
292
|
+
---
|
|
293
|
+
|
|
294
|
+
## Built-in Actions
|
|
295
|
+
|
|
296
|
+
Jetflow includes one useful action: **safe Python execution**.
|
|
297
|
+
|
|
298
|
+
```python
|
|
299
|
+
from jetflow.actions import python_exec
|
|
300
|
+
|
|
301
|
+
agent = Agent(
|
|
302
|
+
client=OpenAIClient(model="gpt-5"),
|
|
303
|
+
actions=[python_exec]
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
resp = agent.run("Calculate compound interest: principal=10000, rate=0.05, years=10")
|
|
307
|
+
```
|
|
308
|
+
|
|
309
|
+
Variables persist across calls. Perfect for data analysis workflows.
|
|
310
|
+
|
|
311
|
+
---
|
|
312
|
+
|
|
313
|
+
## Docs
|
|
314
|
+
|
|
315
|
+
📚 [Full Documentation](https://jetflow.readthedocs.io)
|
|
316
|
+
|
|
317
|
+
- [Quickstart](https://jetflow.readthedocs.io/quickstart) — 5-minute tutorial
|
|
318
|
+
- [Single Agent](https://jetflow.readthedocs.io/single-agent) — Actions, control flow, debugging
|
|
319
|
+
- [Composition](https://jetflow.readthedocs.io/composition) — Agents as tools
|
|
320
|
+
- [Chains](https://jetflow.readthedocs.io/chains) — Multi-stage workflows
|
|
321
|
+
- [API Reference](https://jetflow.readthedocs.io/api) — Complete API docs
|
|
322
|
+
|
|
323
|
+
---
|
|
324
|
+
|
|
325
|
+
## License
|
|
326
|
+
|
|
327
|
+
MIT © 2025 Lucas Astorian
|
jetflow-1.0.0/README.md
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
# ⚡ Jetflow
|
|
2
|
+
|
|
3
|
+
[](https://pypi.org/project/jetflow)
|
|
4
|
+
[](https://www.python.org/downloads/)
|
|
5
|
+
[](LICENSE)
|
|
6
|
+
|
|
7
|
+
**Stop rebuilding the same agent patterns.**
|
|
8
|
+
|
|
9
|
+
Jetflow gives you **typed tools**, **short agent loops**, and **clean multi-agent composition**—all with **full cost visibility**.
|
|
10
|
+
|
|
11
|
+
* **Move fast.** Stand up real agents in minutes, not weeks.
|
|
12
|
+
* **Control cost.** See tokens and dollars per run.
|
|
13
|
+
* **Debug cleanly.** Read the full transcript, not a black box.
|
|
14
|
+
* **Scale simply.** Treat agents as tools. Chain them when it helps.
|
|
15
|
+
|
|
16
|
+
> **One mental model:** *schema-in → action calls → formatted exit*.
|
|
17
|
+
> Agents and actions share the same computational shape. That makes composition boring—in the good way.
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## Why Jetflow (vs CrewAI/LangChain)
|
|
22
|
+
|
|
23
|
+
A lightweight, developer-first agent toolkit for real applications. LLM-agnostic, easy to set up and debug, and flexible from single agents to multi-agent chains.
|
|
24
|
+
|
|
25
|
+
| Dimension | Jetflow | CrewAI | LangChain |
|
|
26
|
+
|---|---|---|---|
|
|
27
|
+
| Target user | Developers integrating agents into apps | Non-dev “crew” workflows | Broad framework users |
|
|
28
|
+
| Abstraction | Low-level, code-first | High-level roles/crews | Many abstractions (chains/graphs) |
|
|
29
|
+
| Architecture | Explicit tools + short loops | Multi-agent by default | Varies by components |
|
|
30
|
+
| Setup/Debug | Minutes; small surface; full transcript | Heavier config/orchestration | Larger surface; callbacks/tools |
|
|
31
|
+
| LLM support | Vendor-neutral (OpenAI, Anthropic, pluggable) | Provider adapters | Large ecosystem |
|
|
32
|
+
| Orchestration | Single, multi-agent, sequential agent chains | Teams/crews | Chains, agents, graphs |
|
|
33
|
+
|
|
34
|
+
## Install
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install jetflow[openai] # OpenAI
|
|
38
|
+
pip install jetflow[anthropic] # Anthropic
|
|
39
|
+
pip install jetflow[all] # Both
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
export OPENAI_API_KEY=...
|
|
44
|
+
export ANTHROPIC_API_KEY=...
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
**Async support:** Full async/await API available. Use `AsyncAgent`, `AsyncChain`, and `@async_action`.
|
|
48
|
+
|
|
49
|
+
---
|
|
50
|
+
|
|
51
|
+
## Quick Start 1 — Single Agent
|
|
52
|
+
|
|
53
|
+
Typed tool → short loop → visible cost.
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
from pydantic import BaseModel, Field
|
|
57
|
+
from jetflow import Agent, action
|
|
58
|
+
from jetflow.clients.openai import OpenAIClient
|
|
59
|
+
|
|
60
|
+
class Calculate(BaseModel):
|
|
61
|
+
"""Evaluate a safe arithmetic expression"""
|
|
62
|
+
expression: str = Field(description="e.g. '25 * 4 + 10'")
|
|
63
|
+
|
|
64
|
+
@action(schema=Calculate)
|
|
65
|
+
def calculator(p: Calculate) -> str:
|
|
66
|
+
env = {"__builtins__": {}}
|
|
67
|
+
fns = {"abs": abs, "round": round, "min": min, "max": max, "sum": sum, "pow": pow}
|
|
68
|
+
return str(eval(p.expression, env, fns))
|
|
69
|
+
|
|
70
|
+
agent = Agent(
|
|
71
|
+
client=OpenAIClient(model="gpt-5"),
|
|
72
|
+
actions=[calculator],
|
|
73
|
+
system_prompt="Answer clearly. Use tools when needed."
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
resp = agent.run("What is 25 * 4 + 10?")
|
|
77
|
+
print(resp.content) # -> "110"
|
|
78
|
+
print(f"Cost: ${resp.usage.estimated_cost:.4f}")
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
**Why teams use this:** strong schemas reduce junk calls, a short loop keeps latency predictable, and you see spend immediately.
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## Quick Start 2 — Multi-Agent (agents as tools)
|
|
86
|
+
|
|
87
|
+
Let a **fast** model gather facts; let a **strong** model reason. Child agents return **one formatted result** via an exit action.
|
|
88
|
+
|
|
89
|
+
```python
|
|
90
|
+
from pydantic import BaseModel
|
|
91
|
+
from jetflow import Agent, action
|
|
92
|
+
from jetflow.clients.openai import OpenAIClient
|
|
93
|
+
|
|
94
|
+
# Child agent: research → returns a concise note
|
|
95
|
+
class ResearchNote(BaseModel):
|
|
96
|
+
summary: str
|
|
97
|
+
sources: list[str]
|
|
98
|
+
def format(self) -> str:
|
|
99
|
+
return f"{self.summary}\n\n" + "\n".join(f"- {s}" for s in self.sources)
|
|
100
|
+
|
|
101
|
+
@action(schema=ResearchNote, exit=True)
|
|
102
|
+
def FinishedResearch(note: ResearchNote) -> str:
|
|
103
|
+
return note.format()
|
|
104
|
+
|
|
105
|
+
researcher = Agent(
|
|
106
|
+
client=OpenAIClient(model="gpt-5-mini"),
|
|
107
|
+
actions=[/* your web_search tool */, FinishedResearch],
|
|
108
|
+
system_prompt="Search broadly. Deduplicate. Return concise notes.",
|
|
109
|
+
require_action=True
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Parent agent: deep analysis over the returned note
|
|
113
|
+
class FinalReport(BaseModel):
|
|
114
|
+
headline: str
|
|
115
|
+
bullets: list[str]
|
|
116
|
+
def format(self) -> str:
|
|
117
|
+
return f"{self.headline}\n\n" + "\n".join(f"- {b}" for b in self.bullets)
|
|
118
|
+
|
|
119
|
+
@action(schema=FinalReport, exit=True)
|
|
120
|
+
def Finished(report: FinalReport) -> str:
|
|
121
|
+
return report.format()
|
|
122
|
+
|
|
123
|
+
analyst = Agent(
|
|
124
|
+
client=OpenAIClient(model="gpt-5"),
|
|
125
|
+
actions=[researcher.to_action("research", "Search and summarize"), Finished],
|
|
126
|
+
system_prompt="Use research notes. Quantify impacts. Be precise.",
|
|
127
|
+
require_action=True
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
resp = analyst.run("Compare NVDA vs AMD inference margins using latest earnings calls.")
|
|
131
|
+
print(resp.content)
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
**What this buys you:** fast models scout, strong models conclude; strict boundaries prevent prompt bloat; parents get one crisp payload per child.
|
|
135
|
+
|
|
136
|
+
---
|
|
137
|
+
|
|
138
|
+
## Quick Start 3 — Sequential Agent Chains (shared transcript, sequential hand-off)
|
|
139
|
+
|
|
140
|
+
Run agents **in order** over the **same** message history. Classic "fast search → slow analysis".
|
|
141
|
+
|
|
142
|
+
```python
|
|
143
|
+
from jetflow import Chain
|
|
144
|
+
from jetflow.clients.openai import OpenAIClient
|
|
145
|
+
|
|
146
|
+
search_agent = Agent(
|
|
147
|
+
client=OpenAIClient(model="gpt-5-mini"),
|
|
148
|
+
actions=[/* web_search */, FinishedResearch],
|
|
149
|
+
system_prompt="Fast breadth-first search.",
|
|
150
|
+
require_action=True
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
analysis_agent = Agent(
|
|
154
|
+
client=OpenAIClient(model="gpt-5"),
|
|
155
|
+
actions=[/* calculator */, Finished],
|
|
156
|
+
system_prompt="Read prior messages. Analyze. Show working.",
|
|
157
|
+
require_action=True
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
chain = Chain([search_agent, analysis_agent])
|
|
161
|
+
resp = chain.run("Find ARM CPU commentary in recent earnings calls, then quantify margin impacts.")
|
|
162
|
+
print(resp.content)
|
|
163
|
+
print(f"Total cost: ${resp.usage.estimated_cost:.4f}")
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
**Why chains win:** you share context only when it compounds value, swap models per stage to balance speed and accuracy, and keep each agent narrowly focused.
|
|
167
|
+
|
|
168
|
+
---
|
|
169
|
+
|
|
170
|
+
## Async Support
|
|
171
|
+
|
|
172
|
+
Full async/await API. Same patterns, async primitives.
|
|
173
|
+
|
|
174
|
+
```python
|
|
175
|
+
from jetflow import AsyncAgent, AsyncChain, async_action
|
|
176
|
+
|
|
177
|
+
@async_action(schema=Calculate)
|
|
178
|
+
async def async_calculator(p: Calculate) -> str:
|
|
179
|
+
return str(eval(p.expression))
|
|
180
|
+
|
|
181
|
+
agent = AsyncAgent(
|
|
182
|
+
client=OpenAIClient(model="gpt-5"),
|
|
183
|
+
actions=[async_calculator]
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
resp = await agent.run("What is 25 * 4 + 10?")
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
**Use async when:** making concurrent API calls, handling many agents in parallel, or building async web services.
|
|
190
|
+
|
|
191
|
+
---
|
|
192
|
+
|
|
193
|
+
## Streaming
|
|
194
|
+
|
|
195
|
+
Stream events in real-time as the agent executes. Perfect for UI updates, progress bars, and live feedback.
|
|
196
|
+
|
|
197
|
+
```python
|
|
198
|
+
from jetflow import ContentDelta, ActionStart, ActionEnd, MessageEnd
|
|
199
|
+
|
|
200
|
+
with agent.stream("What is 25 * 4 + 10?") as events:
|
|
201
|
+
for event in events:
|
|
202
|
+
if isinstance(event, ContentDelta):
|
|
203
|
+
print(event.delta, end="", flush=True) # Stream text as it arrives
|
|
204
|
+
|
|
205
|
+
elif isinstance(event, ActionStart):
|
|
206
|
+
print(f"\n[Calling {event.name}...]")
|
|
207
|
+
|
|
208
|
+
elif isinstance(event, ActionEnd):
|
|
209
|
+
print(f"✓ {event.name}({event.body})")
|
|
210
|
+
|
|
211
|
+
elif isinstance(event, MessageEnd):
|
|
212
|
+
final = event.message # Complete message with all content
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
**Two modes:**
|
|
216
|
+
- **`mode="deltas"`** (default): Stream granular events (ContentDelta, ActionStart, ActionDelta, ActionEnd)
|
|
217
|
+
- **`mode="messages"`**: Stream only complete Message objects (MessageEnd events)
|
|
218
|
+
|
|
219
|
+
**Works for chains too:**
|
|
220
|
+
```python
|
|
221
|
+
with chain.stream("Research and analyze") as events:
|
|
222
|
+
for event in events:
|
|
223
|
+
if isinstance(event, ContentDelta):
|
|
224
|
+
print(event.delta, end="")
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
---
|
|
228
|
+
|
|
229
|
+
## Why Jetflow (in one breath)
|
|
230
|
+
|
|
231
|
+
* **Fewer moving parts.** Agents, actions, messages—nothing else.
|
|
232
|
+
* **Deterministic endings.** Use `require_action=True` + a `format()` exit to get one reliable result.
|
|
233
|
+
* **Real observability.** Full transcript + token and dollar accounting.
|
|
234
|
+
* **Composability that sticks.** Treat agents as tools; add chains when you need shared context.
|
|
235
|
+
* **Provider-agnostic.** OpenAI + Anthropic with matching streaming semantics.
|
|
236
|
+
|
|
237
|
+
---
|
|
238
|
+
|
|
239
|
+
## Production in 60 Seconds
|
|
240
|
+
|
|
241
|
+
* **Guard exits.** For anything that matters, set `require_action=True` and finish with a formattable exit action.
|
|
242
|
+
* **Budget hard-stops.** Choose `max_iter` and fail closed; treat errors as tool messages, not exceptions.
|
|
243
|
+
* **Pick models per stage.** Cheap for search/IO, strong for reasoning, writer for polish.
|
|
244
|
+
* **Log the transcript.** Store `response.messages` and `response.usage` for repro and cost tracking.
|
|
245
|
+
* **Test like code.** Snapshot transcripts for golden tests; track cost deltas PR-to-PR.
|
|
246
|
+
|
|
247
|
+
---
|
|
248
|
+
|
|
249
|
+
## Built-in Actions
|
|
250
|
+
|
|
251
|
+
Jetflow includes one useful action: **safe Python execution**.
|
|
252
|
+
|
|
253
|
+
```python
|
|
254
|
+
from jetflow.actions import python_exec
|
|
255
|
+
|
|
256
|
+
agent = Agent(
|
|
257
|
+
client=OpenAIClient(model="gpt-5"),
|
|
258
|
+
actions=[python_exec]
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
resp = agent.run("Calculate compound interest: principal=10000, rate=0.05, years=10")
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
Variables persist across calls. Perfect for data analysis workflows.
|
|
265
|
+
|
|
266
|
+
---
|
|
267
|
+
|
|
268
|
+
## Docs
|
|
269
|
+
|
|
270
|
+
📚 [Full Documentation](https://jetflow.readthedocs.io)
|
|
271
|
+
|
|
272
|
+
- [Quickstart](https://jetflow.readthedocs.io/quickstart) — 5-minute tutorial
|
|
273
|
+
- [Single Agent](https://jetflow.readthedocs.io/single-agent) — Actions, control flow, debugging
|
|
274
|
+
- [Composition](https://jetflow.readthedocs.io/composition) — Agents as tools
|
|
275
|
+
- [Chains](https://jetflow.readthedocs.io/chains) — Multi-stage workflows
|
|
276
|
+
- [API Reference](https://jetflow.readthedocs.io/api) — Complete API docs
|
|
277
|
+
|
|
278
|
+
---
|
|
279
|
+
|
|
280
|
+
## License
|
|
281
|
+
|
|
282
|
+
MIT © 2025 Lucas Astorian
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Chainlink - Lightweight Agent Coordination Framework
|
|
3
|
+
|
|
4
|
+
A lightweight, production-ready framework for building agentic workflows with LLMs.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from jetflow.__version__ import __version__
|
|
8
|
+
from jetflow.core.agent import Agent, AsyncAgent
|
|
9
|
+
from jetflow.core.action import action, async_action
|
|
10
|
+
from jetflow.core.message import Message, Action, Thought
|
|
11
|
+
from jetflow.core.response import AgentResponse, ActionResult, ChainResponse
|
|
12
|
+
from jetflow.core.chain import Chain, AsyncChain
|
|
13
|
+
from jetflow.core.events import (
|
|
14
|
+
StreamEvent,
|
|
15
|
+
MessageStart,
|
|
16
|
+
MessageEnd,
|
|
17
|
+
ContentDelta,
|
|
18
|
+
ThoughtStart,
|
|
19
|
+
ThoughtDelta,
|
|
20
|
+
ThoughtEnd,
|
|
21
|
+
ActionStart,
|
|
22
|
+
ActionDelta,
|
|
23
|
+
ActionEnd,
|
|
24
|
+
ActionExecutionStart,
|
|
25
|
+
ActionExecuted
|
|
26
|
+
)
|
|
27
|
+
from jetflow.utils.usage import Usage
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
"__version__",
|
|
31
|
+
"Agent",
|
|
32
|
+
"AsyncAgent",
|
|
33
|
+
"Chain",
|
|
34
|
+
"AsyncChain",
|
|
35
|
+
"action",
|
|
36
|
+
"async_action",
|
|
37
|
+
"Message",
|
|
38
|
+
"Action",
|
|
39
|
+
"Thought",
|
|
40
|
+
"AgentResponse",
|
|
41
|
+
"ActionResult",
|
|
42
|
+
"ChainResponse",
|
|
43
|
+
"Usage",
|
|
44
|
+
# Streaming events
|
|
45
|
+
"StreamEvent",
|
|
46
|
+
"MessageStart",
|
|
47
|
+
"MessageEnd",
|
|
48
|
+
"ContentDelta",
|
|
49
|
+
"ThoughtStart",
|
|
50
|
+
"ThoughtDelta",
|
|
51
|
+
"ThoughtEnd",
|
|
52
|
+
"ActionStart",
|
|
53
|
+
"ActionDelta",
|
|
54
|
+
"ActionEnd",
|
|
55
|
+
"ActionExecutionStart",
|
|
56
|
+
"ActionExecuted",
|
|
57
|
+
]
|