cycls 0.0.2.64__tar.gz → 0.0.2.65__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cycls-0.0.2.65/PKG-INFO +269 -0
- cycls-0.0.2.65/README.md +245 -0
- cycls-0.0.2.65/cycls/__init__.py +2 -0
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/dev-theme/index.html +21 -28
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/web.py +7 -25
- {cycls-0.0.2.64 → cycls-0.0.2.65}/pyproject.toml +2 -2
- cycls-0.0.2.64/PKG-INFO +0 -141
- cycls-0.0.2.64/README.md +0 -118
- cycls-0.0.2.64/cycls/__init__.py +0 -3
- cycls-0.0.2.64/cycls/ui.py +0 -6
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/default-theme/assets/index-B0ZKcm_V.css +0 -0
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/default-theme/assets/index-D5EDcI4J.js +0 -0
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/default-theme/index.html +0 -0
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/runtime.py +0 -0
- {cycls-0.0.2.64 → cycls-0.0.2.65}/cycls/sdk.py +0 -0
cycls-0.0.2.65/PKG-INFO
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: cycls
|
|
3
|
+
Version: 0.0.2.65
|
|
4
|
+
Summary: Distribute Intelligence
|
|
5
|
+
Author: Mohammed J. AlRujayi
|
|
6
|
+
Author-email: mj@cycls.com
|
|
7
|
+
Requires-Python: >=3.9,<4.0
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
15
|
+
Provides-Extra: modal
|
|
16
|
+
Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
|
|
17
|
+
Requires-Dist: docker (>=7.1.0,<8.0.0)
|
|
18
|
+
Requires-Dist: fastapi (>=0.111.0,<0.112.0)
|
|
19
|
+
Requires-Dist: httpx (>=0.27.0,<0.28.0)
|
|
20
|
+
Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "modal"
|
|
21
|
+
Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
|
|
24
|
+
<h3 align="center">
|
|
25
|
+
Distribute Intelligence
|
|
26
|
+
</h3>
|
|
27
|
+
|
|
28
|
+
<h4 align="center">
|
|
29
|
+
<a href="https://cycls.com">Website</a> |
|
|
30
|
+
<a href="https://docs.cycls.com">Docs</a>
|
|
31
|
+
</h4>
|
|
32
|
+
|
|
33
|
+
<h4 align="center">
|
|
34
|
+
<a href="https://pypi.python.org/pypi/cycls"><img src="https://img.shields.io/pypi/v/cycls.svg?label=cycls+pypi&color=blueviolet" alt="cycls Python package on PyPi" /></a>
|
|
35
|
+
<a href="https://blog.cycls.com"><img src="https://img.shields.io/badge/newsletter-blueviolet.svg?logo=substack&label=cycls" alt="Cycls newsletter" /></a>
|
|
36
|
+
<a href="https://x.com/cyclsai">
|
|
37
|
+
<img src="https://img.shields.io/twitter/follow/CyclsAI" alt="Cycls Twitter" />
|
|
38
|
+
</a>
|
|
39
|
+
</h4>
|
|
40
|
+
|
|
41
|
+
---
|
|
42
|
+
|
|
43
|
+
# Cycls
|
|
44
|
+
|
|
45
|
+
The open-source SDK for distributing AI agents.
|
|
46
|
+
|
|
47
|
+
## Distribute Intelligence
|
|
48
|
+
|
|
49
|
+
AI capabilities shouldn't be locked in notebooks or trapped behind months of infrastructure work. Cycls turns your Python functions into production services - complete with APIs, interfaces, auth, and analytics. You focus on the intelligence. Cycls handles the distribution.
|
|
50
|
+
|
|
51
|
+
Write a function. Deploy it as an API, a web interface, or both. Add authentication, analytics, and monetization with flags.
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
import cycls
|
|
55
|
+
|
|
56
|
+
agent = cycls.Agent(pip=["openai"])
|
|
57
|
+
|
|
58
|
+
@agent("my-agent", auth=True, analytics=True)
|
|
59
|
+
async def chat(context):
|
|
60
|
+
from openai import AsyncOpenAI
|
|
61
|
+
client = AsyncOpenAI()
|
|
62
|
+
|
|
63
|
+
response = await client.chat.completions.create(
|
|
64
|
+
model="gpt-4o",
|
|
65
|
+
messages=context.messages,
|
|
66
|
+
stream=True
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
async for chunk in response:
|
|
70
|
+
if chunk.choices[0].delta.content:
|
|
71
|
+
yield chunk.choices[0].delta.content
|
|
72
|
+
|
|
73
|
+
agent.deploy(prod=True) # Live at https://my-agent.cycls.ai
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Installation
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
pip install cycls
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
Requires Docker.
|
|
83
|
+
|
|
84
|
+
## What You Get
|
|
85
|
+
|
|
86
|
+
- **Streaming API** - OpenAI-compatible `/chat/completions` endpoint
|
|
87
|
+
- **Web Interface** - Chat UI served automatically
|
|
88
|
+
- **Authentication** - `auth=True` enables JWT-based access control
|
|
89
|
+
- **Analytics** - `analytics=True` tracks usage
|
|
90
|
+
- **Monetization** - `tier="cycls_pass"` integrates with [Cycls Pass](https://cycls.ai) subscriptions
|
|
91
|
+
- **Native UI Components** - Render thinking bubbles, tables, code blocks in responses
|
|
92
|
+
|
|
93
|
+
## Deploying
|
|
94
|
+
|
|
95
|
+
```python
|
|
96
|
+
agent.deploy(prod=False) # Development: localhost:8080
|
|
97
|
+
agent.deploy(prod=True) # Production: https://agent-name.cycls.ai
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
Get an API key at [cycls.com](https://cycls.com).
|
|
101
|
+
|
|
102
|
+
## Native UI Components
|
|
103
|
+
|
|
104
|
+
Yield structured objects for rich streaming responses:
|
|
105
|
+
|
|
106
|
+
```python
|
|
107
|
+
@agent()
|
|
108
|
+
async def demo(context):
|
|
109
|
+
yield {"type": "thinking", "thinking": "Analyzing the request..."}
|
|
110
|
+
yield "Here's what I found:\n\n"
|
|
111
|
+
|
|
112
|
+
yield {"type": "table", "headers": ["Name", "Status"]}
|
|
113
|
+
yield {"type": "table", "row": ["Server 1", "Online"]}
|
|
114
|
+
yield {"type": "table", "row": ["Server 2", "Offline"]}
|
|
115
|
+
|
|
116
|
+
yield {"type": "code", "code": "result = analyze(data)", "language": "python"}
|
|
117
|
+
yield {"type": "callout", "callout": "Analysis complete!", "style": "success"}
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
| Component | Streaming |
|
|
121
|
+
|-----------|-----------|
|
|
122
|
+
| `{"type": "thinking", "thinking": "..."}` | Yes |
|
|
123
|
+
| `{"type": "code", "code": "...", "language": "..."}` | Yes |
|
|
124
|
+
| `{"type": "table", "headers": [...]}` | Yes |
|
|
125
|
+
| `{"type": "table", "row": [...]}` | Yes |
|
|
126
|
+
| `{"type": "status", "status": "..."}` | Yes |
|
|
127
|
+
| `{"type": "callout", "callout": "...", "style": "..."}` | Yes |
|
|
128
|
+
| `{"type": "image", "src": "..."}` | Yes |
|
|
129
|
+
|
|
130
|
+
### Reasoning Models
|
|
131
|
+
|
|
132
|
+
```python
|
|
133
|
+
@agent()
|
|
134
|
+
async def chat(context):
|
|
135
|
+
from openai import AsyncOpenAI
|
|
136
|
+
client = AsyncOpenAI()
|
|
137
|
+
|
|
138
|
+
stream = await client.responses.create(
|
|
139
|
+
model="o3-mini",
|
|
140
|
+
input=context.messages,
|
|
141
|
+
stream=True,
|
|
142
|
+
reasoning={"effort": "medium", "summary": "auto"},
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
async for event in stream:
|
|
146
|
+
if event.type == "response.reasoning_summary_text.delta":
|
|
147
|
+
yield {"type": "thinking", "thinking": event.delta}
|
|
148
|
+
elif event.type == "response.output_text.delta":
|
|
149
|
+
yield event.delta
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
## Context Object
|
|
153
|
+
|
|
154
|
+
```python
|
|
155
|
+
@agent()
|
|
156
|
+
async def chat(context):
|
|
157
|
+
context.messages # [{"role": "user", "content": "..."}]
|
|
158
|
+
context.messages.raw # Full data including UI component parts
|
|
159
|
+
context.user # User(id, email, name, plans) when auth=True
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
## API Endpoints
|
|
163
|
+
|
|
164
|
+
| Endpoint | Format |
|
|
165
|
+
|----------|--------|
|
|
166
|
+
| `POST chat/cycls` | Cycls streaming protocol |
|
|
167
|
+
| `POST chat/completions` | OpenAI-compatible |
|
|
168
|
+
|
|
169
|
+
## Streaming Protocol
|
|
170
|
+
|
|
171
|
+
Cycls streams structured components over SSE:
|
|
172
|
+
|
|
173
|
+
```
|
|
174
|
+
data: {"type": "thinking", "thinking": "Let me "}
|
|
175
|
+
data: {"type": "thinking", "thinking": "analyze..."}
|
|
176
|
+
data: {"type": "text", "text": "Here's the answer"}
|
|
177
|
+
data: {"type": "callout", "callout": "Done!", "style": "success"}
|
|
178
|
+
data: [DONE]
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
See [docs/streaming-protocol.md](docs/streaming-protocol.md) for frontend integration.
|
|
182
|
+
|
|
183
|
+
## Declarative Infrastructure
|
|
184
|
+
|
|
185
|
+
Define your entire runtime in Python:
|
|
186
|
+
|
|
187
|
+
```python
|
|
188
|
+
agent = cycls.Agent(
|
|
189
|
+
pip=["openai", "pandas", "numpy"],
|
|
190
|
+
apt=["ffmpeg", "libmagic1"],
|
|
191
|
+
run_commands=["curl -sSL https://example.com/setup.sh | bash"],
|
|
192
|
+
copy=["./utils.py", "./models/", "/absolute/path/to/config.json"],
|
|
193
|
+
copy_public=["./assets/logo.png", "./static/"],
|
|
194
|
+
)
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
### `pip` - Python Packages
|
|
198
|
+
|
|
199
|
+
Install any packages from PyPI. These are installed during the container build.
|
|
200
|
+
|
|
201
|
+
```python
|
|
202
|
+
pip=["openai", "pandas", "numpy", "transformers"]
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
### `apt` - System Packages
|
|
206
|
+
|
|
207
|
+
Install system-level dependencies via apt-get. Need ffmpeg for audio processing? ImageMagick for images? Just declare it.
|
|
208
|
+
|
|
209
|
+
```python
|
|
210
|
+
apt=["ffmpeg", "imagemagick", "libpq-dev"]
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
### `run_commands` - Shell Commands
|
|
214
|
+
|
|
215
|
+
Run arbitrary shell commands during the container build. Useful for custom setup scripts, downloading assets, or any build-time configuration.
|
|
216
|
+
|
|
217
|
+
```python
|
|
218
|
+
run_commands=[
|
|
219
|
+
"curl -sSL https://example.com/setup.sh | bash",
|
|
220
|
+
"chmod +x /app/scripts/*.sh"
|
|
221
|
+
]
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
### `copy` - Bundle Files and Directories
|
|
225
|
+
|
|
226
|
+
Include local files and directories in your container. Works with both relative and absolute paths. Copies files and entire directory trees.
|
|
227
|
+
|
|
228
|
+
```python
|
|
229
|
+
copy=[
|
|
230
|
+
"./utils.py", # Single file, relative path
|
|
231
|
+
"./models/", # Entire directory
|
|
232
|
+
"/home/user/configs/app.json", # Absolute path
|
|
233
|
+
]
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
Then import them in your function:
|
|
237
|
+
|
|
238
|
+
```python
|
|
239
|
+
@agent()
|
|
240
|
+
async def chat(context):
|
|
241
|
+
from utils import helper_function # Your bundled module
|
|
242
|
+
...
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
### `copy_public` - Static Files
|
|
246
|
+
|
|
247
|
+
Files and directories served at the `/public` endpoint. Perfect for images, downloads, or any static assets your agent needs to reference.
|
|
248
|
+
|
|
249
|
+
```python
|
|
250
|
+
copy_public=["./assets/logo.png", "./downloads/"]
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
Access them at `https://your-agent.cycls.ai/public/logo.png`.
|
|
254
|
+
|
|
255
|
+
---
|
|
256
|
+
|
|
257
|
+
### What You Get
|
|
258
|
+
|
|
259
|
+
- **One file** - Code, dependencies, configuration, and infrastructure together
|
|
260
|
+
- **Instant deploys** - Unchanged code deploys in seconds from cache
|
|
261
|
+
- **No drift** - What you see is what runs. Always.
|
|
262
|
+
- **Just works** - Closures, lambdas, dynamic imports - your function runs exactly as written
|
|
263
|
+
|
|
264
|
+
No YAML. No Dockerfiles. No infrastructure repo. The code is the deployment.
|
|
265
|
+
|
|
266
|
+
## License
|
|
267
|
+
|
|
268
|
+
MIT
|
|
269
|
+
|
cycls-0.0.2.65/README.md
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
<h3 align="center">
|
|
2
|
+
Distribute Intelligence
|
|
3
|
+
</h3>
|
|
4
|
+
|
|
5
|
+
<h4 align="center">
|
|
6
|
+
<a href="https://cycls.com">Website</a> |
|
|
7
|
+
<a href="https://docs.cycls.com">Docs</a>
|
|
8
|
+
</h4>
|
|
9
|
+
|
|
10
|
+
<h4 align="center">
|
|
11
|
+
<a href="https://pypi.python.org/pypi/cycls"><img src="https://img.shields.io/pypi/v/cycls.svg?label=cycls+pypi&color=blueviolet" alt="cycls Python package on PyPi" /></a>
|
|
12
|
+
<a href="https://blog.cycls.com"><img src="https://img.shields.io/badge/newsletter-blueviolet.svg?logo=substack&label=cycls" alt="Cycls newsletter" /></a>
|
|
13
|
+
<a href="https://x.com/cyclsai">
|
|
14
|
+
<img src="https://img.shields.io/twitter/follow/CyclsAI" alt="Cycls Twitter" />
|
|
15
|
+
</a>
|
|
16
|
+
</h4>
|
|
17
|
+
|
|
18
|
+
---
|
|
19
|
+
|
|
20
|
+
# Cycls
|
|
21
|
+
|
|
22
|
+
The open-source SDK for distributing AI agents.
|
|
23
|
+
|
|
24
|
+
## Distribute Intelligence
|
|
25
|
+
|
|
26
|
+
AI capabilities shouldn't be locked in notebooks or trapped behind months of infrastructure work. Cycls turns your Python functions into production services - complete with APIs, interfaces, auth, and analytics. You focus on the intelligence. Cycls handles the distribution.
|
|
27
|
+
|
|
28
|
+
Write a function. Deploy it as an API, a web interface, or both. Add authentication, analytics, and monetization with flags.
|
|
29
|
+
|
|
30
|
+
```python
|
|
31
|
+
import cycls
|
|
32
|
+
|
|
33
|
+
agent = cycls.Agent(pip=["openai"])
|
|
34
|
+
|
|
35
|
+
@agent("my-agent", auth=True, analytics=True)
|
|
36
|
+
async def chat(context):
|
|
37
|
+
from openai import AsyncOpenAI
|
|
38
|
+
client = AsyncOpenAI()
|
|
39
|
+
|
|
40
|
+
response = await client.chat.completions.create(
|
|
41
|
+
model="gpt-4o",
|
|
42
|
+
messages=context.messages,
|
|
43
|
+
stream=True
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
async for chunk in response:
|
|
47
|
+
if chunk.choices[0].delta.content:
|
|
48
|
+
yield chunk.choices[0].delta.content
|
|
49
|
+
|
|
50
|
+
agent.deploy(prod=True) # Live at https://my-agent.cycls.ai
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## Installation
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
pip install cycls
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
Requires Docker.
|
|
60
|
+
|
|
61
|
+
## What You Get
|
|
62
|
+
|
|
63
|
+
- **Streaming API** - OpenAI-compatible `/chat/completions` endpoint
|
|
64
|
+
- **Web Interface** - Chat UI served automatically
|
|
65
|
+
- **Authentication** - `auth=True` enables JWT-based access control
|
|
66
|
+
- **Analytics** - `analytics=True` tracks usage
|
|
67
|
+
- **Monetization** - `tier="cycls_pass"` integrates with [Cycls Pass](https://cycls.ai) subscriptions
|
|
68
|
+
- **Native UI Components** - Render thinking bubbles, tables, code blocks in responses
|
|
69
|
+
|
|
70
|
+
## Deploying
|
|
71
|
+
|
|
72
|
+
```python
|
|
73
|
+
agent.deploy(prod=False) # Development: localhost:8080
|
|
74
|
+
agent.deploy(prod=True) # Production: https://agent-name.cycls.ai
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
Get an API key at [cycls.com](https://cycls.com).
|
|
78
|
+
|
|
79
|
+
## Native UI Components
|
|
80
|
+
|
|
81
|
+
Yield structured objects for rich streaming responses:
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
@agent()
|
|
85
|
+
async def demo(context):
|
|
86
|
+
yield {"type": "thinking", "thinking": "Analyzing the request..."}
|
|
87
|
+
yield "Here's what I found:\n\n"
|
|
88
|
+
|
|
89
|
+
yield {"type": "table", "headers": ["Name", "Status"]}
|
|
90
|
+
yield {"type": "table", "row": ["Server 1", "Online"]}
|
|
91
|
+
yield {"type": "table", "row": ["Server 2", "Offline"]}
|
|
92
|
+
|
|
93
|
+
yield {"type": "code", "code": "result = analyze(data)", "language": "python"}
|
|
94
|
+
yield {"type": "callout", "callout": "Analysis complete!", "style": "success"}
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
| Component | Streaming |
|
|
98
|
+
|-----------|-----------|
|
|
99
|
+
| `{"type": "thinking", "thinking": "..."}` | Yes |
|
|
100
|
+
| `{"type": "code", "code": "...", "language": "..."}` | Yes |
|
|
101
|
+
| `{"type": "table", "headers": [...]}` | Yes |
|
|
102
|
+
| `{"type": "table", "row": [...]}` | Yes |
|
|
103
|
+
| `{"type": "status", "status": "..."}` | Yes |
|
|
104
|
+
| `{"type": "callout", "callout": "...", "style": "..."}` | Yes |
|
|
105
|
+
| `{"type": "image", "src": "..."}` | Yes |
|
|
106
|
+
|
|
107
|
+
### Reasoning Models
|
|
108
|
+
|
|
109
|
+
```python
|
|
110
|
+
@agent()
|
|
111
|
+
async def chat(context):
|
|
112
|
+
from openai import AsyncOpenAI
|
|
113
|
+
client = AsyncOpenAI()
|
|
114
|
+
|
|
115
|
+
stream = await client.responses.create(
|
|
116
|
+
model="o3-mini",
|
|
117
|
+
input=context.messages,
|
|
118
|
+
stream=True,
|
|
119
|
+
reasoning={"effort": "medium", "summary": "auto"},
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
async for event in stream:
|
|
123
|
+
if event.type == "response.reasoning_summary_text.delta":
|
|
124
|
+
yield {"type": "thinking", "thinking": event.delta}
|
|
125
|
+
elif event.type == "response.output_text.delta":
|
|
126
|
+
yield event.delta
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
## Context Object
|
|
130
|
+
|
|
131
|
+
```python
|
|
132
|
+
@agent()
|
|
133
|
+
async def chat(context):
|
|
134
|
+
context.messages # [{"role": "user", "content": "..."}]
|
|
135
|
+
context.messages.raw # Full data including UI component parts
|
|
136
|
+
context.user # User(id, email, name, plans) when auth=True
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
## API Endpoints
|
|
140
|
+
|
|
141
|
+
| Endpoint | Format |
|
|
142
|
+
|----------|--------|
|
|
143
|
+
| `POST chat/cycls` | Cycls streaming protocol |
|
|
144
|
+
| `POST chat/completions` | OpenAI-compatible |
|
|
145
|
+
|
|
146
|
+
## Streaming Protocol
|
|
147
|
+
|
|
148
|
+
Cycls streams structured components over SSE:
|
|
149
|
+
|
|
150
|
+
```
|
|
151
|
+
data: {"type": "thinking", "thinking": "Let me "}
|
|
152
|
+
data: {"type": "thinking", "thinking": "analyze..."}
|
|
153
|
+
data: {"type": "text", "text": "Here's the answer"}
|
|
154
|
+
data: {"type": "callout", "callout": "Done!", "style": "success"}
|
|
155
|
+
data: [DONE]
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
See [docs/streaming-protocol.md](docs/streaming-protocol.md) for frontend integration.
|
|
159
|
+
|
|
160
|
+
## Declarative Infrastructure
|
|
161
|
+
|
|
162
|
+
Define your entire runtime in Python:
|
|
163
|
+
|
|
164
|
+
```python
|
|
165
|
+
agent = cycls.Agent(
|
|
166
|
+
pip=["openai", "pandas", "numpy"],
|
|
167
|
+
apt=["ffmpeg", "libmagic1"],
|
|
168
|
+
run_commands=["curl -sSL https://example.com/setup.sh | bash"],
|
|
169
|
+
copy=["./utils.py", "./models/", "/absolute/path/to/config.json"],
|
|
170
|
+
copy_public=["./assets/logo.png", "./static/"],
|
|
171
|
+
)
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
### `pip` - Python Packages
|
|
175
|
+
|
|
176
|
+
Install any packages from PyPI. These are installed during the container build.
|
|
177
|
+
|
|
178
|
+
```python
|
|
179
|
+
pip=["openai", "pandas", "numpy", "transformers"]
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
### `apt` - System Packages
|
|
183
|
+
|
|
184
|
+
Install system-level dependencies via apt-get. Need ffmpeg for audio processing? ImageMagick for images? Just declare it.
|
|
185
|
+
|
|
186
|
+
```python
|
|
187
|
+
apt=["ffmpeg", "imagemagick", "libpq-dev"]
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
### `run_commands` - Shell Commands
|
|
191
|
+
|
|
192
|
+
Run arbitrary shell commands during the container build. Useful for custom setup scripts, downloading assets, or any build-time configuration.
|
|
193
|
+
|
|
194
|
+
```python
|
|
195
|
+
run_commands=[
|
|
196
|
+
"curl -sSL https://example.com/setup.sh | bash",
|
|
197
|
+
"chmod +x /app/scripts/*.sh"
|
|
198
|
+
]
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
### `copy` - Bundle Files and Directories
|
|
202
|
+
|
|
203
|
+
Include local files and directories in your container. Works with both relative and absolute paths. Copies files and entire directory trees.
|
|
204
|
+
|
|
205
|
+
```python
|
|
206
|
+
copy=[
|
|
207
|
+
"./utils.py", # Single file, relative path
|
|
208
|
+
"./models/", # Entire directory
|
|
209
|
+
"/home/user/configs/app.json", # Absolute path
|
|
210
|
+
]
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
Then import them in your function:
|
|
214
|
+
|
|
215
|
+
```python
|
|
216
|
+
@agent()
|
|
217
|
+
async def chat(context):
|
|
218
|
+
from utils import helper_function # Your bundled module
|
|
219
|
+
...
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
### `copy_public` - Static Files
|
|
223
|
+
|
|
224
|
+
Files and directories served at the `/public` endpoint. Perfect for images, downloads, or any static assets your agent needs to reference.
|
|
225
|
+
|
|
226
|
+
```python
|
|
227
|
+
copy_public=["./assets/logo.png", "./downloads/"]
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
Access them at `https://your-agent.cycls.ai/public/logo.png`.
|
|
231
|
+
|
|
232
|
+
---
|
|
233
|
+
|
|
234
|
+
### What You Get
|
|
235
|
+
|
|
236
|
+
- **One file** - Code, dependencies, configuration, and infrastructure together
|
|
237
|
+
- **Instant deploys** - Unchanged code deploys in seconds from cache
|
|
238
|
+
- **No drift** - What you see is what runs. Always.
|
|
239
|
+
- **Just works** - Closures, lambdas, dynamic imports - your function runs exactly as written
|
|
240
|
+
|
|
241
|
+
No YAML. No Dockerfiles. No infrastructure repo. The code is the deployment.
|
|
242
|
+
|
|
243
|
+
## License
|
|
244
|
+
|
|
245
|
+
MIT
|
|
@@ -104,7 +104,7 @@
|
|
|
104
104
|
|
|
105
105
|
// Native component renderers
|
|
106
106
|
const components = {
|
|
107
|
-
text: (props) => marked.parse(props.
|
|
107
|
+
text: (props) => marked.parse(props.text || '', { breaks: true }),
|
|
108
108
|
|
|
109
109
|
thinking: (props) => `
|
|
110
110
|
<div class="thinking-bubble rounded-lg p-4 my-3 italic text-[var(--text-secondary)]">
|
|
@@ -114,7 +114,7 @@
|
|
|
114
114
|
</svg>
|
|
115
115
|
Thinking
|
|
116
116
|
</div>
|
|
117
|
-
<div>${props.
|
|
117
|
+
<div>${props.thinking}</div>
|
|
118
118
|
</div>
|
|
119
119
|
`,
|
|
120
120
|
|
|
@@ -141,8 +141,8 @@
|
|
|
141
141
|
|
|
142
142
|
code: (props) => {
|
|
143
143
|
const highlighted = props.language
|
|
144
|
-
? hljs.highlight(props.
|
|
145
|
-
: hljs.highlightAuto(props.
|
|
144
|
+
? hljs.highlight(props.code, { language: props.language }).value
|
|
145
|
+
: hljs.highlightAuto(props.code).value;
|
|
146
146
|
return `
|
|
147
147
|
<div class="my-3 rounded-lg overflow-hidden border border-[var(--border-color)]">
|
|
148
148
|
<div class="bg-[var(--bg-secondary)] px-4 py-2 text-xs text-[var(--text-secondary)] flex justify-between items-center">
|
|
@@ -155,9 +155,9 @@
|
|
|
155
155
|
},
|
|
156
156
|
|
|
157
157
|
callout: (props) => `
|
|
158
|
-
<div class="callout-${props.
|
|
158
|
+
<div class="callout-${props.style || 'info'} border-l-4 rounded-r-lg p-4 my-3">
|
|
159
159
|
${props.title ? `<div class="font-semibold mb-1">${props.title}</div>` : ''}
|
|
160
|
-
<div class="text-sm">${props.
|
|
160
|
+
<div class="text-sm">${props.callout}</div>
|
|
161
161
|
</div>
|
|
162
162
|
`,
|
|
163
163
|
|
|
@@ -204,7 +204,7 @@
|
|
|
204
204
|
} else {
|
|
205
205
|
let html = '';
|
|
206
206
|
for (const part of msg.parts || [])
|
|
207
|
-
html += components[part.
|
|
207
|
+
html += components[part.type]?.(part) || '';
|
|
208
208
|
wrapper.innerHTML = `<div class="prose prose-invert max-w-none">${html}</div>`;
|
|
209
209
|
}
|
|
210
210
|
return wrapper;
|
|
@@ -238,24 +238,6 @@
|
|
|
238
238
|
let assistantMsg = messages[messages.length - 1];
|
|
239
239
|
let currentPart = null;
|
|
240
240
|
|
|
241
|
-
const decode = {
|
|
242
|
-
'+': ([, name, props]) => {
|
|
243
|
-
currentPart = {name, ...props};
|
|
244
|
-
if (props.headers) currentPart.rows = [];
|
|
245
|
-
assistantMsg.parts.push(currentPart);
|
|
246
|
-
},
|
|
247
|
-
'~': ([, props]) => {
|
|
248
|
-
if (!currentPart) return;
|
|
249
|
-
for (const [k, v] of Object.entries(props)) {
|
|
250
|
-
if (k === 'content') currentPart.content = (currentPart.content || '') + v;
|
|
251
|
-
else if (k === 'row') currentPart.rows.push(v);
|
|
252
|
-
else currentPart[k] = v;
|
|
253
|
-
}
|
|
254
|
-
},
|
|
255
|
-
'-': () => { currentPart = null; },
|
|
256
|
-
'=': ([, props]) => { assistantMsg.parts.push(props); }
|
|
257
|
-
};
|
|
258
|
-
|
|
259
241
|
while (true) {
|
|
260
242
|
const { done, value } = await reader.read();
|
|
261
243
|
if (done) break;
|
|
@@ -270,8 +252,19 @@
|
|
|
270
252
|
if (data === '[DONE]') continue;
|
|
271
253
|
|
|
272
254
|
try {
|
|
273
|
-
const
|
|
274
|
-
|
|
255
|
+
const item = JSON.parse(data);
|
|
256
|
+
const type = item.type;
|
|
257
|
+
|
|
258
|
+
// Same type as current? Append content
|
|
259
|
+
if (currentPart && currentPart.type === type) {
|
|
260
|
+
if (item.row) currentPart.rows.push(item.row);
|
|
261
|
+
else if (item[type]) currentPart[type] = (currentPart[type] || '') + item[type];
|
|
262
|
+
} else {
|
|
263
|
+
// New component
|
|
264
|
+
currentPart = { ...item };
|
|
265
|
+
if (item.headers) currentPart.rows = [];
|
|
266
|
+
assistantMsg.parts.push(currentPart);
|
|
267
|
+
}
|
|
275
268
|
render();
|
|
276
269
|
} catch (e) {
|
|
277
270
|
console.error('Parse error:', e, data);
|
|
@@ -279,7 +272,7 @@
|
|
|
279
272
|
}
|
|
280
273
|
}
|
|
281
274
|
assistantMsg.parts = assistantMsg.parts.filter(p =>
|
|
282
|
-
p.
|
|
275
|
+
p.type !== 'text' || p.text?.trim()
|
|
283
276
|
);
|
|
284
277
|
render();
|
|
285
278
|
console.log(messages);
|
|
@@ -15,36 +15,18 @@ async def openai_encoder(stream):
|
|
|
15
15
|
if msg: yield f"data: {json.dumps({'choices': [{'delta': {'content': msg}}]})}\n\n"
|
|
16
16
|
yield "data: [DONE]\n\n"
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
if self.cur: self.cur = None; return self.sse(["-"])
|
|
23
|
-
|
|
24
|
-
def process(self, item):
|
|
25
|
-
if not item: return
|
|
26
|
-
if not isinstance(item, dict): item = {"name": "text", "content": item}
|
|
27
|
-
n, done = item.get("name"), item.get("_complete")
|
|
28
|
-
p = {k: v for k, v in item.items() if k not in ("name", "_complete")}
|
|
29
|
-
if done:
|
|
30
|
-
if c := self.close(): yield c
|
|
31
|
-
yield self.sse(["=", {"name": n, **p}])
|
|
32
|
-
elif n != self.cur:
|
|
33
|
-
if c := self.close(): yield c
|
|
34
|
-
self.cur = n
|
|
35
|
-
yield self.sse(["+", n, p])
|
|
36
|
-
else:
|
|
37
|
-
yield self.sse(["~", p])
|
|
18
|
+
def sse(item):
|
|
19
|
+
if not item: return None
|
|
20
|
+
if not isinstance(item, dict): item = {"type": "text", "text": item}
|
|
21
|
+
return f"data: {json.dumps(item)}\n\n"
|
|
38
22
|
|
|
39
23
|
async def encoder(stream):
|
|
40
|
-
enc = Encoder()
|
|
41
24
|
if inspect.isasyncgen(stream):
|
|
42
25
|
async for item in stream:
|
|
43
|
-
|
|
26
|
+
if msg := sse(item): yield msg
|
|
44
27
|
else:
|
|
45
28
|
for item in stream:
|
|
46
|
-
|
|
47
|
-
if close := enc.close(): yield close
|
|
29
|
+
if msg := sse(item): yield msg
|
|
48
30
|
yield "data: [DONE]\n\n"
|
|
49
31
|
|
|
50
32
|
class Messages(list):
|
|
@@ -54,7 +36,7 @@ class Messages(list):
|
|
|
54
36
|
text_messages = []
|
|
55
37
|
for m in raw_messages:
|
|
56
38
|
text_content = "".join(
|
|
57
|
-
p.get("
|
|
39
|
+
p.get("text", "") for p in m.get("parts", []) if p.get("type") == "text"
|
|
58
40
|
)
|
|
59
41
|
text_messages.append({
|
|
60
42
|
"role": m.get("role"),
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "cycls"
|
|
3
|
-
version = "0.0.2.
|
|
3
|
+
version = "0.0.2.65"
|
|
4
4
|
|
|
5
5
|
packages = [{ include = "cycls" }]
|
|
6
6
|
include = ["cycls/theme/**/*"]
|
|
7
|
-
description = "
|
|
7
|
+
description = "Distribute Intelligence"
|
|
8
8
|
authors = ["Mohammed J. AlRujayi <mj@cycls.com>"]
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
|
cycls-0.0.2.64/PKG-INFO
DELETED
|
@@ -1,141 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: cycls
|
|
3
|
-
Version: 0.0.2.64
|
|
4
|
-
Summary: Cycls SDK
|
|
5
|
-
Author: Mohammed J. AlRujayi
|
|
6
|
-
Author-email: mj@cycls.com
|
|
7
|
-
Requires-Python: >=3.9,<4.0
|
|
8
|
-
Classifier: Programming Language :: Python :: 3
|
|
9
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
10
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.14
|
|
15
|
-
Provides-Extra: modal
|
|
16
|
-
Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
|
|
17
|
-
Requires-Dist: docker (>=7.1.0,<8.0.0)
|
|
18
|
-
Requires-Dist: fastapi (>=0.111.0,<0.112.0)
|
|
19
|
-
Requires-Dist: httpx (>=0.27.0,<0.28.0)
|
|
20
|
-
Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "modal"
|
|
21
|
-
Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
|
|
22
|
-
Description-Content-Type: text/markdown
|
|
23
|
-
|
|
24
|
-
<h3 align="center">
|
|
25
|
-
The Distribution SDK for AI Agents.
|
|
26
|
-
</h3>
|
|
27
|
-
|
|
28
|
-
<h4 align="center">
|
|
29
|
-
<a href="https://cycls.com">Website</a> |
|
|
30
|
-
<a href="https://docs.cycls.com">Docs</a>
|
|
31
|
-
</h4>
|
|
32
|
-
|
|
33
|
-
<h4 align="center">
|
|
34
|
-
<a href="https://pypi.python.org/pypi/cycls"><img src="https://img.shields.io/pypi/v/cycls.svg?label=cycls+pypi&color=blueviolet" alt="cycls Python package on PyPi" /></a>
|
|
35
|
-
<a href="https://blog.cycls.com"><img src="https://img.shields.io/badge/newsletter-blueviolet.svg?logo=substack&label=cycls" alt="Cycls newsletter" /></a>
|
|
36
|
-
<a href="https://x.com/cyclsai">
|
|
37
|
-
<img src="https://img.shields.io/twitter/follow/CyclsAI" alt="Cycls Twitter" />
|
|
38
|
-
</a>
|
|
39
|
-
</h4>
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
# Cycls 🚲
|
|
43
|
-
|
|
44
|
-
`cycls` is an open-source SDK for building and publishing AI agents. With a single decorator and one command, you can deploy your code as a web application complete with a front-end UI and an OpenAI-compatible API endpoint.
|
|
45
|
-
|
|
46
|
-
## Key Features
|
|
47
|
-
|
|
48
|
-
* ✨ **Zero-Config Deployment:** No YAML or Dockerfiles. `cycls` infers your dependencies, and APIs directly from your Python code.
|
|
49
|
-
* 🚀 **One-Command Push to Cloud:** Go from local code to a globally scalable, serverless application with a single `agent.deploy()`.
|
|
50
|
-
* 💻 **Instant Local Testing:** Run `agent.local()` to spin up a local server with hot-reloading for rapid iteration and debugging.
|
|
51
|
-
* 🤖 **OpenAI-Compatible API:** Automatically serves a streaming `/chat/completions` endpoint.
|
|
52
|
-
* 🌐 **Automatic Web UI:** Get a clean, interactive front-end for your agent out of the box, with no front-end code required.
|
|
53
|
-
* 🔐 **Built-in Authentication:** Secure your agent for production with a simple `auth=True` flag that enables JWT-based authentication.
|
|
54
|
-
* 📦 **Declarative Dependencies:** Define all your `pip`, `apt`, or local file dependencies directly in Python.
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
## Installation
|
|
58
|
-
|
|
59
|
-
```bash
|
|
60
|
-
pip install cycls
|
|
61
|
-
```
|
|
62
|
-
|
|
63
|
-
**Note:** You must have [Docker](https://www.docker.com/get-started) installed and running on your machine.
|
|
64
|
-
|
|
65
|
-
## How to Use
|
|
66
|
-
### 1. Local Development: "Hello, World!"
|
|
67
|
-
|
|
68
|
-
Create a file main.py. This simple example creates an agent that streams back the message "hi".
|
|
69
|
-
|
|
70
|
-
```py
|
|
71
|
-
import cycls
|
|
72
|
-
|
|
73
|
-
# Initialize the agent
|
|
74
|
-
agent = cycls.Agent()
|
|
75
|
-
|
|
76
|
-
# Decorate your function to register it as an agent
|
|
77
|
-
@agent()
|
|
78
|
-
async def hello(context):
|
|
79
|
-
yield "Hello, World!"
|
|
80
|
-
|
|
81
|
-
agent.deploy(prod=False)
|
|
82
|
-
```
|
|
83
|
-
|
|
84
|
-
Run it from your terminal:
|
|
85
|
-
|
|
86
|
-
```bash
|
|
87
|
-
python main.py
|
|
88
|
-
```
|
|
89
|
-
This will start a local server. Open your browser to http://localhost:8080 to interact with your agent.
|
|
90
|
-
|
|
91
|
-
### 2. Cloud Deployment: An OpenAI-Powered Agent
|
|
92
|
-
This example creates a more advanced agent that calls the OpenAI API. It will be deployed to the cloud with authentication enabled.
|
|
93
|
-
|
|
94
|
-
```py
|
|
95
|
-
import cycls
|
|
96
|
-
|
|
97
|
-
# Initialize the agent with dependencies and API keys
|
|
98
|
-
agent = cycls.Agent(
|
|
99
|
-
pip=["openai"],
|
|
100
|
-
key="YOUR_CYCLS_KEY" # Get yours from https://cycls.com
|
|
101
|
-
)
|
|
102
|
-
|
|
103
|
-
# A helper function to call the LLM
|
|
104
|
-
async def llm(messages):
|
|
105
|
-
# Import inside the function: 'openai' is needed at runtime in the container.
|
|
106
|
-
import openai
|
|
107
|
-
client = openai.AsyncOpenAI(api_key="YOUR_OPENAI_API_KEY")
|
|
108
|
-
model = "gpt-4o"
|
|
109
|
-
response = await client.chat.completions.create(
|
|
110
|
-
model=model,
|
|
111
|
-
messages=messages,
|
|
112
|
-
temperature=1.0,
|
|
113
|
-
stream=True
|
|
114
|
-
)
|
|
115
|
-
# Yield the content from the streaming response
|
|
116
|
-
async def event_stream():
|
|
117
|
-
async for chunk in response:
|
|
118
|
-
content = chunk.choices[0].delta.content
|
|
119
|
-
if content:
|
|
120
|
-
yield content
|
|
121
|
-
return event_stream()
|
|
122
|
-
|
|
123
|
-
# Register the function as an agent named "cake" and enable auth
|
|
124
|
-
@agent("cake", auth=True)
|
|
125
|
-
async def cake_agent(context):
|
|
126
|
-
# The context object contains the message history
|
|
127
|
-
return await llm(context.messages)
|
|
128
|
-
|
|
129
|
-
# Deploy the agent to the cloud
|
|
130
|
-
agent.deploy(prod=True)
|
|
131
|
-
```
|
|
132
|
-
|
|
133
|
-
Run the deployment command from your terminal:
|
|
134
|
-
|
|
135
|
-
```bash
|
|
136
|
-
python main.py
|
|
137
|
-
```
|
|
138
|
-
After a few moments, your agent will be live and accessible at a public URL like https://cake.cycls.ai.
|
|
139
|
-
|
|
140
|
-
### License
|
|
141
|
-
This project is licensed under the MIT License.
|
cycls-0.0.2.64/README.md
DELETED
|
@@ -1,118 +0,0 @@
|
|
|
1
|
-
<h3 align="center">
|
|
2
|
-
The Distribution SDK for AI Agents.
|
|
3
|
-
</h3>
|
|
4
|
-
|
|
5
|
-
<h4 align="center">
|
|
6
|
-
<a href="https://cycls.com">Website</a> |
|
|
7
|
-
<a href="https://docs.cycls.com">Docs</a>
|
|
8
|
-
</h4>
|
|
9
|
-
|
|
10
|
-
<h4 align="center">
|
|
11
|
-
<a href="https://pypi.python.org/pypi/cycls"><img src="https://img.shields.io/pypi/v/cycls.svg?label=cycls+pypi&color=blueviolet" alt="cycls Python package on PyPi" /></a>
|
|
12
|
-
<a href="https://blog.cycls.com"><img src="https://img.shields.io/badge/newsletter-blueviolet.svg?logo=substack&label=cycls" alt="Cycls newsletter" /></a>
|
|
13
|
-
<a href="https://x.com/cyclsai">
|
|
14
|
-
<img src="https://img.shields.io/twitter/follow/CyclsAI" alt="Cycls Twitter" />
|
|
15
|
-
</a>
|
|
16
|
-
</h4>
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
# Cycls 🚲
|
|
20
|
-
|
|
21
|
-
`cycls` is an open-source SDK for building and publishing AI agents. With a single decorator and one command, you can deploy your code as a web application complete with a front-end UI and an OpenAI-compatible API endpoint.
|
|
22
|
-
|
|
23
|
-
## Key Features
|
|
24
|
-
|
|
25
|
-
* ✨ **Zero-Config Deployment:** No YAML or Dockerfiles. `cycls` infers your dependencies, and APIs directly from your Python code.
|
|
26
|
-
* 🚀 **One-Command Push to Cloud:** Go from local code to a globally scalable, serverless application with a single `agent.deploy()`.
|
|
27
|
-
* 💻 **Instant Local Testing:** Run `agent.local()` to spin up a local server with hot-reloading for rapid iteration and debugging.
|
|
28
|
-
* 🤖 **OpenAI-Compatible API:** Automatically serves a streaming `/chat/completions` endpoint.
|
|
29
|
-
* 🌐 **Automatic Web UI:** Get a clean, interactive front-end for your agent out of the box, with no front-end code required.
|
|
30
|
-
* 🔐 **Built-in Authentication:** Secure your agent for production with a simple `auth=True` flag that enables JWT-based authentication.
|
|
31
|
-
* 📦 **Declarative Dependencies:** Define all your `pip`, `apt`, or local file dependencies directly in Python.
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
## Installation
|
|
35
|
-
|
|
36
|
-
```bash
|
|
37
|
-
pip install cycls
|
|
38
|
-
```
|
|
39
|
-
|
|
40
|
-
**Note:** You must have [Docker](https://www.docker.com/get-started) installed and running on your machine.
|
|
41
|
-
|
|
42
|
-
## How to Use
|
|
43
|
-
### 1. Local Development: "Hello, World!"
|
|
44
|
-
|
|
45
|
-
Create a file main.py. This simple example creates an agent that streams back the message "hi".
|
|
46
|
-
|
|
47
|
-
```py
|
|
48
|
-
import cycls
|
|
49
|
-
|
|
50
|
-
# Initialize the agent
|
|
51
|
-
agent = cycls.Agent()
|
|
52
|
-
|
|
53
|
-
# Decorate your function to register it as an agent
|
|
54
|
-
@agent()
|
|
55
|
-
async def hello(context):
|
|
56
|
-
yield "Hello, World!"
|
|
57
|
-
|
|
58
|
-
agent.deploy(prod=False)
|
|
59
|
-
```
|
|
60
|
-
|
|
61
|
-
Run it from your terminal:
|
|
62
|
-
|
|
63
|
-
```bash
|
|
64
|
-
python main.py
|
|
65
|
-
```
|
|
66
|
-
This will start a local server. Open your browser to http://localhost:8080 to interact with your agent.
|
|
67
|
-
|
|
68
|
-
### 2. Cloud Deployment: An OpenAI-Powered Agent
|
|
69
|
-
This example creates a more advanced agent that calls the OpenAI API. It will be deployed to the cloud with authentication enabled.
|
|
70
|
-
|
|
71
|
-
```py
|
|
72
|
-
import cycls
|
|
73
|
-
|
|
74
|
-
# Initialize the agent with dependencies and API keys
|
|
75
|
-
agent = cycls.Agent(
|
|
76
|
-
pip=["openai"],
|
|
77
|
-
key="YOUR_CYCLS_KEY" # Get yours from https://cycls.com
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
# A helper function to call the LLM
|
|
81
|
-
async def llm(messages):
|
|
82
|
-
# Import inside the function: 'openai' is needed at runtime in the container.
|
|
83
|
-
import openai
|
|
84
|
-
client = openai.AsyncOpenAI(api_key="YOUR_OPENAI_API_KEY")
|
|
85
|
-
model = "gpt-4o"
|
|
86
|
-
response = await client.chat.completions.create(
|
|
87
|
-
model=model,
|
|
88
|
-
messages=messages,
|
|
89
|
-
temperature=1.0,
|
|
90
|
-
stream=True
|
|
91
|
-
)
|
|
92
|
-
# Yield the content from the streaming response
|
|
93
|
-
async def event_stream():
|
|
94
|
-
async for chunk in response:
|
|
95
|
-
content = chunk.choices[0].delta.content
|
|
96
|
-
if content:
|
|
97
|
-
yield content
|
|
98
|
-
return event_stream()
|
|
99
|
-
|
|
100
|
-
# Register the function as an agent named "cake" and enable auth
|
|
101
|
-
@agent("cake", auth=True)
|
|
102
|
-
async def cake_agent(context):
|
|
103
|
-
# The context object contains the message history
|
|
104
|
-
return await llm(context.messages)
|
|
105
|
-
|
|
106
|
-
# Deploy the agent to the cloud
|
|
107
|
-
agent.deploy(prod=True)
|
|
108
|
-
```
|
|
109
|
-
|
|
110
|
-
Run the deployment command from your terminal:
|
|
111
|
-
|
|
112
|
-
```bash
|
|
113
|
-
python main.py
|
|
114
|
-
```
|
|
115
|
-
After a few moments, your agent will be live and accessible at a public URL like https://cake.cycls.ai.
|
|
116
|
-
|
|
117
|
-
### License
|
|
118
|
-
This project is licensed under the MIT License.
|
cycls-0.0.2.64/cycls/__init__.py
DELETED
cycls-0.0.2.64/cycls/ui.py
DELETED
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
thinking = lambda content: {"name": "thinking", "content": content}
|
|
2
|
-
status = lambda content: {"name": "status", "content": content}
|
|
3
|
-
code = lambda content, language=None: {"name": "code", "content": content, "language": language}
|
|
4
|
-
table = lambda headers=None, row=None: {"name": "table", "headers": headers} if headers else {"name": "table", "row": row} if row else None
|
|
5
|
-
callout = lambda content, type="info", title=None: {"name": "callout", "content": content, "type": type, "title": title, "_complete": True}
|
|
6
|
-
image = lambda src, alt=None, caption=None: {"name": "image", "src": src, "alt": alt, "caption": caption, "_complete": True}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|