copex 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copex-0.2.1/.gitignore ADDED
@@ -0,0 +1,44 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.so
6
+ .Python
7
+ build/
8
+ develop-eggs/
9
+ dist/
10
+ downloads/
11
+ eggs/
12
+ .eggs/
13
+ lib/
14
+ lib64/
15
+ parts/
16
+ sdist/
17
+ var/
18
+ wheels/
19
+ *.egg-info/
20
+ .installed.cfg
21
+ *.egg
22
+
23
+ # Virtual environments
24
+ .venv/
25
+ venv/
26
+ ENV/
27
+
28
+ # IDE
29
+ .idea/
30
+ .vscode/
31
+ *.swp
32
+ *.swo
33
+
34
+ # Testing
35
+ .pytest_cache/
36
+ .coverage
37
+ htmlcov/
38
+
39
+ # Build
40
+ *.manifest
41
+ *.spec
42
+
43
+ # Copex data
44
+ .copex/
copex-0.2.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Arthur Ramos
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
copex-0.2.1/PKG-INFO ADDED
@@ -0,0 +1,502 @@
1
+ Metadata-Version: 2.4
2
+ Name: copex
3
+ Version: 0.2.1
4
+ Summary: Copilot Extended - Resilient wrapper for GitHub Copilot SDK with auto-retry, Ralph Wiggum loops, and more
5
+ Project-URL: Homepage, https://github.com/Arthur742Ramos/copex
6
+ Project-URL: Repository, https://github.com/Arthur742Ramos/copex
7
+ Project-URL: Issues, https://github.com/Arthur742Ramos/copex/issues
8
+ Author-email: Arthur Ramos <arthur742ramos@users.noreply.github.com>
9
+ License-Expression: MIT
10
+ License-File: LICENSE
11
+ Keywords: ai,copex,copilot,github,ralph-wiggum,retry,sdk
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Requires-Python: >=3.10
20
+ Requires-Dist: github-copilot-sdk>=0.1.0
21
+ Requires-Dist: prompt-toolkit>=3.0.0
22
+ Requires-Dist: pydantic>=2.0.0
23
+ Requires-Dist: rich>=13.0.0
24
+ Requires-Dist: tomli-w>=1.0.0
25
+ Requires-Dist: typer>=0.9.0
26
+ Provides-Extra: dev
27
+ Requires-Dist: pytest; extra == 'dev'
28
+ Requires-Dist: pytest-asyncio; extra == 'dev'
29
+ Requires-Dist: ruff; extra == 'dev'
30
+ Description-Content-Type: text/markdown
31
+
32
+ # Copex - Copilot Extended
33
+
34
+ [![PyPI version](https://badge.fury.io/py/copex.svg)](https://badge.fury.io/py/copex)
35
+ [![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
36
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
37
+ [![Tests](https://github.com/Arthur742Ramos/copex/actions/workflows/test.yml/badge.svg)](https://github.com/Arthur742Ramos/copex/actions/workflows/test.yml)
38
+
39
+ A resilient Python wrapper for the GitHub Copilot SDK with automatic retry, Ralph Wiggum loops, session persistence, metrics, parallel tools, and MCP integration.
40
+
41
+ ## Features
42
+
43
+ - 🔄 **Automatic Retry** - Handles 500 errors, rate limits, and transient failures with exponential backoff
44
+ - 🚀 **Auto-Continue** - Automatically sends "Keep going" on any error
45
+ - 🔁 **Ralph Wiggum Loops** - Iterative AI development with completion promises
46
+ - 💾 **Session Persistence** - Save/restore conversation history to disk
47
+ - 📍 **Checkpointing** - Resume interrupted Ralph loops after crashes
48
+ - 📊 **Metrics & Logging** - Track token usage, timing, and costs
49
+ - ⚡ **Parallel Tools** - Execute multiple tool calls concurrently
50
+ - 🔌 **MCP Integration** - Connect to external MCP servers for extended capabilities
51
+ - 🎯 **Model Selection** - Easy switching between GPT-5.2-codex, Claude, Gemini, and more
52
+ - 🧠 **Reasoning Effort** - Configure reasoning depth from `none` to `xhigh`
53
+ - 💻 **Beautiful CLI** - Rich terminal output with markdown rendering
54
+
55
+ ## Installation
56
+
57
+ ```bash
58
+ pip install copex
59
+ ```
60
+
61
+ Or install from source:
62
+
63
+ ```bash
64
+ git clone https://github.com/Arthur742Ramos/copex
65
+ cd copex
66
+ pip install -e .
67
+ ```
68
+
69
+ ## Prerequisites
70
+
71
+ - Python 3.10+
72
+ - [GitHub Copilot CLI](https://docs.github.com/en/copilot/how-tos/set-up/install-copilot-cli) installed
73
+ - Active Copilot subscription
74
+
75
+ **Note:** Copex automatically detects the Copilot CLI path on Windows, macOS, and Linux. If auto-detection fails, you can specify the path manually:
76
+
77
+ ```python
78
+ config = CopexConfig(cli_path="/path/to/copilot")
79
+ ```
80
+
81
+ Or check detection:
82
+
83
+ ```python
84
+ from copex import find_copilot_cli
85
+ print(f"Found CLI at: {find_copilot_cli()}")
86
+ ```
87
+
88
+ ## Quick Start
89
+
90
+ ### Python API
91
+
92
+ ```python
93
+ import asyncio
94
+ from copex import Copex, CopexConfig, Model, ReasoningEffort
95
+
96
+ async def main():
97
+ # Simple usage with defaults (gpt-5.2-codex, xhigh reasoning)
98
+ async with Copex() as copex:
99
+ response = await copex.chat("Explain async/await in Python")
100
+ print(response)
101
+
102
+ # Custom configuration
103
+ config = CopexConfig(
104
+ model=Model.GPT_5_2_CODEX,
105
+ reasoning_effort=ReasoningEffort.XHIGH,
106
+ retry={"max_retries": 10, "base_delay": 2.0},
107
+ auto_continue=True,
108
+ )
109
+
110
+ async with Copex(config) as copex:
111
+ # Get full response object with metadata
112
+ response = await copex.send("Write a binary search function")
113
+ print(f"Content: {response.content}")
114
+ print(f"Reasoning: {response.reasoning}")
115
+ print(f"Retries needed: {response.retries}")
116
+
117
+ asyncio.run(main())
118
+ ```
119
+
120
+ ### Ralph Wiggum Loops
121
+
122
+ The [Ralph Wiggum technique](https://ghuntley.com/ralph/) enables iterative AI development:
123
+
124
+ ```python
125
+ from copex import Copex, RalphWiggum
126
+
127
+ async def main():
128
+ async with Copex() as copex:
129
+ ralph = RalphWiggum(copex)
130
+
131
+ result = await ralph.loop(
132
+ prompt="Build a REST API with CRUD operations and tests",
133
+ completion_promise="ALL TESTS PASSING",
134
+ max_iterations=30,
135
+ )
136
+
137
+ print(f"Completed in {result.iteration} iterations")
138
+ print(f"Reason: {result.completion_reason}")
139
+ ```
140
+
141
+ **How it works:**
142
+ 1. The same prompt is fed to the AI repeatedly
143
+ 2. The AI sees its previous work in conversation history
144
+ 3. It iteratively improves until outputting `<promise>COMPLETION TEXT</promise>`
145
+ 4. Loop ends when promise matches or max iterations reached
146
+
147
+ ### Skills, Instructions & MCP
148
+
149
+ Copex is fully compatible with Copilot SDK features:
150
+
151
+ ```python
152
+ from copex import Copex, CopexConfig
153
+
154
+ config = CopexConfig(
155
+ model=Model.GPT_5_2_CODEX,
156
+ reasoning_effort=ReasoningEffort.XHIGH,
157
+
158
+ # Enable skills
159
+ skills=["code-review", "api-design", "security"],
160
+
161
+ # Custom instructions
162
+ instructions="Follow PEP 8. Use type hints. Prefer dataclasses.",
163
+ # Or load from file:
164
+ # instructions_file=".copilot/instructions.md",
165
+
166
+ # MCP servers (inline or from file)
167
+ mcp_servers=[
168
+ {"name": "github", "url": "https://api.github.com/mcp/"},
169
+ ],
170
+ # mcp_config_file=".copex/mcp.json",
171
+
172
+ # Tool filtering
173
+ available_tools=["repos", "issues", "code_security"],
174
+ excluded_tools=["delete_repo"],
175
+ )
176
+
177
+ async with Copex(config) as copex:
178
+ response = await copex.chat("Review this code for security issues")
179
+ ```
180
+
181
+ ### Streaming
182
+
183
+ ```python
184
+ async def stream_example():
185
+ async with Copex() as copex:
186
+ async for chunk in copex.stream("Write a REST API"):
187
+ if chunk.type == "message":
188
+ print(chunk.delta, end="", flush=True)
189
+ elif chunk.type == "reasoning":
190
+ print(f"[thinking: {chunk.delta}]", end="")
191
+ ```
192
+
193
+ ## CLI Usage
194
+
195
+ ### Single prompt
196
+
197
+ ```bash
198
+ # Basic usage
199
+ copex chat "Explain Docker containers"
200
+
201
+ # With options
202
+ copex chat "Write a Python web scraper" \
203
+ --model gpt-5.2-codex \
204
+ --reasoning xhigh \
205
+ --max-retries 10
206
+
207
+ # From stdin (for long prompts)
208
+ cat prompt.txt | copex chat
209
+
210
+ # Show reasoning output
211
+ copex chat "Solve this algorithm" --show-reasoning
212
+
213
+ # Raw output (for piping)
214
+ copex chat "Write a bash script" --raw > script.sh
215
+ ```
216
+
217
+ ### Ralph Wiggum loop
218
+
219
+ ```bash
220
+ # Run iterative development loop
221
+ copex ralph "Build a calculator with tests" --promise "ALL TESTS PASSING" -n 20
222
+
223
+ # Without completion promise (runs until max iterations)
224
+ copex ralph "Improve code coverage" --max-iterations 10
225
+ ```
226
+
227
+ ### Interactive mode
228
+
229
+ ```bash
230
+ copex interactive
231
+
232
+ # With specific model
233
+ copex interactive --model claude-sonnet-4.5 --reasoning high
234
+ ```
235
+
236
+ ### Other commands
237
+
238
+ ```bash
239
+ # List available models
240
+ copex models
241
+
242
+ # Create default config file
243
+ copex init
244
+ ```
245
+
246
+ ## Configuration
247
+
248
+ Create a config file at `~/.config/copex/config.toml`:
249
+
250
+ ```toml
251
+ model = "gpt-5.2-codex"
252
+ reasoning_effort = "xhigh"
253
+ streaming = true
254
+ timeout = 300.0
255
+ auto_continue = true
256
+ continue_prompt = "Keep going"
257
+
258
+ # Skills to enable
259
+ skills = ["code-review", "api-design", "test-writer"]
260
+
261
+ # Custom instructions (inline or file path)
262
+ instructions = "Follow our team coding standards. Prefer functional programming."
263
+ # instructions_file = ".copilot/instructions.md"
264
+
265
+ # MCP server config file
266
+ # mcp_config_file = ".copex/mcp.json"
267
+
268
+ # Tool filtering
269
+ # available_tools = ["repos", "issues", "code_security"]
270
+ excluded_tools = []
271
+
272
+ [retry]
273
+ max_retries = 5
274
+ retry_on_any_error = true
275
+ base_delay = 1.0
276
+ max_delay = 30.0
277
+ exponential_base = 2.0
278
+ ```
279
+
280
+ ## Available Models
281
+
282
+ | Model | Description |
283
+ |-------|-------------|
284
+ | `gpt-5.2-codex` | Latest Codex model (default) |
285
+ | `gpt-5.1-codex` | Previous Codex version |
286
+ | `gpt-5.1-codex-max` | High-capacity Codex |
287
+ | `gpt-5.1-codex-mini` | Fast, lightweight Codex |
288
+ | `claude-sonnet-4.5` | Claude Sonnet 4.5 |
289
+ | `claude-sonnet-4` | Claude Sonnet 4 |
290
+ | `claude-opus-4.5` | Claude Opus (premium) |
291
+ | `gemini-3-pro-preview` | Gemini 3 Pro |
292
+
293
+ ## Reasoning Effort Levels
294
+
295
+ | Level | Description |
296
+ |-------|-------------|
297
+ | `none` | No extended reasoning |
298
+ | `low` | Minimal reasoning |
299
+ | `medium` | Balanced reasoning |
300
+ | `high` | Deep reasoning |
301
+ | `xhigh` | Maximum reasoning (best for complex tasks) |
302
+
303
+ ## Error Handling
304
+
305
+ By default, Copex retries on **any error** (`retry_on_any_error=True`).
306
+
307
+ You can also be specific:
308
+
309
+ ```python
310
+ config = CopexConfig(
311
+ retry={
312
+ "retry_on_any_error": False,
313
+ "max_retries": 10,
314
+ "retry_on_errors": ["500", "timeout", "rate limit"],
315
+ }
316
+ )
317
+ ```
318
+
319
+ ## Credits
320
+
321
+ - **Ralph Wiggum technique**: [Geoffrey Huntley](https://ghuntley.com/ralph/)
322
+ - **GitHub Copilot SDK**: [github/copilot-sdk](https://github.com/github/copilot-sdk)
323
+
324
+ ## Contributing
325
+
326
+ Contributions welcome! Please open an issue or PR at [github.com/Arthur742Ramos/copex](https://github.com/Arthur742Ramos/copex).
327
+
328
+ ## License
329
+
330
+ MIT
331
+
332
+ ---
333
+
334
+ ## Advanced Features
335
+
336
+ ### Session Persistence
337
+
338
+ Save and restore conversation history:
339
+
340
+ ```python
341
+ from copex import Copex, SessionStore, PersistentSession
342
+
343
+ store = SessionStore() # Saves to ~/.copex/sessions/
344
+
345
+ # Create a persistent session
346
+ session = PersistentSession("my-project", store)
347
+
348
+ async with Copex() as copex:
349
+ response = await copex.chat("Hello!")
350
+ session.add_user_message("Hello!")
351
+ session.add_assistant_message(response)
352
+ # Auto-saved to disk
353
+
354
+ # Later, restore it
355
+ session = PersistentSession("my-project", store)
356
+ print(session.messages) # Previous messages loaded
357
+ ```
358
+
359
+ ### Checkpointing (Crash Recovery)
360
+
361
+ Resume Ralph loops after interruption:
362
+
363
+ ```python
364
+ from copex import Copex, CheckpointStore, CheckpointedRalph
365
+
366
+ store = CheckpointStore() # Saves to ~/.copex/checkpoints/
367
+
368
+ async with Copex() as copex:
369
+ ralph = CheckpointedRalph(copex, store, loop_id="my-api-project")
370
+
371
+ # Automatically resumes from last checkpoint if interrupted
372
+ result = await ralph.loop(
373
+ prompt="Build a REST API with tests",
374
+ completion_promise="ALL TESTS PASSING",
375
+ max_iterations=30,
376
+ resume=True, # Resume from checkpoint
377
+ )
378
+ ```
379
+
380
+ ### Metrics & Cost Tracking
381
+
382
+ Track token usage and estimate costs:
383
+
384
+ ```python
385
+ from copex import Copex, MetricsCollector
386
+
387
+ collector = MetricsCollector()
388
+
389
+ async with Copex() as copex:
390
+ # Track a request
391
+ req = collector.start_request(
392
+ model="gpt-5.2-codex",
393
+ prompt="Write a function..."
394
+ )
395
+
396
+ response = await copex.chat("Write a function...")
397
+
398
+ collector.complete_request(
399
+ req.request_id,
400
+ success=True,
401
+ response=response,
402
+ )
403
+
404
+ # Get summary
405
+ print(collector.print_summary())
406
+ # Session: 20260117_170000
407
+ # Requests: 5 (5 ok, 0 failed)
408
+ # Success Rate: 100.0%
409
+ # Total Tokens: 12,450
410
+ # Estimated Cost: $0.0234
411
+
412
+ # Export metrics
413
+ collector.export_json("metrics.json")
414
+ collector.export_csv("metrics.csv")
415
+ ```
416
+
417
+ ### Parallel Tools
418
+
419
+ Execute multiple tools concurrently:
420
+
421
+ ```python
422
+ from copex import Copex, ParallelToolExecutor
423
+
424
+ executor = ParallelToolExecutor()
425
+
426
+ @executor.tool("get_weather", "Get weather for a city")
427
+ async def get_weather(city: str) -> str:
428
+ return f"Weather in {city}: Sunny, 72°F"
429
+
430
+ @executor.tool("get_time", "Get time in timezone")
431
+ async def get_time(timezone: str) -> str:
432
+ return f"Time in {timezone}: 2:30 PM"
433
+
434
+ # Tools execute in parallel when AI calls multiple at once
435
+ async with Copex() as copex:
436
+ response = await copex.send(
437
+ "What's the weather in Seattle and the time in PST?",
438
+ tools=executor.get_tool_definitions(),
439
+ )
440
+ ```
441
+
442
+ ### MCP Server Integration
443
+
444
+ Connect to external MCP servers:
445
+
446
+ ```python
447
+ from copex import Copex, MCPManager, MCPServerConfig
448
+
449
+ manager = MCPManager()
450
+
451
+ # Add MCP servers
452
+ manager.add_server(MCPServerConfig(
453
+ name="github",
454
+ command="npx",
455
+ args=["-y", "@github/mcp-server"],
456
+ env={"GITHUB_TOKEN": "..."},
457
+ ))
458
+
459
+ manager.add_server(MCPServerConfig(
460
+ name="filesystem",
461
+ command="npx",
462
+ args=["-y", "@anthropic/mcp-server-filesystem", "/path/to/dir"],
463
+ ))
464
+
465
+ await manager.connect_all()
466
+
467
+ # Get all tools from all servers
468
+ all_tools = manager.get_all_tools()
469
+
470
+ # Call a tool
471
+ result = await manager.call_tool("github:search_repos", {"query": "copex"})
472
+
473
+ await manager.disconnect_all()
474
+ ```
475
+
476
+ **MCP Config File** (`~/.copex/mcp.json`):
477
+
478
+ ```json
479
+ {
480
+ "servers": {
481
+ "github": {
482
+ "command": "npx",
483
+ "args": ["-y", "@github/mcp-server"],
484
+ "env": {"GITHUB_TOKEN": "your-token"}
485
+ },
486
+ "browser": {
487
+ "command": "npx",
488
+ "args": ["-y", "@anthropic/mcp-server-puppeteer"]
489
+ }
490
+ }
491
+ }
492
+ ```
493
+
494
+ ```python
495
+ from copex import load_mcp_config, MCPManager
496
+
497
+ configs = load_mcp_config() # Loads from ~/.copex/mcp.json
498
+ manager = MCPManager()
499
+ for config in configs:
500
+ manager.add_server(config)
501
+ await manager.connect_all()
502
+ ```