copex 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
copex-0.1.0/.gitignore ADDED
@@ -0,0 +1,44 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.so
6
+ .Python
7
+ build/
8
+ develop-eggs/
9
+ dist/
10
+ downloads/
11
+ eggs/
12
+ .eggs/
13
+ lib/
14
+ lib64/
15
+ parts/
16
+ sdist/
17
+ var/
18
+ wheels/
19
+ *.egg-info/
20
+ .installed.cfg
21
+ *.egg
22
+
23
+ # Virtual environments
24
+ .venv/
25
+ venv/
26
+ ENV/
27
+
28
+ # IDE
29
+ .idea/
30
+ .vscode/
31
+ *.swp
32
+ *.swo
33
+
34
+ # Testing
35
+ .pytest_cache/
36
+ .coverage
37
+ htmlcov/
38
+
39
+ # Build
40
+ *.manifest
41
+ *.spec
42
+
43
+ # Copex data
44
+ .copex/
copex-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Arthur Ramos
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
copex-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,501 @@
1
+ Metadata-Version: 2.4
2
+ Name: copex
3
+ Version: 0.1.0
4
+ Summary: Copilot Extended - Resilient wrapper for GitHub Copilot SDK with auto-retry, Ralph Wiggum loops, and more
5
+ Project-URL: Homepage, https://github.com/Arthur742Ramos/copex
6
+ Project-URL: Repository, https://github.com/Arthur742Ramos/copex
7
+ Project-URL: Issues, https://github.com/Arthur742Ramos/copex/issues
8
+ Author-email: Arthur Ramos <arthur742ramos@users.noreply.github.com>
9
+ License-Expression: MIT
10
+ License-File: LICENSE
11
+ Keywords: ai,copex,copilot,github,ralph-wiggum,retry,sdk
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Requires-Python: >=3.10
20
+ Requires-Dist: github-copilot-sdk>=0.1.0
21
+ Requires-Dist: pydantic>=2.0.0
22
+ Requires-Dist: rich>=13.0.0
23
+ Requires-Dist: tomli-w>=1.0.0
24
+ Requires-Dist: typer>=0.9.0
25
+ Provides-Extra: dev
26
+ Requires-Dist: pytest; extra == 'dev'
27
+ Requires-Dist: pytest-asyncio; extra == 'dev'
28
+ Requires-Dist: ruff; extra == 'dev'
29
+ Description-Content-Type: text/markdown
30
+
31
+ # Copex - Copilot Extended
32
+
33
+ [![PyPI version](https://badge.fury.io/py/copex.svg)](https://badge.fury.io/py/copex)
34
+ [![Python 3.10+](https://img.shields.io/badge/python-3.10+-blue.svg)](https://www.python.org/downloads/)
35
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
36
+ [![Tests](https://github.com/Arthur742Ramos/copex/actions/workflows/test.yml/badge.svg)](https://github.com/Arthur742Ramos/copex/actions/workflows/test.yml)
37
+
38
+ A resilient Python wrapper for the GitHub Copilot SDK with automatic retry, Ralph Wiggum loops, session persistence, metrics, parallel tools, and MCP integration.
39
+
40
+ ## Features
41
+
42
+ - 🔄 **Automatic Retry** - Handles 500 errors, rate limits, and transient failures with exponential backoff
43
+ - 🚀 **Auto-Continue** - Automatically sends "Keep going" on any error
44
+ - 🔁 **Ralph Wiggum Loops** - Iterative AI development with completion promises
45
+ - 💾 **Session Persistence** - Save/restore conversation history to disk
46
+ - 📍 **Checkpointing** - Resume interrupted Ralph loops after crashes
47
+ - 📊 **Metrics & Logging** - Track token usage, timing, and costs
48
+ - ⚡ **Parallel Tools** - Execute multiple tool calls concurrently
49
+ - 🔌 **MCP Integration** - Connect to external MCP servers for extended capabilities
50
+ - 🎯 **Model Selection** - Easy switching between GPT-5.2-codex, Claude, Gemini, and more
51
+ - 🧠 **Reasoning Effort** - Configure reasoning depth from `none` to `xhigh`
52
+ - 💻 **Beautiful CLI** - Rich terminal output with markdown rendering
53
+
54
+ ## Installation
55
+
56
+ ```bash
57
+ pip install copex
58
+ ```
59
+
60
+ Or install from source:
61
+
62
+ ```bash
63
+ git clone https://github.com/Arthur742Ramos/copex
64
+ cd copex
65
+ pip install -e .
66
+ ```
67
+
68
+ ## Prerequisites
69
+
70
+ - Python 3.10+
71
+ - [GitHub Copilot CLI](https://docs.github.com/en/copilot/how-tos/set-up/install-copilot-cli) installed
72
+ - Active Copilot subscription
73
+
74
+ **Note:** Copex automatically detects the Copilot CLI path on Windows, macOS, and Linux. If auto-detection fails, you can specify the path manually:
75
+
76
+ ```python
77
+ config = CopexConfig(cli_path="/path/to/copilot")
78
+ ```
79
+
80
+ Or check detection:
81
+
82
+ ```python
83
+ from copex import find_copilot_cli
84
+ print(f"Found CLI at: {find_copilot_cli()}")
85
+ ```
86
+
87
+ ## Quick Start
88
+
89
+ ### Python API
90
+
91
+ ```python
92
+ import asyncio
93
+ from copex import Copex, CopexConfig, Model, ReasoningEffort
94
+
95
+ async def main():
96
+ # Simple usage with defaults (gpt-5.2-codex, xhigh reasoning)
97
+ async with Copex() as copex:
98
+ response = await copex.chat("Explain async/await in Python")
99
+ print(response)
100
+
101
+ # Custom configuration
102
+ config = CopexConfig(
103
+ model=Model.GPT_5_2_CODEX,
104
+ reasoning_effort=ReasoningEffort.XHIGH,
105
+ retry={"max_retries": 10, "base_delay": 2.0},
106
+ auto_continue=True,
107
+ )
108
+
109
+ async with Copex(config) as copex:
110
+ # Get full response object with metadata
111
+ response = await copex.send("Write a binary search function")
112
+ print(f"Content: {response.content}")
113
+ print(f"Reasoning: {response.reasoning}")
114
+ print(f"Retries needed: {response.retries}")
115
+
116
+ asyncio.run(main())
117
+ ```
118
+
119
+ ### Ralph Wiggum Loops
120
+
121
+ The [Ralph Wiggum technique](https://ghuntley.com/ralph/) enables iterative AI development:
122
+
123
+ ```python
124
+ from copex import Copex, RalphWiggum
125
+
126
+ async def main():
127
+ async with Copex() as copex:
128
+ ralph = RalphWiggum(copex)
129
+
130
+ result = await ralph.loop(
131
+ prompt="Build a REST API with CRUD operations and tests",
132
+ completion_promise="ALL TESTS PASSING",
133
+ max_iterations=30,
134
+ )
135
+
136
+ print(f"Completed in {result.iteration} iterations")
137
+ print(f"Reason: {result.completion_reason}")
138
+ ```
139
+
140
+ **How it works:**
141
+ 1. The same prompt is fed to the AI repeatedly
142
+ 2. The AI sees its previous work in conversation history
143
+ 3. It iteratively improves until outputting `<promise>COMPLETION TEXT</promise>`
144
+ 4. Loop ends when promise matches or max iterations reached
145
+
146
+ ### Skills, Instructions & MCP
147
+
148
+ Copex is fully compatible with Copilot SDK features:
149
+
150
+ ```python
151
+ from copex import Copex, CopexConfig
152
+
153
+ config = CopexConfig(
154
+ model=Model.GPT_5_2_CODEX,
155
+ reasoning_effort=ReasoningEffort.XHIGH,
156
+
157
+ # Enable skills
158
+ skills=["code-review", "api-design", "security"],
159
+
160
+ # Custom instructions
161
+ instructions="Follow PEP 8. Use type hints. Prefer dataclasses.",
162
+ # Or load from file:
163
+ # instructions_file=".copilot/instructions.md",
164
+
165
+ # MCP servers (inline or from file)
166
+ mcp_servers=[
167
+ {"name": "github", "url": "https://api.github.com/mcp/"},
168
+ ],
169
+ # mcp_config_file=".copex/mcp.json",
170
+
171
+ # Tool filtering
172
+ available_tools=["repos", "issues", "code_security"],
173
+ excluded_tools=["delete_repo"],
174
+ )
175
+
176
+ async with Copex(config) as copex:
177
+ response = await copex.chat("Review this code for security issues")
178
+ ```
179
+
180
+ ### Streaming
181
+
182
+ ```python
183
+ async def stream_example():
184
+ async with Copex() as copex:
185
+ async for chunk in copex.stream("Write a REST API"):
186
+ if chunk.type == "message":
187
+ print(chunk.delta, end="", flush=True)
188
+ elif chunk.type == "reasoning":
189
+ print(f"[thinking: {chunk.delta}]", end="")
190
+ ```
191
+
192
+ ## CLI Usage
193
+
194
+ ### Single prompt
195
+
196
+ ```bash
197
+ # Basic usage
198
+ copex chat "Explain Docker containers"
199
+
200
+ # With options
201
+ copex chat "Write a Python web scraper" \
202
+ --model gpt-5.2-codex \
203
+ --reasoning xhigh \
204
+ --max-retries 10
205
+
206
+ # From stdin (for long prompts)
207
+ cat prompt.txt | copex chat
208
+
209
+ # Show reasoning output
210
+ copex chat "Solve this algorithm" --show-reasoning
211
+
212
+ # Raw output (for piping)
213
+ copex chat "Write a bash script" --raw > script.sh
214
+ ```
215
+
216
+ ### Ralph Wiggum loop
217
+
218
+ ```bash
219
+ # Run iterative development loop
220
+ copex ralph "Build a calculator with tests" --promise "ALL TESTS PASSING" -n 20
221
+
222
+ # Without completion promise (runs until max iterations)
223
+ copex ralph "Improve code coverage" --max-iterations 10
224
+ ```
225
+
226
+ ### Interactive mode
227
+
228
+ ```bash
229
+ copex interactive
230
+
231
+ # With specific model
232
+ copex interactive --model claude-sonnet-4.5 --reasoning high
233
+ ```
234
+
235
+ ### Other commands
236
+
237
+ ```bash
238
+ # List available models
239
+ copex models
240
+
241
+ # Create default config file
242
+ copex init
243
+ ```
244
+
245
+ ## Configuration
246
+
247
+ Create a config file at `~/.config/copex/config.toml`:
248
+
249
+ ```toml
250
+ model = "gpt-5.2-codex"
251
+ reasoning_effort = "xhigh"
252
+ streaming = true
253
+ timeout = 300.0
254
+ auto_continue = true
255
+ continue_prompt = "Keep going"
256
+
257
+ # Skills to enable
258
+ skills = ["code-review", "api-design", "test-writer"]
259
+
260
+ # Custom instructions (inline or file path)
261
+ instructions = "Follow our team coding standards. Prefer functional programming."
262
+ # instructions_file = ".copilot/instructions.md"
263
+
264
+ # MCP server config file
265
+ # mcp_config_file = ".copex/mcp.json"
266
+
267
+ # Tool filtering
268
+ # available_tools = ["repos", "issues", "code_security"]
269
+ excluded_tools = []
270
+
271
+ [retry]
272
+ max_retries = 5
273
+ retry_on_any_error = true
274
+ base_delay = 1.0
275
+ max_delay = 30.0
276
+ exponential_base = 2.0
277
+ ```
278
+
279
+ ## Available Models
280
+
281
+ | Model | Description |
282
+ |-------|-------------|
283
+ | `gpt-5.2-codex` | Latest Codex model (default) |
284
+ | `gpt-5.1-codex` | Previous Codex version |
285
+ | `gpt-5.1-codex-max` | High-capacity Codex |
286
+ | `gpt-5.1-codex-mini` | Fast, lightweight Codex |
287
+ | `claude-sonnet-4.5` | Claude Sonnet 4.5 |
288
+ | `claude-sonnet-4` | Claude Sonnet 4 |
289
+ | `claude-opus-4.5` | Claude Opus (premium) |
290
+ | `gemini-3-pro-preview` | Gemini 3 Pro |
291
+
292
+ ## Reasoning Effort Levels
293
+
294
+ | Level | Description |
295
+ |-------|-------------|
296
+ | `none` | No extended reasoning |
297
+ | `low` | Minimal reasoning |
298
+ | `medium` | Balanced reasoning |
299
+ | `high` | Deep reasoning |
300
+ | `xhigh` | Maximum reasoning (best for complex tasks) |
301
+
302
+ ## Error Handling
303
+
304
+ By default, Copex retries on **any error** (`retry_on_any_error=True`).
305
+
306
+ You can also be specific:
307
+
308
+ ```python
309
+ config = CopexConfig(
310
+ retry={
311
+ "retry_on_any_error": False,
312
+ "max_retries": 10,
313
+ "retry_on_errors": ["500", "timeout", "rate limit"],
314
+ }
315
+ )
316
+ ```
317
+
318
+ ## Credits
319
+
320
+ - **Ralph Wiggum technique**: [Geoffrey Huntley](https://ghuntley.com/ralph/)
321
+ - **GitHub Copilot SDK**: [github/copilot-sdk](https://github.com/github/copilot-sdk)
322
+
323
+ ## Contributing
324
+
325
+ Contributions welcome! Please open an issue or PR at [github.com/Arthur742Ramos/copex](https://github.com/Arthur742Ramos/copex).
326
+
327
+ ## License
328
+
329
+ MIT
330
+
331
+ ---
332
+
333
+ ## Advanced Features
334
+
335
+ ### Session Persistence
336
+
337
+ Save and restore conversation history:
338
+
339
+ ```python
340
+ from copex import Copex, SessionStore, PersistentSession
341
+
342
+ store = SessionStore() # Saves to ~/.copex/sessions/
343
+
344
+ # Create a persistent session
345
+ session = PersistentSession("my-project", store)
346
+
347
+ async with Copex() as copex:
348
+ response = await copex.chat("Hello!")
349
+ session.add_user_message("Hello!")
350
+ session.add_assistant_message(response)
351
+ # Auto-saved to disk
352
+
353
+ # Later, restore it
354
+ session = PersistentSession("my-project", store)
355
+ print(session.messages) # Previous messages loaded
356
+ ```
357
+
358
+ ### Checkpointing (Crash Recovery)
359
+
360
+ Resume Ralph loops after interruption:
361
+
362
+ ```python
363
+ from copex import Copex, CheckpointStore, CheckpointedRalph
364
+
365
+ store = CheckpointStore() # Saves to ~/.copex/checkpoints/
366
+
367
+ async with Copex() as copex:
368
+ ralph = CheckpointedRalph(copex, store, loop_id="my-api-project")
369
+
370
+ # Automatically resumes from last checkpoint if interrupted
371
+ result = await ralph.loop(
372
+ prompt="Build a REST API with tests",
373
+ completion_promise="ALL TESTS PASSING",
374
+ max_iterations=30,
375
+ resume=True, # Resume from checkpoint
376
+ )
377
+ ```
378
+
379
+ ### Metrics & Cost Tracking
380
+
381
+ Track token usage and estimate costs:
382
+
383
+ ```python
384
+ from copex import Copex, MetricsCollector
385
+
386
+ collector = MetricsCollector()
387
+
388
+ async with Copex() as copex:
389
+ # Track a request
390
+ req = collector.start_request(
391
+ model="gpt-5.2-codex",
392
+ prompt="Write a function..."
393
+ )
394
+
395
+ response = await copex.chat("Write a function...")
396
+
397
+ collector.complete_request(
398
+ req.request_id,
399
+ success=True,
400
+ response=response,
401
+ )
402
+
403
+ # Get summary
404
+ print(collector.print_summary())
405
+ # Session: 20260117_170000
406
+ # Requests: 5 (5 ok, 0 failed)
407
+ # Success Rate: 100.0%
408
+ # Total Tokens: 12,450
409
+ # Estimated Cost: $0.0234
410
+
411
+ # Export metrics
412
+ collector.export_json("metrics.json")
413
+ collector.export_csv("metrics.csv")
414
+ ```
415
+
416
+ ### Parallel Tools
417
+
418
+ Execute multiple tools concurrently:
419
+
420
+ ```python
421
+ from copex import Copex, ParallelToolExecutor
422
+
423
+ executor = ParallelToolExecutor()
424
+
425
+ @executor.tool("get_weather", "Get weather for a city")
426
+ async def get_weather(city: str) -> str:
427
+ return f"Weather in {city}: Sunny, 72°F"
428
+
429
+ @executor.tool("get_time", "Get time in timezone")
430
+ async def get_time(timezone: str) -> str:
431
+ return f"Time in {timezone}: 2:30 PM"
432
+
433
+ # Tools execute in parallel when AI calls multiple at once
434
+ async with Copex() as copex:
435
+ response = await copex.send(
436
+ "What's the weather in Seattle and the time in PST?",
437
+ tools=executor.get_tool_definitions(),
438
+ )
439
+ ```
440
+
441
+ ### MCP Server Integration
442
+
443
+ Connect to external MCP servers:
444
+
445
+ ```python
446
+ from copex import Copex, MCPManager, MCPServerConfig
447
+
448
+ manager = MCPManager()
449
+
450
+ # Add MCP servers
451
+ manager.add_server(MCPServerConfig(
452
+ name="github",
453
+ command="npx",
454
+ args=["-y", "@github/mcp-server"],
455
+ env={"GITHUB_TOKEN": "..."},
456
+ ))
457
+
458
+ manager.add_server(MCPServerConfig(
459
+ name="filesystem",
460
+ command="npx",
461
+ args=["-y", "@anthropic/mcp-server-filesystem", "/path/to/dir"],
462
+ ))
463
+
464
+ await manager.connect_all()
465
+
466
+ # Get all tools from all servers
467
+ all_tools = manager.get_all_tools()
468
+
469
+ # Call a tool
470
+ result = await manager.call_tool("github:search_repos", {"query": "copex"})
471
+
472
+ await manager.disconnect_all()
473
+ ```
474
+
475
+ **MCP Config File** (`~/.copex/mcp.json`):
476
+
477
+ ```json
478
+ {
479
+ "servers": {
480
+ "github": {
481
+ "command": "npx",
482
+ "args": ["-y", "@github/mcp-server"],
483
+ "env": {"GITHUB_TOKEN": "your-token"}
484
+ },
485
+ "browser": {
486
+ "command": "npx",
487
+ "args": ["-y", "@anthropic/mcp-server-puppeteer"]
488
+ }
489
+ }
490
+ }
491
+ ```
492
+
493
+ ```python
494
+ from copex import load_mcp_config, MCPManager
495
+
496
+ configs = load_mcp_config() # Loads from ~/.copex/mcp.json
497
+ manager = MCPManager()
498
+ for config in configs:
499
+ manager.add_server(config)
500
+ await manager.connect_all()
501
+ ```