bpsai-pair 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bpsai-pair might be problematic. Click here for more details.
- bpsai_pair/__init__.py +25 -0
- bpsai_pair/__main__.py +4 -0
- bpsai_pair/adapters.py +9 -0
- bpsai_pair/cli.py +514 -0
- bpsai_pair/config.py +310 -0
- bpsai_pair/data/cookiecutter-paircoder/cookiecutter.json +12 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.agentpackignore +1 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.editorconfig +17 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/PULL_REQUEST_TEMPLATE.md +47 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/workflows/ci.yml +90 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/workflows/project_tree.yml +33 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.gitignore +5 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.gitleaks.toml +17 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.pre-commit-config.yaml +38 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/CODEOWNERS +9 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/CONTRIBUTING.md +35 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/SECURITY.md +14 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/agents.md +6 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/agents.md.bak +196 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/development.md +1 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/development.md.bak +10 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/directory_notes/.gitkeep +1 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/project_tree.md +7 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/deep_research.yml +28 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/implementation.yml +25 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/roadmap.yml +14 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/scripts/README.md +11 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/src/.gitkeep +1 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/templates/adr.md +19 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/templates/directory_note.md +17 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/tests/example_contract/README.md +3 -0
- bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/tests/example_integration/README.md +3 -0
- bpsai_pair/init_bundled_cli.py +47 -0
- bpsai_pair/jsonio.py +6 -0
- bpsai_pair/ops.py +451 -0
- bpsai_pair/pyutils.py +26 -0
- bpsai_pair/utils.py +11 -0
- bpsai_pair-0.2.0.dist-info/METADATA +29 -0
- bpsai_pair-0.2.0.dist-info/RECORD +42 -0
- bpsai_pair-0.2.0.dist-info/WHEEL +5 -0
- bpsai_pair-0.2.0.dist-info/entry_points.txt +3 -0
- bpsai_pair-0.2.0.dist-info/top_level.txt +1 -0
bpsai_pair/ops.py
ADDED
|
@@ -0,0 +1,451 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Operations module for cross-platform compatibility.
|
|
3
|
+
Replaces shell scripts with Python implementations.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import subprocess
|
|
9
|
+
import tarfile
|
|
10
|
+
import tempfile
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import List, Optional, Set
|
|
14
|
+
import shutil
|
|
15
|
+
import json
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class GitOps:
|
|
19
|
+
"""Git operations helper."""
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
def is_repo(path: Path) -> bool:
|
|
23
|
+
"""Check if path is a git repo."""
|
|
24
|
+
return (path / ".git").exists()
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def is_clean(path: Path) -> bool:
|
|
28
|
+
"""Check if working tree is clean."""
|
|
29
|
+
try:
|
|
30
|
+
# Check for unstaged changes
|
|
31
|
+
result = subprocess.run(
|
|
32
|
+
["git", "diff", "--quiet"],
|
|
33
|
+
cwd=path,
|
|
34
|
+
capture_output=True
|
|
35
|
+
)
|
|
36
|
+
if result.returncode != 0:
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
# Check for staged changes
|
|
40
|
+
staged = subprocess.run(
|
|
41
|
+
["git", "diff", "--cached", "--quiet"],
|
|
42
|
+
cwd=path,
|
|
43
|
+
capture_output=True
|
|
44
|
+
)
|
|
45
|
+
if staged.returncode != 0:
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
# Check for untracked files
|
|
49
|
+
untracked = subprocess.run(
|
|
50
|
+
["git", "ls-files", "--other", "--exclude-standard"],
|
|
51
|
+
cwd=path,
|
|
52
|
+
capture_output=True,
|
|
53
|
+
text=True
|
|
54
|
+
)
|
|
55
|
+
if untracked.stdout.strip():
|
|
56
|
+
return False
|
|
57
|
+
|
|
58
|
+
return True
|
|
59
|
+
except:
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
@staticmethod
|
|
63
|
+
def current_branch(path: Path) -> str:
|
|
64
|
+
"""Get current branch name."""
|
|
65
|
+
result = subprocess.run(
|
|
66
|
+
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
|
67
|
+
cwd=path,
|
|
68
|
+
capture_output=True,
|
|
69
|
+
text=True
|
|
70
|
+
)
|
|
71
|
+
return result.stdout.strip() if result.returncode == 0 else ""
|
|
72
|
+
|
|
73
|
+
@staticmethod
|
|
74
|
+
def create_branch(path: Path, branch: str, from_branch: str = "main") -> bool:
|
|
75
|
+
"""Create and checkout a new branch."""
|
|
76
|
+
# Check if source branch exists
|
|
77
|
+
check = subprocess.run(
|
|
78
|
+
["git", "rev-parse", "--verify", from_branch],
|
|
79
|
+
cwd=path,
|
|
80
|
+
capture_output=True
|
|
81
|
+
)
|
|
82
|
+
if check.returncode != 0:
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
# Checkout source branch
|
|
86
|
+
subprocess.run(["git", "checkout", from_branch], cwd=path, capture_output=True)
|
|
87
|
+
|
|
88
|
+
# Pull if upstream exists
|
|
89
|
+
upstream = subprocess.run(
|
|
90
|
+
["git", "rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"],
|
|
91
|
+
cwd=path,
|
|
92
|
+
capture_output=True
|
|
93
|
+
)
|
|
94
|
+
if upstream.returncode == 0:
|
|
95
|
+
subprocess.run(["git", "pull", "--ff-only"], cwd=path, capture_output=True)
|
|
96
|
+
|
|
97
|
+
# Create new branch
|
|
98
|
+
result = subprocess.run(
|
|
99
|
+
["git", "checkout", "-b", branch],
|
|
100
|
+
cwd=path,
|
|
101
|
+
capture_output=True
|
|
102
|
+
)
|
|
103
|
+
return result.returncode == 0
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def add_commit(path: Path, files: List[Path], message: str) -> bool:
|
|
107
|
+
"""Add files and commit."""
|
|
108
|
+
for f in files:
|
|
109
|
+
subprocess.run(["git", "add", str(f)], cwd=path, capture_output=True)
|
|
110
|
+
|
|
111
|
+
result = subprocess.run(
|
|
112
|
+
["git", "commit", "-m", message],
|
|
113
|
+
cwd=path,
|
|
114
|
+
capture_output=True
|
|
115
|
+
)
|
|
116
|
+
return result.returncode == 0
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class ProjectTree:
|
|
120
|
+
"""Generate project tree snapshots."""
|
|
121
|
+
|
|
122
|
+
@staticmethod
|
|
123
|
+
def generate(root: Path, excludes: Optional[Set[str]] = None) -> str:
|
|
124
|
+
"""Generate a tree structure of the project."""
|
|
125
|
+
if excludes is None:
|
|
126
|
+
excludes = {
|
|
127
|
+
'.git', '.venv', 'venv', '__pycache__',
|
|
128
|
+
'node_modules', 'dist', 'build', '.mypy_cache',
|
|
129
|
+
'.pytest_cache', '.tox', '*.egg-info', '.DS_Store'
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
tree_lines = []
|
|
133
|
+
|
|
134
|
+
def should_skip(path: Path) -> bool:
|
|
135
|
+
name = path.name
|
|
136
|
+
for pattern in excludes:
|
|
137
|
+
if pattern.startswith('*') and name.endswith(pattern[1:]):
|
|
138
|
+
return True
|
|
139
|
+
if name == pattern:
|
|
140
|
+
return True
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
def walk_dir(dir_path: Path, prefix: str = ""):
|
|
144
|
+
items = sorted(dir_path.iterdir(), key=lambda x: (x.is_file(), x.name))
|
|
145
|
+
items = [i for i in items if not should_skip(i)]
|
|
146
|
+
|
|
147
|
+
for i, item in enumerate(items):
|
|
148
|
+
is_last = i == len(items) - 1
|
|
149
|
+
current = "└── " if is_last else "├── "
|
|
150
|
+
tree_lines.append(f"{prefix}{current}{item.name}")
|
|
151
|
+
|
|
152
|
+
if item.is_dir():
|
|
153
|
+
extension = " " if is_last else "│ "
|
|
154
|
+
walk_dir(item, prefix + extension)
|
|
155
|
+
|
|
156
|
+
tree_lines.append(".")
|
|
157
|
+
walk_dir(root)
|
|
158
|
+
return "\n".join(tree_lines)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class ContextPacker:
|
|
162
|
+
"""Package context files for AI agents."""
|
|
163
|
+
|
|
164
|
+
@staticmethod
|
|
165
|
+
def read_ignore_patterns(ignore_file: Path) -> Set[str]:
|
|
166
|
+
"""Read patterns from .agentpackignore file."""
|
|
167
|
+
patterns = set()
|
|
168
|
+
if ignore_file.exists():
|
|
169
|
+
with open(ignore_file, 'r') as f:
|
|
170
|
+
for line in f:
|
|
171
|
+
line = line.strip()
|
|
172
|
+
if line and not line.startswith('#'):
|
|
173
|
+
patterns.add(line.rstrip('/'))
|
|
174
|
+
else:
|
|
175
|
+
# Default patterns
|
|
176
|
+
patterns = {
|
|
177
|
+
'.git', '.venv', '__pycache__', 'node_modules',
|
|
178
|
+
'dist', 'build', '*.log', '*.bak', '*.tgz',
|
|
179
|
+
'*.tar.gz', '*.zip', '.env*'
|
|
180
|
+
}
|
|
181
|
+
return patterns
|
|
182
|
+
|
|
183
|
+
@staticmethod
|
|
184
|
+
def should_exclude(path: Path, patterns: Set[str]) -> bool:
|
|
185
|
+
"""Check if path should be excluded based on patterns."""
|
|
186
|
+
path_str = str(path).replace('\\', '/')
|
|
187
|
+
|
|
188
|
+
for pattern in patterns:
|
|
189
|
+
# Handle wildcards
|
|
190
|
+
if '*' in pattern:
|
|
191
|
+
import fnmatch
|
|
192
|
+
if fnmatch.fnmatch(path.name, pattern):
|
|
193
|
+
return True
|
|
194
|
+
if fnmatch.fnmatch(path_str, pattern):
|
|
195
|
+
return True
|
|
196
|
+
# Handle directory patterns
|
|
197
|
+
elif pattern.endswith('/'):
|
|
198
|
+
if path.is_dir() and path.name == pattern[:-1]:
|
|
199
|
+
return True
|
|
200
|
+
# Exact match
|
|
201
|
+
elif path.name == pattern:
|
|
202
|
+
return True
|
|
203
|
+
# Path contains pattern
|
|
204
|
+
elif pattern in path_str.split('/'):
|
|
205
|
+
return True
|
|
206
|
+
|
|
207
|
+
return False
|
|
208
|
+
|
|
209
|
+
@staticmethod
|
|
210
|
+
def pack(
|
|
211
|
+
root: Path,
|
|
212
|
+
output: Path,
|
|
213
|
+
extra_files: Optional[List[str]] = None,
|
|
214
|
+
dry_run: bool = False
|
|
215
|
+
) -> List[Path]:
|
|
216
|
+
"""Create a context pack for AI agents."""
|
|
217
|
+
# Default files to include
|
|
218
|
+
context_files = [
|
|
219
|
+
root / "context" / "development.md",
|
|
220
|
+
root / "context" / "agents.md",
|
|
221
|
+
root / "context" / "project_tree.md",
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
# Add directory_notes if it exists
|
|
225
|
+
dir_notes = root / "context" / "directory_notes"
|
|
226
|
+
if dir_notes.exists():
|
|
227
|
+
for note in dir_notes.rglob("*.md"):
|
|
228
|
+
context_files.append(note)
|
|
229
|
+
|
|
230
|
+
# Add extra files
|
|
231
|
+
if extra_files:
|
|
232
|
+
for extra in extra_files:
|
|
233
|
+
extra_path = root / extra
|
|
234
|
+
if extra_path.exists():
|
|
235
|
+
context_files.append(extra_path)
|
|
236
|
+
|
|
237
|
+
# Filter out non-existent files
|
|
238
|
+
context_files = [f for f in context_files if f.exists()]
|
|
239
|
+
|
|
240
|
+
if dry_run:
|
|
241
|
+
return context_files
|
|
242
|
+
|
|
243
|
+
# Read ignore patterns
|
|
244
|
+
ignore_file = root / ".agentpackignore"
|
|
245
|
+
patterns = ContextPacker.read_ignore_patterns(ignore_file)
|
|
246
|
+
|
|
247
|
+
# Create tarball
|
|
248
|
+
with tarfile.open(output, "w:gz") as tar:
|
|
249
|
+
for file_path in context_files:
|
|
250
|
+
# Check if file should be excluded
|
|
251
|
+
if not ContextPacker.should_exclude(file_path, patterns):
|
|
252
|
+
arcname = file_path.relative_to(root)
|
|
253
|
+
tar.add(file_path, arcname=str(arcname))
|
|
254
|
+
|
|
255
|
+
return context_files
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class FeatureOps:
|
|
259
|
+
"""Operations for feature branch management."""
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
262
|
+
def create_feature(
|
|
263
|
+
root: Path,
|
|
264
|
+
name: str,
|
|
265
|
+
branch_type: str = "feature",
|
|
266
|
+
primary_goal: str = "",
|
|
267
|
+
phase: str = "",
|
|
268
|
+
force: bool = False
|
|
269
|
+
) -> bool:
|
|
270
|
+
"""Create a feature branch and update context."""
|
|
271
|
+
# Check if working tree is clean
|
|
272
|
+
if not force and not GitOps.is_clean(root):
|
|
273
|
+
raise ValueError("Working tree not clean. Commit or stash changes, or use --force")
|
|
274
|
+
|
|
275
|
+
# Create branch
|
|
276
|
+
branch_name = f"{branch_type}/{name}"
|
|
277
|
+
if not GitOps.create_branch(root, branch_name):
|
|
278
|
+
raise ValueError(f"Failed to create branch {branch_name}")
|
|
279
|
+
|
|
280
|
+
# Ensure context directory structure
|
|
281
|
+
context_dir = root / "context"
|
|
282
|
+
context_dir.mkdir(exist_ok=True)
|
|
283
|
+
(context_dir / "directory_notes").mkdir(exist_ok=True)
|
|
284
|
+
|
|
285
|
+
# Update or create development.md
|
|
286
|
+
dev_file = context_dir / "development.md"
|
|
287
|
+
if not dev_file.exists():
|
|
288
|
+
dev_content = f"""# Development Log
|
|
289
|
+
|
|
290
|
+
**Phase:** {phase or 'Phase 1'}
|
|
291
|
+
**Primary Goal:** {primary_goal or 'To be defined'}
|
|
292
|
+
|
|
293
|
+
## Context Sync (AUTO-UPDATED)
|
|
294
|
+
|
|
295
|
+
- **Overall goal is:** {primary_goal or 'To be defined'}
|
|
296
|
+
- **Last action was:** Created feature branch {branch_name}
|
|
297
|
+
- **Next action will be:** {phase or 'Define first task'}
|
|
298
|
+
- **Blockers:** None
|
|
299
|
+
"""
|
|
300
|
+
dev_file.write_text(dev_content)
|
|
301
|
+
else:
|
|
302
|
+
# Update existing file
|
|
303
|
+
content = dev_file.read_text()
|
|
304
|
+
|
|
305
|
+
# Update Primary Goal
|
|
306
|
+
if primary_goal:
|
|
307
|
+
import re
|
|
308
|
+
content = re.sub(
|
|
309
|
+
r'\*\*Primary Goal:\*\*.*',
|
|
310
|
+
f'**Primary Goal:** {primary_goal}',
|
|
311
|
+
content
|
|
312
|
+
)
|
|
313
|
+
content = re.sub(
|
|
314
|
+
r'Overall goal is:.*',
|
|
315
|
+
f'Overall goal is: {primary_goal}',
|
|
316
|
+
content
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# Update Phase
|
|
320
|
+
if phase:
|
|
321
|
+
content = re.sub(
|
|
322
|
+
r'\*\*Phase:\*\*.*',
|
|
323
|
+
f'**Phase:** {phase}',
|
|
324
|
+
content
|
|
325
|
+
)
|
|
326
|
+
content = re.sub(
|
|
327
|
+
r'Next action will be:.*',
|
|
328
|
+
f'Next action will be: {phase}',
|
|
329
|
+
content
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# Update Last action
|
|
333
|
+
content = re.sub(
|
|
334
|
+
r'Last action was:.*',
|
|
335
|
+
f'Last action was: Created feature branch {branch_name}',
|
|
336
|
+
content
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
dev_file.write_text(content)
|
|
340
|
+
|
|
341
|
+
# Create agents.md if missing
|
|
342
|
+
agents_file = context_dir / "agents.md"
|
|
343
|
+
if not agents_file.exists():
|
|
344
|
+
agents_content = """# Agents Guide
|
|
345
|
+
|
|
346
|
+
This project uses a **Context Loop**. Always keep these fields current:
|
|
347
|
+
|
|
348
|
+
- **Overall goal is:** Single-sentence mission
|
|
349
|
+
- **Last action was:** What just completed
|
|
350
|
+
- **Next action will be:** The very next step
|
|
351
|
+
- **Blockers:** Known issues or decisions needed
|
|
352
|
+
|
|
353
|
+
### Working Rules for Agents
|
|
354
|
+
- Do not modify or examine ignored directories (see `.agentpackignore`). Assume large assets exist even if excluded.
|
|
355
|
+
- Prefer minimal, reversible changes.
|
|
356
|
+
- After committing code, run `bpsai-pair context-sync` to update the loop.
|
|
357
|
+
- Request a new context pack when the tree or docs change significantly.
|
|
358
|
+
|
|
359
|
+
### Context Pack
|
|
360
|
+
Run `bpsai-pair pack --out agent_pack.tgz` and upload to your session.
|
|
361
|
+
"""
|
|
362
|
+
agents_file.write_text(agents_content)
|
|
363
|
+
|
|
364
|
+
# Generate project tree
|
|
365
|
+
tree_file = context_dir / "project_tree.md"
|
|
366
|
+
tree_content = f"""# Project Tree (snapshot)
|
|
367
|
+
_Generated: {datetime.now(timezone.utc).isoformat()}Z_
|
|
368
|
+
|
|
369
|
+
```
|
|
370
|
+
{ProjectTree.generate(root)}
|
|
371
|
+
```
|
|
372
|
+
"""
|
|
373
|
+
tree_file.write_text(tree_content)
|
|
374
|
+
|
|
375
|
+
# Commit changes
|
|
376
|
+
GitOps.add_commit(
|
|
377
|
+
root,
|
|
378
|
+
[dev_file, agents_file, tree_file],
|
|
379
|
+
f"feat(context): start {branch_name} — Primary Goal: {primary_goal or 'TBD'}"
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
return True
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
class LocalCI:
|
|
386
|
+
"""Cross-platform local CI runner."""
|
|
387
|
+
|
|
388
|
+
@staticmethod
|
|
389
|
+
def run_python_checks(root: Path) -> dict:
|
|
390
|
+
"""Run Python linting, formatting, and tests."""
|
|
391
|
+
results = {}
|
|
392
|
+
|
|
393
|
+
# Check if Python project
|
|
394
|
+
if not ((root / "pyproject.toml").exists() or (root / "requirements.txt").exists()):
|
|
395
|
+
return results
|
|
396
|
+
|
|
397
|
+
# Try to run ruff
|
|
398
|
+
try:
|
|
399
|
+
subprocess.run(["ruff", "format", "--check", "."], cwd=root, check=True)
|
|
400
|
+
subprocess.run(["ruff", "check", "."], cwd=root, check=True)
|
|
401
|
+
results["ruff"] = "passed"
|
|
402
|
+
except:
|
|
403
|
+
results["ruff"] = "failed or not installed"
|
|
404
|
+
|
|
405
|
+
# Try to run mypy
|
|
406
|
+
try:
|
|
407
|
+
subprocess.run(["mypy", "."], cwd=root, check=True)
|
|
408
|
+
results["mypy"] = "passed"
|
|
409
|
+
except:
|
|
410
|
+
results["mypy"] = "failed or not installed"
|
|
411
|
+
|
|
412
|
+
# Try to run pytest
|
|
413
|
+
try:
|
|
414
|
+
subprocess.run(["pytest", "-q"], cwd=root, check=True)
|
|
415
|
+
results["pytest"] = "passed"
|
|
416
|
+
except:
|
|
417
|
+
results["pytest"] = "failed or not installed"
|
|
418
|
+
|
|
419
|
+
return results
|
|
420
|
+
|
|
421
|
+
@staticmethod
|
|
422
|
+
def run_node_checks(root: Path) -> dict:
|
|
423
|
+
"""Run Node.js linting, formatting, and tests."""
|
|
424
|
+
results = {}
|
|
425
|
+
|
|
426
|
+
if not (root / "package.json").exists():
|
|
427
|
+
return results
|
|
428
|
+
|
|
429
|
+
# Try npm commands
|
|
430
|
+
try:
|
|
431
|
+
subprocess.run(["npm", "run", "lint"], cwd=root, check=True)
|
|
432
|
+
results["eslint"] = "passed"
|
|
433
|
+
except:
|
|
434
|
+
results["eslint"] = "failed or not configured"
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
subprocess.run(["npm", "test"], cwd=root, check=True)
|
|
438
|
+
results["npm test"] = "passed"
|
|
439
|
+
except:
|
|
440
|
+
results["npm test"] = "failed or not configured"
|
|
441
|
+
|
|
442
|
+
return results
|
|
443
|
+
|
|
444
|
+
@staticmethod
|
|
445
|
+
def run_all(root: Path) -> dict:
|
|
446
|
+
"""Run all applicable CI checks."""
|
|
447
|
+
results = {
|
|
448
|
+
"python": LocalCI.run_python_checks(root),
|
|
449
|
+
"node": LocalCI.run_node_checks(root)
|
|
450
|
+
}
|
|
451
|
+
return results
|
bpsai_pair/pyutils.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Iterable, List
|
|
4
|
+
|
|
5
|
+
def project_files(root: Path, excludes: Iterable[str] | None = None) -> List[Path]:
|
|
6
|
+
"""
|
|
7
|
+
Return project files relative to root, respecting simple directory/file excludes.
|
|
8
|
+
Excludes are glob-like segments (e.g., '.git/', '.venv/', '__pycache__/').
|
|
9
|
+
This is intentionally minimal and cross-platform safe.
|
|
10
|
+
"""
|
|
11
|
+
ex = list(excludes or [])
|
|
12
|
+
out: List[Path] = []
|
|
13
|
+
for p in root.rglob("*"):
|
|
14
|
+
rel = p.relative_to(root)
|
|
15
|
+
# skip directories that match excludes early
|
|
16
|
+
if any(str(rel).startswith(e.rstrip("/")) for e in ex):
|
|
17
|
+
# if it's a dir, skip its subtree
|
|
18
|
+
if p.is_dir():
|
|
19
|
+
# rely on rglob: cannot prune; filtering below suffices
|
|
20
|
+
pass
|
|
21
|
+
if p.is_file():
|
|
22
|
+
s = str(rel)
|
|
23
|
+
if any(s.startswith(e.rstrip("/")) for e in ex):
|
|
24
|
+
continue
|
|
25
|
+
out.append(rel)
|
|
26
|
+
return out
|
bpsai_pair/utils.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
def repo_root() -> Path:
|
|
4
|
+
p = Path.cwd()
|
|
5
|
+
if not (p / ".git").exists():
|
|
6
|
+
raise SystemExit("Run from repo root (where .git exists).")
|
|
7
|
+
return p
|
|
8
|
+
|
|
9
|
+
def ensure_executable(path: Path) -> None:
|
|
10
|
+
mode = path.stat().st_mode
|
|
11
|
+
path.chmod(mode | 0o111)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: bpsai-pair
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: CLI for AI pair-coding workflow
|
|
5
|
+
Author: BPS AI Software
|
|
6
|
+
Requires-Python: >=3.9
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Requires-Dist: typer>=0.12
|
|
9
|
+
Requires-Dist: rich>=13.7
|
|
10
|
+
Requires-Dist: pyyaml>=6.0
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# bpsai-pair CLI
|
|
14
|
+
|
|
15
|
+
## Quick start (local, un-packaged)
|
|
16
|
+
```
|
|
17
|
+
python -m tools.cli.bpsai_pair --help
|
|
18
|
+
python -m tools.cli.bpsai_pair init tools/cookiecutter-paircoder
|
|
19
|
+
python -m tools.cli.bpsai_pair feature auth-di --primary "Decouple auth via DI" --phase "Refactor auth + tests"
|
|
20
|
+
python -m tools.cli.bpsai_pair pack --extra README.md
|
|
21
|
+
python -m tools.cli.bpsai_pair context-sync --last "initialized scaffolding" --nxt "set up CI secrets" --blockers "none"
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Install as a CLI
|
|
25
|
+
```
|
|
26
|
+
cd tools/cli
|
|
27
|
+
pip install -e .
|
|
28
|
+
# now available as: bpsai-pair --help
|
|
29
|
+
```
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
bpsai_pair/__init__.py,sha256=LpEf96Jx3tRBAzZZ8ZkPU7yWnTduFq_vZYru2jLa1Rs,379
|
|
2
|
+
bpsai_pair/__main__.py,sha256=Qd-f8z2Q2vpiEP2x6PBFsJrpACWDVxFKQk820MhFmHo,59
|
|
3
|
+
bpsai_pair/adapters.py,sha256=oowin5juQ9dTRwpHD8eJGBCahI2BoQZVEAhueDD821Y,323
|
|
4
|
+
bpsai_pair/cli.py,sha256=qc5Pcr8gUG-xuJtrqqMT-kxlHi6zUyPDIUrOIuE_110,18422
|
|
5
|
+
bpsai_pair/config.py,sha256=OIBaBkjz_jNsms4zjFxnmEN9gpqRLk1FTPq4ZBNkAUM,8750
|
|
6
|
+
bpsai_pair/init_bundled_cli.py,sha256=AjtdC7yt4p-FatYt5y65XEwH9CtSFKnRc09cgb0di3I,1906
|
|
7
|
+
bpsai_pair/jsonio.py,sha256=C_n42gPLRqjpif-AO0vjE3G1ae_v_PT3cywu7J4Xx-M,189
|
|
8
|
+
bpsai_pair/ops.py,sha256=wYVhZ5XNfRjBkr3Ztvp6j1bBW-0bOggyAQOCVHmjPuA,14706
|
|
9
|
+
bpsai_pair/pyutils.py,sha256=5ub27mF4OIaGDm7CDacXsU_9FUZVaPOnYp2NyavFydo,998
|
|
10
|
+
bpsai_pair/utils.py,sha256=TzaN27qKsBlRQCYHBcA06ufQERcc1fcyyOY7QlJQv8M,289
|
|
11
|
+
bpsai_pair/data/cookiecutter-paircoder/cookiecutter.json,sha256=FoaPEPabd5AneVMamzBSw6QNsLZ3DhJRkNtmPOVtSIY,346
|
|
12
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.agentpackignore,sha256=4NkboG3S3zI736evJVm4RTjmzFxx_hvMi6KTBaGnLxU,130
|
|
13
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.editorconfig,sha256=Qpd1apKmHcq5gVWI8jpoxAbY_LF_04tkclK06aE2lB0,235
|
|
14
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.gitignore,sha256=ylldDXITCRrj038OGp1pBwihuM4SA6vDvThOvilZV0Q,47
|
|
15
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.gitleaks.toml,sha256=STXJmj13hqxp9ejb2hImc1h72BPB_1qsgT_b86o0HYA,373
|
|
16
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.pre-commit-config.yaml,sha256=o9AmanEQcUJCYOlnKg4SNcA_rviJKbczGiijyp9oSdY,1021
|
|
17
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/CODEOWNERS,sha256=vWB4d6NzBm1qlhLs_4uFQFz6UDICn5Ehn0NUqvXlMYo,715
|
|
18
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/CONTRIBUTING.md,sha256=KdZ4GjoyWMSS_kSb4zI0riALXkGuXmsiT09yH5X3Ac4,1191
|
|
19
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/SECURITY.md,sha256=L6teX1-0xxToQek2m5lR8iEZlJdzg2TYwpSIIKZDTzE,495
|
|
20
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/PULL_REQUEST_TEMPLATE.md,sha256=VbjBqMsIcryg3YY8TL8NsGnnzE1Thz43MrBaI3-m5mU,1146
|
|
21
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/workflows/ci.yml,sha256=VisTKdEHgnL6902gqKczr5FXFZgJlmA0zw2FJBMjb2Q,2766
|
|
22
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/.github/workflows/project_tree.yml,sha256=QXS7BCkua_digYmhhSJtWhVcPrEhVJWbRTqIstdK1nw,1146
|
|
23
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/agents.md,sha256=yUgVqo5BuU5l3oX1hrRCzjLtvItvfArknxQ7ElrEzo4,836
|
|
24
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/agents.md.bak,sha256=jO9h6OnstcQmcejpO4BBnjX4USZUuSBDZ0U4GJf0tv0,6813
|
|
25
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/development.md,sha256=wQIGHsI6YDQ_hv1Ps-xYXf9vLmGi73rWbwTGOzt2S60,295
|
|
26
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/development.md.bak,sha256=Nm0e5ZNsTWawkcwU61m-6MFwseWouakPMWCwxCNElUY,251
|
|
27
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/project_tree.md,sha256=ppNgU2afWoJOnSJbOl9GoJ8EuNT85N005_8LjilEN4w,89
|
|
28
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/context/directory_notes/.gitkeep,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
29
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/deep_research.yml,sha256=lhHG4JRi9Zdsq5J8TNaw2HeupD5oD7mm5J0XPNg1H9A,958
|
|
30
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/implementation.yml,sha256=co2EmK7ojw3lqeNXf99rlU3wxn6mrSuVAZGMlOEZHt4,639
|
|
31
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/prompts/roadmap.yml,sha256=EIdCo_EZpPZ9GXXg71IDgMooychyarWPpulojrcGIg8,498
|
|
32
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/scripts/README.md,sha256=gnZc_VUQPU81z7SIh3VqTqh0JPJJecKWmqWh-eBgJpU,358
|
|
33
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/src/.gitkeep,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
34
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/templates/adr.md,sha256=y_9hvwCgzukpV2pYOjYGk4kD-a6YZZZJqflMjnHvbLE,460
|
|
35
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/templates/directory_note.md,sha256=FnNMtnD_CpvVzpp5VzQzS4qddFQ6jfyS8q36ff10DrA,346
|
|
36
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/tests/example_contract/README.md,sha256=x5ZpsFnN9QiMWtXJsYw9n27ik5Yf425N5tbzMKOpVQQ,109
|
|
37
|
+
bpsai_pair/data/cookiecutter-paircoder/{{cookiecutter.project_slug}}/tests/example_integration/README.md,sha256=_G9MzJQq1GR4fVSgGeUFVo2eyzDXJVQn6mqJ5He1Euk,150
|
|
38
|
+
bpsai_pair-0.2.0.dist-info/METADATA,sha256=jBUABNC5sNe8WFZ4oR_SPNAOBfsi8ITw3ufXBielzPk,813
|
|
39
|
+
bpsai_pair-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
40
|
+
bpsai_pair-0.2.0.dist-info/entry_points.txt,sha256=tK6yOUS1oseLus0bPQYd2cuLJgL1Zr3AGb_YPHVCtCI,101
|
|
41
|
+
bpsai_pair-0.2.0.dist-info/top_level.txt,sha256=kwTlUncK6pxJyQpZQdspexSc-kWtPvZkLUy4ji1P6LU,11
|
|
42
|
+
bpsai_pair-0.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
bpsai_pair
|