@trac3er/oh-my-god 2.0.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +8 -8
- package/.claude-plugin/plugin.json +5 -4
- package/.claude-plugin/scripts/uninstall.sh +74 -3
- package/.claude-plugin/scripts/update.sh +78 -3
- package/.coveragerc +26 -0
- package/.mcp.json +4 -4
- package/CHANGELOG.md +14 -0
- package/CODE_OF_CONDUCT.md +27 -0
- package/CONTRIBUTING.md +62 -0
- package/OMG-setup.sh +1201 -355
- package/README.md +77 -56
- package/SECURITY.md +25 -0
- package/agents/__init__.py +1 -0
- package/agents/model_roles.py +196 -0
- package/agents/omg-architect-mode.md +3 -5
- package/agents/omg-backend-engineer.md +3 -5
- package/agents/omg-database-engineer.md +3 -5
- package/agents/omg-frontend-designer.md +4 -5
- package/agents/omg-implement-mode.md +4 -5
- package/agents/omg-infra-engineer.md +3 -5
- package/agents/omg-research-mode.md +4 -6
- package/agents/omg-security-auditor.md +3 -5
- package/agents/omg-testing-engineer.md +3 -5
- package/build/lib/yaml.py +321 -0
- package/commands/OMG:ai-commit.md +101 -14
- package/commands/OMG:arch.md +302 -19
- package/commands/OMG:ccg.md +12 -7
- package/commands/OMG:compat.md +25 -17
- package/commands/OMG:cost.md +173 -13
- package/commands/OMG:crazy.md +1 -1
- package/commands/OMG:create-agent.md +170 -20
- package/commands/OMG:deps.md +235 -17
- package/commands/OMG:domain-init.md +1 -1
- package/commands/OMG:escalate.md +41 -12
- package/commands/OMG:health-check.md +37 -13
- package/commands/OMG:init.md +122 -14
- package/commands/OMG:project-init.md +1 -1
- package/commands/OMG:session-branch.md +76 -9
- package/commands/OMG:session-fork.md +42 -5
- package/commands/OMG:session-merge.md +124 -8
- package/commands/OMG:setup.md +69 -12
- package/commands/OMG:stats.md +215 -14
- package/commands/OMG:teams.md +19 -10
- package/config/lsp_languages.yaml +8 -0
- package/hooks/__init__.py +0 -0
- package/hooks/_agent_registry.py +423 -0
- package/hooks/_analytics.py +291 -0
- package/hooks/_budget.py +31 -0
- package/hooks/_common.py +569 -0
- package/hooks/_compression_optimizer.py +119 -0
- package/hooks/_cost_ledger.py +176 -0
- package/hooks/_learnings.py +126 -0
- package/hooks/_memory.py +103 -0
- package/hooks/_protected_context.py +150 -0
- package/hooks/_token_counter.py +221 -0
- package/hooks/branch_manager.py +236 -0
- package/hooks/budget_governor.py +232 -0
- package/hooks/circuit-breaker.py +270 -0
- package/hooks/compression_feedback.py +254 -0
- package/hooks/config-guard.py +216 -0
- package/hooks/context_pressure.py +53 -0
- package/hooks/credential_store.py +1020 -0
- package/hooks/fetch-rate-limits.py +212 -0
- package/hooks/firewall.py +48 -0
- package/hooks/hashline-formatter-bridge.py +224 -0
- package/hooks/hashline-injector.py +273 -0
- package/hooks/hashline-validator.py +216 -0
- package/hooks/idle-detector.py +95 -0
- package/hooks/intentgate-keyword-detector.py +188 -0
- package/hooks/magic-keyword-router.py +195 -0
- package/hooks/policy_engine.py +505 -0
- package/hooks/post-tool-failure.py +19 -0
- package/hooks/post-write.py +219 -0
- package/hooks/post_write.py +46 -0
- package/hooks/pre-compact.py +398 -0
- package/hooks/pre-tool-inject.py +98 -0
- package/hooks/prompt-enhancer.py +672 -0
- package/hooks/quality-runner.py +191 -0
- package/hooks/query.py +512 -0
- package/hooks/secret-guard.py +61 -0
- package/hooks/secret_audit.py +144 -0
- package/hooks/session-end-capture.py +137 -0
- package/hooks/session-start.py +277 -0
- package/hooks/setup_wizard.py +582 -0
- package/hooks/shadow_manager.py +297 -0
- package/hooks/state_migration.py +225 -0
- package/hooks/stop-gate.py +7 -0
- package/hooks/stop_dispatcher.py +945 -0
- package/hooks/test-validator.py +361 -0
- package/hooks/test_generator_hook.py +123 -0
- package/hooks/todo-state-tracker.py +114 -0
- package/hooks/tool-ledger.py +149 -0
- package/hooks/trust_review.py +585 -0
- package/hud/omg-hud.mjs +31 -1
- package/lab/__init__.py +1 -0
- package/lab/pipeline.py +75 -0
- package/lab/policies.py +52 -0
- package/package.json +7 -18
- package/plugins/README.md +33 -61
- package/plugins/advanced/commands/OMG:deep-plan.md +3 -3
- package/plugins/advanced/commands/OMG:learn.md +1 -1
- package/plugins/advanced/commands/OMG:security-review.md +3 -3
- package/plugins/advanced/commands/OMG:ship.md +1 -1
- package/plugins/advanced/plugin.json +1 -1
- package/plugins/core/plugin.json +8 -3
- package/plugins/dephealth/__init__.py +0 -0
- package/plugins/dephealth/cve_scanner.py +188 -0
- package/plugins/dephealth/license_checker.py +135 -0
- package/plugins/dephealth/manifest_detector.py +423 -0
- package/plugins/dephealth/vuln_analyzer.py +169 -0
- package/plugins/testgen/__init__.py +0 -0
- package/plugins/testgen/codamosa_engine.py +402 -0
- package/plugins/testgen/edge_case_synthesizer.py +184 -0
- package/plugins/testgen/framework_detector.py +271 -0
- package/plugins/testgen/skeleton_generator.py +219 -0
- package/plugins/viz/__init__.py +0 -0
- package/plugins/viz/ast_parser.py +139 -0
- package/plugins/viz/diagram_generator.py +192 -0
- package/plugins/viz/graph_builder.py +444 -0
- package/plugins/viz/native_parsers.py +259 -0
- package/plugins/viz/regex_parser.py +112 -0
- package/pyproject.toml +81 -0
- package/rules/contextual/write-verify.md +2 -2
- package/rules/core/00-truth.md +1 -1
- package/rules/core/01-surgical.md +1 -1
- package/rules/core/02-circuit-breaker.md +2 -2
- package/rules/core/03-ensemble.md +3 -3
- package/rules/core/04-testing.md +3 -3
- package/runtime/__init__.py +32 -0
- package/runtime/adapters/__init__.py +13 -0
- package/runtime/adapters/claude.py +60 -0
- package/runtime/adapters/gpt.py +53 -0
- package/runtime/adapters/local.py +53 -0
- package/runtime/adoption.py +212 -0
- package/runtime/business_workflow.py +220 -0
- package/runtime/cli_provider.py +85 -0
- package/runtime/compat.py +1299 -0
- package/runtime/custom_agent_loader.py +366 -0
- package/runtime/dispatcher.py +47 -0
- package/runtime/ecosystem.py +371 -0
- package/runtime/legacy_compat.py +7 -0
- package/runtime/mcp_config_writers.py +115 -0
- package/runtime/mcp_lifecycle.py +153 -0
- package/runtime/mcp_memory_server.py +135 -0
- package/runtime/memory_parsers/__init__.py +0 -0
- package/runtime/memory_parsers/chatgpt_parser.py +257 -0
- package/runtime/memory_parsers/claude_import.py +107 -0
- package/runtime/memory_parsers/export.py +97 -0
- package/runtime/memory_parsers/gemini_import.py +91 -0
- package/runtime/memory_parsers/kimi_import.py +91 -0
- package/runtime/memory_store.py +215 -0
- package/runtime/omc_compat.py +7 -0
- package/runtime/providers/__init__.py +0 -0
- package/runtime/providers/codex_provider.py +112 -0
- package/runtime/providers/gemini_provider.py +128 -0
- package/runtime/providers/kimi_provider.py +151 -0
- package/runtime/providers/opencode_provider.py +144 -0
- package/runtime/subagent_dispatcher.py +362 -0
- package/runtime/team_router.py +1167 -0
- package/runtime/tmux_session_manager.py +169 -0
- package/scripts/check-omg-compat-contract-snapshot.py +137 -0
- package/scripts/check-omg-contract-snapshot.py +12 -0
- package/scripts/check-omg-public-ready.py +193 -0
- package/scripts/check-omg-standalone-clean.py +103 -0
- package/scripts/legacy_to_omg_migrate.py +29 -0
- package/scripts/migrate-legacy.py +464 -0
- package/scripts/omc_to_omg_migrate.py +12 -0
- package/scripts/omg.py +492 -0
- package/scripts/settings-merge.py +283 -0
- package/scripts/verify-standalone.sh +8 -4
- package/settings.json +126 -29
- package/templates/profile.yaml +1 -1
- package/tools/__init__.py +2 -0
- package/tools/browser_consent.py +289 -0
- package/tools/browser_stealth.py +481 -0
- package/tools/browser_tool.py +448 -0
- package/tools/changelog_generator.py +347 -0
- package/tools/commit_splitter.py +746 -0
- package/tools/config_discovery.py +151 -0
- package/tools/config_merger.py +449 -0
- package/tools/dashboard_generator.py +300 -0
- package/tools/git_inspector.py +298 -0
- package/tools/lsp_client.py +275 -0
- package/tools/lsp_discovery.py +231 -0
- package/tools/lsp_operations.py +392 -0
- package/tools/pr_generator.py +404 -0
- package/tools/python_repl.py +656 -0
- package/tools/python_sandbox.py +609 -0
- package/tools/search_providers/__init__.py +77 -0
- package/tools/search_providers/brave.py +115 -0
- package/tools/search_providers/exa.py +116 -0
- package/tools/search_providers/jina.py +104 -0
- package/tools/search_providers/perplexity.py +139 -0
- package/tools/search_providers/synthetic.py +74 -0
- package/tools/session_snapshot.py +736 -0
- package/tools/ssh_manager.py +912 -0
- package/tools/theme_engine.py +294 -0
- package/tools/theme_selector.py +137 -0
- package/tools/web_search.py +622 -0
- package/yaml.py +321 -0
- package/.claude-plugin/scripts/install.sh +0 -9
- package/bun.lock +0 -23
- package/bunfig.toml +0 -3
- package/hooks/_budget.ts +0 -1
- package/hooks/_common.ts +0 -63
- package/hooks/circuit-breaker.ts +0 -101
- package/hooks/config-guard.ts +0 -4
- package/hooks/firewall.ts +0 -20
- package/hooks/policy_engine.ts +0 -156
- package/hooks/post-tool-failure.ts +0 -22
- package/hooks/post-write.ts +0 -4
- package/hooks/pre-tool-inject.ts +0 -4
- package/hooks/prompt-enhancer.ts +0 -46
- package/hooks/quality-runner.ts +0 -24
- package/hooks/secret-guard.ts +0 -4
- package/hooks/session-end-capture.ts +0 -19
- package/hooks/session-start.ts +0 -19
- package/hooks/shadow_manager.ts +0 -81
- package/hooks/stop-gate.ts +0 -22
- package/hooks/stop_dispatcher.ts +0 -147
- package/hooks/test-generator-hook.ts +0 -4
- package/hooks/tool-ledger.ts +0 -27
- package/hooks/trust_review.ts +0 -175
- package/lab/pipeline.ts +0 -75
- package/lab/policies.ts +0 -68
- package/runtime/common.ts +0 -111
- package/runtime/compat.ts +0 -174
- package/runtime/dispatcher.ts +0 -25
- package/runtime/ecosystem.ts +0 -186
- package/runtime/provider_bootstrap.ts +0 -99
- package/runtime/provider_smoke.ts +0 -34
- package/runtime/release_readiness.ts +0 -186
- package/runtime/team_router.ts +0 -144
- package/scripts/check-omg-compat-contract-snapshot.ts +0 -20
- package/scripts/check-omg-standalone-clean.ts +0 -12
- package/scripts/check-runtime-clean.ts +0 -94
- package/scripts/omg.ts +0 -352
- package/scripts/settings-merge.ts +0 -93
- package/tools/commit_splitter.ts +0 -23
- package/tools/git_inspector.ts +0 -18
- package/tools/session_snapshot.ts +0 -47
- package/trac3er-oh-my-god-2.0.0.tgz +0 -0
- package/tsconfig.json +0 -15
|
@@ -0,0 +1,371 @@
|
|
|
1
|
+
"""Optional upstream ecosystem sync and integration helpers for OMG."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
import subprocess
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
ECOSYSTEM_SCHEMA = "OmgEcosystemCatalog"
|
|
11
|
+
ECOSYSTEM_CATALOG_VERSION = "1.0.0"
|
|
12
|
+
ECOSYSTEM_LOCK_SCHEMA = "OmgEcosystemLock"
|
|
13
|
+
DEFAULT_ECOSYSTEM_REPO_DIR = ".omg/ecosystem/repos"
|
|
14
|
+
DEFAULT_ECOSYSTEM_LOCK_PATH = ".omg/state/ecosystem-lock.json"
|
|
15
|
+
DEFAULT_ECOSYSTEM_PLAYBOOK_DIR = ".omg/knowledge/ecosystem"
|
|
16
|
+
MAX_SELECTION = 32
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
ECOSYSTEM_REPOS: tuple[dict[str, Any], ...] = (
|
|
20
|
+
{
|
|
21
|
+
"name": "omg-superpowers",
|
|
22
|
+
"aliases": ("omg-superpowers",),
|
|
23
|
+
"repo": "https://github.com/trac3er00/OMG.git",
|
|
24
|
+
"ref": "main",
|
|
25
|
+
"route": "plan",
|
|
26
|
+
"category": "tdd",
|
|
27
|
+
"capabilities": ("tdd", "planning", "execution"),
|
|
28
|
+
"notes": "Primary source for strict red-green-refactor and plan execution discipline.",
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
"name": "ralph-wiggum",
|
|
32
|
+
"aliases": ("ralph-wiggum", "ralph wiggum", "ralph"),
|
|
33
|
+
"repo": "https://github.com/anthropics/claude-code.git",
|
|
34
|
+
"ref": "main",
|
|
35
|
+
"sparse_path": "plugins/ralph-wiggum",
|
|
36
|
+
"route": "runtime_ship",
|
|
37
|
+
"category": "persistent-loop",
|
|
38
|
+
"capabilities": ("persistent-mode", "completion-promises", "iteration"),
|
|
39
|
+
"notes": "Provides loop-style persistent execution patterns via stop-hook based iteration.",
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
"name": "claude-flow",
|
|
43
|
+
"aliases": ("claude-flow",),
|
|
44
|
+
"repo": "https://github.com/ruvnet/claude-flow.git",
|
|
45
|
+
"ref": "main",
|
|
46
|
+
"route": "ccg",
|
|
47
|
+
"category": "orchestration",
|
|
48
|
+
"capabilities": ("multi-agent", "coordination", "task-routing"),
|
|
49
|
+
"notes": "Informs CCG-style orchestrated task dispatch and multi-agent coordination.",
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
"name": "claude-mem",
|
|
53
|
+
"aliases": ("claude-mem",),
|
|
54
|
+
"repo": "https://github.com/thedotmack/claude-mem.git",
|
|
55
|
+
"ref": "main",
|
|
56
|
+
"route": "memory",
|
|
57
|
+
"category": "memory",
|
|
58
|
+
"capabilities": ("session-memory", "knowledge-capture", "recall"),
|
|
59
|
+
"notes": "Complements OMG knowledge/state artifacts with memory-centric workflows.",
|
|
60
|
+
},
|
|
61
|
+
{
|
|
62
|
+
"name": "memsearch",
|
|
63
|
+
"aliases": ("memsearch", "memory-search"),
|
|
64
|
+
"repo": "https://github.com/rjyo/memory-search.git",
|
|
65
|
+
"ref": "main",
|
|
66
|
+
"route": "memory",
|
|
67
|
+
"category": "memory-search",
|
|
68
|
+
"capabilities": ("semantic-search", "retrieval", "indexing"),
|
|
69
|
+
"notes": "Adds focused memory retrieval and search patterns for long-running sessions.",
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
"name": "beads",
|
|
73
|
+
"aliases": ("beads",),
|
|
74
|
+
"repo": "https://github.com/steveyegge/beads.git",
|
|
75
|
+
"ref": "main",
|
|
76
|
+
"route": "maintainer",
|
|
77
|
+
"category": "context-engineering",
|
|
78
|
+
"capabilities": ("context", "workflow", "agent-patterns"),
|
|
79
|
+
"notes": "Source of context-engineering and disciplined workflow patterns.",
|
|
80
|
+
},
|
|
81
|
+
{
|
|
82
|
+
"name": "planning-with-files",
|
|
83
|
+
"aliases": ("planning-with-files", "planning with files"),
|
|
84
|
+
"repo": "https://github.com/OthmanAdi/planning-with-files.git",
|
|
85
|
+
"ref": "master",
|
|
86
|
+
"route": "plan",
|
|
87
|
+
"category": "planning",
|
|
88
|
+
"capabilities": ("file-based-plans", "checklists", "handoff"),
|
|
89
|
+
"notes": "Reinforces file-native planning artifacts and execution checklists.",
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
"name": "hooks-mastery",
|
|
93
|
+
"aliases": ("hooks-mastery", "hooks mastery"),
|
|
94
|
+
"repo": "https://github.com/disler/claude-code-hooks-mastery.git",
|
|
95
|
+
"ref": "main",
|
|
96
|
+
"route": "health",
|
|
97
|
+
"category": "hooks",
|
|
98
|
+
"capabilities": ("hook-design", "hook-hardening", "hook-automation"),
|
|
99
|
+
"notes": "Hardening references for robust, low-noise hook behavior.",
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
"name": "compound-engineering",
|
|
103
|
+
"aliases": ("compound-engineering", "compounding-engineering"),
|
|
104
|
+
"repo": "https://github.com/EveryInc/compounding-engineering-plugin.git",
|
|
105
|
+
"ref": "main",
|
|
106
|
+
"route": "ccg",
|
|
107
|
+
"category": "compound-workflows",
|
|
108
|
+
"capabilities": ("iterative-improvement", "compound-results", "workflow-composition"),
|
|
109
|
+
"notes": "Compound engineering workflow patterns for iterative gains over multiple passes.",
|
|
110
|
+
},
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _now() -> str:
|
|
115
|
+
return datetime.now(timezone.utc).isoformat()
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _canonical(value: str) -> str:
|
|
119
|
+
return value.strip().lower().replace("_", "-").replace(" ", "-")
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _run_git(args: list[str], *, cwd: Path | None = None) -> str:
|
|
123
|
+
proc = subprocess.run(
|
|
124
|
+
["git", *args],
|
|
125
|
+
cwd=str(cwd) if cwd else None,
|
|
126
|
+
capture_output=True,
|
|
127
|
+
text=True,
|
|
128
|
+
check=False,
|
|
129
|
+
)
|
|
130
|
+
if proc.returncode != 0:
|
|
131
|
+
stderr = proc.stderr.strip()
|
|
132
|
+
raise RuntimeError(f"git {' '.join(args)} failed: {stderr or 'unknown error'}")
|
|
133
|
+
return proc.stdout.strip()
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def list_ecosystem_repos() -> list[dict[str, Any]]:
|
|
137
|
+
repos: list[dict[str, Any]] = []
|
|
138
|
+
for repo in ECOSYSTEM_REPOS:
|
|
139
|
+
cloned = dict(repo)
|
|
140
|
+
cloned["aliases"] = list(repo.get("aliases", ()))
|
|
141
|
+
cloned["capabilities"] = list(repo.get("capabilities", ()))
|
|
142
|
+
repos.append(cloned)
|
|
143
|
+
return repos
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def resolve_ecosystem_repo(name: str) -> dict[str, Any] | None:
|
|
147
|
+
if not name.strip():
|
|
148
|
+
return None
|
|
149
|
+
wanted = _canonical(name)
|
|
150
|
+
for repo in ECOSYSTEM_REPOS:
|
|
151
|
+
if wanted == _canonical(str(repo["name"])):
|
|
152
|
+
return dict(repo)
|
|
153
|
+
for alias in repo.get("aliases", ()):
|
|
154
|
+
if wanted == _canonical(str(alias)):
|
|
155
|
+
return dict(repo)
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def resolve_ecosystem_selection(names: list[str] | None) -> tuple[list[dict[str, Any]], list[str]]:
|
|
160
|
+
if not names:
|
|
161
|
+
return list_ecosystem_repos(), []
|
|
162
|
+
selected: list[dict[str, Any]] = []
|
|
163
|
+
unknown: list[str] = []
|
|
164
|
+
seen: set[str] = set()
|
|
165
|
+
for raw in names[:MAX_SELECTION]:
|
|
166
|
+
repo = resolve_ecosystem_repo(raw)
|
|
167
|
+
if repo is None:
|
|
168
|
+
unknown.append(raw)
|
|
169
|
+
continue
|
|
170
|
+
key = str(repo["name"])
|
|
171
|
+
if key in seen:
|
|
172
|
+
continue
|
|
173
|
+
seen.add(key)
|
|
174
|
+
selected.append(repo)
|
|
175
|
+
return selected, unknown
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _read_lock(lock_path: Path) -> dict[str, Any]:
|
|
179
|
+
if not lock_path.exists():
|
|
180
|
+
return {}
|
|
181
|
+
try:
|
|
182
|
+
payload = json.loads(lock_path.read_text(encoding="utf-8"))
|
|
183
|
+
except Exception:
|
|
184
|
+
return {}
|
|
185
|
+
if not isinstance(payload, dict):
|
|
186
|
+
return {}
|
|
187
|
+
return payload
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def _clone_or_update_repo(
|
|
191
|
+
*,
|
|
192
|
+
repo: dict[str, Any],
|
|
193
|
+
target: Path,
|
|
194
|
+
update: bool,
|
|
195
|
+
depth: int,
|
|
196
|
+
) -> dict[str, Any]:
|
|
197
|
+
ref = str(repo.get("ref", "main"))
|
|
198
|
+
repo_url = str(repo["repo"])
|
|
199
|
+
sparse_path = str(repo.get("sparse_path", "")).strip()
|
|
200
|
+
action = "cached"
|
|
201
|
+
|
|
202
|
+
if not target.exists():
|
|
203
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
204
|
+
if sparse_path:
|
|
205
|
+
_run_git(["clone", "--depth", str(depth), "--filter=blob:none", "--sparse", repo_url, str(target)])
|
|
206
|
+
_run_git(["-C", str(target), "sparse-checkout", "set", sparse_path])
|
|
207
|
+
if ref and ref != "main":
|
|
208
|
+
_run_git(["-C", str(target), "checkout", ref])
|
|
209
|
+
else:
|
|
210
|
+
_run_git(
|
|
211
|
+
[
|
|
212
|
+
"clone",
|
|
213
|
+
"--depth",
|
|
214
|
+
str(depth),
|
|
215
|
+
"--filter=blob:none",
|
|
216
|
+
"--branch",
|
|
217
|
+
ref,
|
|
218
|
+
"--single-branch",
|
|
219
|
+
repo_url,
|
|
220
|
+
str(target),
|
|
221
|
+
]
|
|
222
|
+
)
|
|
223
|
+
action = "cloned"
|
|
224
|
+
elif update:
|
|
225
|
+
_run_git(["-C", str(target), "fetch", "--depth", str(depth), "origin", ref])
|
|
226
|
+
_run_git(["-C", str(target), "checkout", "-B", ref, "FETCH_HEAD"])
|
|
227
|
+
action = "updated"
|
|
228
|
+
|
|
229
|
+
commit = _run_git(["-C", str(target), "rev-parse", "HEAD"])
|
|
230
|
+
branch = _run_git(["-C", str(target), "rev-parse", "--abbrev-ref", "HEAD"])
|
|
231
|
+
return {
|
|
232
|
+
"name": repo["name"],
|
|
233
|
+
"repo": repo_url,
|
|
234
|
+
"ref": ref,
|
|
235
|
+
"repo_segments": [".omg", "ecosystem", "repos", str(repo["name"])],
|
|
236
|
+
"action": action,
|
|
237
|
+
"commit": commit,
|
|
238
|
+
"branch": branch,
|
|
239
|
+
"sparse_path": sparse_path,
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def _write_playbook(project_dir: Path, selected: list[dict[str, Any]]) -> list[str]:
|
|
244
|
+
base = project_dir / DEFAULT_ECOSYSTEM_PLAYBOOK_DIR
|
|
245
|
+
base.mkdir(parents=True, exist_ok=True)
|
|
246
|
+
written: list[str] = []
|
|
247
|
+
for repo in selected:
|
|
248
|
+
path = base / f"{repo['name']}.md"
|
|
249
|
+
capabilities = ", ".join(repo.get("capabilities", []))
|
|
250
|
+
content = (
|
|
251
|
+
f"# {repo['name']} Integration Notes\n\n"
|
|
252
|
+
f"- Route: `{repo.get('route', '')}`\n"
|
|
253
|
+
f"- Category: `{repo.get('category', '')}`\n"
|
|
254
|
+
f"- Capabilities: {capabilities}\n"
|
|
255
|
+
f"- Source: {repo.get('repo', '')}\n\n"
|
|
256
|
+
f"{repo.get('notes', '').strip()}\n"
|
|
257
|
+
)
|
|
258
|
+
path.write_text(content, encoding="utf-8")
|
|
259
|
+
written.append(str(path))
|
|
260
|
+
return written
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def sync_ecosystem_repos(
|
|
264
|
+
*,
|
|
265
|
+
project_dir: str,
|
|
266
|
+
names: list[str] | None = None,
|
|
267
|
+
update: bool = False,
|
|
268
|
+
depth: int = 1,
|
|
269
|
+
) -> dict[str, Any]:
|
|
270
|
+
root = Path(project_dir)
|
|
271
|
+
selected, unknown = resolve_ecosystem_selection(names)
|
|
272
|
+
repo_root = root / DEFAULT_ECOSYSTEM_REPO_DIR
|
|
273
|
+
lock_path = root / DEFAULT_ECOSYSTEM_LOCK_PATH
|
|
274
|
+
lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
275
|
+
|
|
276
|
+
entries: list[dict[str, Any]] = []
|
|
277
|
+
for repo in selected:
|
|
278
|
+
target = repo_root / str(repo["name"])
|
|
279
|
+
try:
|
|
280
|
+
synced = _clone_or_update_repo(repo=repo, target=target, update=update, depth=depth)
|
|
281
|
+
synced["status"] = "ok"
|
|
282
|
+
entries.append(synced)
|
|
283
|
+
except Exception as exc:
|
|
284
|
+
entries.append(
|
|
285
|
+
{
|
|
286
|
+
"name": repo["name"],
|
|
287
|
+
"repo": repo["repo"],
|
|
288
|
+
"ref": repo.get("ref", "main"),
|
|
289
|
+
"repo_segments": [".omg", "ecosystem", "repos", str(repo["name"])],
|
|
290
|
+
"status": "error",
|
|
291
|
+
"error": str(exc),
|
|
292
|
+
}
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
playbook_files = _write_playbook(root, selected)
|
|
296
|
+
previous = _read_lock(lock_path)
|
|
297
|
+
payload = {
|
|
298
|
+
"schema": ECOSYSTEM_LOCK_SCHEMA,
|
|
299
|
+
"catalog_schema": ECOSYSTEM_SCHEMA,
|
|
300
|
+
"catalog_version": ECOSYSTEM_CATALOG_VERSION,
|
|
301
|
+
"generated_at": _now(),
|
|
302
|
+
"selected_count": len(selected),
|
|
303
|
+
"unknown_count": len(unknown),
|
|
304
|
+
"selected": [repo["name"] for repo in selected],
|
|
305
|
+
"unknown": unknown,
|
|
306
|
+
"entries": entries,
|
|
307
|
+
"playbook_files": playbook_files,
|
|
308
|
+
"previous_generated_at": previous.get("generated_at", ""),
|
|
309
|
+
}
|
|
310
|
+
lock_path.write_text(json.dumps(payload, indent=2, ensure_ascii=True), encoding="utf-8")
|
|
311
|
+
return {
|
|
312
|
+
"status": "ok",
|
|
313
|
+
"schema": ECOSYSTEM_LOCK_SCHEMA,
|
|
314
|
+
"catalog_version": ECOSYSTEM_CATALOG_VERSION,
|
|
315
|
+
"lock_path": str(lock_path),
|
|
316
|
+
"repo_dir": str(repo_root),
|
|
317
|
+
"selected": payload["selected"],
|
|
318
|
+
"unknown": unknown,
|
|
319
|
+
"entries": entries,
|
|
320
|
+
"playbook_files": playbook_files,
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def ecosystem_status(*, project_dir: str) -> dict[str, Any]:
|
|
325
|
+
root = Path(project_dir)
|
|
326
|
+
repo_root = root / DEFAULT_ECOSYSTEM_REPO_DIR
|
|
327
|
+
lock_path = root / DEFAULT_ECOSYSTEM_LOCK_PATH
|
|
328
|
+
lock = _read_lock(lock_path)
|
|
329
|
+
|
|
330
|
+
repos = list_ecosystem_repos()
|
|
331
|
+
statuses: list[dict[str, Any]] = []
|
|
332
|
+
for repo in repos:
|
|
333
|
+
target = repo_root / str(repo["name"])
|
|
334
|
+
if not target.exists():
|
|
335
|
+
statuses.append(
|
|
336
|
+
{
|
|
337
|
+
"name": repo["name"],
|
|
338
|
+
"installed": False,
|
|
339
|
+
"repo_segments": [".omg", "ecosystem", "repos", str(repo["name"])],
|
|
340
|
+
}
|
|
341
|
+
)
|
|
342
|
+
continue
|
|
343
|
+
commit = ""
|
|
344
|
+
branch = ""
|
|
345
|
+
error = ""
|
|
346
|
+
try:
|
|
347
|
+
commit = _run_git(["-C", str(target), "rev-parse", "HEAD"])
|
|
348
|
+
branch = _run_git(["-C", str(target), "rev-parse", "--abbrev-ref", "HEAD"])
|
|
349
|
+
except Exception as exc:
|
|
350
|
+
error = str(exc)
|
|
351
|
+
statuses.append(
|
|
352
|
+
{
|
|
353
|
+
"name": repo["name"],
|
|
354
|
+
"installed": True,
|
|
355
|
+
"repo_segments": [".omg", "ecosystem", "repos", str(repo["name"])],
|
|
356
|
+
"commit": commit,
|
|
357
|
+
"branch": branch,
|
|
358
|
+
"error": error,
|
|
359
|
+
}
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
return {
|
|
363
|
+
"status": "ok",
|
|
364
|
+
"schema": ECOSYSTEM_LOCK_SCHEMA,
|
|
365
|
+
"catalog_schema": ECOSYSTEM_SCHEMA,
|
|
366
|
+
"catalog_version": ECOSYSTEM_CATALOG_VERSION,
|
|
367
|
+
"lock_exists": lock_path.exists(),
|
|
368
|
+
"lock_generated_at": lock.get("generated_at", ""),
|
|
369
|
+
"repo_dir": str(repo_root),
|
|
370
|
+
"repos": statuses,
|
|
371
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import cast
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _atomic_write_text(path: Path, content: str) -> None:
|
|
10
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
11
|
+
tmp_path = path.with_name(f"{path.name}.tmp")
|
|
12
|
+
_ = tmp_path.write_text(content)
|
|
13
|
+
_ = os.replace(tmp_path, path)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _load_json(path: Path) -> dict[str, object]:
|
|
17
|
+
if not path.exists():
|
|
18
|
+
return {}
|
|
19
|
+
try:
|
|
20
|
+
parsed = cast(object, json.loads(path.read_text()))
|
|
21
|
+
if isinstance(parsed, dict):
|
|
22
|
+
return cast(dict[str, object], parsed)
|
|
23
|
+
return {}
|
|
24
|
+
except (json.JSONDecodeError, ValueError):
|
|
25
|
+
return {}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _write_json(path: Path, data: dict[str, object]) -> None:
|
|
29
|
+
_atomic_write_text(path, json.dumps(data, indent=2) + "\n")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def write_claude_mcp_config(project_dir: str, server_url: str, server_name: str = "memory-server") -> None:
|
|
33
|
+
config_path = Path(project_dir) / ".mcp.json"
|
|
34
|
+
config = _load_json(config_path)
|
|
35
|
+
mcp_servers = config.get("mcpServers")
|
|
36
|
+
if not isinstance(mcp_servers, dict):
|
|
37
|
+
mcp_servers = {}
|
|
38
|
+
config["mcpServers"] = mcp_servers
|
|
39
|
+
mcp_servers[server_name] = {"type": "http", "url": server_url}
|
|
40
|
+
_write_json(config_path, config)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def write_codex_mcp_config(server_url: str, server_name: str = "memory-server") -> None:
|
|
44
|
+
config_path = Path.home() / ".codex" / "config.toml"
|
|
45
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
46
|
+
|
|
47
|
+
existing = config_path.read_text() if config_path.exists() else ""
|
|
48
|
+
lines = existing.splitlines(keepends=True)
|
|
49
|
+
|
|
50
|
+
header_unquoted = f"[mcp_servers.{server_name}]"
|
|
51
|
+
header_quoted = f"[mcp_servers.\"{server_name}\"]"
|
|
52
|
+
headers = {header_unquoted, header_quoted}
|
|
53
|
+
|
|
54
|
+
start_idx: int | None = None
|
|
55
|
+
for idx, line in enumerate(lines):
|
|
56
|
+
if line.strip() in headers:
|
|
57
|
+
start_idx = idx
|
|
58
|
+
break
|
|
59
|
+
|
|
60
|
+
block = [
|
|
61
|
+
f"{header_unquoted}\n",
|
|
62
|
+
'type = "http"\n',
|
|
63
|
+
f'url = "{server_url}"\n',
|
|
64
|
+
"\n",
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
if start_idx is None:
|
|
68
|
+
if existing and not existing.endswith("\n"):
|
|
69
|
+
existing += "\n"
|
|
70
|
+
content = existing + "".join(block)
|
|
71
|
+
_atomic_write_text(config_path, content)
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
end_idx = len(lines)
|
|
75
|
+
for idx in range(start_idx + 1, len(lines)):
|
|
76
|
+
stripped = lines[idx].strip()
|
|
77
|
+
if stripped.startswith("[") and stripped.endswith("]"):
|
|
78
|
+
end_idx = idx
|
|
79
|
+
break
|
|
80
|
+
|
|
81
|
+
updated_lines = lines[:start_idx] + block + lines[end_idx:]
|
|
82
|
+
_atomic_write_text(config_path, "".join(updated_lines))
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def write_gemini_mcp_config(server_url: str, server_name: str = "memory-server") -> None:
|
|
86
|
+
config_path = Path.home() / ".gemini" / "settings.json"
|
|
87
|
+
config = _load_json(config_path)
|
|
88
|
+
mcp_servers = config.get("mcpServers")
|
|
89
|
+
if not isinstance(mcp_servers, dict):
|
|
90
|
+
mcp_servers = {}
|
|
91
|
+
config["mcpServers"] = mcp_servers
|
|
92
|
+
mcp_servers[server_name] = {"httpUrl": server_url}
|
|
93
|
+
_write_json(config_path, config)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def write_opencode_mcp_config(server_url: str, server_name: str = "memory-server") -> None:
|
|
97
|
+
config_path = Path.home() / ".config" / "opencode" / "opencode.json"
|
|
98
|
+
config = _load_json(config_path)
|
|
99
|
+
mcp = config.get("mcp")
|
|
100
|
+
if not isinstance(mcp, dict):
|
|
101
|
+
mcp = {}
|
|
102
|
+
config["mcp"] = mcp
|
|
103
|
+
mcp[server_name] = {"type": "remote", "url": server_url}
|
|
104
|
+
_write_json(config_path, config)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def write_kimi_mcp_config(server_url: str, server_name: str = "memory-server") -> None:
|
|
108
|
+
config_path = Path.home() / ".kimi" / "mcp.json"
|
|
109
|
+
config = _load_json(config_path)
|
|
110
|
+
mcp_servers = config.get("mcpServers")
|
|
111
|
+
if not isinstance(mcp_servers, dict):
|
|
112
|
+
mcp_servers = {}
|
|
113
|
+
config["mcpServers"] = mcp_servers
|
|
114
|
+
mcp_servers[server_name] = {"type": "http", "url": server_url}
|
|
115
|
+
_write_json(config_path, config)
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""MCP server lifecycle manager — start/stop/health/ensure."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
import signal
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_pid_file_path() -> str:
|
|
14
|
+
"""Return path to the PID file for the memory server."""
|
|
15
|
+
return str(Path.home() / ".omg" / "shared-memory" / "server.pid")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_server_url() -> str:
|
|
19
|
+
"""Return the MCP server URL using host/port from mcp_memory_server."""
|
|
20
|
+
from runtime.mcp_memory_server import get_host, get_port
|
|
21
|
+
|
|
22
|
+
return f"http://{get_host()}:{get_port()}/mcp"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _read_pid() -> int | None:
|
|
26
|
+
"""Read PID from PID file. Returns None if missing or invalid."""
|
|
27
|
+
try:
|
|
28
|
+
return int(Path(get_pid_file_path()).read_text().strip())
|
|
29
|
+
except (FileNotFoundError, ValueError):
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def is_server_running() -> bool:
|
|
34
|
+
"""Check if the memory server process is alive."""
|
|
35
|
+
pid = _read_pid()
|
|
36
|
+
if pid is None:
|
|
37
|
+
return False
|
|
38
|
+
try:
|
|
39
|
+
os.kill(pid, 0)
|
|
40
|
+
return True
|
|
41
|
+
except ProcessLookupError:
|
|
42
|
+
return False
|
|
43
|
+
except PermissionError:
|
|
44
|
+
return True # Process exists but we can't signal it
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _wait_for_health(url: str, timeout: float = 5.0) -> bool:
|
|
48
|
+
"""Wait for server health endpoint to respond."""
|
|
49
|
+
import urllib.request
|
|
50
|
+
|
|
51
|
+
deadline = time.monotonic() + timeout
|
|
52
|
+
while time.monotonic() < deadline:
|
|
53
|
+
try:
|
|
54
|
+
with urllib.request.urlopen(url, timeout=1) as resp:
|
|
55
|
+
if resp.status == 200:
|
|
56
|
+
return True
|
|
57
|
+
except Exception:
|
|
58
|
+
time.sleep(0.1)
|
|
59
|
+
return False
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _wait_for_exit(pid: int, timeout: float = 5.0) -> bool:
|
|
63
|
+
"""Wait for a process to exit."""
|
|
64
|
+
deadline = time.monotonic() + timeout
|
|
65
|
+
while time.monotonic() < deadline:
|
|
66
|
+
try:
|
|
67
|
+
os.kill(pid, 0)
|
|
68
|
+
time.sleep(0.1)
|
|
69
|
+
except (ProcessLookupError, PermissionError):
|
|
70
|
+
return True
|
|
71
|
+
return False
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def start_memory_server() -> dict[str, Any]:
|
|
75
|
+
"""Start the MCP memory server as a background process."""
|
|
76
|
+
if is_server_running():
|
|
77
|
+
return {"status": "already_running", "url": get_server_url()}
|
|
78
|
+
|
|
79
|
+
pid_path = Path(get_pid_file_path())
|
|
80
|
+
pid_path.parent.mkdir(parents=True, exist_ok=True)
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
server_script = str(Path(__file__).parent / "mcp_memory_server.py")
|
|
84
|
+
proc = subprocess.Popen(
|
|
85
|
+
[sys.executable, server_script],
|
|
86
|
+
stdout=subprocess.DEVNULL,
|
|
87
|
+
stderr=subprocess.DEVNULL,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
pid_path.write_text(str(proc.pid))
|
|
91
|
+
|
|
92
|
+
health_url = get_server_url().replace("/mcp", "/health")
|
|
93
|
+
if _wait_for_health(health_url):
|
|
94
|
+
return {"status": "started", "pid": proc.pid, "url": get_server_url()}
|
|
95
|
+
else:
|
|
96
|
+
return {
|
|
97
|
+
"status": "error",
|
|
98
|
+
"message": "Server did not respond within timeout",
|
|
99
|
+
}
|
|
100
|
+
except Exception as exc:
|
|
101
|
+
return {"status": "error", "message": str(exc)}
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def stop_memory_server() -> dict[str, Any]:
|
|
105
|
+
"""Stop the MCP memory server."""
|
|
106
|
+
if not is_server_running():
|
|
107
|
+
return {"status": "not_running"}
|
|
108
|
+
|
|
109
|
+
pid = _read_pid()
|
|
110
|
+
if pid is None:
|
|
111
|
+
return {"status": "not_running"}
|
|
112
|
+
|
|
113
|
+
try:
|
|
114
|
+
os.kill(pid, signal.SIGTERM)
|
|
115
|
+
_wait_for_exit(pid)
|
|
116
|
+
|
|
117
|
+
pid_path = Path(get_pid_file_path())
|
|
118
|
+
if pid_path.exists():
|
|
119
|
+
pid_path.unlink()
|
|
120
|
+
|
|
121
|
+
return {"status": "stopped", "pid": pid}
|
|
122
|
+
except ProcessLookupError:
|
|
123
|
+
pid_path = Path(get_pid_file_path())
|
|
124
|
+
if pid_path.exists():
|
|
125
|
+
pid_path.unlink()
|
|
126
|
+
return {"status": "stopped", "pid": pid}
|
|
127
|
+
except Exception as exc:
|
|
128
|
+
return {"status": "error", "message": str(exc)}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def check_memory_server() -> dict[str, Any]:
|
|
132
|
+
"""Check the memory server status."""
|
|
133
|
+
running = is_server_running()
|
|
134
|
+
if not running:
|
|
135
|
+
return {"running": False, "url": None, "pid": None}
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
"running": True,
|
|
139
|
+
"url": get_server_url(),
|
|
140
|
+
"pid": _read_pid(),
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def ensure_memory_server() -> dict[str, Any]:
|
|
145
|
+
"""Ensure the memory server is running (idempotent)."""
|
|
146
|
+
if is_server_running():
|
|
147
|
+
return {"status": "already_running", "url": get_server_url()}
|
|
148
|
+
return start_memory_server()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
# Feature flag: auto-start on import
|
|
152
|
+
if os.environ.get("OMG_MEMORY_AUTOSTART") == "1":
|
|
153
|
+
ensure_memory_server()
|