arkaos 2.14.0 → 2.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/VERSION CHANGED
@@ -1 +1 @@
1
- 2.14.0
1
+ 2.15.0
@@ -0,0 +1,25 @@
1
+ """ArkaOS Sync Engine — Hybrid sync for /arka update."""
2
+
3
+ from core.sync.engine import run_sync
4
+ from core.sync.schema import (
5
+ ChangeManifest,
6
+ DescriptorSyncResult,
7
+ FeatureSpec,
8
+ McpSyncResult,
9
+ Project,
10
+ SettingsSyncResult,
11
+ SkillSyncResult,
12
+ SyncReport,
13
+ )
14
+
15
+ __all__ = [
16
+ "run_sync",
17
+ "ChangeManifest",
18
+ "DescriptorSyncResult",
19
+ "FeatureSpec",
20
+ "McpSyncResult",
21
+ "Project",
22
+ "SettingsSyncResult",
23
+ "SkillSyncResult",
24
+ "SyncReport",
25
+ ]
@@ -0,0 +1,166 @@
1
+ """Descriptor syncer for the ArkaOS Sync Engine.
2
+
3
+ Syncs project descriptor YAML frontmatter: auto-pauses inactive projects,
4
+ archives missing paths, and updates detected stacks.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import subprocess
10
+ from datetime import datetime, timezone
11
+ from pathlib import Path
12
+
13
+ import yaml
14
+
15
+ from core.sync.schema import DescriptorSyncResult, Project
16
+
17
+ _PAUSE_THRESHOLD_DAYS = 30
18
+ _REACTIVATE_THRESHOLD_DAYS = 7
19
+
20
+
21
+ # ---------------------------------------------------------------------------
22
+ # Public API
23
+ # ---------------------------------------------------------------------------
24
+
25
+
26
+ def sync_descriptor(project: Project) -> DescriptorSyncResult:
27
+ """Sync a single project's descriptor file.
28
+
29
+ Reads the descriptor, checks if the project path exists, compares the
30
+ detected stack, checks git activity, and writes any updates back.
31
+ """
32
+ if not project.descriptor_path:
33
+ return DescriptorSyncResult(path=project.path, status="unchanged")
34
+
35
+ desc_path = Path(project.descriptor_path)
36
+ if not desc_path.exists():
37
+ return DescriptorSyncResult(path=project.path, status="unchanged")
38
+
39
+ try:
40
+ return _do_sync(project)
41
+ except Exception as exc: # noqa: BLE001
42
+ return DescriptorSyncResult(
43
+ path=project.path, status="error", error=str(exc)
44
+ )
45
+
46
+
47
+ def _do_sync(project: Project) -> DescriptorSyncResult:
48
+ """Execute the descriptor sync logic for a single project."""
49
+ desc_path = Path(project.descriptor_path) # type: ignore[arg-type]
50
+ text = desc_path.read_text()
51
+ frontmatter, body = _split_frontmatter(text)
52
+ changes: list[str] = []
53
+
54
+ if not Path(project.path).exists():
55
+ frontmatter["status"] = "archived"
56
+ changes.append("status: archived (path not found)")
57
+ _write_descriptor(desc_path, frontmatter, body)
58
+ return DescriptorSyncResult(
59
+ path=project.path, status="updated", changes=changes
60
+ )
61
+
62
+ _check_stack(frontmatter, project.stack, changes)
63
+ _check_activity(frontmatter, project.path, changes)
64
+
65
+ if not changes:
66
+ return DescriptorSyncResult(path=project.path, status="unchanged")
67
+
68
+ _write_descriptor(desc_path, frontmatter, body)
69
+ return DescriptorSyncResult(
70
+ path=project.path, status="updated", changes=changes
71
+ )
72
+
73
+
74
+ def sync_all_descriptors(projects: list[Project]) -> list[DescriptorSyncResult]:
75
+ """Sync descriptor files for all projects."""
76
+ return [sync_descriptor(p) for p in projects]
77
+
78
+
79
+ # ---------------------------------------------------------------------------
80
+ # Private helpers
81
+ # ---------------------------------------------------------------------------
82
+
83
+
84
+ def _split_frontmatter(text: str) -> tuple[dict, str]:
85
+ """Split a markdown file into its YAML frontmatter dict and body string.
86
+
87
+ Expects the file to start with '---' and have a closing '---' marker.
88
+ Returns ({}, full_text) if frontmatter markers are not found.
89
+ """
90
+ if not text.startswith("---"):
91
+ return {}, text
92
+
93
+ parts = text.split("---", 2)
94
+ if len(parts) < 3:
95
+ return {}, text
96
+
97
+ raw_yaml = parts[1]
98
+ body = parts[2]
99
+ parsed = yaml.safe_load(raw_yaml) or {}
100
+ return parsed, body
101
+
102
+
103
+ def _normalize_stack_item(item: str) -> str:
104
+ """Normalize a stack item to lowercase first word for comparison."""
105
+ return item.strip().lower().split()[0]
106
+
107
+
108
+ def _check_stack(
109
+ frontmatter: dict, detected_stack: list[str], changes: list[str]
110
+ ) -> None:
111
+ """Compare frontmatter stack with detected stack and update if different."""
112
+ fm_stack: list[str] = frontmatter.get("stack") or []
113
+ fm_normalized = {_normalize_stack_item(s) for s in fm_stack}
114
+ detected_normalized = {_normalize_stack_item(s) for s in detected_stack}
115
+
116
+ if fm_normalized != detected_normalized and detected_stack:
117
+ frontmatter["stack"] = detected_stack
118
+ changes.append(f"stack updated: {fm_stack} -> {detected_stack}")
119
+
120
+
121
+ def _check_activity(
122
+ frontmatter: dict, project_path: str, changes: list[str]
123
+ ) -> None:
124
+ """Check git activity and auto-pause or auto-reactivate the project."""
125
+ days = _get_last_commit_days(project_path)
126
+ if days is None:
127
+ return
128
+
129
+ current_status = frontmatter.get("status", "active")
130
+
131
+ if days > _PAUSE_THRESHOLD_DAYS and current_status == "active":
132
+ frontmatter["status"] = "paused"
133
+ changes.append(f"status: active -> paused ({days}d since last commit)")
134
+ elif days < _REACTIVATE_THRESHOLD_DAYS and current_status == "paused":
135
+ frontmatter["status"] = "active"
136
+ changes.append(f"status: paused -> active ({days}d since last commit)")
137
+
138
+
139
+ def _get_last_commit_days(project_path: str) -> int | None:
140
+ """Return days since the last git commit in project_path, or None."""
141
+ try:
142
+ result = subprocess.run(
143
+ ["git", "log", "-1", "--format=%ci"],
144
+ cwd=project_path,
145
+ capture_output=True,
146
+ text=True,
147
+ timeout=10,
148
+ )
149
+ raw = result.stdout.strip()
150
+ if not raw:
151
+ return None
152
+
153
+ commit_dt = datetime.fromisoformat(raw)
154
+ if commit_dt.tzinfo is None:
155
+ commit_dt = commit_dt.replace(tzinfo=timezone.utc)
156
+
157
+ now = datetime.now(tz=timezone.utc)
158
+ return (now - commit_dt).days
159
+ except Exception: # noqa: BLE001
160
+ return None
161
+
162
+
163
+ def _write_descriptor(desc_path: Path, frontmatter: dict, body: str) -> None:
164
+ """Write updated frontmatter and preserved body back to the descriptor file."""
165
+ fm_text = yaml.dump(frontmatter, default_flow_style=False, allow_unicode=True)
166
+ desc_path.write_text(f"---\n{fm_text}---{body}")
@@ -0,0 +1,256 @@
1
+ """Project discovery for the ArkaOS Sync Engine.
2
+
3
+ Discovers projects from 3 sources: descriptors, filesystem, and ecosystems.
4
+ Detects tech stacks and deduplicates across sources.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ from pathlib import Path
11
+
12
+ import yaml
13
+
14
+ from core.sync.schema import Project
15
+
16
+
17
+ # --- Stack detection helpers ---
18
+
19
+ def _detect_from_composer(project_path: Path) -> list[str]:
20
+ composer = project_path / "composer.json"
21
+ if not composer.exists():
22
+ return []
23
+ try:
24
+ data = json.loads(composer.read_text())
25
+ require = data.get("require", {})
26
+ if "laravel/framework" in require:
27
+ return ["php", "laravel"]
28
+ return ["php"]
29
+ except (json.JSONDecodeError, OSError):
30
+ return []
31
+
32
+
33
+ def _detect_from_package_json(project_path: Path) -> list[str]:
34
+ pkg = project_path / "package.json"
35
+ if not pkg.exists():
36
+ return []
37
+ try:
38
+ data = json.loads(pkg.read_text())
39
+ deps = {
40
+ **data.get("dependencies", {}),
41
+ **data.get("devDependencies", {}),
42
+ }
43
+ stack: list[str] = []
44
+ if "nuxt" in deps:
45
+ stack.extend(["javascript", "nuxt", "vue"])
46
+ elif "vue" in deps:
47
+ stack.extend(["javascript", "vue"])
48
+ elif "next" in deps:
49
+ stack.extend(["javascript", "next", "react"])
50
+ elif "react" in deps:
51
+ stack.extend(["javascript", "react"])
52
+ else:
53
+ stack.append("javascript")
54
+ return stack
55
+ except (json.JSONDecodeError, OSError):
56
+ return []
57
+
58
+
59
+ def _detect_from_pyproject(project_path: Path) -> list[str]:
60
+ pyproject = project_path / "pyproject.toml"
61
+ if not pyproject.exists():
62
+ return []
63
+ return ["python"]
64
+
65
+
66
+ def detect_stack(project_path: Path) -> list[str]:
67
+ """Detect tech stack from project files.
68
+
69
+ Checks composer.json, package.json, and pyproject.toml in order.
70
+ Returns a deduplicated list of detected technologies.
71
+ """
72
+ stack: list[str] = []
73
+ stack.extend(_detect_from_composer(project_path))
74
+ stack.extend(_detect_from_package_json(project_path))
75
+ stack.extend(_detect_from_pyproject(project_path))
76
+ seen: set[str] = set()
77
+ result: list[str] = []
78
+ for item in stack:
79
+ if item not in seen:
80
+ seen.add(item)
81
+ result.append(item)
82
+ return result
83
+
84
+
85
+ # --- Descriptor discovery helpers ---
86
+
87
+ def _parse_descriptor_frontmatter(text: str) -> dict:
88
+ """Extract YAML frontmatter from a markdown file."""
89
+ if not text.startswith("---"):
90
+ return {}
91
+ parts = text.split("---", 2)
92
+ if len(parts) < 3:
93
+ return {}
94
+ try:
95
+ return yaml.safe_load(parts[1]) or {}
96
+ except yaml.YAMLError:
97
+ return {}
98
+
99
+
100
+ def _read_descriptor_item(item: Path) -> dict:
101
+ """Read a descriptor file and return its frontmatter as a dict."""
102
+ try:
103
+ return _parse_descriptor_frontmatter(item.read_text())
104
+ except OSError:
105
+ return {}
106
+
107
+
108
+ def _process_descriptor_item(item: Path, descriptor_dir: Path) -> "Project | None":
109
+ """Parse a single descriptor file and return a Project or None."""
110
+ fm = _read_descriptor_item(item)
111
+ raw_path = fm.get("path", "")
112
+ if not raw_path:
113
+ return None
114
+ project_path = Path(raw_path)
115
+ if not project_path.exists():
116
+ return None
117
+ name = fm.get("name", project_path.name)
118
+ stack = detect_stack(project_path)
119
+ return Project(
120
+ path=str(project_path),
121
+ name=name,
122
+ ecosystem=fm.get("ecosystem") or None,
123
+ stack=stack,
124
+ descriptor_path=str(item),
125
+ has_mcp_json=(project_path / ".mcp.json").exists(),
126
+ has_settings=(project_path / ".claude").is_dir(),
127
+ )
128
+
129
+
130
+ def discover_from_descriptors(descriptor_dir: Path) -> list[Project]:
131
+ """Discover projects from .md descriptor files with YAML frontmatter.
132
+
133
+ Reads .md files in descriptor_dir and PROJECT.md in subdirectories.
134
+ Skips entries whose paths don't exist on the filesystem.
135
+ """
136
+ if not descriptor_dir.exists():
137
+ return []
138
+
139
+ candidates: list[Path] = list(descriptor_dir.glob("*.md"))
140
+ for subdir in descriptor_dir.iterdir():
141
+ if subdir.is_dir():
142
+ project_md = subdir / "PROJECT.md"
143
+ if project_md.exists():
144
+ candidates.append(project_md)
145
+
146
+ projects: list[Project] = []
147
+ for item in candidates:
148
+ project = _process_descriptor_item(item, descriptor_dir)
149
+ if project is not None:
150
+ projects.append(project)
151
+
152
+ return projects
153
+
154
+
155
+ # --- Filesystem discovery ---
156
+
157
+ def discover_from_filesystem(scan_dirs: list[Path]) -> list[Project]:
158
+ """Discover projects by scanning directories for .mcp.json or .claude/ markers."""
159
+ projects: list[Project] = []
160
+ for scan_dir in scan_dirs:
161
+ if not scan_dir.exists():
162
+ continue
163
+ for subdir in scan_dir.iterdir():
164
+ if not subdir.is_dir():
165
+ continue
166
+ has_mcp = (subdir / ".mcp.json").is_file()
167
+ has_claude = (subdir / ".claude").is_dir()
168
+ has_claude_md = (subdir / "CLAUDE.md").is_file()
169
+ if not has_mcp and not has_claude and not has_claude_md:
170
+ continue
171
+ stack = detect_stack(subdir)
172
+ projects.append(Project(
173
+ path=str(subdir),
174
+ name=subdir.name,
175
+ stack=stack,
176
+ has_mcp_json=has_mcp,
177
+ has_settings=has_claude,
178
+ ))
179
+
180
+ return projects
181
+
182
+
183
+ # --- Ecosystem discovery ---
184
+
185
+ def discover_from_ecosystems(ecosystems_file: Path) -> list[Project]:
186
+ """Discover projects from an ecosystems.json registry file."""
187
+ if not ecosystems_file.exists():
188
+ return []
189
+ try:
190
+ data = json.loads(ecosystems_file.read_text())
191
+ except (json.JSONDecodeError, OSError):
192
+ return []
193
+
194
+ projects: list[Project] = []
195
+ for eco_key, eco_data in data.get("ecosystems", {}).items():
196
+ for proj_name, proj_path_str in eco_data.get("project_paths", {}).items():
197
+ proj_path = Path(proj_path_str)
198
+ if not proj_path.exists():
199
+ continue
200
+ stack = detect_stack(proj_path)
201
+ projects.append(Project(
202
+ path=str(proj_path),
203
+ name=proj_name,
204
+ ecosystem=eco_key,
205
+ stack=stack,
206
+ has_mcp_json=(proj_path / ".mcp.json").exists(),
207
+ has_settings=(proj_path / ".claude").is_dir(),
208
+ ))
209
+
210
+ return projects
211
+
212
+
213
+ # --- Merge and deduplication helpers ---
214
+
215
+ def _merge_project(primary: Project, secondary: Project) -> Project:
216
+ """Merge two Project records — primary data wins over secondary."""
217
+ return Project(
218
+ path=primary.path,
219
+ name=primary.name,
220
+ ecosystem=primary.ecosystem or secondary.ecosystem,
221
+ stack=primary.stack if primary.stack else secondary.stack,
222
+ descriptor_path=primary.descriptor_path or secondary.descriptor_path,
223
+ has_mcp_json=primary.has_mcp_json or secondary.has_mcp_json,
224
+ has_settings=primary.has_settings or secondary.has_settings,
225
+ )
226
+
227
+
228
+ def _deduplicate(projects: list[Project]) -> list[Project]:
229
+ """Deduplicate projects by resolved absolute path. First entry wins."""
230
+ seen: dict[str, Project] = {}
231
+ for project in projects:
232
+ key = str(Path(project.path).resolve())
233
+ if key not in seen:
234
+ seen[key] = project
235
+ else:
236
+ seen[key] = _merge_project(seen[key], project)
237
+ return list(seen.values())
238
+
239
+
240
+ def discover_all_projects(
241
+ descriptor_dir: Path,
242
+ scan_dirs: list[Path],
243
+ ecosystems_file: Path,
244
+ ) -> list[Project]:
245
+ """Discover all projects from all sources, deduplicate, and sort by name.
246
+
247
+ Descriptor data wins over filesystem/ecosystem data during merging.
248
+ """
249
+ descriptor_projects = discover_from_descriptors(descriptor_dir)
250
+ filesystem_projects = discover_from_filesystem(scan_dirs)
251
+ ecosystem_projects = discover_from_ecosystems(ecosystems_file)
252
+
253
+ all_projects = descriptor_projects + ecosystem_projects + filesystem_projects
254
+ deduplicated = _deduplicate(all_projects)
255
+
256
+ return sorted(deduplicated, key=lambda p: p.name)
@@ -0,0 +1,177 @@
1
+ """Engine Orchestrator for the ArkaOS Sync Engine.
2
+
3
+ Coordinates all sync phases and provides a CLI entry point for /arka update.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import argparse
9
+ import json
10
+ import re
11
+ import sys
12
+ from pathlib import Path
13
+
14
+ from core.sync.manifest import build_manifest
15
+ from core.sync.discovery import discover_all_projects
16
+ from core.sync.mcp_syncer import sync_all_mcps
17
+ from core.sync.settings_syncer import sync_all_settings
18
+ from core.sync.descriptor_syncer import sync_all_descriptors
19
+ from core.sync.reporter import build_report, format_report, write_sync_state
20
+ from core.sync.schema import SyncReport
21
+
22
+
23
+ # ---------------------------------------------------------------------------
24
+ # Public API
25
+ # ---------------------------------------------------------------------------
26
+
27
+
28
+ def run_sync(arkaos_home: Path, skills_dir: Path, home_path: str) -> SyncReport:
29
+ """Orchestrate all deterministic sync phases and return a SyncReport."""
30
+ previous_version = _read_previous_version(arkaos_home)
31
+ current_version = _read_current_version(arkaos_home)
32
+ features_dir = _resolve_features_dir(arkaos_home)
33
+
34
+ manifest = build_manifest(previous_version, current_version, features_dir)
35
+
36
+ projects = _discover_projects(arkaos_home, skills_dir)
37
+
38
+ registry_path = skills_dir / "arka" / "mcps" / "registry.json"
39
+ mcp_results = sync_all_mcps(projects, registry_path, home_path)
40
+ settings_results = sync_all_settings(mcp_results)
41
+ descriptor_results = sync_all_descriptors(projects)
42
+
43
+ report = build_report(
44
+ previous_version,
45
+ current_version,
46
+ mcp_results,
47
+ settings_results,
48
+ descriptor_results,
49
+ [],
50
+ new_features=manifest.new_features,
51
+ deprecated_features=manifest.deprecated_features,
52
+ )
53
+
54
+ state_file = arkaos_home / "sync-state.json"
55
+ write_sync_state(state_file, report)
56
+
57
+ return report
58
+
59
+
60
+ def main() -> None:
61
+ """CLI entry point for the sync engine."""
62
+ parser = argparse.ArgumentParser(description="ArkaOS Sync Engine")
63
+ parser.add_argument("--home", required=True, help="ArkaOS home directory")
64
+ parser.add_argument("--skills", required=True, help="Skills directory")
65
+ parser.add_argument(
66
+ "--output",
67
+ choices=["text", "json"],
68
+ default="text",
69
+ help="Output format",
70
+ )
71
+ args = parser.parse_args()
72
+
73
+ report = run_sync(
74
+ arkaos_home=Path(args.home),
75
+ skills_dir=Path(args.skills),
76
+ home_path=str(Path.home()),
77
+ )
78
+
79
+ if args.output == "json":
80
+ print(report.model_dump_json(indent=2))
81
+ else:
82
+ print(format_report(report))
83
+
84
+
85
+ # ---------------------------------------------------------------------------
86
+ # Private helpers
87
+ # ---------------------------------------------------------------------------
88
+
89
+
90
+ def _read_previous_version(arkaos_home: Path) -> str:
91
+ """Read version field from sync-state.json, defaulting to pending-sync."""
92
+ state_file = arkaos_home / "sync-state.json"
93
+ if not state_file.exists():
94
+ return "pending-sync"
95
+ try:
96
+ data = json.loads(state_file.read_text())
97
+ return data.get("version", "pending-sync") or "pending-sync"
98
+ except (json.JSONDecodeError, OSError):
99
+ return "pending-sync"
100
+
101
+
102
+ def _read_current_version(arkaos_home: Path) -> str:
103
+ """Read version from VERSION file in the ArkaOS repo."""
104
+ repo_path = _read_repo_path(arkaos_home)
105
+ if repo_path is None:
106
+ return "unknown"
107
+ version_file = repo_path / "VERSION"
108
+ if not version_file.exists():
109
+ return "unknown"
110
+ try:
111
+ return version_file.read_text().strip()
112
+ except OSError:
113
+ return "unknown"
114
+
115
+
116
+ def _read_repo_path(arkaos_home: Path) -> Path | None:
117
+ """Read the absolute repo path from .repo-path file."""
118
+ repo_path_file = arkaos_home / ".repo-path"
119
+ if not repo_path_file.exists():
120
+ return None
121
+ try:
122
+ raw = repo_path_file.read_text().strip()
123
+ return Path(raw) if raw else None
124
+ except OSError:
125
+ return None
126
+
127
+
128
+ def _resolve_features_dir(arkaos_home: Path) -> Path:
129
+ """Resolve the features directory from repo or fallback config."""
130
+ repo_path = _read_repo_path(arkaos_home)
131
+ if repo_path is not None:
132
+ repo_features = repo_path / "core" / "sync" / "features"
133
+ if repo_features.exists():
134
+ return repo_features
135
+
136
+ fallback = arkaos_home / "config" / "sync" / "features"
137
+ return fallback
138
+
139
+
140
+ def _parse_scan_dirs(projects_dir_str: str) -> list[Path]:
141
+ """Parse a projectsDir string, extracting all paths starting with /."""
142
+ segments = re.split(r",\s*", projects_dir_str.strip())
143
+ paths: list[Path] = []
144
+ for segment in segments:
145
+ match = re.match(r"(/[^\s]+)", segment.strip())
146
+ if match:
147
+ paths.append(Path(match.group(1)))
148
+ return paths
149
+
150
+
151
+ def _discover_projects(arkaos_home: Path, skills_dir: Path) -> list:
152
+ """Combine profile.json dirs, descriptor dir, and ecosystems into projects."""
153
+ descriptor_dir = skills_dir / "arka" / "projects"
154
+ ecosystems_file = skills_dir / "arka" / "knowledge" / "ecosystems.json"
155
+
156
+ scan_dirs = _load_scan_dirs_from_profile(arkaos_home)
157
+
158
+ return discover_all_projects(descriptor_dir, scan_dirs, ecosystems_file)
159
+
160
+
161
+ def _load_scan_dirs_from_profile(arkaos_home: Path) -> list[Path]:
162
+ """Read projectsDir from profile.json and parse into scan directory paths."""
163
+ profile_file = arkaos_home / "profile.json"
164
+ if not profile_file.exists():
165
+ return []
166
+ try:
167
+ data = json.loads(profile_file.read_text())
168
+ projects_dir_str = data.get("projectsDir", "")
169
+ if not projects_dir_str:
170
+ return []
171
+ return _parse_scan_dirs(projects_dir_str)
172
+ except (json.JSONDecodeError, OSError):
173
+ return []
174
+
175
+
176
+ if __name__ == "__main__":
177
+ sys.exit(main())
@@ -0,0 +1,16 @@
1
+ name: forge-integration
2
+ added_in: "2.14.0"
3
+ mandatory: true
4
+ section_title: "Forge Integration"
5
+ detection_pattern: "arka-forge"
6
+ deprecated_in: null
7
+ content: |
8
+ ## Forge Integration
9
+
10
+ Complex requests (complexity score >= 5) are automatically routed to
11
+ The Forge for multi-agent planning before execution.
12
+
13
+ - Phase 0.5: Forge analysis (after spec creation, before squad planning)
14
+ - Complexity assessment: automatic via Synapse L8 (ForgeContextLayer)
15
+ - Manual invocation: `/forge` command
16
+ - Handoff: Forge outputs structured plan → squad executes phases
@@ -0,0 +1,15 @@
1
+ name: quality-gate
2
+ added_in: "2.10.0"
3
+ mandatory: true
4
+ section_title: "Quality Gate"
5
+ detection_pattern: "Marta.*CQO|Quality Gate"
6
+ deprecated_in: null
7
+ content: |
8
+ ## Quality Gate
9
+
10
+ Mandatory on every workflow. Nothing ships without approval.
11
+
12
+ - **Marta (CQO):** Orchestrates review, absolute veto power
13
+ - **Eduardo (Copy Director):** Reviews all text output
14
+ - **Francisca (Tech Director):** Reviews all code and technical output
15
+ - Verdict: APPROVED or REJECTED (binary, no partial)