@spekn/cli 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -0
- package/dist/main.js +3707 -611
- package/dist/tui/index.mjs +2 -2
- package/package.json +29 -12
- package/dist/__tests__/export-cli.test.d.ts +0 -1
- package/dist/__tests__/export-cli.test.js +0 -70
- package/dist/__tests__/tui-args-policy.test.d.ts +0 -1
- package/dist/__tests__/tui-args-policy.test.js +0 -50
- package/dist/acp-S2MHZOAD.mjs +0 -23
- package/dist/acp-UCCI44JY.mjs +0 -25
- package/dist/auth/credentials-store.d.ts +0 -2
- package/dist/auth/credentials-store.js +0 -5
- package/dist/auth/device-flow.d.ts +0 -36
- package/dist/auth/device-flow.js +0 -189
- package/dist/auth/jwt.d.ts +0 -1
- package/dist/auth/jwt.js +0 -6
- package/dist/auth/session.d.ts +0 -67
- package/dist/auth/session.js +0 -86
- package/dist/auth-login.d.ts +0 -34
- package/dist/auth-login.js +0 -202
- package/dist/auth-logout.d.ts +0 -25
- package/dist/auth-logout.js +0 -115
- package/dist/auth-status.d.ts +0 -24
- package/dist/auth-status.js +0 -109
- package/dist/backlog-generate.d.ts +0 -11
- package/dist/backlog-generate.js +0 -308
- package/dist/backlog-health.d.ts +0 -11
- package/dist/backlog-health.js +0 -287
- package/dist/bridge-login.d.ts +0 -40
- package/dist/bridge-login.js +0 -277
- package/dist/chunk-3PAYRI4G.mjs +0 -2428
- package/dist/chunk-M4CS3A25.mjs +0 -2426
- package/dist/commands/auth/login.d.ts +0 -30
- package/dist/commands/auth/login.js +0 -164
- package/dist/commands/auth/logout.d.ts +0 -25
- package/dist/commands/auth/logout.js +0 -115
- package/dist/commands/auth/status.d.ts +0 -24
- package/dist/commands/auth/status.js +0 -109
- package/dist/commands/backlog/generate.d.ts +0 -11
- package/dist/commands/backlog/generate.js +0 -308
- package/dist/commands/backlog/health.d.ts +0 -11
- package/dist/commands/backlog/health.js +0 -287
- package/dist/commands/bridge/login.d.ts +0 -36
- package/dist/commands/bridge/login.js +0 -258
- package/dist/commands/export.d.ts +0 -35
- package/dist/commands/export.js +0 -485
- package/dist/commands/marketplace-export.d.ts +0 -21
- package/dist/commands/marketplace-export.js +0 -214
- package/dist/commands/project-clean.d.ts +0 -1
- package/dist/commands/project-clean.js +0 -126
- package/dist/commands/repo/common.d.ts +0 -105
- package/dist/commands/repo/common.js +0 -775
- package/dist/commands/repo/detach.d.ts +0 -2
- package/dist/commands/repo/detach.js +0 -120
- package/dist/commands/repo/register.d.ts +0 -21
- package/dist/commands/repo/register.js +0 -175
- package/dist/commands/repo/sync.d.ts +0 -22
- package/dist/commands/repo/sync.js +0 -873
- package/dist/commands/skills-import-local.d.ts +0 -16
- package/dist/commands/skills-import-local.js +0 -352
- package/dist/commands/spec/drift-check.d.ts +0 -3
- package/dist/commands/spec/drift-check.js +0 -186
- package/dist/commands/spec/frontmatter.d.ts +0 -11
- package/dist/commands/spec/frontmatter.js +0 -219
- package/dist/commands/spec/lint.d.ts +0 -11
- package/dist/commands/spec/lint.js +0 -499
- package/dist/commands/spec/parse.d.ts +0 -11
- package/dist/commands/spec/parse.js +0 -162
- package/dist/export.d.ts +0 -35
- package/dist/export.js +0 -485
- package/dist/main.d.ts +0 -1
- package/dist/marketplace-export.d.ts +0 -21
- package/dist/marketplace-export.js +0 -214
- package/dist/project-clean.d.ts +0 -1
- package/dist/project-clean.js +0 -126
- package/dist/project-context.d.ts +0 -99
- package/dist/project-context.js +0 -376
- package/dist/repo-common.d.ts +0 -101
- package/dist/repo-common.js +0 -671
- package/dist/repo-detach.d.ts +0 -2
- package/dist/repo-detach.js +0 -102
- package/dist/repo-ingest.d.ts +0 -29
- package/dist/repo-ingest.js +0 -305
- package/dist/repo-register.d.ts +0 -21
- package/dist/repo-register.js +0 -175
- package/dist/repo-sync.d.ts +0 -16
- package/dist/repo-sync.js +0 -152
- package/dist/resources/prompt-loader.d.ts +0 -1
- package/dist/resources/prompt-loader.js +0 -62
- package/dist/skills-import-local.d.ts +0 -16
- package/dist/skills-import-local.js +0 -352
- package/dist/spec-drift-check.d.ts +0 -3
- package/dist/spec-drift-check.js +0 -186
- package/dist/spec-frontmatter.d.ts +0 -11
- package/dist/spec-frontmatter.js +0 -219
- package/dist/spec-lint.d.ts +0 -11
- package/dist/spec-lint.js +0 -499
- package/dist/spec-parse.d.ts +0 -11
- package/dist/spec-parse.js +0 -162
- package/dist/stubs/dotenv.d.ts +0 -5
- package/dist/stubs/dotenv.js +0 -6
- package/dist/stubs/typeorm.d.ts +0 -22
- package/dist/stubs/typeorm.js +0 -28
- package/dist/tui/app.d.ts +0 -7
- package/dist/tui/app.js +0 -122
- package/dist/tui/args.d.ts +0 -8
- package/dist/tui/args.js +0 -57
- package/dist/tui/capabilities/policy.d.ts +0 -7
- package/dist/tui/capabilities/policy.js +0 -64
- package/dist/tui/components/frame.d.ts +0 -8
- package/dist/tui/components/frame.js +0 -8
- package/dist/tui/components/status-bar.d.ts +0 -8
- package/dist/tui/components/status-bar.js +0 -8
- package/dist/tui/index.d.ts +0 -2
- package/dist/tui/index.js +0 -23
- package/dist/tui/keymap/use-global-keymap.d.ts +0 -19
- package/dist/tui/keymap/use-global-keymap.js +0 -82
- package/dist/tui/navigation/nav-items.d.ts +0 -3
- package/dist/tui/navigation/nav-items.js +0 -18
- package/dist/tui/screens/bridge.d.ts +0 -8
- package/dist/tui/screens/bridge.js +0 -19
- package/dist/tui/screens/decisions.d.ts +0 -5
- package/dist/tui/screens/decisions.js +0 -28
- package/dist/tui/screens/export.d.ts +0 -5
- package/dist/tui/screens/export.js +0 -16
- package/dist/tui/screens/home.d.ts +0 -5
- package/dist/tui/screens/home.js +0 -33
- package/dist/tui/screens/locked.d.ts +0 -5
- package/dist/tui/screens/locked.js +0 -9
- package/dist/tui/screens/specs.d.ts +0 -5
- package/dist/tui/screens/specs.js +0 -31
- package/dist/tui/services/client.d.ts +0 -1
- package/dist/tui/services/client.js +0 -18
- package/dist/tui/services/context-service.d.ts +0 -19
- package/dist/tui/services/context-service.js +0 -246
- package/dist/tui/shared-enums.d.ts +0 -16
- package/dist/tui/shared-enums.js +0 -19
- package/dist/tui/state/use-app-state.d.ts +0 -35
- package/dist/tui/state/use-app-state.js +0 -177
- package/dist/tui/types.d.ts +0 -77
- package/dist/tui/types.js +0 -2
- package/dist/tui-bundle.d.ts +0 -1
- package/dist/tui-bundle.js +0 -5
- package/dist/tui-entry.mjs +0 -1407
- package/dist/utils/cli-runtime.d.ts +0 -5
- package/dist/utils/cli-runtime.js +0 -22
- package/dist/utils/help-error.d.ts +0 -7
- package/dist/utils/help-error.js +0 -14
- package/dist/utils/interaction.d.ts +0 -19
- package/dist/utils/interaction.js +0 -93
- package/dist/utils/structured-log.d.ts +0 -7
- package/dist/utils/structured-log.js +0 -112
- package/dist/utils/trpc-url.d.ts +0 -4
- package/dist/utils/trpc-url.js +0 -15
|
@@ -1,873 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
"use strict";
|
|
3
|
-
/**
|
|
4
|
-
* repo-sync CLI command
|
|
5
|
-
*
|
|
6
|
-
* Syncs metadata for the current git repository with Spekn.
|
|
7
|
-
* Looks up the repo by its remote URL and updates name and default branch.
|
|
8
|
-
* By default, also runs ingestion drift analysis against project spec storage.
|
|
9
|
-
* Must be run from inside a local git clone.
|
|
10
|
-
*
|
|
11
|
-
* Usage: spekn repo sync --project-id <uuid> [--analyze|--no-analyze] [--import-to-project] [--api-url <url>]
|
|
12
|
-
*/
|
|
13
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
14
|
-
if (k2 === undefined) k2 = k;
|
|
15
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
16
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
17
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
18
|
-
}
|
|
19
|
-
Object.defineProperty(o, k2, desc);
|
|
20
|
-
}) : (function(o, m, k, k2) {
|
|
21
|
-
if (k2 === undefined) k2 = k;
|
|
22
|
-
o[k2] = m[k];
|
|
23
|
-
}));
|
|
24
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
25
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
26
|
-
}) : function(o, v) {
|
|
27
|
-
o["default"] = v;
|
|
28
|
-
});
|
|
29
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
30
|
-
var ownKeys = function(o) {
|
|
31
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
32
|
-
var ar = [];
|
|
33
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
34
|
-
return ar;
|
|
35
|
-
};
|
|
36
|
-
return ownKeys(o);
|
|
37
|
-
};
|
|
38
|
-
return function (mod) {
|
|
39
|
-
if (mod && mod.__esModule) return mod;
|
|
40
|
-
var result = {};
|
|
41
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
42
|
-
__setModuleDefault(result, mod);
|
|
43
|
-
return result;
|
|
44
|
-
};
|
|
45
|
-
})();
|
|
46
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
47
|
-
exports.runRepoSyncCli = runRepoSyncCli;
|
|
48
|
-
exports.main = main;
|
|
49
|
-
exports.parseArgs = parseArgs;
|
|
50
|
-
const fs = __importStar(require("node:fs"));
|
|
51
|
-
const path = __importStar(require("node:path"));
|
|
52
|
-
const common_1 = require("./common");
|
|
53
|
-
const project_context_1 = require("../../project-context");
|
|
54
|
-
const structured_log_1 = require("../../utils/structured-log");
|
|
55
|
-
const prompt_loader_1 = require("../../resources/prompt-loader");
|
|
56
|
-
const interaction_1 = require("../../utils/interaction");
|
|
57
|
-
const check_1 = require("@spekn/check");
|
|
58
|
-
function normalizeText(value) {
|
|
59
|
-
return value.replace(/\s+/g, " ").trim().toLowerCase();
|
|
60
|
-
}
|
|
61
|
-
function flattenIngestionText(result) {
|
|
62
|
-
const layers = result.layers ?? {};
|
|
63
|
-
const groups = [
|
|
64
|
-
layers.constraints ?? [],
|
|
65
|
-
layers.requirements ?? [],
|
|
66
|
-
layers.technicalContext ?? [],
|
|
67
|
-
layers.implementationGuidance ?? [],
|
|
68
|
-
];
|
|
69
|
-
const lines = [];
|
|
70
|
-
for (const group of groups) {
|
|
71
|
-
for (const item of group) {
|
|
72
|
-
const text = normalizeText(`${item.title ?? ""} ${item.content ?? ""}`);
|
|
73
|
-
if (text.length >= 24)
|
|
74
|
-
lines.push(text);
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
return lines;
|
|
78
|
-
}
|
|
79
|
-
function decideImport(analyzed, findings, latestSpecContent) {
|
|
80
|
-
const hasHighConflict = findings.some((finding) => finding?.type === "conflict" && finding?.severity === "high");
|
|
81
|
-
if (hasHighConflict) {
|
|
82
|
-
return { shouldImport: false, reason: "high-conflict" };
|
|
83
|
-
}
|
|
84
|
-
const localEntries = flattenIngestionText(analyzed);
|
|
85
|
-
if (localEntries.length === 0) {
|
|
86
|
-
return { shouldImport: false, reason: "duplicate" };
|
|
87
|
-
}
|
|
88
|
-
// Normalized line-level comparison for deduplication awareness
|
|
89
|
-
const existingNormalized = new Set(latestSpecContent
|
|
90
|
-
.split("\n")
|
|
91
|
-
.map((line) => line.toLowerCase().trim())
|
|
92
|
-
.filter(Boolean));
|
|
93
|
-
let novelItems = 0;
|
|
94
|
-
for (const entry of localEntries) {
|
|
95
|
-
if (!existingNormalized.has(entry))
|
|
96
|
-
novelItems++;
|
|
97
|
-
}
|
|
98
|
-
const noveltyRatio = localEntries.length > 0 ? novelItems / localEntries.length : 0;
|
|
99
|
-
// Only import if >30% novel content
|
|
100
|
-
return noveltyRatio > 0.3
|
|
101
|
-
? { shouldImport: true, reason: "new-content", noveltyRatio }
|
|
102
|
-
: { shouldImport: false, reason: "duplicate", noveltyRatio };
|
|
103
|
-
}
|
|
104
|
-
function buildSyncAnalysisPrompt(projectId, repoPath, files, commitContext, organizationId) {
|
|
105
|
-
const fileList = files.map((f) => ` - ${f.relativePath} [${f.category}]`).join("\n");
|
|
106
|
-
const orgIdInstruction = organizationId
|
|
107
|
-
? `\nORGANIZATION ID: ${organizationId}\nIMPORTANT: You MUST include "organizationId": "${organizationId}" as a parameter in EVERY Spekn MCP tool call.\n`
|
|
108
|
-
: "";
|
|
109
|
-
const template = (0, prompt_loader_1.loadPromptTemplate)("repo-sync-analysis.prompt.md");
|
|
110
|
-
return template
|
|
111
|
-
.replaceAll("{{PROJECT_ID}}", projectId)
|
|
112
|
-
.replaceAll("{{ORG_INSTRUCTION}}", orgIdInstruction)
|
|
113
|
-
.replaceAll("{{REPO_PATH}}", repoPath)
|
|
114
|
-
.replaceAll("{{COMMIT_CONTEXT}}", commitContext)
|
|
115
|
-
.replaceAll("{{FILE_LIST}}", fileList);
|
|
116
|
-
}
|
|
117
|
-
function loadGlobalContextLike() {
|
|
118
|
-
return (0, project_context_1.loadGlobalContext)() ?? {};
|
|
119
|
-
}
|
|
120
|
-
function getProjectsFromContext(globalContext) {
|
|
121
|
-
return Array.isArray(globalContext.projects) ? globalContext.projects : [];
|
|
122
|
-
}
|
|
123
|
-
function loadRepoSyncState(repoPath, projectId) {
|
|
124
|
-
const repoKey = path.resolve(repoPath);
|
|
125
|
-
const globalContext = loadGlobalContextLike();
|
|
126
|
-
const projects = getProjectsFromContext(globalContext);
|
|
127
|
-
const projectCheckpoint = projects.find((project) => project.id === projectId)?.repoSync
|
|
128
|
-
?.checkpoints?.[repoKey];
|
|
129
|
-
if (projectCheckpoint && typeof projectCheckpoint === "object") {
|
|
130
|
-
return projectCheckpoint;
|
|
131
|
-
}
|
|
132
|
-
return {};
|
|
133
|
-
}
|
|
134
|
-
function saveRepoSyncState(repoPath, projectId, organizationId, state) {
|
|
135
|
-
const repoKey = path.resolve(repoPath);
|
|
136
|
-
const globalContext = loadGlobalContextLike();
|
|
137
|
-
const projects = getProjectsFromContext(globalContext);
|
|
138
|
-
const now = new Date().toISOString();
|
|
139
|
-
const projectIndex = projects.findIndex((project) => project.id === projectId);
|
|
140
|
-
const nextProjects = [...projects];
|
|
141
|
-
if (projectIndex >= 0) {
|
|
142
|
-
const project = nextProjects[projectIndex];
|
|
143
|
-
nextProjects[projectIndex] = {
|
|
144
|
-
...project,
|
|
145
|
-
organizationId: project.organizationId || organizationId,
|
|
146
|
-
repoSync: {
|
|
147
|
-
...(project.repoSync ?? {}),
|
|
148
|
-
checkpoints: {
|
|
149
|
-
...(project.repoSync?.checkpoints ?? {}),
|
|
150
|
-
[repoKey]: state,
|
|
151
|
-
},
|
|
152
|
-
},
|
|
153
|
-
lastUsed: project.lastUsed || now,
|
|
154
|
-
repoPaths: Array.from(new Set([...(project.repoPaths ?? []), repoPath])),
|
|
155
|
-
};
|
|
156
|
-
}
|
|
157
|
-
else {
|
|
158
|
-
nextProjects.unshift({
|
|
159
|
-
id: projectId,
|
|
160
|
-
organizationId,
|
|
161
|
-
lastUsed: now,
|
|
162
|
-
repoPaths: [repoPath],
|
|
163
|
-
repoSync: {
|
|
164
|
-
checkpoints: {
|
|
165
|
-
[repoKey]: state,
|
|
166
|
-
},
|
|
167
|
-
},
|
|
168
|
-
});
|
|
169
|
-
}
|
|
170
|
-
(0, project_context_1.saveGlobalContext)({
|
|
171
|
-
...globalContext,
|
|
172
|
-
projects: nextProjects.slice(0, 10),
|
|
173
|
-
repoSync: undefined,
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
function readHeadCommit(repoPath, deps) {
|
|
177
|
-
try {
|
|
178
|
-
return deps.execGit(["-C", repoPath, "rev-parse", "HEAD"]);
|
|
179
|
-
}
|
|
180
|
-
catch {
|
|
181
|
-
return null;
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
function readChangedFilesSince(repoPath, fromCommit, toCommit, deps) {
|
|
185
|
-
try {
|
|
186
|
-
const raw = deps.execGit([
|
|
187
|
-
"-C",
|
|
188
|
-
repoPath,
|
|
189
|
-
"diff",
|
|
190
|
-
"--name-only",
|
|
191
|
-
`${fromCommit}..${toCommit}`,
|
|
192
|
-
]);
|
|
193
|
-
return raw
|
|
194
|
-
.split(/\r?\n/)
|
|
195
|
-
.map((line) => line.trim())
|
|
196
|
-
.filter((line) => line.length > 0);
|
|
197
|
-
}
|
|
198
|
-
catch {
|
|
199
|
-
return [];
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
function looksLikeCodeFile(relativePath) {
|
|
203
|
-
const lower = relativePath.toLowerCase();
|
|
204
|
-
if (lower.endsWith(".md") || lower.endsWith(".mdx") || lower.endsWith(".txt")) {
|
|
205
|
-
return false;
|
|
206
|
-
}
|
|
207
|
-
if (lower.includes("/specs/") ||
|
|
208
|
-
lower.includes("/decisions/") ||
|
|
209
|
-
lower.includes("spec") ||
|
|
210
|
-
lower.includes("decision") ||
|
|
211
|
-
lower.includes("/docs/")) {
|
|
212
|
-
return false;
|
|
213
|
-
}
|
|
214
|
-
return true;
|
|
215
|
-
}
|
|
216
|
-
function looksLikeSpecOrDecisionUpdate(relativePath) {
|
|
217
|
-
const lower = relativePath.toLowerCase();
|
|
218
|
-
return (lower.includes("/specs/") ||
|
|
219
|
-
lower.includes("/decisions/") ||
|
|
220
|
-
lower.includes("spec") ||
|
|
221
|
-
lower.includes("decision") ||
|
|
222
|
-
lower.endsWith(".md") ||
|
|
223
|
-
lower.endsWith(".mdx"));
|
|
224
|
-
}
|
|
225
|
-
async function approveLowConfidenceImport(input) {
|
|
226
|
-
const selected = await (0, interaction_1.requestSelectionFromController)({
|
|
227
|
-
title: "Approve low-confidence sync import?",
|
|
228
|
-
message: `${input.filePath} has score=${input.score}, findings=${input.findingCount}, high=${input.highCount}. ` +
|
|
229
|
-
"Approve import into SaaS spec/decision context?",
|
|
230
|
-
options: [
|
|
231
|
-
{ value: "approve", label: "Approve import" },
|
|
232
|
-
{ value: "skip", label: "Skip import" },
|
|
233
|
-
],
|
|
234
|
-
allowSkip: true,
|
|
235
|
-
timeoutMs: 45_000,
|
|
236
|
-
});
|
|
237
|
-
if (selected === "approve")
|
|
238
|
-
return true;
|
|
239
|
-
if (selected === "skip")
|
|
240
|
-
return false;
|
|
241
|
-
input.stderr(`[warn] No explicit approval for low-confidence change (${input.filePath}); skipping import.\n`);
|
|
242
|
-
return false;
|
|
243
|
-
}
|
|
244
|
-
async function requestReviewNotesApproval(input) {
|
|
245
|
-
const hasLowConfidence = /\blow-confidence\b/i.test(input.analysisText) ||
|
|
246
|
-
/^\s*REVIEW_NOTES\b/im.test(input.analysisText);
|
|
247
|
-
if (!hasLowConfidence)
|
|
248
|
-
return "approve";
|
|
249
|
-
const selected = await (0, interaction_1.requestSelectionFromController)({
|
|
250
|
-
title: "ACP review notes detected",
|
|
251
|
-
message: "ACP analysis reported low-confidence review notes. Apply sync result now or stop for manual review?",
|
|
252
|
-
options: [
|
|
253
|
-
{ value: "approve", label: "Apply now" },
|
|
254
|
-
{ value: "review-later", label: "Apply and review later" },
|
|
255
|
-
{ value: "cancel", label: "Cancel sync" },
|
|
256
|
-
],
|
|
257
|
-
allowSkip: true,
|
|
258
|
-
timeoutMs: 45_000,
|
|
259
|
-
});
|
|
260
|
-
if (selected === "approve" || selected === "review-later" || selected === "cancel") {
|
|
261
|
-
return selected;
|
|
262
|
-
}
|
|
263
|
-
input.stderr("[warn] No explicit review decision received for low-confidence notes; defaulting to review-later.\n");
|
|
264
|
-
return "review-later";
|
|
265
|
-
}
|
|
266
|
-
function printDiscoveredSummary(files, stdout) {
|
|
267
|
-
const byCat = {
|
|
268
|
-
governance: files.filter((f) => f.category === "governance"),
|
|
269
|
-
spec: files.filter((f) => f.category === "spec"),
|
|
270
|
-
decision: files.filter((f) => f.category === "decision"),
|
|
271
|
-
config: files.filter((f) => f.category === "config"),
|
|
272
|
-
};
|
|
273
|
-
stdout(`\nDiscovered ${files.length} files:\n`);
|
|
274
|
-
if (byCat.governance.length > 0)
|
|
275
|
-
stdout(` Governance: ${byCat.governance.map((f) => f.relativePath).join(", ")}\n`);
|
|
276
|
-
if (byCat.spec.length > 0)
|
|
277
|
-
stdout(` Specs: ${byCat.spec.map((f) => f.relativePath).join(", ")}\n`);
|
|
278
|
-
if (byCat.decision.length > 0)
|
|
279
|
-
stdout(` Decisions: ${byCat.decision.map((f) => f.relativePath).join(", ")}\n`);
|
|
280
|
-
if (byCat.config.length > 0)
|
|
281
|
-
stdout(` Other: ${byCat.config.length} markdown files\n`);
|
|
282
|
-
}
|
|
283
|
-
const INGESTION_CONCURRENCY = 5;
|
|
284
|
-
async function mapWithConcurrency(items, concurrency, fn) {
|
|
285
|
-
const results = [];
|
|
286
|
-
let index = 0;
|
|
287
|
-
async function worker() {
|
|
288
|
-
while (index < items.length) {
|
|
289
|
-
const i = index++;
|
|
290
|
-
results[i] = await fn(items[i]);
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
await Promise.all(Array.from({ length: Math.min(concurrency, items.length) }, () => worker()));
|
|
294
|
-
return results;
|
|
295
|
-
}
|
|
296
|
-
function runGovernanceHealthCheck(repoPath, deps) {
|
|
297
|
-
try {
|
|
298
|
-
const config = (0, check_1.loadConfig)(repoPath);
|
|
299
|
-
const scan = (0, check_1.scanRepo)(repoPath, config);
|
|
300
|
-
const checkResults = check_1.checks.map((check) => check.run({ repoPath, config, scan }));
|
|
301
|
-
const score = (0, check_1.calculateScore)(checkResults);
|
|
302
|
-
const grade = (0, check_1.assignGrade)(score);
|
|
303
|
-
const allFindings = [];
|
|
304
|
-
for (const result of checkResults) {
|
|
305
|
-
allFindings.push(...result.findings);
|
|
306
|
-
}
|
|
307
|
-
// Sort findings by potential points (most impactful first)
|
|
308
|
-
const actionableFindings = allFindings
|
|
309
|
-
.filter((f) => f.speknCta)
|
|
310
|
-
.sort((a, b) => {
|
|
311
|
-
const severityOrder = { error: 3, warning: 2, info: 1 };
|
|
312
|
-
return (severityOrder[b.severity] ?? 0) - (severityOrder[a.severity] ?? 0);
|
|
313
|
-
});
|
|
314
|
-
// Render the health report
|
|
315
|
-
const maxScore = checkResults.reduce((sum, r) => sum + r.maxScore, 0);
|
|
316
|
-
deps.stdout(`\n\u{1F4CA} Governance Health: ${grade} (${score}/100, raw ${checkResults.reduce((s, r) => s + r.score, 0)}/${maxScore})\n`);
|
|
317
|
-
const passedChecks = checkResults.filter((r) => r.score === r.maxScore);
|
|
318
|
-
const failedChecks = checkResults.filter((r) => r.score < r.maxScore);
|
|
319
|
-
for (const finding of actionableFindings.slice(0, 5)) {
|
|
320
|
-
const icon = finding.severity === "error" ? "\u{26A0}" : finding.severity === "warning" ? "\u{26A0}" : "\u{2139}";
|
|
321
|
-
deps.stdout(` ${icon} ${finding.message}\n`);
|
|
322
|
-
}
|
|
323
|
-
for (const check of passedChecks) {
|
|
324
|
-
deps.stdout(` \u{2713} ${check.name}\n`);
|
|
325
|
-
}
|
|
326
|
-
if (failedChecks.length > 0 && actionableFindings.length > 5) {
|
|
327
|
-
deps.stdout(` ... and ${actionableFindings.length - 5} more recommendation(s)\n`);
|
|
328
|
-
}
|
|
329
|
-
return { grade, score, findings: allFindings, checkResults };
|
|
330
|
-
}
|
|
331
|
-
catch (error) {
|
|
332
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
333
|
-
deps.stderr(`[warn] Governance health check failed: ${msg}\n`);
|
|
334
|
-
return null;
|
|
335
|
-
}
|
|
336
|
-
}
|
|
337
|
-
async function runIngestionEngine(params) {
|
|
338
|
-
const { client, projectId, options, changedFileSet, hasBaselineCommit, deps } = params;
|
|
339
|
-
deps.stdout("\nScanning repository for spec-like files...\n");
|
|
340
|
-
const discoveredBase = (0, common_1.discoverFiles)(options.repoPath).filter((file) => /\.(md|mdx|rules|cursorrules)$/i.test(file.relativePath));
|
|
341
|
-
const discovered = hasBaselineCommit
|
|
342
|
-
? discoveredBase.filter((file) => changedFileSet.has(file.relativePath))
|
|
343
|
-
: discoveredBase;
|
|
344
|
-
const scopedDiscovered = discovered.slice(0, options.maxFiles);
|
|
345
|
-
if (scopedDiscovered.length === 0) {
|
|
346
|
-
deps.stdout("No spec-like files found to analyze.\n");
|
|
347
|
-
return true;
|
|
348
|
-
}
|
|
349
|
-
if (options.dryRun) {
|
|
350
|
-
deps.stdout(`[dry-run] Found ${scopedDiscovered.length} analyzable file(s):\n`);
|
|
351
|
-
for (const file of scopedDiscovered) {
|
|
352
|
-
deps.stdout(` - ${file.relativePath}\n`);
|
|
353
|
-
}
|
|
354
|
-
deps.stdout("[dry-run] Skipping ingestion analysis and import.\n");
|
|
355
|
-
return true;
|
|
356
|
-
}
|
|
357
|
-
// Run governance health check before file-level analysis
|
|
358
|
-
const healthReport = runGovernanceHealthCheck(options.repoPath, deps);
|
|
359
|
-
deps.stdout(`Analyzing ${scopedDiscovered.length} file(s) for drift against SaaS spec storage...\n`);
|
|
360
|
-
const clientAny = client;
|
|
361
|
-
const latestSpecs = await clientAny.specification.list.query({
|
|
362
|
-
projectId,
|
|
363
|
-
limit: 1,
|
|
364
|
-
offset: 0,
|
|
365
|
-
});
|
|
366
|
-
const latestSpecContent = String(latestSpecs?.[0]?.content ?? "");
|
|
367
|
-
let scannedCount = 0;
|
|
368
|
-
let importedCount = 0;
|
|
369
|
-
let skippedDuplicateCount = 0;
|
|
370
|
-
let skippedConflictCount = 0;
|
|
371
|
-
let skippedUnapprovedCount = 0;
|
|
372
|
-
let totalScore = 0;
|
|
373
|
-
let totalFindings = 0;
|
|
374
|
-
let highSeverityFindings = 0;
|
|
375
|
-
const fileResults = await mapWithConcurrency(scopedDiscovered, INGESTION_CONCURRENCY, async (file) => {
|
|
376
|
-
let content = "";
|
|
377
|
-
try {
|
|
378
|
-
content = fs.readFileSync(file.absolutePath, "utf-8");
|
|
379
|
-
}
|
|
380
|
-
catch {
|
|
381
|
-
deps.stderr(`[warn] Could not read ${file.relativePath}, skipping.\n`);
|
|
382
|
-
return null;
|
|
383
|
-
}
|
|
384
|
-
if (content.trim().length === 0)
|
|
385
|
-
return null;
|
|
386
|
-
try {
|
|
387
|
-
const analyzed = await clientAny.ingestion.analyzeContent.mutate({
|
|
388
|
-
content,
|
|
389
|
-
filename: file.relativePath,
|
|
390
|
-
});
|
|
391
|
-
const alignment = await clientAny.ingestion.analyzeAlignment.mutate({
|
|
392
|
-
projectId,
|
|
393
|
-
result: analyzed,
|
|
394
|
-
});
|
|
395
|
-
const score = Number(alignment?.score ?? 100);
|
|
396
|
-
const findings = Array.isArray(alignment?.findings)
|
|
397
|
-
? alignment.findings
|
|
398
|
-
: [];
|
|
399
|
-
const perFileHigh = findings.filter((finding) => finding?.severity === "high").length;
|
|
400
|
-
const importDecision = decideImport(analyzed, findings, latestSpecContent);
|
|
401
|
-
const importLabel = options.importToProject
|
|
402
|
-
? importDecision.shouldImport
|
|
403
|
-
? "import=applied"
|
|
404
|
-
: `import=skipped(${importDecision.reason})`
|
|
405
|
-
: "import=disabled";
|
|
406
|
-
deps.stdout(` - ${file.relativePath}: score=${score}, findings=${findings.length}, high=${perFileHigh}, ${importLabel}\n`);
|
|
407
|
-
const result = {
|
|
408
|
-
scanned: true,
|
|
409
|
-
imported: false,
|
|
410
|
-
skippedDuplicate: false,
|
|
411
|
-
skippedConflict: false,
|
|
412
|
-
skippedUnapproved: false,
|
|
413
|
-
score,
|
|
414
|
-
findings: findings.length,
|
|
415
|
-
highFindings: perFileHigh,
|
|
416
|
-
};
|
|
417
|
-
if (options.importToProject && importDecision.shouldImport) {
|
|
418
|
-
const lowConfidence = score < 85 || perFileHigh > 0 || findings.length >= 8;
|
|
419
|
-
const approved = lowConfidence
|
|
420
|
-
? await approveLowConfidenceImport({
|
|
421
|
-
filePath: file.relativePath,
|
|
422
|
-
score,
|
|
423
|
-
findingCount: findings.length,
|
|
424
|
-
highCount: perFileHigh,
|
|
425
|
-
stderr: deps.stderr,
|
|
426
|
-
})
|
|
427
|
-
: true;
|
|
428
|
-
if (approved) {
|
|
429
|
-
await clientAny.ingestion.confirmIngestion.mutate({
|
|
430
|
-
projectId,
|
|
431
|
-
result: analyzed,
|
|
432
|
-
});
|
|
433
|
-
result.imported = true;
|
|
434
|
-
}
|
|
435
|
-
else {
|
|
436
|
-
result.skippedUnapproved = true;
|
|
437
|
-
}
|
|
438
|
-
}
|
|
439
|
-
else if (options.importToProject &&
|
|
440
|
-
importDecision.reason === "duplicate") {
|
|
441
|
-
result.skippedDuplicate = true;
|
|
442
|
-
}
|
|
443
|
-
else if (options.importToProject &&
|
|
444
|
-
importDecision.reason === "high-conflict") {
|
|
445
|
-
result.skippedConflict = true;
|
|
446
|
-
}
|
|
447
|
-
return result;
|
|
448
|
-
}
|
|
449
|
-
catch (error) {
|
|
450
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
451
|
-
deps.stderr(`[warn] Analysis failed for ${file.relativePath}: ${message}\n`);
|
|
452
|
-
return null;
|
|
453
|
-
}
|
|
454
|
-
});
|
|
455
|
-
for (const r of fileResults) {
|
|
456
|
-
if (!r || !r.scanned)
|
|
457
|
-
continue;
|
|
458
|
-
scannedCount += 1;
|
|
459
|
-
totalScore += r.score;
|
|
460
|
-
totalFindings += r.findings;
|
|
461
|
-
highSeverityFindings += r.highFindings;
|
|
462
|
-
if (r.imported)
|
|
463
|
-
importedCount += 1;
|
|
464
|
-
if (r.skippedDuplicate)
|
|
465
|
-
skippedDuplicateCount += 1;
|
|
466
|
-
if (r.skippedConflict)
|
|
467
|
-
skippedConflictCount += 1;
|
|
468
|
-
if (r.skippedUnapproved)
|
|
469
|
-
skippedUnapprovedCount += 1;
|
|
470
|
-
}
|
|
471
|
-
if (scannedCount > 0) {
|
|
472
|
-
const averageScore = Math.round(totalScore / scannedCount);
|
|
473
|
-
deps.stdout("\nDrift report summary:\n");
|
|
474
|
-
deps.stdout(` Files analyzed : ${scannedCount}\n`);
|
|
475
|
-
deps.stdout(` Avg score : ${averageScore}\n`);
|
|
476
|
-
deps.stdout(` Findings : ${totalFindings}\n`);
|
|
477
|
-
deps.stdout(` High severity : ${highSeverityFindings}\n`);
|
|
478
|
-
if (healthReport) {
|
|
479
|
-
deps.stdout(` Gov. health : ${healthReport.grade} (${healthReport.score}/100)\n`);
|
|
480
|
-
}
|
|
481
|
-
if (options.importToProject) {
|
|
482
|
-
deps.stdout(` Imported : ${importedCount}\n`);
|
|
483
|
-
deps.stdout(` Skipped (dup) : ${skippedDuplicateCount}\n`);
|
|
484
|
-
deps.stdout(` Skipped (conf) : ${skippedConflictCount}\n`);
|
|
485
|
-
deps.stdout(` Skipped (approval) : ${skippedUnapprovedCount}\n`);
|
|
486
|
-
}
|
|
487
|
-
else {
|
|
488
|
-
deps.stdout(" Imported : 0 (use --import-to-project to apply updates to SaaS specs/decisions)\n");
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
else {
|
|
492
|
-
deps.stdout("No readable files analyzed.\n");
|
|
493
|
-
}
|
|
494
|
-
return true;
|
|
495
|
-
}
|
|
496
|
-
async function runAcpEngine(params) {
|
|
497
|
-
const { projectId, options, organizationId, authToken, changedFileSet, hasBaselineCommit, commitContext, deps } = params;
|
|
498
|
-
deps.stdout("\nRunning ACP repo analysis for missing/conflicting specs...\n");
|
|
499
|
-
deps.stdout("\nScanning repository for analysis...\n");
|
|
500
|
-
const discoveredAll = (0, common_1.discoverFiles)(options.repoPath);
|
|
501
|
-
const discovered = hasBaselineCommit
|
|
502
|
-
? discoveredAll.filter((file) => changedFileSet.has(file.relativePath))
|
|
503
|
-
: discoveredAll;
|
|
504
|
-
if (discovered.length > 0) {
|
|
505
|
-
printDiscoveredSummary(discovered, deps.stdout);
|
|
506
|
-
}
|
|
507
|
-
if (discovered.length === 0) {
|
|
508
|
-
deps.stdout("No changed files matched discovery filters for ACP analysis.\n");
|
|
509
|
-
return true;
|
|
510
|
-
}
|
|
511
|
-
if (options.dryRun) {
|
|
512
|
-
deps.stdout("\n[dry-run] Skipping ACP analysis.\n");
|
|
513
|
-
return true;
|
|
514
|
-
}
|
|
515
|
-
deps.stdout("\nResolving AI agent...\n");
|
|
516
|
-
const prompt = buildSyncAnalysisPrompt(projectId, options.repoPath, discovered, commitContext, organizationId);
|
|
517
|
-
const result = await (0, common_1.runAnalysisWithAgent)({
|
|
518
|
-
apiUrl: options.apiUrl,
|
|
519
|
-
agentName: options.agent,
|
|
520
|
-
prompt,
|
|
521
|
-
repoPath: options.repoPath,
|
|
522
|
-
mcpUrl: options.mcpUrl,
|
|
523
|
-
authToken,
|
|
524
|
-
organizationId,
|
|
525
|
-
requiredTools: [
|
|
526
|
-
"spekn_spec_list",
|
|
527
|
-
"spekn_spec_update",
|
|
528
|
-
"spekn_decision_update",
|
|
529
|
-
],
|
|
530
|
-
stdout: deps.stdout,
|
|
531
|
-
stderr: deps.stderr,
|
|
532
|
-
debug: options.debug,
|
|
533
|
-
acpTimeoutMs: options.acpTimeoutMs,
|
|
534
|
-
});
|
|
535
|
-
if (result.error) {
|
|
536
|
-
const errorMessage = typeof result.error === "string"
|
|
537
|
-
? result.error
|
|
538
|
-
: (() => {
|
|
539
|
-
try {
|
|
540
|
-
return JSON.stringify(result.error);
|
|
541
|
-
}
|
|
542
|
-
catch {
|
|
543
|
-
return String(result.error);
|
|
544
|
-
}
|
|
545
|
-
})();
|
|
546
|
-
deps.stderr(`\nAgent error: ${errorMessage}\n`);
|
|
547
|
-
return false;
|
|
548
|
-
}
|
|
549
|
-
const reviewDecision = await requestReviewNotesApproval({
|
|
550
|
-
analysisText: result.text,
|
|
551
|
-
stderr: deps.stderr,
|
|
552
|
-
});
|
|
553
|
-
if (reviewDecision === "cancel") {
|
|
554
|
-
deps.stderr("Sync cancelled by reviewer due to ACP low-confidence review notes.\n");
|
|
555
|
-
return false;
|
|
556
|
-
}
|
|
557
|
-
deps.stdout("\nSync ACP analysis complete.\n");
|
|
558
|
-
return true;
|
|
559
|
-
}
|
|
560
|
-
function printHelp(stderr) {
|
|
561
|
-
stderr(`
|
|
562
|
-
repo sync - Sync git repository metadata with Spekn
|
|
563
|
-
|
|
564
|
-
USAGE:
|
|
565
|
-
spekn repo sync --project-id <uuid> [options]
|
|
566
|
-
|
|
567
|
-
OPTIONS:
|
|
568
|
-
--project-id <uuid> Project ID that owns the repository (optional if .spekn/context is present)
|
|
569
|
-
--analyze Run ingestion drift analysis after syncing metadata (default: true)
|
|
570
|
-
--no-analyze Skip ingestion drift analysis
|
|
571
|
-
--import-to-project Import analyzed files with novel content into project specs/decisions
|
|
572
|
-
--max-files <n> Maximum files to analyze during drift scan (default: 50)
|
|
573
|
-
--analysis-engine <mode> Analysis mode: ingestion | acp | both (default: both)
|
|
574
|
-
--agent <name> ACP agent for prompt analysis (codex, claude, opencode, ...)
|
|
575
|
-
--acp-timeout <ms> ACP session timeout in milliseconds (0 = infinite, default: 0)
|
|
576
|
-
--path <dir> Repository root path (default: current directory)
|
|
577
|
-
--api-url <url> API base URL (default: SPEKN_API_URL or https://app.spekn.com)
|
|
578
|
-
--help Show this help message
|
|
579
|
-
|
|
580
|
-
ENVIRONMENT:
|
|
581
|
-
SPEKN_API_URL API base URL
|
|
582
|
-
SPEKN_AUTH_TOKEN Bearer token for authentication
|
|
583
|
-
SPEKN_ORGANIZATION_ID Organization ID header
|
|
584
|
-
ACP_AGENT_TIMEOUT_MS ACP session timeout in milliseconds (default: 0 = infinite)
|
|
585
|
-
|
|
586
|
-
DESCRIPTION:
|
|
587
|
-
Reads the 'origin' remote URL from the current git repository, looks up the
|
|
588
|
-
matching registered repository in the project, and updates its name and
|
|
589
|
-
default branch to match the local git state.
|
|
590
|
-
|
|
591
|
-
With analysis enabled (default), repo sync runs:
|
|
592
|
-
- ingestion drift scan (local files vs SaaS specs), and
|
|
593
|
-
- ACP prompt analysis for missing/conflicting specs and decisions.
|
|
594
|
-
Use --analysis-engine to limit to one mode.
|
|
595
|
-
|
|
596
|
-
EXAMPLES:
|
|
597
|
-
spekn repo sync --project-id 11111111-1111-4111-8111-111111111111
|
|
598
|
-
spekn repo sync --project-id 11111111-1111-4111-8111-111111111111 --analyze
|
|
599
|
-
spekn repo sync --project-id 11111111-1111-4111-8111-111111111111 --import-to-project
|
|
600
|
-
`);
|
|
601
|
-
}
|
|
602
|
-
function parseArgs(args) {
|
|
603
|
-
const opts = {
|
|
604
|
-
...(0, common_1.commonDefaults)(true),
|
|
605
|
-
importToProject: false,
|
|
606
|
-
maxFiles: 50,
|
|
607
|
-
analysisEngine: "both",
|
|
608
|
-
};
|
|
609
|
-
for (let i = 0; i < args.length;) {
|
|
610
|
-
const consumed = (0, common_1.parseCommonFlag)(args, i, opts);
|
|
611
|
-
if (consumed > 0) {
|
|
612
|
-
i += consumed;
|
|
613
|
-
continue;
|
|
614
|
-
}
|
|
615
|
-
const arg = args[i];
|
|
616
|
-
if (arg === "--import-to-project") {
|
|
617
|
-
opts.importToProject = true;
|
|
618
|
-
i += 1;
|
|
619
|
-
continue;
|
|
620
|
-
}
|
|
621
|
-
if (arg === "--max-files" && args[i + 1]) {
|
|
622
|
-
const parsed = Number(args[i + 1]);
|
|
623
|
-
if (Number.isFinite(parsed) && parsed > 0) {
|
|
624
|
-
opts.maxFiles = Math.min(500, Math.floor(parsed));
|
|
625
|
-
}
|
|
626
|
-
i += 2;
|
|
627
|
-
continue;
|
|
628
|
-
}
|
|
629
|
-
if (arg?.startsWith("--max-files=")) {
|
|
630
|
-
const parsed = Number(arg.slice("--max-files=".length));
|
|
631
|
-
if (Number.isFinite(parsed) && parsed > 0) {
|
|
632
|
-
opts.maxFiles = Math.min(500, Math.floor(parsed));
|
|
633
|
-
}
|
|
634
|
-
i += 1;
|
|
635
|
-
continue;
|
|
636
|
-
}
|
|
637
|
-
if (arg === "--analysis-engine" && args[i + 1]) {
|
|
638
|
-
const value = args[i + 1]?.toLowerCase();
|
|
639
|
-
if (value === "ingestion" || value === "acp" || value === "both") {
|
|
640
|
-
opts.analysisEngine = value;
|
|
641
|
-
}
|
|
642
|
-
i += 2;
|
|
643
|
-
continue;
|
|
644
|
-
}
|
|
645
|
-
if (arg?.startsWith("--analysis-engine=")) {
|
|
646
|
-
const value = arg.slice("--analysis-engine=".length).toLowerCase();
|
|
647
|
-
if (value === "ingestion" || value === "acp" || value === "both") {
|
|
648
|
-
opts.analysisEngine = value;
|
|
649
|
-
}
|
|
650
|
-
i += 1;
|
|
651
|
-
continue;
|
|
652
|
-
}
|
|
653
|
-
i++; // skip unknown
|
|
654
|
-
}
|
|
655
|
-
return (0, common_1.finalizeOptions)(opts);
|
|
656
|
-
}
|
|
657
|
-
// ── Main ────────────────────────────────────────────────────────────
|
|
658
|
-
async function runRepoSyncCli(args, deps = common_1.defaultDeps) {
|
|
659
|
-
try {
|
|
660
|
-
const options = parseArgs(args);
|
|
661
|
-
(0, structured_log_1.appendCliStructuredLog)({
|
|
662
|
-
source: "cli.repo.sync",
|
|
663
|
-
level: "info",
|
|
664
|
-
message: "Starting repo sync",
|
|
665
|
-
details: {
|
|
666
|
-
repoPath: options.repoPath,
|
|
667
|
-
analyze: options.analyze,
|
|
668
|
-
apiUrl: options.apiUrl,
|
|
669
|
-
},
|
|
670
|
-
});
|
|
671
|
-
const { authToken, organizationId, projectId } = await (0, common_1.resolveAuth)(deps, {
|
|
672
|
-
projectId: options.projectId,
|
|
673
|
-
repoPath: options.repoPath,
|
|
674
|
-
});
|
|
675
|
-
// ── Phase 1: Sync repository metadata ──────────────────────────
|
|
676
|
-
const git = (0, common_1.readGitMetadata)(options.repoPath, deps);
|
|
677
|
-
if (!git)
|
|
678
|
-
return 1;
|
|
679
|
-
deps.stdout(`Syncing repository "${git.name}" (${git.remoteUrl})\n`);
|
|
680
|
-
deps.stdout(` Default branch : ${git.defaultBranch}\n`);
|
|
681
|
-
deps.stdout(` Project : ${projectId}\n`);
|
|
682
|
-
const client = (0, common_1.createApiClient)(options.apiUrl, authToken, organizationId);
|
|
683
|
-
// Find matching repository
|
|
684
|
-
const repos =
|
|
685
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
686
|
-
await client.gitRepository.list.query({
|
|
687
|
-
projectId,
|
|
688
|
-
limit: 100,
|
|
689
|
-
offset: 0,
|
|
690
|
-
});
|
|
691
|
-
const match = repos.find((r) => r.repositoryUrl === git.remoteUrl);
|
|
692
|
-
if (!match) {
|
|
693
|
-
deps.stderr(`Error: No registered repository found for URL "${git.remoteUrl}" in project ${projectId}.\n` +
|
|
694
|
-
"Use 'spekn repo register' to register this repository first.\n");
|
|
695
|
-
return 1;
|
|
696
|
-
}
|
|
697
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
698
|
-
await client.gitRepository.update.mutate({
|
|
699
|
-
projectId,
|
|
700
|
-
id: match.id,
|
|
701
|
-
data: { name: git.name, defaultBranch: git.defaultBranch },
|
|
702
|
-
});
|
|
703
|
-
(0, project_context_1.persistProjectContextWithoutRepoPath)(options.repoPath, {
|
|
704
|
-
projectId,
|
|
705
|
-
organizationId: organizationId || undefined,
|
|
706
|
-
});
|
|
707
|
-
deps.stdout(`Repository synced successfully. ID: ${match.id}\n`);
|
|
708
|
-
(0, structured_log_1.appendCliStructuredLog)({
|
|
709
|
-
source: "cli.repo.sync",
|
|
710
|
-
level: "info",
|
|
711
|
-
message: "Repository metadata synced",
|
|
712
|
-
details: { projectId, repositoryId: match.id },
|
|
713
|
-
});
|
|
714
|
-
const headCommit = readHeadCommit(options.repoPath, deps);
|
|
715
|
-
const previousState = loadRepoSyncState(options.repoPath, projectId);
|
|
716
|
-
const lastSyncCommit = previousState.lastHeadCommit;
|
|
717
|
-
const hasBaselineCommit = typeof lastSyncCommit === "string" && lastSyncCommit.length > 0;
|
|
718
|
-
const changedFilesSinceLast = hasBaselineCommit && headCommit && lastSyncCommit !== headCommit
|
|
719
|
-
? readChangedFilesSince(options.repoPath, lastSyncCommit, headCommit, deps)
|
|
720
|
-
: [];
|
|
721
|
-
const changedFileSet = new Set(changedFilesSinceLast);
|
|
722
|
-
const commitContext = hasBaselineCommit
|
|
723
|
-
? headCommit && lastSyncCommit !== headCommit
|
|
724
|
-
? `LAST_SYNC_COMMIT: ${lastSyncCommit}\nCURRENT_HEAD_COMMIT: ${headCommit}\nCHANGED_FILES_COUNT: ${changedFilesSinceLast.length}`
|
|
725
|
-
: `LAST_SYNC_COMMIT: ${lastSyncCommit}\nCURRENT_HEAD_COMMIT: ${headCommit ?? "unknown"}\nCHANGED_FILES_COUNT: 0`
|
|
726
|
-
: `LAST_SYNC_COMMIT: (none)\nCURRENT_HEAD_COMMIT: ${headCommit ?? "unknown"}\nCHANGED_FILES_COUNT: full-scan`;
|
|
727
|
-
if (hasBaselineCommit) {
|
|
728
|
-
deps.stdout(`Change scope: ${lastSyncCommit}..${headCommit ?? "HEAD"} (${changedFilesSinceLast.length} file(s))\n`);
|
|
729
|
-
}
|
|
730
|
-
else {
|
|
731
|
-
deps.stdout("No previous sync checkpoint found. Running full repository scan.\n");
|
|
732
|
-
}
|
|
733
|
-
// ── Phase 2: Analysis (optional) ─────────────────────────────────
|
|
734
|
-
if (!options.analyze) {
|
|
735
|
-
if (headCommit) {
|
|
736
|
-
saveRepoSyncState(options.repoPath, projectId, organizationId, {
|
|
737
|
-
lastHeadCommit: headCommit,
|
|
738
|
-
syncedAt: new Date().toISOString(),
|
|
739
|
-
});
|
|
740
|
-
}
|
|
741
|
-
return 0;
|
|
742
|
-
}
|
|
743
|
-
if (hasBaselineCommit && changedFilesSinceLast.length === 0) {
|
|
744
|
-
deps.stdout("No changes detected since last sync commit. Skipping analysis.\n");
|
|
745
|
-
if (headCommit) {
|
|
746
|
-
saveRepoSyncState(options.repoPath, projectId, organizationId, {
|
|
747
|
-
lastHeadCommit: headCommit,
|
|
748
|
-
syncedAt: new Date().toISOString(),
|
|
749
|
-
});
|
|
750
|
-
}
|
|
751
|
-
return 0;
|
|
752
|
-
}
|
|
753
|
-
const engine = options.analysisEngine;
|
|
754
|
-
let ingestionOk = true;
|
|
755
|
-
let acpOk = true;
|
|
756
|
-
if (hasBaselineCommit && changedFilesSinceLast.length > 0) {
|
|
757
|
-
const changedCodeFiles = changedFilesSinceLast.filter(looksLikeCodeFile);
|
|
758
|
-
const changedSpecDecisionFiles = changedFilesSinceLast.filter(looksLikeSpecOrDecisionUpdate);
|
|
759
|
-
deps.stdout(`Changed scope review: code=${changedCodeFiles.length}, spec/decision-docs=${changedSpecDecisionFiles.length}\n`);
|
|
760
|
-
if (changedCodeFiles.length > 0 &&
|
|
761
|
-
changedSpecDecisionFiles.length === 0) {
|
|
762
|
-
deps.stderr("[warn] Code changed since last sync but no spec/decision documentation files changed.\n");
|
|
763
|
-
const selected = await (0, interaction_1.requestSelectionFromController)({
|
|
764
|
-
title: "Spec/decision coverage check",
|
|
765
|
-
message: "Code changed since last sync but no spec/decision docs changed. Continue sync anyway?",
|
|
766
|
-
options: [
|
|
767
|
-
{ value: "continue", label: "Continue sync" },
|
|
768
|
-
{ value: "cancel", label: "Cancel sync for manual review" },
|
|
769
|
-
],
|
|
770
|
-
allowSkip: true,
|
|
771
|
-
timeoutMs: 45_000,
|
|
772
|
-
});
|
|
773
|
-
if (selected === "cancel" || selected === "skip") {
|
|
774
|
-
deps.stderr("Sync cancelled by reviewer due to missing spec/decision updates.\n");
|
|
775
|
-
return 1;
|
|
776
|
-
}
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
const ingestionParams = {
|
|
780
|
-
client,
|
|
781
|
-
projectId,
|
|
782
|
-
options,
|
|
783
|
-
changedFileSet,
|
|
784
|
-
hasBaselineCommit,
|
|
785
|
-
deps,
|
|
786
|
-
};
|
|
787
|
-
const canRunAcp = Boolean(organizationId && authToken);
|
|
788
|
-
if ((engine === "acp" || engine === "both") && !canRunAcp) {
|
|
789
|
-
if (!organizationId) {
|
|
790
|
-
deps.stderr("Warning: No organization ID. Skipping ACP analysis. Re-run 'spekn auth login' or set SPEKN_ORGANIZATION_ID.\n");
|
|
791
|
-
}
|
|
792
|
-
else {
|
|
793
|
-
deps.stderr("Warning: Not authenticated. Skipping ACP analysis. Run 'spekn auth login' first.\n");
|
|
794
|
-
}
|
|
795
|
-
acpOk = false;
|
|
796
|
-
}
|
|
797
|
-
const acpParams = (engine === "acp" || engine === "both") && canRunAcp
|
|
798
|
-
? {
|
|
799
|
-
projectId,
|
|
800
|
-
options: options,
|
|
801
|
-
organizationId: organizationId,
|
|
802
|
-
authToken: authToken,
|
|
803
|
-
changedFileSet,
|
|
804
|
-
hasBaselineCommit,
|
|
805
|
-
commitContext,
|
|
806
|
-
deps,
|
|
807
|
-
}
|
|
808
|
-
: null;
|
|
809
|
-
if (engine === "both" && acpParams) {
|
|
810
|
-
const [ingestionResult, acpResult] = await Promise.all([
|
|
811
|
-
runIngestionEngine(ingestionParams),
|
|
812
|
-
runAcpEngine({ ...acpParams, options: { ...acpParams.options, acpTimeoutMs: options.acpTimeoutMs } }),
|
|
813
|
-
]);
|
|
814
|
-
ingestionOk = ingestionResult;
|
|
815
|
-
acpOk = acpResult;
|
|
816
|
-
}
|
|
817
|
-
else if (engine === "ingestion" || (engine === "both" && !acpParams)) {
|
|
818
|
-
ingestionOk = await runIngestionEngine(ingestionParams);
|
|
819
|
-
}
|
|
820
|
-
else if (engine === "acp" && acpParams) {
|
|
821
|
-
acpOk = await runAcpEngine({ ...acpParams, options: { ...acpParams.options, acpTimeoutMs: options.acpTimeoutMs } });
|
|
822
|
-
}
|
|
823
|
-
const analysisOk = options.analysisEngine === "both"
|
|
824
|
-
? ingestionOk || acpOk
|
|
825
|
-
: options.analysisEngine === "ingestion"
|
|
826
|
-
? ingestionOk
|
|
827
|
-
: acpOk;
|
|
828
|
-
if (options.analysisEngine === "both" && ingestionOk && !acpOk) {
|
|
829
|
-
deps.stderr("Warning: ACP analysis failed, but ingestion sync succeeded. Treating sync as completed with warnings.\n");
|
|
830
|
-
}
|
|
831
|
-
const exitCode = analysisOk ? 0 : 1;
|
|
832
|
-
if (exitCode === 0 && headCommit) {
|
|
833
|
-
saveRepoSyncState(options.repoPath, projectId, organizationId, {
|
|
834
|
-
lastHeadCommit: headCommit,
|
|
835
|
-
syncedAt: new Date().toISOString(),
|
|
836
|
-
});
|
|
837
|
-
}
|
|
838
|
-
(0, structured_log_1.appendCliStructuredLog)({
|
|
839
|
-
source: "cli.repo.sync",
|
|
840
|
-
level: exitCode === 0 ? "info" : "error",
|
|
841
|
-
message: "Repo sync completed",
|
|
842
|
-
details: {
|
|
843
|
-
exitCode,
|
|
844
|
-
analyzed: options.analyze,
|
|
845
|
-
projectId,
|
|
846
|
-
analysisEngine: options.analysisEngine,
|
|
847
|
-
ingestionOk,
|
|
848
|
-
acpOk,
|
|
849
|
-
},
|
|
850
|
-
});
|
|
851
|
-
return exitCode;
|
|
852
|
-
}
|
|
853
|
-
catch (error) {
|
|
854
|
-
if (error instanceof common_1.HelpRequestedError) {
|
|
855
|
-
printHelp(deps.stderr);
|
|
856
|
-
return 0;
|
|
857
|
-
}
|
|
858
|
-
deps.stderr(`Error: ${error instanceof Error ? error.message : String(error)}\n`);
|
|
859
|
-
(0, structured_log_1.appendCliStructuredLog)({
|
|
860
|
-
source: "cli.repo.sync",
|
|
861
|
-
level: "error",
|
|
862
|
-
message: error instanceof Error ? error.message : String(error),
|
|
863
|
-
});
|
|
864
|
-
return 1;
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
async function main() {
|
|
868
|
-
const exitCode = await runRepoSyncCli(process.argv.slice(2));
|
|
869
|
-
process.exit(exitCode);
|
|
870
|
-
}
|
|
871
|
-
if (require.main === module) {
|
|
872
|
-
void main();
|
|
873
|
-
}
|