@spekn/cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. package/dist/__tests__/export-cli.test.d.ts +1 -0
  2. package/dist/__tests__/export-cli.test.js +70 -0
  3. package/dist/__tests__/tui-args-policy.test.d.ts +1 -0
  4. package/dist/__tests__/tui-args-policy.test.js +50 -0
  5. package/dist/acp-S2MHZOAD.mjs +23 -0
  6. package/dist/acp-UCCI44JY.mjs +25 -0
  7. package/dist/auth/credentials-store.d.ts +2 -0
  8. package/dist/auth/credentials-store.js +5 -0
  9. package/dist/auth/device-flow.d.ts +36 -0
  10. package/dist/auth/device-flow.js +189 -0
  11. package/dist/auth/jwt.d.ts +1 -0
  12. package/dist/auth/jwt.js +6 -0
  13. package/dist/auth/session.d.ts +67 -0
  14. package/dist/auth/session.js +86 -0
  15. package/dist/auth-login.d.ts +34 -0
  16. package/dist/auth-login.js +202 -0
  17. package/dist/auth-logout.d.ts +25 -0
  18. package/dist/auth-logout.js +115 -0
  19. package/dist/auth-status.d.ts +24 -0
  20. package/dist/auth-status.js +109 -0
  21. package/dist/backlog-generate.d.ts +11 -0
  22. package/dist/backlog-generate.js +308 -0
  23. package/dist/backlog-health.d.ts +11 -0
  24. package/dist/backlog-health.js +287 -0
  25. package/dist/bridge-login.d.ts +40 -0
  26. package/dist/bridge-login.js +277 -0
  27. package/dist/chunk-3PAYRI4G.mjs +2428 -0
  28. package/dist/chunk-M4CS3A25.mjs +2426 -0
  29. package/dist/commands/auth/login.d.ts +30 -0
  30. package/dist/commands/auth/login.js +164 -0
  31. package/dist/commands/auth/logout.d.ts +25 -0
  32. package/dist/commands/auth/logout.js +115 -0
  33. package/dist/commands/auth/status.d.ts +24 -0
  34. package/dist/commands/auth/status.js +109 -0
  35. package/dist/commands/backlog/generate.d.ts +11 -0
  36. package/dist/commands/backlog/generate.js +308 -0
  37. package/dist/commands/backlog/health.d.ts +11 -0
  38. package/dist/commands/backlog/health.js +287 -0
  39. package/dist/commands/bridge/login.d.ts +36 -0
  40. package/dist/commands/bridge/login.js +258 -0
  41. package/dist/commands/export.d.ts +35 -0
  42. package/dist/commands/export.js +485 -0
  43. package/dist/commands/marketplace-export.d.ts +21 -0
  44. package/dist/commands/marketplace-export.js +214 -0
  45. package/dist/commands/project-clean.d.ts +1 -0
  46. package/dist/commands/project-clean.js +126 -0
  47. package/dist/commands/repo/common.d.ts +105 -0
  48. package/dist/commands/repo/common.js +775 -0
  49. package/dist/commands/repo/detach.d.ts +2 -0
  50. package/dist/commands/repo/detach.js +120 -0
  51. package/dist/commands/repo/register.d.ts +21 -0
  52. package/dist/commands/repo/register.js +175 -0
  53. package/dist/commands/repo/sync.d.ts +22 -0
  54. package/dist/commands/repo/sync.js +873 -0
  55. package/dist/commands/skills-import-local.d.ts +16 -0
  56. package/dist/commands/skills-import-local.js +352 -0
  57. package/dist/commands/spec/drift-check.d.ts +3 -0
  58. package/dist/commands/spec/drift-check.js +186 -0
  59. package/dist/commands/spec/frontmatter.d.ts +11 -0
  60. package/dist/commands/spec/frontmatter.js +219 -0
  61. package/dist/commands/spec/lint.d.ts +11 -0
  62. package/dist/commands/spec/lint.js +499 -0
  63. package/dist/commands/spec/parse.d.ts +11 -0
  64. package/dist/commands/spec/parse.js +162 -0
  65. package/dist/export.d.ts +35 -0
  66. package/dist/export.js +485 -0
  67. package/dist/index.d.ts +11 -0
  68. package/dist/index.js +21 -0
  69. package/dist/main.d.ts +1 -0
  70. package/dist/main.js +115280 -0
  71. package/dist/marketplace-export.d.ts +21 -0
  72. package/dist/marketplace-export.js +214 -0
  73. package/dist/project-clean.d.ts +1 -0
  74. package/dist/project-clean.js +126 -0
  75. package/dist/project-context.d.ts +99 -0
  76. package/dist/project-context.js +376 -0
  77. package/dist/repo-common.d.ts +101 -0
  78. package/dist/repo-common.js +671 -0
  79. package/dist/repo-detach.d.ts +2 -0
  80. package/dist/repo-detach.js +102 -0
  81. package/dist/repo-ingest.d.ts +29 -0
  82. package/dist/repo-ingest.js +305 -0
  83. package/dist/repo-register.d.ts +21 -0
  84. package/dist/repo-register.js +175 -0
  85. package/dist/repo-sync.d.ts +16 -0
  86. package/dist/repo-sync.js +152 -0
  87. package/dist/resources/prompt-loader.d.ts +1 -0
  88. package/dist/resources/prompt-loader.js +62 -0
  89. package/dist/resources/prompts/README.md +21 -0
  90. package/dist/resources/prompts/prompts/repo-analysis.prompt.md +126 -0
  91. package/dist/resources/prompts/repo-analysis.prompt.md +151 -0
  92. package/dist/resources/prompts/repo-sync-analysis.prompt.md +85 -0
  93. package/dist/skills-import-local.d.ts +16 -0
  94. package/dist/skills-import-local.js +352 -0
  95. package/dist/spec-drift-check.d.ts +3 -0
  96. package/dist/spec-drift-check.js +186 -0
  97. package/dist/spec-frontmatter.d.ts +11 -0
  98. package/dist/spec-frontmatter.js +219 -0
  99. package/dist/spec-lint.d.ts +11 -0
  100. package/dist/spec-lint.js +499 -0
  101. package/dist/spec-parse.d.ts +11 -0
  102. package/dist/spec-parse.js +162 -0
  103. package/dist/stubs/dotenv.d.ts +5 -0
  104. package/dist/stubs/dotenv.js +6 -0
  105. package/dist/stubs/typeorm.d.ts +22 -0
  106. package/dist/stubs/typeorm.js +28 -0
  107. package/dist/tui/app.d.ts +7 -0
  108. package/dist/tui/app.js +122 -0
  109. package/dist/tui/args.d.ts +8 -0
  110. package/dist/tui/args.js +57 -0
  111. package/dist/tui/capabilities/policy.d.ts +7 -0
  112. package/dist/tui/capabilities/policy.js +64 -0
  113. package/dist/tui/components/frame.d.ts +8 -0
  114. package/dist/tui/components/frame.js +8 -0
  115. package/dist/tui/components/status-bar.d.ts +8 -0
  116. package/dist/tui/components/status-bar.js +8 -0
  117. package/dist/tui/index.d.ts +2 -0
  118. package/dist/tui/index.js +23 -0
  119. package/dist/tui/index.mjs +7563 -0
  120. package/dist/tui/keymap/use-global-keymap.d.ts +19 -0
  121. package/dist/tui/keymap/use-global-keymap.js +82 -0
  122. package/dist/tui/navigation/nav-items.d.ts +3 -0
  123. package/dist/tui/navigation/nav-items.js +18 -0
  124. package/dist/tui/screens/bridge.d.ts +8 -0
  125. package/dist/tui/screens/bridge.js +19 -0
  126. package/dist/tui/screens/decisions.d.ts +5 -0
  127. package/dist/tui/screens/decisions.js +28 -0
  128. package/dist/tui/screens/export.d.ts +5 -0
  129. package/dist/tui/screens/export.js +16 -0
  130. package/dist/tui/screens/home.d.ts +5 -0
  131. package/dist/tui/screens/home.js +33 -0
  132. package/dist/tui/screens/locked.d.ts +5 -0
  133. package/dist/tui/screens/locked.js +9 -0
  134. package/dist/tui/screens/specs.d.ts +5 -0
  135. package/dist/tui/screens/specs.js +31 -0
  136. package/dist/tui/services/client.d.ts +1 -0
  137. package/dist/tui/services/client.js +18 -0
  138. package/dist/tui/services/context-service.d.ts +19 -0
  139. package/dist/tui/services/context-service.js +246 -0
  140. package/dist/tui/shared-enums.d.ts +16 -0
  141. package/dist/tui/shared-enums.js +19 -0
  142. package/dist/tui/state/use-app-state.d.ts +35 -0
  143. package/dist/tui/state/use-app-state.js +177 -0
  144. package/dist/tui/types.d.ts +77 -0
  145. package/dist/tui/types.js +2 -0
  146. package/dist/tui-bundle.d.ts +1 -0
  147. package/dist/tui-bundle.js +5 -0
  148. package/dist/tui-entry.mjs +1407 -0
  149. package/dist/utils/cli-runtime.d.ts +5 -0
  150. package/dist/utils/cli-runtime.js +22 -0
  151. package/dist/utils/help-error.d.ts +7 -0
  152. package/dist/utils/help-error.js +14 -0
  153. package/dist/utils/interaction.d.ts +19 -0
  154. package/dist/utils/interaction.js +93 -0
  155. package/dist/utils/structured-log.d.ts +7 -0
  156. package/dist/utils/structured-log.js +112 -0
  157. package/dist/utils/trpc-url.d.ts +4 -0
  158. package/dist/utils/trpc-url.js +15 -0
  159. package/package.json +59 -0
@@ -0,0 +1,152 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ /**
4
+ * repo-sync CLI command
5
+ *
6
+ * Syncs metadata for the current git repository with Spekn.
7
+ * Looks up the repo by its remote URL and updates name and default branch.
8
+ * Optionally runs AI analysis (same as repo register --analyze).
9
+ * Must be run from inside a local git clone.
10
+ *
11
+ * Usage: spekn repo sync --project-id <uuid> [--analyze] [--agent <name>] [--api-url <url>]
12
+ */
13
+ Object.defineProperty(exports, "__esModule", { value: true });
14
+ exports.runRepoSyncCli = runRepoSyncCli;
15
+ exports.main = main;
16
+ exports.parseArgs = parseArgs;
17
+ const repo_common_1 = require("./repo-common");
18
+ const structured_log_1 = require("./utils/structured-log");
19
+ // ── Help & Args ─────────────────────────────────────────────────────
20
+ function printHelp(stderr) {
21
+ stderr(`
22
+ repo sync - Sync git repository metadata with Spekn
23
+
24
+ USAGE:
25
+ spekn repo sync --project-id <uuid> [options]
26
+
27
+ OPTIONS:
28
+ --project-id <uuid> Project ID that owns the repository (optional if .spekn/context is present)
29
+ --analyze Run AI analysis after syncing metadata
30
+ --agent <name> Force a specific agent (claude, codex, opencode, etc.)
31
+ --path <dir> Repository root path (default: current directory)
32
+ --dry-run Discover files only, skip AI analysis
33
+ --mcp-url <url> MCP HTTP server URL (default: MCP_HTTP_URL or https://app.spekn.com/mcp)
34
+ --api-url <url> API base URL (default: SPEKN_API_URL or https://app.spekn.com)
35
+ --debug Show detailed debug output (tokens, HTTP exchanges)
36
+ --help Show this help message
37
+
38
+ ENVIRONMENT:
39
+ SPEKN_API_URL API base URL
40
+ SPEKN_AUTH_TOKEN Bearer token for authentication
41
+ SPEKN_ORGANIZATION_ID Organization ID header
42
+ MCP_HTTP_URL MCP HTTP server URL (default: https://app.spekn.com/mcp)
43
+ AGENT_ARGS Override agent CLI args (skips ACP registry resolution)
44
+
45
+ DESCRIPTION:
46
+ Reads the 'origin' remote URL from the current git repository, looks up the
47
+ matching registered repository in the project, and updates its name and
48
+ default branch to match the local git state.
49
+
50
+ With --analyze, also runs AI-powered analysis to discover specs, assess
51
+ governance, and create improvement specifications (same as repo register).
52
+
53
+ EXAMPLES:
54
+ spekn repo sync --project-id 11111111-1111-4111-8111-111111111111
55
+ spekn repo sync --project-id 11111111-1111-4111-8111-111111111111 --analyze
56
+ spekn repo sync --project-id 11111111-1111-4111-8111-111111111111 --analyze --agent claude
57
+ `);
58
+ }
59
+ function parseArgs(args) {
60
+ const opts = (0, repo_common_1.commonDefaults)(false);
61
+ for (let i = 0; i < args.length;) {
62
+ const consumed = (0, repo_common_1.parseCommonFlag)(args, i, opts);
63
+ if (consumed > 0) {
64
+ i += consumed;
65
+ continue;
66
+ }
67
+ i++; // skip unknown
68
+ }
69
+ return (0, repo_common_1.finalizeOptions)(opts);
70
+ }
71
+ // ── Main ────────────────────────────────────────────────────────────
72
+ async function runRepoSyncCli(args, deps = repo_common_1.defaultDeps) {
73
+ try {
74
+ const options = parseArgs(args);
75
+ (0, structured_log_1.appendCliStructuredLog)({
76
+ source: "cli.repo.sync",
77
+ level: "info",
78
+ message: "Starting repo sync",
79
+ details: { repoPath: options.repoPath, analyze: options.analyze, apiUrl: options.apiUrl },
80
+ });
81
+ const { authToken, organizationId, projectId } = await (0, repo_common_1.resolveAuth)(deps, {
82
+ projectId: options.projectId,
83
+ repoPath: options.repoPath,
84
+ });
85
+ // ── Phase 1: Sync repository metadata ──────────────────────────
86
+ const git = (0, repo_common_1.readGitMetadata)(options.repoPath, deps);
87
+ if (!git)
88
+ return 1;
89
+ deps.stdout(`Syncing repository "${git.name}" (${git.remoteUrl})\n`);
90
+ deps.stdout(` Default branch : ${git.defaultBranch}\n`);
91
+ deps.stdout(` Project : ${projectId}\n`);
92
+ const client = (0, repo_common_1.createApiClient)(options.apiUrl, authToken, organizationId);
93
+ // Find matching repository
94
+ const repos =
95
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
96
+ await client.gitRepository.list.query({
97
+ projectId,
98
+ limit: 100,
99
+ offset: 0,
100
+ });
101
+ const match = repos.find((r) => r.repositoryUrl === git.remoteUrl);
102
+ if (!match) {
103
+ deps.stderr(`Error: No registered repository found for URL "${git.remoteUrl}" in project ${projectId}.\n` +
104
+ "Use 'spekn repo register' to register this repository first.\n");
105
+ return 1;
106
+ }
107
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
108
+ await client.gitRepository.update.mutate({
109
+ projectId,
110
+ id: match.id,
111
+ data: { name: git.name, defaultBranch: git.defaultBranch },
112
+ });
113
+ deps.stdout(`Repository synced successfully. ID: ${match.id}\n`);
114
+ (0, structured_log_1.appendCliStructuredLog)({
115
+ source: "cli.repo.sync",
116
+ level: "info",
117
+ message: "Repository metadata synced",
118
+ details: { projectId, repositoryId: match.id },
119
+ });
120
+ // ── Phase 2: AI analysis (optional) ─────────────────────────────
121
+ if (!options.analyze)
122
+ return 0;
123
+ const exitCode = await (0, repo_common_1.runAnalysisPhase)({ ...options, projectId }, authToken ?? "", deps, organizationId);
124
+ (0, structured_log_1.appendCliStructuredLog)({
125
+ source: "cli.repo.sync",
126
+ level: exitCode === 0 ? "info" : "error",
127
+ message: "Repo sync completed",
128
+ details: { exitCode, analyzed: options.analyze, projectId },
129
+ });
130
+ return exitCode;
131
+ }
132
+ catch (error) {
133
+ if (error instanceof repo_common_1.HelpRequestedError) {
134
+ printHelp(deps.stderr);
135
+ return 0;
136
+ }
137
+ deps.stderr(`Error: ${error instanceof Error ? error.message : String(error)}\n`);
138
+ (0, structured_log_1.appendCliStructuredLog)({
139
+ source: "cli.repo.sync",
140
+ level: "error",
141
+ message: error instanceof Error ? error.message : String(error),
142
+ });
143
+ return 1;
144
+ }
145
+ }
146
+ async function main() {
147
+ const exitCode = await runRepoSyncCli(process.argv.slice(2));
148
+ process.exit(exitCode);
149
+ }
150
+ if (require.main === module) {
151
+ void main();
152
+ }
@@ -0,0 +1 @@
1
+ export declare function loadPromptTemplate(name: string): string;
@@ -0,0 +1,62 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.loadPromptTemplate = loadPromptTemplate;
37
+ const fs = __importStar(require("node:fs"));
38
+ const path = __importStar(require("node:path"));
39
+ const promptCache = new Map();
40
+ function resolvePromptPath(name) {
41
+ const candidates = [
42
+ path.resolve(__dirname, "resources", "prompts", name),
43
+ path.resolve(__dirname, "prompts", name),
44
+ path.resolve(__dirname, "..", "resources", "prompts", name),
45
+ path.resolve(process.cwd(), "packages", "cli", "src", "resources", "prompts", name),
46
+ ];
47
+ for (const candidate of candidates) {
48
+ if (fs.existsSync(candidate) && fs.statSync(candidate).isFile()) {
49
+ return candidate;
50
+ }
51
+ }
52
+ throw new Error(`Prompt resource "${name}" not found. Looked in: ${candidates.join(", ")}`);
53
+ }
54
+ function loadPromptTemplate(name) {
55
+ const cached = promptCache.get(name);
56
+ if (cached)
57
+ return cached;
58
+ const promptPath = resolvePromptPath(name);
59
+ const content = fs.readFileSync(promptPath, "utf-8");
60
+ promptCache.set(name, content);
61
+ return content;
62
+ }
@@ -0,0 +1,21 @@
1
+ # CLI Prompt Resources
2
+
3
+ This folder contains editable prompt templates used by the Spekn CLI.
4
+
5
+ ## Files
6
+
7
+ - `repo-analysis.prompt.md`: prompt used by `spekn repo register --analyze`.
8
+ This is the ingestion/bootstrap-oriented analysis prompt.
9
+ - `repo-sync-analysis.prompt.md`: prompt used by `spekn repo sync --analysis-engine acp|both`.
10
+ This prompt is sync-specific and focuses on drift/update decisions without duplicating existing specs.
11
+
12
+ ## Placeholders
13
+
14
+ `repo-analysis.prompt.md` supports token replacement:
15
+
16
+ - `{{PROJECT_ID}}`
17
+ - `{{ORG_INSTRUCTION}}`
18
+ - `{{REPO_PATH}}`
19
+ - `{{FILE_LIST}}`
20
+
21
+ Keep token names unchanged unless you also update the renderer in `packages/cli/src/commands/repo/common.ts`.
@@ -0,0 +1,126 @@
1
+ You are analyzing a repository to register it with a Spekn project.
2
+ You have been given Spekn MCP tools to create specifications and decisions.
3
+
4
+ PROJECT ID: {{PROJECT_ID}}{{ORG_INSTRUCTION}}
5
+ REPO PATH: {{REPO_PATH}}
6
+
7
+ DISCOVERED FILES:
8
+ {{FILE_LIST}}
9
+
10
+ STEP 0 — VERIFY TOOLS:
11
+ Before doing anything else, list the available MCP tools to confirm you have
12
+ access to spekn_spec_create, spekn_spec_list, and spekn_decision_create. If these tools are NOT available,
13
+ output "ERROR: Spekn MCP tools not available. Cannot proceed with analysis." and stop.
14
+
15
+ STEP 1 — READ AND UNDERSTAND THE REPOSITORY:
16
+ Read the governance files (CLAUDE.md, AGENTS.md, README.md), package manifests
17
+ (package.json, Cargo.toml, pyproject.toml, etc.), and key source directories
18
+ to understand what this project does, its tech stack, and architecture.
19
+
20
+ STEP 2 — CREATE MAIN PROJECT SPECIFICATION:
21
+ Use spekn_spec_create to create a main project overview spec:
22
+ - Title: "{repo name} — Project Overview" (e.g. "linux-mdm — Project Overview")
23
+ - Content MUST start with a YAML frontmatter block:
24
+ ---
25
+ title: "{repo name} — Project Overview"
26
+ type: intent
27
+ version: "1.0.0"
28
+ status: draft
29
+ author: "repo ingestor"
30
+ tags: [overview, project]
31
+ hints:
32
+ constraints: ["Key project invariants and boundaries"]
33
+ requirements: ["Core project goals and capabilities"]
34
+ technical: ["Tech stack, architecture patterns, build system"]
35
+ guidance: ["Development conventions and contribution guidelines"]
36
+ ---
37
+ - After the frontmatter, include a comprehensive explanation of what the project does,
38
+ tech stack, directory structure, build system, development conventions, and key decisions
39
+ - This becomes the primary context for the engineering manager agent
40
+
41
+ STEP 3 — IMPORT EXISTING SPECS:
42
+ For each file categorized as "spec" in the DISCOVERED FILES list:
43
+ - Read the file content
44
+ - Use spekn_spec_create to create a Specification record
45
+ - Use the first # heading as the title (must be unique, 3-100 chars)
46
+ - If the file already has a YAML frontmatter block (---), preserve it but ensure author is set:
47
+ - author: "repo ingestor" (add it when missing)
48
+ - If no frontmatter exists, prepend one with:
49
+ - type: "capability"
50
+ - version: "1.0.0"
51
+ - author: "repo ingestor"
52
+ - hints: populate from the file content (constraints, requirements, technical, guidance)
53
+ - Set version to "1.0.0"
54
+
55
+ For each file categorized as "decision" in the DISCOVERED FILES list:
56
+ - Read the file content
57
+ - First create/import its parent specification with spekn_spec_create if needed (type: "decision", author: "repo ingestor")
58
+ - Then perform ACP DECISION DETECTION before creating records:
59
+ - Extract one normalized decision object per explicit decision/ADR entry
60
+ - Ignore vague notes that do not state a concrete choice
61
+ - If one file contains multiple explicit ADR entries, create one decision record per entry
62
+ - Use this normalized structure for each extracted decision:
63
+ {
64
+ "title": "<3-100 chars, imperative/statement form>",
65
+ "description": "<what was decided, specific and concrete>",
66
+ "rationale": "<why this option was chosen>",
67
+ "alternatives": ["<rejected option 1>", "<rejected option 2>"]
68
+ }
69
+ - Formatting requirements:
70
+ - title must NOT include prefixes like "Decision:", "ADR-001:", markdown symbols, or file names
71
+ - description and rationale must be plain text (no markdown headings/bullets)
72
+ - alternatives must be plain option strings; use [] when none are explicit
73
+ - If rationale is missing, infer a concise rationale from nearby context (trade-offs/constraints)
74
+ - Create each record with spekn_decision_create using:
75
+ - projectId: PROJECT ID
76
+ - specificationId: parent decision specification ID
77
+ - title, description, rationale, alternatives from normalized object
78
+
79
+ STEP 4 — ASSESS GOVERNANCE AND CREATE IMPROVEMENT SPECS:
80
+ Assess governance quality — does the repo have CI/CD, tests, security guidelines,
81
+ contribution guidelines, documentation structure?
82
+ For each significant gap, use spekn_spec_create to create a draft improvement spec:
83
+ - Descriptive title (e.g. "CI/CD Pipeline", "Testing Strategy")
84
+ - Content MUST start with a YAML frontmatter block:
85
+ - type: "capability" for new features, "operational" for ops/process improvements
86
+ - status: "draft"
87
+ - author: "repo ingestor"
88
+ - tags: reflecting the gap area (e.g., [ci-cd, automation] or [testing, quality])
89
+ - hints: summarize what the spec covers in each context layer
90
+ - After frontmatter, describe what the repo SHOULD have (best practices)
91
+ - Focus on quality over quantity — only create specs for real gaps
92
+
93
+ STEP 5 — FEATURE COVERAGE AND PLAN GAP ANALYSIS:
94
+ Analyze feature implementation coverage against specs/FEATURE_MAP.md and related feature docs.
95
+ - Read specs/FEATURE_MAP.md and list all rows marked "Planned", "Draft", "Active", "~95%", "pending", or "deferred"
96
+ - For each such row, verify whether the repo has:
97
+ - implementation evidence in code (routers/services/components/tests)
98
+ - a corresponding feature document in specs/features/
99
+ - an implementation plan (IMPLEMENTATION_PLAN.md) when feature status implies planning/active build
100
+ - For missing or weakly-covered items, create improvement specs with spekn_spec_create:
101
+ - Title pattern: "{repo name} — Gap: <feature>"
102
+ - type: "capability" for product/runtime gaps, "operational" for process/governance gaps
103
+ - tags must include "feature-coverage" and "plan-gap"
104
+ - hints must explicitly include missing code surfaces and missing planning artifacts
105
+ - Do not duplicate if an equivalent gap spec already exists; update existing draft instead when possible
106
+
107
+ STEP 6 — SUMMARY:
108
+ Print a summary of what you created:
109
+ - Number of specs imported from existing files
110
+ - Number of decisions created from decision files
111
+ - Number of improvement specs created
112
+ - Overall governance assessment (strengths and gaps)
113
+ - Feature coverage summary (implemented vs partial vs missing)
114
+ - Missing plan features summary
115
+ - Include a "FEATURE_COVERAGE" section in this exact line format:
116
+ - FEATURE | <id-or-name> | <status> | <evidence-or-missing>
117
+ - Include a "MISSING_PLAN_FEATURES" section in this exact line format:
118
+ - PLAN_GAP | <id-or-name> | <missing-artifact> | <recommended-spec-title>
119
+ - Include a "DECISIONS_CREATED" section in this exact line format:
120
+ - DECISION | <source-file> | <title> | <specificationId>
121
+
122
+ RULES:
123
+ - ALWAYS use Spekn MCP create tools (spekn_spec_create / spekn_decision_create) — never just describe what you would create
124
+ - Each spec title must be globally unique — prefix with the repo/project name if needed
125
+ - Do NOT create specs for governance files (CLAUDE.md, AGENTS.md, .cursorrules) — those are context, not specs
126
+ - If a tool call fails, report the error and continue with the next item
@@ -0,0 +1,151 @@
1
+ You are analyzing a repository to register it with a Spekn project.
2
+ You have been given Spekn MCP tools to create specifications and decisions.
3
+
4
+ PROJECT ID: {{PROJECT_ID}}{{ORG_INSTRUCTION}}
5
+ REPO PATH: {{REPO_PATH}}
6
+
7
+ DISCOVERED FILES:
8
+ {{FILE_LIST}}
9
+
10
+ STEP 0 — VERIFY TOOLS:
11
+ Before doing anything else, list the available MCP tools to confirm you have
12
+ access to spekn_spec_create, spekn_spec_list, and spekn_decision_create. If these tools are NOT available,
13
+ output "ERROR: Spekn MCP tools not available. Cannot proceed with analysis." and stop.
14
+
15
+ STEP 1 — READ AND UNDERSTAND THE REPOSITORY:
16
+ Read the governance files (CLAUDE.md, AGENTS.md, README.md), package manifests
17
+ (package.json, Cargo.toml, pyproject.toml, etc.), and key source directories
18
+ to understand what this project does, its tech stack, and architecture.
19
+
20
+ STEP 2 — CREATE MAIN PROJECT SPECIFICATION:
21
+ Use spekn_spec_create to create a main project overview spec:
22
+ - Title: "Project Overview" — short, no repo name prefix
23
+ - Content MUST start with a YAML frontmatter block:
24
+ ---
25
+ title: "Project Overview"
26
+ type: intent
27
+ version: "1.0.0"
28
+ status: draft
29
+ author: "repo ingestor"
30
+ tags: [overview, project]
31
+ hints:
32
+ constraints: ["Key project invariants and boundaries"]
33
+ requirements: ["Core project goals and capabilities"]
34
+ technical: ["Tech stack, architecture patterns, build system"]
35
+ guidance: ["Development conventions and contribution guidelines"]
36
+ ---
37
+ - After the frontmatter, include a comprehensive explanation of what the project does,
38
+ tech stack, directory structure, build system, development conventions, and key decisions
39
+ - This becomes the primary context for the engineering manager agent
40
+
41
+ STEP 3 — IMPORT EXISTING SPECS:
42
+ For each file categorized as "spec" in the DISCOVERED FILES list:
43
+ - Read the file content
44
+ - Use spekn_spec_create to create a Specification record
45
+ - Use the first # heading as the title (must be unique, 3-100 chars)
46
+ - If the file already has a YAML frontmatter block (---), preserve it but ensure author is set:
47
+ - author: "repo ingestor" (add it when missing)
48
+ - If no frontmatter exists, prepend one with:
49
+ - type: "capability"
50
+ - version: "1.0.0"
51
+ - author: "repo ingestor"
52
+ - hints: populate from the file content (constraints, requirements, technical, guidance)
53
+ - Set version to "1.0.0"
54
+
55
+ For each file categorized as "decision" in the DISCOVERED FILES list:
56
+ - Read the file content
57
+ - First create/import its parent specification with spekn_spec_create if needed (type: "decision", author: "repo ingestor")
58
+ - Then perform ACP DECISION DETECTION before creating records:
59
+ - Extract one normalized decision object per explicit decision/ADR entry
60
+ - Ignore vague notes that do not state a concrete choice
61
+ - If one file contains multiple explicit ADR entries, create one decision record per entry
62
+ - Use this normalized structure for each extracted decision:
63
+ {
64
+ "title": "<3-100 chars, imperative/statement form>",
65
+ "description": "<what was decided, specific and concrete>",
66
+ "rationale": "<why this option was chosen>",
67
+ "alternatives": ["<rejected option 1>", "<rejected option 2>"]
68
+ }
69
+ - Formatting requirements:
70
+ - title must NOT include prefixes like "Decision:", "ADR-001:", markdown symbols, or file names
71
+ - description and rationale must be plain text (no markdown headings/bullets)
72
+ - alternatives must be plain option strings; use [] when none are explicit
73
+ - If rationale is missing, infer a concise rationale from nearby context (trade-offs/constraints)
74
+ - Create each record with spekn_decision_create using:
75
+ - projectId: PROJECT ID
76
+ - specificationId: parent decision specification ID
77
+ - title, description, rationale, alternatives from normalized object
78
+
79
+ STEP 4 — ANALYZE PDF DOCUMENTS:
80
+ If any files categorized as "pdf" appear in the DISCOVERED FILES list:
81
+ - Check whether you have a tool that can read or extract text from PDF files
82
+ (e.g. `read_file` with PDF support, a `pdf` skill, or any PDF extraction tool)
83
+ - If you DO have PDF capability:
84
+ - Read each PDF file to extract its text content
85
+ - Determine what the document covers (architecture diagrams, requirements, RFCs,
86
+ compliance docs, contracts, design documents, etc.)
87
+ - For each PDF that contains specification-worthy content, use spekn_spec_create:
88
+ - Title: short descriptive name derived from the PDF content (NOT the filename)
89
+ - Content MUST start with a YAML frontmatter block:
90
+ - type: choose the best fit ("capability", "architectural", "intent", "operational")
91
+ - status: "draft"
92
+ - author: "repo ingestor"
93
+ - tags: ["pdf-import", ...relevant tags]
94
+ - hints: summarize key content in each layer
95
+ - After frontmatter, include the extracted content reformatted as clean markdown
96
+ - For PDFs containing decision records or ADRs, create decisions following STEP 3 rules
97
+ - If you do NOT have PDF capability:
98
+ - Log: "PDF files found but no PDF reading tool available — skipping PDF analysis"
99
+ - List the PDF files that were skipped so the user knows
100
+ - Do NOT fail the entire analysis if PDF reading is unavailable
101
+
102
+ STEP 5 — ASSESS GOVERNANCE AND CREATE IMPROVEMENT SPECS:
103
+ Assess governance quality — does the repo have CI/CD, tests, security guidelines,
104
+ contribution guidelines, documentation structure?
105
+ For each significant gap, use spekn_spec_create to create a draft improvement spec:
106
+ - Descriptive title (e.g. "CI/CD Pipeline", "Testing Strategy")
107
+ - Content MUST start with a YAML frontmatter block:
108
+ - type: "capability" for new features, "operational" for ops/process improvements
109
+ - status: "draft"
110
+ - author: "repo ingestor"
111
+ - tags: reflecting the gap area (e.g., [ci-cd, automation] or [testing, quality])
112
+ - hints: summarize what the spec covers in each context layer
113
+ - After frontmatter, describe what the repo SHOULD have (best practices)
114
+ - Focus on quality over quantity — only create specs for real gaps
115
+
116
+ STEP 6 — FEATURE COVERAGE AND PLAN GAP ANALYSIS:
117
+ Analyze feature implementation coverage against specs/FEATURE_MAP.md and related feature docs.
118
+ - Read specs/FEATURE_MAP.md and list all rows marked "Planned", "Draft", "Active", "~95%", "pending", or "deferred"
119
+ - For each such row, verify whether the repo has:
120
+ - implementation evidence in code (routers/services/components/tests)
121
+ - a corresponding feature document in specs/features/
122
+ - an implementation plan (IMPLEMENTATION_PLAN.md) when feature status implies planning/active build
123
+ - For missing or weakly-covered items, create improvement specs with spekn_spec_create:
124
+ - Title: short descriptive name of the feature gap (e.g. "Flatpak Governance Enforcement", "Network Egress Policy")
125
+ - Do NOT prefix titles with the repo name, "Gap:", or any other prefix
126
+ - type: "capability" for product/runtime gaps, "operational" for process/governance gaps
127
+ - tags must include "feature-coverage" and "plan-gap"
128
+ - hints must explicitly include missing code surfaces and missing planning artifacts
129
+ - Do not duplicate if an equivalent gap spec already exists; update existing draft instead when possible
130
+
131
+ STEP 7 — SUMMARY:
132
+ Print a summary of what you created:
133
+ - Number of specs imported from existing files
134
+ - Number of specs imported from PDF documents (or skipped if no PDF tool)
135
+ - Number of decisions created from decision files
136
+ - Number of improvement specs created
137
+ - Overall governance assessment (strengths and gaps)
138
+ - Feature coverage summary (implemented vs partial vs missing)
139
+ - Missing plan features summary
140
+ - Include a "FEATURE_COVERAGE" section in this exact line format:
141
+ - FEATURE | <id-or-name> | <status> | <evidence-or-missing>
142
+ - Include a "MISSING_PLAN_FEATURES" section in this exact line format:
143
+ - PLAN_GAP | <id-or-name> | <missing-artifact> | <recommended-spec-title>
144
+ - Include a "DECISIONS_CREATED" section in this exact line format:
145
+ - DECISION | <source-file> | <title> | <specificationId>
146
+
147
+ RULES:
148
+ - ALWAYS use Spekn MCP create tools (spekn_spec_create / spekn_decision_create) — never just describe what you would create
149
+ - Each spec title must be unique within the project — use short descriptive titles, never prefix with repo name
150
+ - Do NOT create specs for governance files (CLAUDE.md, AGENTS.md, .cursorrules) — those are context, not specs
151
+ - If a tool call fails, report the error and continue with the next item
@@ -0,0 +1,85 @@
1
+ You are synchronizing an already-registered repository with a Spekn project.
2
+ Your role is to detect drift, decide what to update, and avoid duplicate specs.
3
+
4
+ PROJECT ID: {{PROJECT_ID}}{{ORG_INSTRUCTION}}
5
+ REPO PATH: {{REPO_PATH}}
6
+ COMMIT CONTEXT:
7
+ {{COMMIT_CONTEXT}}
8
+
9
+ DISCOVERED FILES:
10
+ {{FILE_LIST}}
11
+
12
+ STEP 0 — VERIFY TOOLS:
13
+ List available tools and confirm you have:
14
+ - spekn_spec_list
15
+ - spekn_spec_create
16
+ - spekn_spec_update
17
+ - spekn_decision_create
18
+ - spekn_decision_update
19
+ If missing critical tools, output an error and stop.
20
+
21
+ STEP 1 — LOAD CURRENT SaaS STATE FIRST:
22
+ Before touching anything:
23
+ 1. Call spekn_spec_list for the project using pagination.
24
+ - API limit is max 100 items per page.
25
+ - Use `limit <= 100` and increment `offset` until no more items are returned.
26
+ 2. Build a map of existing specs by:
27
+ - exact title
28
+ - normalized title (case/spacing/punctuation insensitive)
29
+ 3. Treat locked specs as canonical. Never create a duplicate just because content differs.
30
+
31
+ STEP 2 — FILTER NON-SOURCE EXPORT ARTIFACTS:
32
+ Files under `spekn/context/specs/**` and `spekn/context/decisions/**` are export artifacts.
33
+ Do NOT re-import these as new specs by default.
34
+ Only use them as evidence for drift, not as direct create input.
35
+
36
+ STEP 3 — FOR EACH DISCOVERED SOURCE FILE, DECIDE ACTION:
37
+ For each relevant source file, choose exactly one:
38
+ - SKIP: no meaningful drift
39
+ - UPDATE_EXISTING: matching spec exists and should be updated
40
+ - CREATE_NEW: truly new capability/intent/decision area with no equivalent existing spec
41
+
42
+ Decision policy:
43
+ - Prefer UPDATE_EXISTING over CREATE_NEW when a semantically equivalent spec exists.
44
+ - CREATE_NEW only when no reasonable existing target exists.
45
+ - Never create a “repo ingestor import ...” duplicate of an existing locked/canonical spec.
46
+ - If uncertain between update and create, choose update and explain rationale.
47
+ - Give extra weight to commit-range changes since LAST_SYNC_COMMIT when deciding drift.
48
+ - If code/protocol files changed, verify corresponding specs/decisions were updated; if not, propose targeted updates.
49
+
50
+ STEP 4 — APPLY CHANGES:
51
+ - Use spekn_spec_update for UPDATE_EXISTING actions.
52
+ - Use spekn_spec_create only for CREATE_NEW actions.
53
+ - Use decision create/update tools similarly with dedupe intent.
54
+ - Keep titles stable for existing specs; avoid generating near-duplicate titles.
55
+ - For UPDATE_EXISTING calls, include version/history intent:
56
+ - prefer `changeType: "patch"` for small clarifications
57
+ - use `changeType: "minor"` for new requirements/constraints
58
+ - set concise `changeDescription` referencing commit-range drift rationale
59
+
60
+ STEP 4.5 — ASK FOR APPROVAL WHEN CONFIDENCE IS LOW:
61
+ Before applying a low-confidence update/create (for example: weak evidence, ambiguous target mapping, or conflicting interpretation):
62
+ - trigger an interactive ACP question to the client (`session/prompt`) with explicit options:
63
+ - approve
64
+ - skip
65
+ - if approval is not explicit, skip the low-confidence change
66
+ - mention each prompted item again in REVIEW_NOTES
67
+
68
+ STEP 5 — OUTPUT SUMMARY:
69
+ Output:
70
+ 1. DRIFT_SUMMARY
71
+ - file, drift type, severity, target spec (if any)
72
+ 2. SYNC_ACTION_PLAN
73
+ - ACTION | SKIP/UPDATE_EXISTING/CREATE_NEW | file | target/new title | reason
74
+ 3. SYNC_APPLIED
75
+ - APPLIED | update/create/skip | entity type | id/title
76
+ 4. REVIEW_NOTES
77
+ - low-confidence updates that should be human-reviewed
78
+ - missing spec/decision coverage for changed code/protocol files
79
+
80
+ RULES:
81
+ - Do not mass-create specs.
82
+ - Minimize churn.
83
+ - Respect existing canonical spec set.
84
+ - Every tool call must include projectId and organizationId when required.
85
+ - Spec titles must be short and descriptive — never prefix with the repo name, "Gap:", or other prefixes.
@@ -0,0 +1,16 @@
1
+ /**
2
+ * skills import-local CLI command
3
+ *
4
+ * Imports SKILL.md files from a local directory into a Spekn project.
5
+ *
6
+ * Usage: spekn skills import-local <path> --project-id <uuid> [--namespace <ns>] [--api-url <url>]
7
+ */
8
+ import { CredentialsStore } from "./auth/credentials-store";
9
+ interface Deps {
10
+ stdout: (content: string) => void;
11
+ stderr: (content: string) => void;
12
+ credentialsStore: CredentialsStore;
13
+ }
14
+ export declare function runSkillsImportLocalCli(args: string[], deps?: Deps): Promise<number>;
15
+ export declare function main(): Promise<void>;
16
+ export {};