@hyperdrive.bot/cli 1.0.7 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +307 -59
- package/dist/commands/account/list.d.ts +3 -0
- package/dist/commands/account/list.js +9 -2
- package/dist/commands/auth/logout.d.ts +11 -0
- package/dist/commands/auth/logout.js +86 -9
- package/dist/commands/git/connect.js +1 -0
- package/dist/commands/init.d.ts +1 -0
- package/dist/commands/init.js +20 -19
- package/dist/commands/jira/connect.d.ts +1 -0
- package/dist/commands/jira/connect.js +17 -6
- package/dist/commands/jira/hook/add.d.ts +17 -0
- package/dist/commands/jira/hook/add.js +147 -0
- package/dist/commands/jira/hook/list.d.ts +14 -0
- package/dist/commands/jira/hook/list.js +105 -0
- package/dist/commands/jira/hook/remove.d.ts +15 -0
- package/dist/commands/jira/hook/remove.js +119 -0
- package/dist/commands/jira/hook/toggle.d.ts +15 -0
- package/dist/commands/jira/hook/toggle.js +136 -0
- package/dist/commands/jira/status.js +11 -2
- package/dist/commands/project/init.d.ts +21 -0
- package/dist/commands/project/init.js +576 -0
- package/dist/commands/project/list.d.ts +10 -0
- package/dist/commands/project/list.js +119 -0
- package/dist/commands/project/status.d.ts +13 -0
- package/dist/commands/project/status.js +163 -0
- package/dist/commands/project/sync.d.ts +26 -0
- package/dist/commands/project/sync.js +406 -0
- package/dist/services/hyperdrive-sigv4.d.ts +125 -0
- package/dist/services/hyperdrive-sigv4.js +45 -0
- package/dist/services/tenant-service.d.ts +12 -0
- package/dist/services/tenant-service.js +44 -1
- package/dist/utils/account-flow.d.ts +2 -2
- package/dist/utils/account-flow.js +4 -4
- package/dist/utils/git-flow.d.ts +1 -0
- package/dist/utils/git-flow.js +2 -2
- package/dist/utils/hook-flow.d.ts +21 -0
- package/dist/utils/hook-flow.js +154 -0
- package/dist/utils/jira-flow.d.ts +2 -2
- package/dist/utils/jira-flow.js +4 -4
- package/oclif.manifest.json +591 -119
- package/package.json +5 -1
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Project Sync Command
|
|
3
|
+
*
|
|
4
|
+
* Generates structured architecture summaries for repos in a project.
|
|
5
|
+
* For each repo: clone/pull → detect _bmad or invoke Claude → validate YAML → upload S3 → update DynamoDB.
|
|
6
|
+
*/
|
|
7
|
+
import { Args, Command, Flags } from '@oclif/core';
|
|
8
|
+
import chalk from 'chalk';
|
|
9
|
+
import { execFileSync, spawn } from 'child_process';
|
|
10
|
+
import { existsSync, mkdtempSync, readdirSync, readFileSync, rmSync, statSync } from 'fs';
|
|
11
|
+
import yaml from 'js-yaml';
|
|
12
|
+
import ora from 'ora';
|
|
13
|
+
import { tmpdir } from 'os';
|
|
14
|
+
import { join } from 'path';
|
|
15
|
+
import { HyperdriveSigV4Service } from '../../services/hyperdrive-sigv4.js';
|
|
16
|
+
// ============================================================================
|
|
17
|
+
// Architecture Summary YAML Schema & Validation (inline for CLI — no server deps)
|
|
18
|
+
// ============================================================================
|
|
19
|
+
const VALID_ENTITY_TYPES = ['client', 'company', 'delivery', 'initiative', 'module', 'service', 'system', 'tool'];
|
|
20
|
+
const REQUIRED_SUMMARY_KEYS = ['repo', 'domains', 'patterns', 'modules', 'entity_registry', 'tech_stack'];
|
|
21
|
+
const ARCHITECTURE_YAML_SCHEMA = `repo:
|
|
22
|
+
name: "<repo-name>"
|
|
23
|
+
description: "<one-line description>"
|
|
24
|
+
|
|
25
|
+
domains:
|
|
26
|
+
- name: "<domain-name>"
|
|
27
|
+
modules: [<module1>, <module2>]
|
|
28
|
+
key_files: [<path1>, <path2>]
|
|
29
|
+
|
|
30
|
+
patterns:
|
|
31
|
+
handler_pattern: "<glob pattern for handlers>"
|
|
32
|
+
service_pattern: "<glob pattern for services>"
|
|
33
|
+
module_config: "<glob pattern for module config>"
|
|
34
|
+
test_pattern: "<glob pattern for tests>"
|
|
35
|
+
|
|
36
|
+
modules: [<module1>, <module2>, ...]
|
|
37
|
+
|
|
38
|
+
entity_registry:
|
|
39
|
+
- { name: "<entity-name>", type: "<client|company|delivery|initiative|module|service|system|tool>", path: "<relative-path>" }
|
|
40
|
+
|
|
41
|
+
tech_stack:
|
|
42
|
+
runtime: "<e.g. nodejs-22>"
|
|
43
|
+
framework: "<e.g. serverless-v4>"
|
|
44
|
+
language: "<e.g. typescript>"
|
|
45
|
+
database: "<e.g. dynamodb>"
|
|
46
|
+
infrastructure: "<e.g. aws-lambda>"`;
|
|
47
|
+
const ARCHITECTURE_ANALYSIS_PROMPT = `Analyze this code repository and produce a structured architecture summary in YAML format. Output ONLY valid YAML — no markdown fences, no explanations, no commentary.
|
|
48
|
+
|
|
49
|
+
Analyze the following:
|
|
50
|
+
1. Directory structure and key files
|
|
51
|
+
2. Functional domains (groups of related modules)
|
|
52
|
+
3. Code patterns (handler paths, service paths, test paths, module config paths)
|
|
53
|
+
4. List of modules/packages
|
|
54
|
+
5. Entity registry (named entities with type: client|company|delivery|initiative|module|service|system|tool)
|
|
55
|
+
6. Technology stack (runtime, framework, language, database, infrastructure)
|
|
56
|
+
|
|
57
|
+
Required YAML schema:
|
|
58
|
+
${ARCHITECTURE_YAML_SCHEMA}
|
|
59
|
+
|
|
60
|
+
Now analyze the repository and produce the YAML summary.`;
|
|
61
|
+
const VALID_REPO_NAME_REGEX = /^[a-zA-Z0-9][a-zA-Z0-9._-]*$/;
|
|
62
|
+
const MAX_BMAD_DOC_SIZE = 100 * 1024; // 100KB cap for _bmad docs (SEC-002 mitigation)
|
|
63
|
+
export default class ProjectSync extends Command {
|
|
64
|
+
static args = {
|
|
65
|
+
project: Args.string({
|
|
66
|
+
description: 'Project slug or ID',
|
|
67
|
+
required: true,
|
|
68
|
+
}),
|
|
69
|
+
};
|
|
70
|
+
static description = 'Generate architecture summaries for project repos via Claude analysis';
|
|
71
|
+
static examples = [
|
|
72
|
+
'<%= config.bin %> project sync my-project',
|
|
73
|
+
'<%= config.bin %> project sync my-project --repo api',
|
|
74
|
+
'<%= config.bin %> project sync my-project --json',
|
|
75
|
+
];
|
|
76
|
+
static flags = {
|
|
77
|
+
domain: Flags.string({
|
|
78
|
+
char: 'd',
|
|
79
|
+
description: 'Tenant domain (for multi-domain setups)',
|
|
80
|
+
}),
|
|
81
|
+
json: Flags.boolean({
|
|
82
|
+
description: 'Output result as JSON',
|
|
83
|
+
}),
|
|
84
|
+
repo: Flags.string({
|
|
85
|
+
description: 'Sync only a specific repo by name',
|
|
86
|
+
}),
|
|
87
|
+
};
|
|
88
|
+
async run() {
|
|
89
|
+
const { args, flags } = await this.parse(ProjectSync);
|
|
90
|
+
// Authenticate
|
|
91
|
+
let service;
|
|
92
|
+
const authSpinner = ora('Checking authentication...').start();
|
|
93
|
+
try {
|
|
94
|
+
service = new HyperdriveSigV4Service(flags.domain);
|
|
95
|
+
authSpinner.succeed('Authenticated');
|
|
96
|
+
}
|
|
97
|
+
catch (error) {
|
|
98
|
+
authSpinner.fail('Not authenticated');
|
|
99
|
+
this.error(`${error.message}\n\nPlease authenticate first with: ${chalk.cyan('hd auth login')}`);
|
|
100
|
+
}
|
|
101
|
+
// Resolve project by slug
|
|
102
|
+
const projectSpinner = ora(`Resolving project ${chalk.cyan(args.project)}...`).start();
|
|
103
|
+
let project;
|
|
104
|
+
try {
|
|
105
|
+
const result = await service.moduleGet({ slug: args.project });
|
|
106
|
+
project = { name: result.name || args.project, projectId: result.projectId || args.project, slug: result.slug || args.project };
|
|
107
|
+
projectSpinner.succeed(`Project: ${chalk.cyan(project.name)} (${project.slug})`);
|
|
108
|
+
}
|
|
109
|
+
catch (error) {
|
|
110
|
+
projectSpinner.fail('Project not found');
|
|
111
|
+
this.error(`Could not find project "${args.project}": ${error.response?.data?.message || error.message}`);
|
|
112
|
+
}
|
|
113
|
+
// List repos
|
|
114
|
+
const repoSpinner = ora('Fetching repositories...').start();
|
|
115
|
+
let repos;
|
|
116
|
+
try {
|
|
117
|
+
repos = await service.projectListRepos(project.projectId);
|
|
118
|
+
repoSpinner.succeed(`Found ${repos.length} repo(s)`);
|
|
119
|
+
}
|
|
120
|
+
catch (error) {
|
|
121
|
+
repoSpinner.fail('Failed to fetch repos');
|
|
122
|
+
this.error(`Could not list repos: ${error.response?.data?.message || error.message}`);
|
|
123
|
+
}
|
|
124
|
+
// Filter by --repo flag
|
|
125
|
+
if (flags.repo) {
|
|
126
|
+
repos = repos.filter(r => r.name === flags.repo);
|
|
127
|
+
if (repos.length === 0) {
|
|
128
|
+
this.error(`No repo named "${flags.repo}" found in project "${project.slug}"`);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
if (repos.length === 0) {
|
|
132
|
+
this.log(chalk.yellow('No repositories to sync.'));
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
this.log('');
|
|
136
|
+
this.log(chalk.blue(`Syncing ${repos.length} repo(s)...`));
|
|
137
|
+
this.log('');
|
|
138
|
+
// Process each repo
|
|
139
|
+
const results = [];
|
|
140
|
+
for (let i = 0; i < repos.length; i++) {
|
|
141
|
+
const repo = repos[i];
|
|
142
|
+
const prefix = chalk.dim(`[${i + 1}/${repos.length}]`);
|
|
143
|
+
const result = await this.syncRepo(service, project, repo, prefix);
|
|
144
|
+
results.push(result);
|
|
145
|
+
}
|
|
146
|
+
// Summary
|
|
147
|
+
this.log('');
|
|
148
|
+
const succeeded = results.filter(r => r.success).length;
|
|
149
|
+
const failed = results.filter(r => !r.success).length;
|
|
150
|
+
if (flags.json) {
|
|
151
|
+
this.log(JSON.stringify({ failed, project: project.slug, results, succeeded, total: repos.length }, null, 2));
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
if (failed === 0) {
|
|
155
|
+
this.log(chalk.green(`Synced ${succeeded}/${repos.length} repos (0 failed)`));
|
|
156
|
+
}
|
|
157
|
+
else {
|
|
158
|
+
this.log(chalk.yellow(`Synced ${succeeded}/${repos.length} repos (${failed} failed)`));
|
|
159
|
+
for (const r of results.filter(r => !r.success)) {
|
|
160
|
+
this.log(chalk.red(` ${r.name}: ${r.error}`));
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
async syncRepo(service, project, repo, prefix) {
|
|
165
|
+
const spinner = ora(`${prefix} ${chalk.cyan(repo.name)} — cloning...`).start();
|
|
166
|
+
let tmpDir = null;
|
|
167
|
+
try {
|
|
168
|
+
// Step 1: Validate repo name and clone
|
|
169
|
+
if (!VALID_REPO_NAME_REGEX.test(repo.name)) {
|
|
170
|
+
throw new Error(`Invalid repo name "${repo.name}" — must match ${VALID_REPO_NAME_REGEX}`);
|
|
171
|
+
}
|
|
172
|
+
tmpDir = mkdtempSync(join(tmpdir(), `hd-sync-${repo.name}-`));
|
|
173
|
+
const clonePath = join(tmpDir, repo.name);
|
|
174
|
+
try {
|
|
175
|
+
execFileSync('git', ['clone', '--depth', '1', '--branch', repo.defaultBranch, repo.gitRemote, clonePath], {
|
|
176
|
+
stdio: 'pipe',
|
|
177
|
+
timeout: 120_000, // 2 min clone timeout
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
catch (cloneError) {
|
|
181
|
+
throw new Error(`git clone failed: ${cloneError.stderr?.toString() || cloneError.message}`);
|
|
182
|
+
}
|
|
183
|
+
// Step 2: Detect _bmad or generate via Claude
|
|
184
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — analyzing...`;
|
|
185
|
+
let yamlOutput = null;
|
|
186
|
+
let lastError = null;
|
|
187
|
+
const MAX_ATTEMPTS = 3;
|
|
188
|
+
// Check for _bmad docs
|
|
189
|
+
const bmadDocPath = this.detectBmadDocs(clonePath);
|
|
190
|
+
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
|
|
191
|
+
try {
|
|
192
|
+
if (bmadDocPath) {
|
|
193
|
+
// Use _bmad doc with simpler restructure prompt
|
|
194
|
+
const docStat = statSync(bmadDocPath);
|
|
195
|
+
if (docStat.size > MAX_BMAD_DOC_SIZE) {
|
|
196
|
+
throw new Error(`_bmad doc exceeds ${MAX_BMAD_DOC_SIZE / 1024}KB size limit (${Math.round(docStat.size / 1024)}KB) — skipping to full analysis`);
|
|
197
|
+
}
|
|
198
|
+
const docContent = readFileSync(bmadDocPath, 'utf-8');
|
|
199
|
+
const prompt = `You are given an existing architecture documentation file. Convert it into the following YAML format. Output ONLY valid YAML, no markdown fences, no explanations.\n\nRequired YAML schema:\n${ARCHITECTURE_YAML_SCHEMA}\n\nHere is the architecture document to convert:\n\n<architecture-doc>\n${docContent}\n</architecture-doc>`;
|
|
200
|
+
yamlOutput = await this.runClaude(prompt, clonePath);
|
|
201
|
+
}
|
|
202
|
+
else {
|
|
203
|
+
// Full codebase analysis
|
|
204
|
+
yamlOutput = await this.runClaude(ARCHITECTURE_ANALYSIS_PROMPT, clonePath);
|
|
205
|
+
}
|
|
206
|
+
// Validate
|
|
207
|
+
this.validateYaml(yamlOutput);
|
|
208
|
+
break; // Validation passed
|
|
209
|
+
}
|
|
210
|
+
catch (error) {
|
|
211
|
+
lastError = error.message;
|
|
212
|
+
this.log(chalk.dim(` ${repo.name} attempt ${attempt}/${MAX_ATTEMPTS} failed: ${lastError}`));
|
|
213
|
+
yamlOutput = null;
|
|
214
|
+
if (attempt === MAX_ATTEMPTS) {
|
|
215
|
+
throw new Error(`Validation failed after ${MAX_ATTEMPTS} attempts: ${lastError}`);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
if (!yamlOutput) {
|
|
220
|
+
throw new Error(`Generation failed: ${lastError}`);
|
|
221
|
+
}
|
|
222
|
+
// Step 2b: Extract entity registry from validated YAML and merge with gut config
|
|
223
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — extracting entities...`;
|
|
224
|
+
const parsedYaml = yaml.load(yamlOutput);
|
|
225
|
+
let entityRegistry = (parsedYaml.entity_registry || []);
|
|
226
|
+
// Check for .gut/config.json and merge
|
|
227
|
+
const gutEntities = this.readGutEntities(clonePath);
|
|
228
|
+
if (gutEntities.length > 0) {
|
|
229
|
+
entityRegistry = this.mergeEntityRegistries(entityRegistry, gutEntities);
|
|
230
|
+
this.log(chalk.dim(` ${repo.name}: merged ${entityRegistry.length} entities (${gutEntities.length} from gut)`));
|
|
231
|
+
}
|
|
232
|
+
else if (entityRegistry.length > 0) {
|
|
233
|
+
this.log(chalk.dim(` ${repo.name}: found ${entityRegistry.length} entities from analysis`));
|
|
234
|
+
}
|
|
235
|
+
// Step 3: Upload to S3 via API
|
|
236
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — uploading...`;
|
|
237
|
+
// The API handles S3 upload and DynamoDB update — call updateRepo with the summary content
|
|
238
|
+
await service.projectUpdateRepo(project.projectId, repo.repoId, {
|
|
239
|
+
architectureSummary: yamlOutput,
|
|
240
|
+
lastSyncedAt: new Date().toISOString(),
|
|
241
|
+
});
|
|
242
|
+
// Step 4: Update entity registry via dedicated endpoint
|
|
243
|
+
if (entityRegistry.length > 0) {
|
|
244
|
+
try {
|
|
245
|
+
await service.projectUpdateEntities(project.projectId, repo.repoId, entityRegistry);
|
|
246
|
+
}
|
|
247
|
+
catch (entityError) {
|
|
248
|
+
this.log(chalk.yellow(` ${repo.name}: entity registry update failed: ${entityError.message}`));
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
spinner.succeed(`${prefix} ${chalk.cyan(repo.name)} — ${chalk.green('done')}`);
|
|
252
|
+
return { name: repo.name, success: true };
|
|
253
|
+
}
|
|
254
|
+
catch (error) {
|
|
255
|
+
spinner.fail(`${prefix} ${chalk.cyan(repo.name)} — ${chalk.red('failed')}`);
|
|
256
|
+
this.log(chalk.red(` Error: ${error.message}`));
|
|
257
|
+
return { error: error.message, name: repo.name, success: false };
|
|
258
|
+
}
|
|
259
|
+
finally {
|
|
260
|
+
// Cleanup temp directory
|
|
261
|
+
if (tmpDir && existsSync(tmpDir)) {
|
|
262
|
+
try {
|
|
263
|
+
rmSync(tmpDir, { force: true, recursive: true });
|
|
264
|
+
}
|
|
265
|
+
catch {
|
|
266
|
+
// Ignore cleanup errors
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
detectBmadDocs(repoPath) {
|
|
272
|
+
const bmadDir = join(repoPath, '_bmad');
|
|
273
|
+
if (!existsSync(bmadDir))
|
|
274
|
+
return null;
|
|
275
|
+
try {
|
|
276
|
+
if (!statSync(bmadDir).isDirectory())
|
|
277
|
+
return null;
|
|
278
|
+
const files = readdirSync(bmadDir);
|
|
279
|
+
const archPatterns = [/^architecture.*\.md$/i, /^arch-.*\.md$/i];
|
|
280
|
+
for (const file of files) {
|
|
281
|
+
for (const pattern of archPatterns) {
|
|
282
|
+
if (pattern.test(file)) {
|
|
283
|
+
return join(bmadDir, file);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
catch {
|
|
289
|
+
// Ignore fs errors
|
|
290
|
+
}
|
|
291
|
+
return null;
|
|
292
|
+
}
|
|
293
|
+
runClaude(prompt, cwd) {
|
|
294
|
+
return new Promise((resolve, reject) => {
|
|
295
|
+
const TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
|
|
296
|
+
const child = spawn('claude', ['-p', prompt, '--output-format', 'text'], {
|
|
297
|
+
cwd,
|
|
298
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
299
|
+
timeout: TIMEOUT_MS,
|
|
300
|
+
});
|
|
301
|
+
let stdout = '';
|
|
302
|
+
let stderr = '';
|
|
303
|
+
child.stdout.on('data', (data) => {
|
|
304
|
+
stdout += data.toString();
|
|
305
|
+
});
|
|
306
|
+
child.stderr.on('data', (data) => {
|
|
307
|
+
stderr += data.toString();
|
|
308
|
+
});
|
|
309
|
+
child.on('error', (error) => {
|
|
310
|
+
reject(new Error(`Claude CLI failed to start: ${error.message}`));
|
|
311
|
+
});
|
|
312
|
+
child.on('close', (code) => {
|
|
313
|
+
if (code !== 0) {
|
|
314
|
+
reject(new Error(`Claude CLI exited with code ${code}: ${stderr.slice(0, 500)}`));
|
|
315
|
+
return;
|
|
316
|
+
}
|
|
317
|
+
// Strip markdown code fences if present
|
|
318
|
+
let output = stdout.trim();
|
|
319
|
+
if (output.startsWith('```yaml')) {
|
|
320
|
+
output = output.replace(/^```yaml\n?/, '').replace(/\n?```$/, '');
|
|
321
|
+
}
|
|
322
|
+
else if (output.startsWith('```')) {
|
|
323
|
+
output = output.replace(/^```\n?/, '').replace(/\n?```$/, '');
|
|
324
|
+
}
|
|
325
|
+
resolve(output);
|
|
326
|
+
});
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
validateYaml(yamlString) {
|
|
330
|
+
const parsed = yaml.load(yamlString);
|
|
331
|
+
if (!parsed || typeof parsed !== 'object') {
|
|
332
|
+
throw new Error('YAML must be a non-null object');
|
|
333
|
+
}
|
|
334
|
+
for (const key of REQUIRED_SUMMARY_KEYS) {
|
|
335
|
+
if (!(key in parsed)) {
|
|
336
|
+
throw new Error(`Missing required top-level key: '${key}'`);
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
const repo = parsed.repo;
|
|
340
|
+
if (!repo || typeof repo !== 'object' || !repo.name || !repo.description) {
|
|
341
|
+
throw new Error("'repo' must have 'name' and 'description' string fields");
|
|
342
|
+
}
|
|
343
|
+
if (!Array.isArray(parsed.domains)) {
|
|
344
|
+
throw new Error("'domains' must be an array");
|
|
345
|
+
}
|
|
346
|
+
if (!parsed.patterns || typeof parsed.patterns !== 'object') {
|
|
347
|
+
throw new Error("'patterns' must be an object");
|
|
348
|
+
}
|
|
349
|
+
if (!Array.isArray(parsed.modules)) {
|
|
350
|
+
throw new Error("'modules' must be an array");
|
|
351
|
+
}
|
|
352
|
+
if (!Array.isArray(parsed.entity_registry)) {
|
|
353
|
+
throw new Error("'entity_registry' must be an array");
|
|
354
|
+
}
|
|
355
|
+
for (let i = 0; i < parsed.entity_registry.length; i++) {
|
|
356
|
+
const entry = parsed.entity_registry[i];
|
|
357
|
+
if (!entry.name)
|
|
358
|
+
throw new Error(`entity_registry[${i}]: 'name' is required`);
|
|
359
|
+
if (!entry.type)
|
|
360
|
+
throw new Error(`entity_registry[${i}]: 'type' is required`);
|
|
361
|
+
if (!VALID_ENTITY_TYPES.includes(entry.type)) {
|
|
362
|
+
throw new Error(`entity_registry[${i}]: invalid type '${entry.type}'`);
|
|
363
|
+
}
|
|
364
|
+
if (!entry.path)
|
|
365
|
+
throw new Error(`entity_registry[${i}]: 'path' is required`);
|
|
366
|
+
}
|
|
367
|
+
const techStack = parsed.tech_stack;
|
|
368
|
+
if (!techStack || typeof techStack !== 'object') {
|
|
369
|
+
throw new Error("'tech_stack' must be an object");
|
|
370
|
+
}
|
|
371
|
+
for (const field of ['runtime', 'framework', 'language', 'database', 'infrastructure']) {
|
|
372
|
+
if (!techStack[field]) {
|
|
373
|
+
throw new Error(`tech_stack.${field} is required`);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
readGutEntities(repoPath) {
|
|
378
|
+
const gutConfigPath = join(repoPath, '.gut', 'config.json');
|
|
379
|
+
if (!existsSync(gutConfigPath))
|
|
380
|
+
return [];
|
|
381
|
+
try {
|
|
382
|
+
const content = readFileSync(gutConfigPath, 'utf-8');
|
|
383
|
+
const config = JSON.parse(content);
|
|
384
|
+
if (!config.entities || !Array.isArray(config.entities))
|
|
385
|
+
return [];
|
|
386
|
+
return config.entities
|
|
387
|
+
.filter((e) => e && typeof e === 'object' && e.name && e.type && e.path &&
|
|
388
|
+
VALID_ENTITY_TYPES.includes(e.type))
|
|
389
|
+
.map((e) => ({
|
|
390
|
+
name: e.name,
|
|
391
|
+
type: e.type,
|
|
392
|
+
path: e.path,
|
|
393
|
+
...(e.repository && typeof e.repository === 'string' ? { repository: e.repository } : {}),
|
|
394
|
+
...(e.description && typeof e.description === 'string' ? { description: e.description } : {}),
|
|
395
|
+
}));
|
|
396
|
+
}
|
|
397
|
+
catch {
|
|
398
|
+
return [];
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
mergeEntityRegistries(claudeEntities, gutEntities) {
|
|
402
|
+
const gutNameSet = new Set(gutEntities.map(e => e.name.toLowerCase()));
|
|
403
|
+
const uniqueClaude = claudeEntities.filter(e => !gutNameSet.has(e.name.toLowerCase()));
|
|
404
|
+
return [...gutEntities, ...uniqueClaude];
|
|
405
|
+
}
|
|
406
|
+
}
|
|
@@ -136,6 +136,42 @@ interface GitAuthInitiateResponse {
|
|
|
136
136
|
installUrl: string;
|
|
137
137
|
state: string;
|
|
138
138
|
}
|
|
139
|
+
export type HookActionType = 'adhb-enrich' | 'ci-trigger' | 'slack-notify' | 'webhook';
|
|
140
|
+
export interface SlackNotifyConfig {
|
|
141
|
+
channel: string;
|
|
142
|
+
template?: string;
|
|
143
|
+
}
|
|
144
|
+
export interface AdhbEnrichConfig {
|
|
145
|
+
priority?: 'high' | 'low' | 'normal';
|
|
146
|
+
}
|
|
147
|
+
export interface WebhookConfig {
|
|
148
|
+
headers?: Record<string, string>;
|
|
149
|
+
method: 'POST' | 'PUT';
|
|
150
|
+
url: string;
|
|
151
|
+
}
|
|
152
|
+
export interface CiTriggerConfig {
|
|
153
|
+
pipeline: string;
|
|
154
|
+
provider: 'github' | 'gitlab';
|
|
155
|
+
ref?: string;
|
|
156
|
+
}
|
|
157
|
+
export type HookActionConfig = AdhbEnrichConfig | CiTriggerConfig | SlackNotifyConfig | WebhookConfig;
|
|
158
|
+
export interface HookResponse {
|
|
159
|
+
actionConfig: HookActionConfig;
|
|
160
|
+
actionType: HookActionType;
|
|
161
|
+
createdAt: string;
|
|
162
|
+
enabled: boolean;
|
|
163
|
+
hookId: string;
|
|
164
|
+
triggerStatus: string;
|
|
165
|
+
updatedAt: string;
|
|
166
|
+
}
|
|
167
|
+
export interface HookListResponse {
|
|
168
|
+
hooks: HookResponse[];
|
|
169
|
+
}
|
|
170
|
+
export interface HookCreateRequest {
|
|
171
|
+
actionConfig: HookActionConfig;
|
|
172
|
+
actionType: HookActionType;
|
|
173
|
+
triggerStatus: string;
|
|
174
|
+
}
|
|
139
175
|
/**
|
|
140
176
|
* Hyperdrive API Service with AWS SigV4 authentication
|
|
141
177
|
*/
|
|
@@ -253,6 +289,28 @@ export declare class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
253
289
|
repos: GitRepoInfo[];
|
|
254
290
|
totalCount: number;
|
|
255
291
|
}>;
|
|
292
|
+
hookCreate(projectId: string, body: HookCreateRequest): Promise<HookResponse>;
|
|
293
|
+
hookDelete(projectId: string, hookId: string): Promise<{
|
|
294
|
+
message: string;
|
|
295
|
+
}>;
|
|
296
|
+
hookList(projectId: string): Promise<HookListResponse>;
|
|
297
|
+
hookUpdate(projectId: string, hookId: string, body: Partial<HookCreateRequest> & {
|
|
298
|
+
enabled?: boolean;
|
|
299
|
+
}): Promise<HookResponse>;
|
|
300
|
+
projectGetJiraStatuses(projectId: string): Promise<{
|
|
301
|
+
statuses: Array<{
|
|
302
|
+
id: string;
|
|
303
|
+
name: string;
|
|
304
|
+
category?: string;
|
|
305
|
+
}>;
|
|
306
|
+
}>;
|
|
307
|
+
jiraGetProjectStatuses(jiraProjectKey: string): Promise<{
|
|
308
|
+
statuses: Array<{
|
|
309
|
+
id: string;
|
|
310
|
+
name: string;
|
|
311
|
+
category?: string;
|
|
312
|
+
}>;
|
|
313
|
+
}>;
|
|
256
314
|
jiraPreRegister(params: {
|
|
257
315
|
jiraDomain: string;
|
|
258
316
|
}): Promise<{
|
|
@@ -268,6 +326,73 @@ export declare class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
268
326
|
};
|
|
269
327
|
success: boolean;
|
|
270
328
|
}>;
|
|
329
|
+
projectAddRepo(projectId: string, repo: {
|
|
330
|
+
defaultBranch: string;
|
|
331
|
+
gitProvider: string;
|
|
332
|
+
gitRemote: string;
|
|
333
|
+
name: string;
|
|
334
|
+
}): Promise<{
|
|
335
|
+
defaultBranch: string;
|
|
336
|
+
gitProvider: string;
|
|
337
|
+
gitRemote: string;
|
|
338
|
+
name: string;
|
|
339
|
+
repoId: string;
|
|
340
|
+
}>;
|
|
341
|
+
projectFindByJiraKey(jiraProjectKey: string): Promise<{
|
|
342
|
+
jiraProjectKey: string;
|
|
343
|
+
projectId: string;
|
|
344
|
+
slug: string;
|
|
345
|
+
}>;
|
|
346
|
+
projectGetJiraConfig(projectId: string): Promise<{
|
|
347
|
+
createdAt: string;
|
|
348
|
+
jiraProjectKey: string;
|
|
349
|
+
statusMapping: Record<string, string>;
|
|
350
|
+
updatedAt: string;
|
|
351
|
+
}>;
|
|
352
|
+
projectListRepos(projectId: string): Promise<Array<{
|
|
353
|
+
architectureSummary?: string | null;
|
|
354
|
+
architectureSummaryUpdatedAt?: string | null;
|
|
355
|
+
createdAt: string;
|
|
356
|
+
defaultBranch: string;
|
|
357
|
+
gitRemote: string;
|
|
358
|
+
name: string;
|
|
359
|
+
provider: string;
|
|
360
|
+
repoId: string;
|
|
361
|
+
updatedAt: string;
|
|
362
|
+
}>>;
|
|
363
|
+
projectSetJiraConfig(projectId: string, config: {
|
|
364
|
+
jiraProjectKey: string;
|
|
365
|
+
statusMapping: Record<string, string>;
|
|
366
|
+
}): Promise<{
|
|
367
|
+
jiraProjectKey: string;
|
|
368
|
+
projectId: string;
|
|
369
|
+
statusMapping: Record<string, string>;
|
|
370
|
+
}>;
|
|
371
|
+
projectUpdateRepo(projectId: string, repoId: string, updateData: {
|
|
372
|
+
architectureSummary?: string | null;
|
|
373
|
+
defaultBranch?: string;
|
|
374
|
+
lastSyncedAt?: string;
|
|
375
|
+
}): Promise<{
|
|
376
|
+
architectureSummary?: string | null;
|
|
377
|
+
defaultBranch: string;
|
|
378
|
+
gitRemote: string;
|
|
379
|
+
lastSyncedAt?: string;
|
|
380
|
+
provider: string;
|
|
381
|
+
repoId: string;
|
|
382
|
+
}>;
|
|
383
|
+
projectUpdateEntities(projectId: string, repoId: string, entities: Array<{
|
|
384
|
+
name: string;
|
|
385
|
+
type: string;
|
|
386
|
+
path: string;
|
|
387
|
+
repository?: string;
|
|
388
|
+
description?: string;
|
|
389
|
+
}>): Promise<Array<{
|
|
390
|
+
name: string;
|
|
391
|
+
type: string;
|
|
392
|
+
path: string;
|
|
393
|
+
repository?: string;
|
|
394
|
+
description?: string;
|
|
395
|
+
}>>;
|
|
271
396
|
makeTestRequest(): Promise<Record<string, unknown>>;
|
|
272
397
|
moduleAnalyze(slug: string): Promise<{
|
|
273
398
|
jobId: string;
|
|
@@ -102,12 +102,57 @@ export class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
102
102
|
return this.makeSignedRequest('GET', `/git/repos?${queryParams}`);
|
|
103
103
|
}
|
|
104
104
|
// ============================================================================
|
|
105
|
+
// Hook Methods
|
|
106
|
+
// ============================================================================
|
|
107
|
+
async hookCreate(projectId, body) {
|
|
108
|
+
return this.makeSignedRequest('POST', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks`, body);
|
|
109
|
+
}
|
|
110
|
+
async hookDelete(projectId, hookId) {
|
|
111
|
+
return this.makeSignedRequest('DELETE', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks/${encodeURIComponent(hookId)}`);
|
|
112
|
+
}
|
|
113
|
+
async hookList(projectId) {
|
|
114
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks`);
|
|
115
|
+
}
|
|
116
|
+
async hookUpdate(projectId, hookId, body) {
|
|
117
|
+
return this.makeSignedRequest('PATCH', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks/${encodeURIComponent(hookId)}`, body);
|
|
118
|
+
}
|
|
119
|
+
async projectGetJiraStatuses(projectId) {
|
|
120
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira/statuses`);
|
|
121
|
+
}
|
|
122
|
+
// ============================================================================
|
|
105
123
|
// Jira Integration Methods
|
|
106
124
|
// ============================================================================
|
|
125
|
+
async jiraGetProjectStatuses(jiraProjectKey) {
|
|
126
|
+
return this.makeSignedRequest('GET', `/hyperdrive/jira/projects/${encodeURIComponent(jiraProjectKey)}/statuses`);
|
|
127
|
+
}
|
|
107
128
|
async jiraPreRegister(params) {
|
|
108
129
|
return this.makeSignedRequest('POST', '/hyperdrive/jira/pre-register', params);
|
|
109
130
|
}
|
|
110
131
|
// ============================================================================
|
|
132
|
+
// Hyperdrive Project Methods
|
|
133
|
+
// ============================================================================
|
|
134
|
+
async projectAddRepo(projectId, repo) {
|
|
135
|
+
return this.makeSignedRequest('POST', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos`, repo);
|
|
136
|
+
}
|
|
137
|
+
async projectFindByJiraKey(jiraProjectKey) {
|
|
138
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/by-jira-key/${encodeURIComponent(jiraProjectKey)}`);
|
|
139
|
+
}
|
|
140
|
+
async projectGetJiraConfig(projectId) {
|
|
141
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira-config`);
|
|
142
|
+
}
|
|
143
|
+
async projectListRepos(projectId) {
|
|
144
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos`);
|
|
145
|
+
}
|
|
146
|
+
async projectSetJiraConfig(projectId, config) {
|
|
147
|
+
return this.makeSignedRequest('PUT', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira-config`, config);
|
|
148
|
+
}
|
|
149
|
+
async projectUpdateRepo(projectId, repoId, updateData) {
|
|
150
|
+
return this.makeSignedRequest('PATCH', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos/${encodeURIComponent(repoId)}`, updateData);
|
|
151
|
+
}
|
|
152
|
+
async projectUpdateEntities(projectId, repoId, entities) {
|
|
153
|
+
return this.makeSignedRequest('PUT', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos/${encodeURIComponent(repoId)}/entities`, entities);
|
|
154
|
+
}
|
|
155
|
+
// ============================================================================
|
|
111
156
|
// Module Methods
|
|
112
157
|
// ============================================================================
|
|
113
158
|
async makeTestRequest() {
|
|
@@ -67,6 +67,18 @@ export declare class TenantService {
|
|
|
67
67
|
* Save CLI configuration to file
|
|
68
68
|
*/
|
|
69
69
|
saveConfig(config: CLIConfig): void;
|
|
70
|
+
/**
|
|
71
|
+
* Clear the default domain file
|
|
72
|
+
*/
|
|
73
|
+
clearDefaultDomain(): void;
|
|
74
|
+
/**
|
|
75
|
+
* Remove a specific domain from config.json domains map
|
|
76
|
+
*/
|
|
77
|
+
removeDomainConfig(domain: string): void;
|
|
78
|
+
/**
|
|
79
|
+
* Remove the entire config file and default-domain file
|
|
80
|
+
*/
|
|
81
|
+
clearAllConfig(): void;
|
|
70
82
|
/**
|
|
71
83
|
* Set default domain
|
|
72
84
|
*/
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import axios from 'axios';
|
|
2
2
|
import chalk from 'chalk';
|
|
3
|
-
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
3
|
+
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from 'fs';
|
|
4
4
|
import { homedir } from 'os';
|
|
5
5
|
import { join } from 'path';
|
|
6
6
|
/**
|
|
@@ -186,6 +186,49 @@ export class TenantService {
|
|
|
186
186
|
console.error(chalk.red('Failed to save configuration:'), error);
|
|
187
187
|
}
|
|
188
188
|
}
|
|
189
|
+
/**
|
|
190
|
+
* Clear the default domain file
|
|
191
|
+
*/
|
|
192
|
+
clearDefaultDomain() {
|
|
193
|
+
try {
|
|
194
|
+
if (existsSync(this.defaultDomainPath)) {
|
|
195
|
+
unlinkSync(this.defaultDomainPath);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
catch (error) {
|
|
199
|
+
console.error(chalk.red('Failed to clear default domain:'), error);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
/**
|
|
203
|
+
* Remove a specific domain from config.json domains map
|
|
204
|
+
*/
|
|
205
|
+
removeDomainConfig(domain) {
|
|
206
|
+
const config = this.loadConfigFile();
|
|
207
|
+
if (!config)
|
|
208
|
+
return;
|
|
209
|
+
if (config.domains?.[domain]) {
|
|
210
|
+
delete config.domains[domain];
|
|
211
|
+
}
|
|
212
|
+
// If this was the legacy tenantDomain, clear it too
|
|
213
|
+
if (config.tenantDomain === domain) {
|
|
214
|
+
delete config.tenantDomain;
|
|
215
|
+
}
|
|
216
|
+
this.saveConfig(config);
|
|
217
|
+
}
|
|
218
|
+
/**
|
|
219
|
+
* Remove the entire config file and default-domain file
|
|
220
|
+
*/
|
|
221
|
+
clearAllConfig() {
|
|
222
|
+
try {
|
|
223
|
+
if (existsSync(this.configPath)) {
|
|
224
|
+
unlinkSync(this.configPath);
|
|
225
|
+
}
|
|
226
|
+
this.clearDefaultDomain();
|
|
227
|
+
}
|
|
228
|
+
catch (error) {
|
|
229
|
+
console.error(chalk.red('Failed to clear config:'), error);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
189
232
|
/**
|
|
190
233
|
* Set default domain
|
|
191
234
|
*/
|