@hyperdrive.bot/cli 1.0.7 → 1.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +298 -56
- package/dist/commands/account/list.d.ts +3 -0
- package/dist/commands/account/list.js +9 -2
- package/dist/commands/git/connect.js +1 -0
- package/dist/commands/init.d.ts +1 -0
- package/dist/commands/init.js +20 -19
- package/dist/commands/jira/connect.d.ts +1 -0
- package/dist/commands/jira/connect.js +17 -6
- package/dist/commands/jira/hook/add.d.ts +17 -0
- package/dist/commands/jira/hook/add.js +147 -0
- package/dist/commands/jira/hook/list.d.ts +14 -0
- package/dist/commands/jira/hook/list.js +105 -0
- package/dist/commands/jira/hook/remove.d.ts +15 -0
- package/dist/commands/jira/hook/remove.js +119 -0
- package/dist/commands/jira/hook/toggle.d.ts +15 -0
- package/dist/commands/jira/hook/toggle.js +136 -0
- package/dist/commands/project/init.d.ts +21 -0
- package/dist/commands/project/init.js +576 -0
- package/dist/commands/project/list.d.ts +10 -0
- package/dist/commands/project/list.js +119 -0
- package/dist/commands/project/status.d.ts +13 -0
- package/dist/commands/project/status.js +163 -0
- package/dist/commands/project/sync.d.ts +26 -0
- package/dist/commands/project/sync.js +388 -0
- package/dist/services/hyperdrive-sigv4.d.ts +125 -0
- package/dist/services/hyperdrive-sigv4.js +45 -0
- package/dist/utils/account-flow.d.ts +2 -2
- package/dist/utils/account-flow.js +4 -4
- package/dist/utils/git-flow.d.ts +1 -0
- package/dist/utils/git-flow.js +2 -2
- package/dist/utils/hook-flow.d.ts +21 -0
- package/dist/utils/hook-flow.js +154 -0
- package/dist/utils/jira-flow.d.ts +2 -2
- package/dist/utils/jira-flow.js +4 -4
- package/oclif.manifest.json +590 -128
- package/package.json +5 -1
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Project Sync Command
|
|
3
|
+
*
|
|
4
|
+
* Generates structured architecture summaries for repos in a project.
|
|
5
|
+
* For each repo: clone/pull → detect _bmad or invoke Claude → validate YAML → upload S3 → update DynamoDB.
|
|
6
|
+
*/
|
|
7
|
+
import { Args, Command, Flags } from '@oclif/core';
|
|
8
|
+
import chalk from 'chalk';
|
|
9
|
+
import { execSync, spawn } from 'child_process';
|
|
10
|
+
import { existsSync, mkdtempSync, readdirSync, readFileSync, rmSync, statSync } from 'fs';
|
|
11
|
+
import yaml from 'js-yaml';
|
|
12
|
+
import ora from 'ora';
|
|
13
|
+
import { tmpdir } from 'os';
|
|
14
|
+
import { join } from 'path';
|
|
15
|
+
import { HyperdriveSigV4Service } from '../../services/hyperdrive-sigv4.js';
|
|
16
|
+
// ============================================================================
|
|
17
|
+
// Architecture Summary YAML Schema & Validation (inline for CLI — no server deps)
|
|
18
|
+
// ============================================================================
|
|
19
|
+
const VALID_ENTITY_TYPES = ['client', 'company', 'delivery', 'initiative', 'module', 'service', 'system', 'tool'];
|
|
20
|
+
const REQUIRED_SUMMARY_KEYS = ['repo', 'domains', 'patterns', 'modules', 'entity_registry', 'tech_stack'];
|
|
21
|
+
const ARCHITECTURE_YAML_SCHEMA = `repo:
|
|
22
|
+
name: "<repo-name>"
|
|
23
|
+
description: "<one-line description>"
|
|
24
|
+
|
|
25
|
+
domains:
|
|
26
|
+
- name: "<domain-name>"
|
|
27
|
+
modules: [<module1>, <module2>]
|
|
28
|
+
key_files: [<path1>, <path2>]
|
|
29
|
+
|
|
30
|
+
patterns:
|
|
31
|
+
handler_pattern: "<glob pattern for handlers>"
|
|
32
|
+
service_pattern: "<glob pattern for services>"
|
|
33
|
+
module_config: "<glob pattern for module config>"
|
|
34
|
+
test_pattern: "<glob pattern for tests>"
|
|
35
|
+
|
|
36
|
+
modules: [<module1>, <module2>, ...]
|
|
37
|
+
|
|
38
|
+
entity_registry:
|
|
39
|
+
- { name: "<entity-name>", type: "<client|company|delivery|initiative|module|service|system|tool>", path: "<relative-path>" }
|
|
40
|
+
|
|
41
|
+
tech_stack:
|
|
42
|
+
runtime: "<e.g. nodejs-22>"
|
|
43
|
+
framework: "<e.g. serverless-v4>"
|
|
44
|
+
language: "<e.g. typescript>"
|
|
45
|
+
database: "<e.g. dynamodb>"
|
|
46
|
+
infrastructure: "<e.g. aws-lambda>"`;
|
|
47
|
+
const ARCHITECTURE_ANALYSIS_PROMPT = `Analyze this code repository and produce a structured architecture summary in YAML format. Output ONLY valid YAML — no markdown fences, no explanations, no commentary.
|
|
48
|
+
|
|
49
|
+
Analyze the following:
|
|
50
|
+
1. Directory structure and key files
|
|
51
|
+
2. Functional domains (groups of related modules)
|
|
52
|
+
3. Code patterns (handler paths, service paths, test paths, module config paths)
|
|
53
|
+
4. List of modules/packages
|
|
54
|
+
5. Entity registry (named entities with type: client|company|delivery|initiative|module|service|system|tool)
|
|
55
|
+
6. Technology stack (runtime, framework, language, database, infrastructure)
|
|
56
|
+
|
|
57
|
+
Required YAML schema:
|
|
58
|
+
${ARCHITECTURE_YAML_SCHEMA}
|
|
59
|
+
|
|
60
|
+
Now analyze the repository and produce the YAML summary.`;
|
|
61
|
+
export default class ProjectSync extends Command {
|
|
62
|
+
static args = {
|
|
63
|
+
project: Args.string({
|
|
64
|
+
description: 'Project slug or ID',
|
|
65
|
+
required: true,
|
|
66
|
+
}),
|
|
67
|
+
};
|
|
68
|
+
static description = 'Generate architecture summaries for project repos via Claude analysis';
|
|
69
|
+
static examples = [
|
|
70
|
+
'<%= config.bin %> project sync my-project',
|
|
71
|
+
'<%= config.bin %> project sync my-project --repo api',
|
|
72
|
+
'<%= config.bin %> project sync my-project --json',
|
|
73
|
+
];
|
|
74
|
+
static flags = {
|
|
75
|
+
domain: Flags.string({
|
|
76
|
+
char: 'd',
|
|
77
|
+
description: 'Tenant domain (for multi-domain setups)',
|
|
78
|
+
}),
|
|
79
|
+
json: Flags.boolean({
|
|
80
|
+
description: 'Output result as JSON',
|
|
81
|
+
}),
|
|
82
|
+
repo: Flags.string({
|
|
83
|
+
description: 'Sync only a specific repo by name',
|
|
84
|
+
}),
|
|
85
|
+
};
|
|
86
|
+
async run() {
|
|
87
|
+
const { args, flags } = await this.parse(ProjectSync);
|
|
88
|
+
// Authenticate
|
|
89
|
+
let service;
|
|
90
|
+
const authSpinner = ora('Checking authentication...').start();
|
|
91
|
+
try {
|
|
92
|
+
service = new HyperdriveSigV4Service(flags.domain);
|
|
93
|
+
authSpinner.succeed('Authenticated');
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
authSpinner.fail('Not authenticated');
|
|
97
|
+
this.error(`${error.message}\n\nPlease authenticate first with: ${chalk.cyan('hd auth login')}`);
|
|
98
|
+
}
|
|
99
|
+
// Resolve project by slug
|
|
100
|
+
const projectSpinner = ora(`Resolving project ${chalk.cyan(args.project)}...`).start();
|
|
101
|
+
let project;
|
|
102
|
+
try {
|
|
103
|
+
const result = await service.moduleGet({ slug: args.project });
|
|
104
|
+
project = { name: result.name || args.project, projectId: result.projectId || args.project, slug: result.slug || args.project };
|
|
105
|
+
projectSpinner.succeed(`Project: ${chalk.cyan(project.name)} (${project.slug})`);
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
projectSpinner.fail('Project not found');
|
|
109
|
+
this.error(`Could not find project "${args.project}": ${error.response?.data?.message || error.message}`);
|
|
110
|
+
}
|
|
111
|
+
// List repos
|
|
112
|
+
const repoSpinner = ora('Fetching repositories...').start();
|
|
113
|
+
let repos;
|
|
114
|
+
try {
|
|
115
|
+
repos = await service.projectListRepos(project.projectId);
|
|
116
|
+
repoSpinner.succeed(`Found ${repos.length} repo(s)`);
|
|
117
|
+
}
|
|
118
|
+
catch (error) {
|
|
119
|
+
repoSpinner.fail('Failed to fetch repos');
|
|
120
|
+
this.error(`Could not list repos: ${error.response?.data?.message || error.message}`);
|
|
121
|
+
}
|
|
122
|
+
// Filter by --repo flag
|
|
123
|
+
if (flags.repo) {
|
|
124
|
+
repos = repos.filter(r => r.name === flags.repo);
|
|
125
|
+
if (repos.length === 0) {
|
|
126
|
+
this.error(`No repo named "${flags.repo}" found in project "${project.slug}"`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (repos.length === 0) {
|
|
130
|
+
this.log(chalk.yellow('No repositories to sync.'));
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
this.log('');
|
|
134
|
+
this.log(chalk.blue(`Syncing ${repos.length} repo(s)...`));
|
|
135
|
+
this.log('');
|
|
136
|
+
// Process each repo
|
|
137
|
+
const results = [];
|
|
138
|
+
for (let i = 0; i < repos.length; i++) {
|
|
139
|
+
const repo = repos[i];
|
|
140
|
+
const prefix = chalk.dim(`[${i + 1}/${repos.length}]`);
|
|
141
|
+
const result = await this.syncRepo(service, project, repo, prefix);
|
|
142
|
+
results.push(result);
|
|
143
|
+
}
|
|
144
|
+
// Summary
|
|
145
|
+
this.log('');
|
|
146
|
+
const succeeded = results.filter(r => r.success).length;
|
|
147
|
+
const failed = results.filter(r => !r.success).length;
|
|
148
|
+
if (flags.json) {
|
|
149
|
+
this.log(JSON.stringify({ failed, project: project.slug, results, succeeded, total: repos.length }, null, 2));
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
if (failed === 0) {
|
|
153
|
+
this.log(chalk.green(`Synced ${succeeded}/${repos.length} repos (0 failed)`));
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
this.log(chalk.yellow(`Synced ${succeeded}/${repos.length} repos (${failed} failed)`));
|
|
157
|
+
for (const r of results.filter(r => !r.success)) {
|
|
158
|
+
this.log(chalk.red(` ${r.name}: ${r.error}`));
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
async syncRepo(service, project, repo, prefix) {
|
|
163
|
+
const spinner = ora(`${prefix} ${chalk.cyan(repo.name)} — cloning...`).start();
|
|
164
|
+
let tmpDir = null;
|
|
165
|
+
try {
|
|
166
|
+
// Step 1: Clone
|
|
167
|
+
tmpDir = mkdtempSync(join(tmpdir(), `hd-sync-${repo.name}-`));
|
|
168
|
+
const clonePath = join(tmpDir, repo.name);
|
|
169
|
+
try {
|
|
170
|
+
execSync(`git clone --depth 1 --branch ${repo.defaultBranch} ${repo.gitRemote} ${clonePath}`, {
|
|
171
|
+
stdio: 'pipe',
|
|
172
|
+
timeout: 120_000, // 2 min clone timeout
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
catch (cloneError) {
|
|
176
|
+
throw new Error(`git clone failed: ${cloneError.stderr?.toString() || cloneError.message}`);
|
|
177
|
+
}
|
|
178
|
+
// Step 2: Detect _bmad or generate via Claude
|
|
179
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — analyzing...`;
|
|
180
|
+
let yamlOutput = null;
|
|
181
|
+
let lastError = null;
|
|
182
|
+
const MAX_ATTEMPTS = 3;
|
|
183
|
+
// Check for _bmad docs
|
|
184
|
+
const bmadDocPath = this.detectBmadDocs(clonePath);
|
|
185
|
+
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
|
|
186
|
+
try {
|
|
187
|
+
if (bmadDocPath) {
|
|
188
|
+
// Use _bmad doc with simpler restructure prompt
|
|
189
|
+
const docContent = readFileSync(bmadDocPath, 'utf-8');
|
|
190
|
+
const prompt = `You are given an existing architecture documentation file. Convert it into the following YAML format. Output ONLY valid YAML, no markdown fences, no explanations.\n\nRequired YAML schema:\n${ARCHITECTURE_YAML_SCHEMA}\n\nHere is the architecture document to convert:\n\n${docContent}`;
|
|
191
|
+
yamlOutput = await this.runClaude(prompt, clonePath);
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
// Full codebase analysis
|
|
195
|
+
yamlOutput = await this.runClaude(ARCHITECTURE_ANALYSIS_PROMPT, clonePath);
|
|
196
|
+
}
|
|
197
|
+
// Validate
|
|
198
|
+
this.validateYaml(yamlOutput);
|
|
199
|
+
break; // Validation passed
|
|
200
|
+
}
|
|
201
|
+
catch (error) {
|
|
202
|
+
lastError = error.message;
|
|
203
|
+
this.log(chalk.dim(` ${repo.name} attempt ${attempt}/${MAX_ATTEMPTS} failed: ${lastError}`));
|
|
204
|
+
yamlOutput = null;
|
|
205
|
+
if (attempt === MAX_ATTEMPTS) {
|
|
206
|
+
throw new Error(`Validation failed after ${MAX_ATTEMPTS} attempts: ${lastError}`);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
if (!yamlOutput) {
|
|
211
|
+
throw new Error(`Generation failed: ${lastError}`);
|
|
212
|
+
}
|
|
213
|
+
// Step 2b: Extract entity registry from validated YAML and merge with gut config
|
|
214
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — extracting entities...`;
|
|
215
|
+
const parsedYaml = yaml.load(yamlOutput);
|
|
216
|
+
let entityRegistry = (parsedYaml.entity_registry || []);
|
|
217
|
+
// Check for .gut/config.json and merge
|
|
218
|
+
const gutEntities = this.readGutEntities(clonePath);
|
|
219
|
+
if (gutEntities.length > 0) {
|
|
220
|
+
entityRegistry = this.mergeEntityRegistries(entityRegistry, gutEntities);
|
|
221
|
+
this.log(chalk.dim(` ${repo.name}: merged ${entityRegistry.length} entities (${gutEntities.length} from gut)`));
|
|
222
|
+
}
|
|
223
|
+
else if (entityRegistry.length > 0) {
|
|
224
|
+
this.log(chalk.dim(` ${repo.name}: found ${entityRegistry.length} entities from analysis`));
|
|
225
|
+
}
|
|
226
|
+
// Step 3: Upload to S3 via API
|
|
227
|
+
spinner.text = `${prefix} ${chalk.cyan(repo.name)} — uploading...`;
|
|
228
|
+
// The API handles S3 upload and DynamoDB update — call updateRepo with the summary content
|
|
229
|
+
await service.projectUpdateRepo(project.projectId, repo.repoId, {
|
|
230
|
+
architectureSummary: yamlOutput,
|
|
231
|
+
lastSyncedAt: new Date().toISOString(),
|
|
232
|
+
});
|
|
233
|
+
// Step 4: Update entity registry via dedicated endpoint
|
|
234
|
+
if (entityRegistry.length > 0) {
|
|
235
|
+
try {
|
|
236
|
+
await service.projectUpdateEntities(project.projectId, repo.repoId, entityRegistry);
|
|
237
|
+
}
|
|
238
|
+
catch (entityError) {
|
|
239
|
+
this.log(chalk.yellow(` ${repo.name}: entity registry update failed: ${entityError.message}`));
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
spinner.succeed(`${prefix} ${chalk.cyan(repo.name)} — ${chalk.green('done')}`);
|
|
243
|
+
return { name: repo.name, success: true };
|
|
244
|
+
}
|
|
245
|
+
catch (error) {
|
|
246
|
+
spinner.fail(`${prefix} ${chalk.cyan(repo.name)} — ${chalk.red('failed')}`);
|
|
247
|
+
this.log(chalk.red(` Error: ${error.message}`));
|
|
248
|
+
return { error: error.message, name: repo.name, success: false };
|
|
249
|
+
}
|
|
250
|
+
finally {
|
|
251
|
+
// Cleanup temp directory
|
|
252
|
+
if (tmpDir && existsSync(tmpDir)) {
|
|
253
|
+
try {
|
|
254
|
+
rmSync(tmpDir, { force: true, recursive: true });
|
|
255
|
+
}
|
|
256
|
+
catch {
|
|
257
|
+
// Ignore cleanup errors
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
detectBmadDocs(repoPath) {
|
|
263
|
+
const bmadDir = join(repoPath, '_bmad');
|
|
264
|
+
if (!existsSync(bmadDir))
|
|
265
|
+
return null;
|
|
266
|
+
try {
|
|
267
|
+
if (!statSync(bmadDir).isDirectory())
|
|
268
|
+
return null;
|
|
269
|
+
const files = readdirSync(bmadDir);
|
|
270
|
+
const archPatterns = [/^architecture.*\.md$/i, /^arch-.*\.md$/i];
|
|
271
|
+
for (const file of files) {
|
|
272
|
+
for (const pattern of archPatterns) {
|
|
273
|
+
if (pattern.test(file)) {
|
|
274
|
+
return join(bmadDir, file);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
catch {
|
|
280
|
+
// Ignore fs errors
|
|
281
|
+
}
|
|
282
|
+
return null;
|
|
283
|
+
}
|
|
284
|
+
runClaude(prompt, cwd) {
|
|
285
|
+
return new Promise((resolve, reject) => {
|
|
286
|
+
const TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
|
|
287
|
+
const child = spawn('claude', ['-p', prompt, '--output-format', 'text'], {
|
|
288
|
+
cwd,
|
|
289
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
290
|
+
timeout: TIMEOUT_MS,
|
|
291
|
+
});
|
|
292
|
+
let stdout = '';
|
|
293
|
+
let stderr = '';
|
|
294
|
+
child.stdout.on('data', (data) => {
|
|
295
|
+
stdout += data.toString();
|
|
296
|
+
});
|
|
297
|
+
child.stderr.on('data', (data) => {
|
|
298
|
+
stderr += data.toString();
|
|
299
|
+
});
|
|
300
|
+
child.on('error', (error) => {
|
|
301
|
+
reject(new Error(`Claude CLI failed to start: ${error.message}`));
|
|
302
|
+
});
|
|
303
|
+
child.on('close', (code) => {
|
|
304
|
+
if (code !== 0) {
|
|
305
|
+
reject(new Error(`Claude CLI exited with code ${code}: ${stderr.slice(0, 500)}`));
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
// Strip markdown code fences if present
|
|
309
|
+
let output = stdout.trim();
|
|
310
|
+
if (output.startsWith('```yaml')) {
|
|
311
|
+
output = output.replace(/^```yaml\n?/, '').replace(/\n?```$/, '');
|
|
312
|
+
}
|
|
313
|
+
else if (output.startsWith('```')) {
|
|
314
|
+
output = output.replace(/^```\n?/, '').replace(/\n?```$/, '');
|
|
315
|
+
}
|
|
316
|
+
resolve(output);
|
|
317
|
+
});
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
validateYaml(yamlString) {
|
|
321
|
+
const parsed = yaml.load(yamlString);
|
|
322
|
+
if (!parsed || typeof parsed !== 'object') {
|
|
323
|
+
throw new Error('YAML must be a non-null object');
|
|
324
|
+
}
|
|
325
|
+
for (const key of REQUIRED_SUMMARY_KEYS) {
|
|
326
|
+
if (!(key in parsed)) {
|
|
327
|
+
throw new Error(`Missing required top-level key: '${key}'`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
const repo = parsed.repo;
|
|
331
|
+
if (!repo || typeof repo !== 'object' || !repo.name || !repo.description) {
|
|
332
|
+
throw new Error("'repo' must have 'name' and 'description' string fields");
|
|
333
|
+
}
|
|
334
|
+
if (!Array.isArray(parsed.entity_registry)) {
|
|
335
|
+
throw new Error("'entity_registry' must be an array");
|
|
336
|
+
}
|
|
337
|
+
for (let i = 0; i < parsed.entity_registry.length; i++) {
|
|
338
|
+
const entry = parsed.entity_registry[i];
|
|
339
|
+
if (!entry.name)
|
|
340
|
+
throw new Error(`entity_registry[${i}]: 'name' is required`);
|
|
341
|
+
if (!entry.type)
|
|
342
|
+
throw new Error(`entity_registry[${i}]: 'type' is required`);
|
|
343
|
+
if (!VALID_ENTITY_TYPES.includes(entry.type)) {
|
|
344
|
+
throw new Error(`entity_registry[${i}]: invalid type '${entry.type}'`);
|
|
345
|
+
}
|
|
346
|
+
if (!entry.path)
|
|
347
|
+
throw new Error(`entity_registry[${i}]: 'path' is required`);
|
|
348
|
+
}
|
|
349
|
+
const techStack = parsed.tech_stack;
|
|
350
|
+
if (!techStack || typeof techStack !== 'object') {
|
|
351
|
+
throw new Error("'tech_stack' must be an object");
|
|
352
|
+
}
|
|
353
|
+
for (const field of ['runtime', 'framework', 'language', 'database', 'infrastructure']) {
|
|
354
|
+
if (!techStack[field]) {
|
|
355
|
+
throw new Error(`tech_stack.${field} is required`);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
readGutEntities(repoPath) {
|
|
360
|
+
const gutConfigPath = join(repoPath, '.gut', 'config.json');
|
|
361
|
+
if (!existsSync(gutConfigPath))
|
|
362
|
+
return [];
|
|
363
|
+
try {
|
|
364
|
+
const content = readFileSync(gutConfigPath, 'utf-8');
|
|
365
|
+
const config = JSON.parse(content);
|
|
366
|
+
if (!config.entities || !Array.isArray(config.entities))
|
|
367
|
+
return [];
|
|
368
|
+
return config.entities
|
|
369
|
+
.filter((e) => e && typeof e === 'object' && e.name && e.type && e.path &&
|
|
370
|
+
VALID_ENTITY_TYPES.includes(e.type))
|
|
371
|
+
.map((e) => ({
|
|
372
|
+
name: e.name,
|
|
373
|
+
type: e.type,
|
|
374
|
+
path: e.path,
|
|
375
|
+
...(e.repository && typeof e.repository === 'string' ? { repository: e.repository } : {}),
|
|
376
|
+
...(e.description && typeof e.description === 'string' ? { description: e.description } : {}),
|
|
377
|
+
}));
|
|
378
|
+
}
|
|
379
|
+
catch {
|
|
380
|
+
return [];
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
mergeEntityRegistries(claudeEntities, gutEntities) {
|
|
384
|
+
const gutNameSet = new Set(gutEntities.map(e => e.name.toLowerCase()));
|
|
385
|
+
const uniqueClaude = claudeEntities.filter(e => !gutNameSet.has(e.name.toLowerCase()));
|
|
386
|
+
return [...gutEntities, ...uniqueClaude];
|
|
387
|
+
}
|
|
388
|
+
}
|
|
@@ -136,6 +136,42 @@ interface GitAuthInitiateResponse {
|
|
|
136
136
|
installUrl: string;
|
|
137
137
|
state: string;
|
|
138
138
|
}
|
|
139
|
+
export type HookActionType = 'adhb-enrich' | 'ci-trigger' | 'slack-notify' | 'webhook';
|
|
140
|
+
export interface SlackNotifyConfig {
|
|
141
|
+
channel: string;
|
|
142
|
+
template?: string;
|
|
143
|
+
}
|
|
144
|
+
export interface AdhbEnrichConfig {
|
|
145
|
+
priority?: 'high' | 'low' | 'normal';
|
|
146
|
+
}
|
|
147
|
+
export interface WebhookConfig {
|
|
148
|
+
headers?: Record<string, string>;
|
|
149
|
+
method: 'POST' | 'PUT';
|
|
150
|
+
url: string;
|
|
151
|
+
}
|
|
152
|
+
export interface CiTriggerConfig {
|
|
153
|
+
pipeline: string;
|
|
154
|
+
provider: 'github' | 'gitlab';
|
|
155
|
+
ref?: string;
|
|
156
|
+
}
|
|
157
|
+
export type HookActionConfig = AdhbEnrichConfig | CiTriggerConfig | SlackNotifyConfig | WebhookConfig;
|
|
158
|
+
export interface HookResponse {
|
|
159
|
+
actionConfig: HookActionConfig;
|
|
160
|
+
actionType: HookActionType;
|
|
161
|
+
createdAt: string;
|
|
162
|
+
enabled: boolean;
|
|
163
|
+
hookId: string;
|
|
164
|
+
triggerStatus: string;
|
|
165
|
+
updatedAt: string;
|
|
166
|
+
}
|
|
167
|
+
export interface HookListResponse {
|
|
168
|
+
hooks: HookResponse[];
|
|
169
|
+
}
|
|
170
|
+
export interface HookCreateRequest {
|
|
171
|
+
actionConfig: HookActionConfig;
|
|
172
|
+
actionType: HookActionType;
|
|
173
|
+
triggerStatus: string;
|
|
174
|
+
}
|
|
139
175
|
/**
|
|
140
176
|
* Hyperdrive API Service with AWS SigV4 authentication
|
|
141
177
|
*/
|
|
@@ -253,6 +289,28 @@ export declare class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
253
289
|
repos: GitRepoInfo[];
|
|
254
290
|
totalCount: number;
|
|
255
291
|
}>;
|
|
292
|
+
hookCreate(projectId: string, body: HookCreateRequest): Promise<HookResponse>;
|
|
293
|
+
hookDelete(projectId: string, hookId: string): Promise<{
|
|
294
|
+
message: string;
|
|
295
|
+
}>;
|
|
296
|
+
hookList(projectId: string): Promise<HookListResponse>;
|
|
297
|
+
hookUpdate(projectId: string, hookId: string, body: Partial<HookCreateRequest> & {
|
|
298
|
+
enabled?: boolean;
|
|
299
|
+
}): Promise<HookResponse>;
|
|
300
|
+
projectGetJiraStatuses(projectId: string): Promise<{
|
|
301
|
+
statuses: Array<{
|
|
302
|
+
id: string;
|
|
303
|
+
name: string;
|
|
304
|
+
category?: string;
|
|
305
|
+
}>;
|
|
306
|
+
}>;
|
|
307
|
+
jiraGetProjectStatuses(jiraProjectKey: string): Promise<{
|
|
308
|
+
statuses: Array<{
|
|
309
|
+
id: string;
|
|
310
|
+
name: string;
|
|
311
|
+
category?: string;
|
|
312
|
+
}>;
|
|
313
|
+
}>;
|
|
256
314
|
jiraPreRegister(params: {
|
|
257
315
|
jiraDomain: string;
|
|
258
316
|
}): Promise<{
|
|
@@ -268,6 +326,73 @@ export declare class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
268
326
|
};
|
|
269
327
|
success: boolean;
|
|
270
328
|
}>;
|
|
329
|
+
projectAddRepo(projectId: string, repo: {
|
|
330
|
+
defaultBranch: string;
|
|
331
|
+
gitProvider: string;
|
|
332
|
+
gitRemote: string;
|
|
333
|
+
name: string;
|
|
334
|
+
}): Promise<{
|
|
335
|
+
defaultBranch: string;
|
|
336
|
+
gitProvider: string;
|
|
337
|
+
gitRemote: string;
|
|
338
|
+
name: string;
|
|
339
|
+
repoId: string;
|
|
340
|
+
}>;
|
|
341
|
+
projectFindByJiraKey(jiraProjectKey: string): Promise<{
|
|
342
|
+
jiraProjectKey: string;
|
|
343
|
+
projectId: string;
|
|
344
|
+
slug: string;
|
|
345
|
+
}>;
|
|
346
|
+
projectGetJiraConfig(projectId: string): Promise<{
|
|
347
|
+
createdAt: string;
|
|
348
|
+
jiraProjectKey: string;
|
|
349
|
+
statusMapping: Record<string, string>;
|
|
350
|
+
updatedAt: string;
|
|
351
|
+
}>;
|
|
352
|
+
projectListRepos(projectId: string): Promise<Array<{
|
|
353
|
+
architectureSummary?: string | null;
|
|
354
|
+
architectureSummaryUpdatedAt?: string | null;
|
|
355
|
+
createdAt: string;
|
|
356
|
+
defaultBranch: string;
|
|
357
|
+
gitRemote: string;
|
|
358
|
+
name: string;
|
|
359
|
+
provider: string;
|
|
360
|
+
repoId: string;
|
|
361
|
+
updatedAt: string;
|
|
362
|
+
}>>;
|
|
363
|
+
projectSetJiraConfig(projectId: string, config: {
|
|
364
|
+
jiraProjectKey: string;
|
|
365
|
+
statusMapping: Record<string, string>;
|
|
366
|
+
}): Promise<{
|
|
367
|
+
jiraProjectKey: string;
|
|
368
|
+
projectId: string;
|
|
369
|
+
statusMapping: Record<string, string>;
|
|
370
|
+
}>;
|
|
371
|
+
projectUpdateRepo(projectId: string, repoId: string, updateData: {
|
|
372
|
+
architectureSummary?: string | null;
|
|
373
|
+
defaultBranch?: string;
|
|
374
|
+
lastSyncedAt?: string;
|
|
375
|
+
}): Promise<{
|
|
376
|
+
architectureSummary?: string | null;
|
|
377
|
+
defaultBranch: string;
|
|
378
|
+
gitRemote: string;
|
|
379
|
+
lastSyncedAt?: string;
|
|
380
|
+
provider: string;
|
|
381
|
+
repoId: string;
|
|
382
|
+
}>;
|
|
383
|
+
projectUpdateEntities(projectId: string, repoId: string, entities: Array<{
|
|
384
|
+
name: string;
|
|
385
|
+
type: string;
|
|
386
|
+
path: string;
|
|
387
|
+
repository?: string;
|
|
388
|
+
description?: string;
|
|
389
|
+
}>): Promise<Array<{
|
|
390
|
+
name: string;
|
|
391
|
+
type: string;
|
|
392
|
+
path: string;
|
|
393
|
+
repository?: string;
|
|
394
|
+
description?: string;
|
|
395
|
+
}>>;
|
|
271
396
|
makeTestRequest(): Promise<Record<string, unknown>>;
|
|
272
397
|
moduleAnalyze(slug: string): Promise<{
|
|
273
398
|
jobId: string;
|
|
@@ -102,12 +102,57 @@ export class HyperdriveSigV4Service extends SigV4ApiClient {
|
|
|
102
102
|
return this.makeSignedRequest('GET', `/git/repos?${queryParams}`);
|
|
103
103
|
}
|
|
104
104
|
// ============================================================================
|
|
105
|
+
// Hook Methods
|
|
106
|
+
// ============================================================================
|
|
107
|
+
async hookCreate(projectId, body) {
|
|
108
|
+
return this.makeSignedRequest('POST', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks`, body);
|
|
109
|
+
}
|
|
110
|
+
async hookDelete(projectId, hookId) {
|
|
111
|
+
return this.makeSignedRequest('DELETE', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks/${encodeURIComponent(hookId)}`);
|
|
112
|
+
}
|
|
113
|
+
async hookList(projectId) {
|
|
114
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks`);
|
|
115
|
+
}
|
|
116
|
+
async hookUpdate(projectId, hookId, body) {
|
|
117
|
+
return this.makeSignedRequest('PATCH', `/hyperdrive/projects/${encodeURIComponent(projectId)}/hooks/${encodeURIComponent(hookId)}`, body);
|
|
118
|
+
}
|
|
119
|
+
async projectGetJiraStatuses(projectId) {
|
|
120
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira/statuses`);
|
|
121
|
+
}
|
|
122
|
+
// ============================================================================
|
|
105
123
|
// Jira Integration Methods
|
|
106
124
|
// ============================================================================
|
|
125
|
+
async jiraGetProjectStatuses(jiraProjectKey) {
|
|
126
|
+
return this.makeSignedRequest('GET', `/hyperdrive/jira/projects/${encodeURIComponent(jiraProjectKey)}/statuses`);
|
|
127
|
+
}
|
|
107
128
|
async jiraPreRegister(params) {
|
|
108
129
|
return this.makeSignedRequest('POST', '/hyperdrive/jira/pre-register', params);
|
|
109
130
|
}
|
|
110
131
|
// ============================================================================
|
|
132
|
+
// Hyperdrive Project Methods
|
|
133
|
+
// ============================================================================
|
|
134
|
+
async projectAddRepo(projectId, repo) {
|
|
135
|
+
return this.makeSignedRequest('POST', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos`, repo);
|
|
136
|
+
}
|
|
137
|
+
async projectFindByJiraKey(jiraProjectKey) {
|
|
138
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/by-jira-key/${encodeURIComponent(jiraProjectKey)}`);
|
|
139
|
+
}
|
|
140
|
+
async projectGetJiraConfig(projectId) {
|
|
141
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira-config`);
|
|
142
|
+
}
|
|
143
|
+
async projectListRepos(projectId) {
|
|
144
|
+
return this.makeSignedRequest('GET', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos`);
|
|
145
|
+
}
|
|
146
|
+
async projectSetJiraConfig(projectId, config) {
|
|
147
|
+
return this.makeSignedRequest('PUT', `/hyperdrive/projects/${encodeURIComponent(projectId)}/jira-config`, config);
|
|
148
|
+
}
|
|
149
|
+
async projectUpdateRepo(projectId, repoId, updateData) {
|
|
150
|
+
return this.makeSignedRequest('PATCH', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos/${encodeURIComponent(repoId)}`, updateData);
|
|
151
|
+
}
|
|
152
|
+
async projectUpdateEntities(projectId, repoId, entities) {
|
|
153
|
+
return this.makeSignedRequest('PUT', `/hyperdrive/projects/${encodeURIComponent(projectId)}/repos/${encodeURIComponent(repoId)}/entities`, entities);
|
|
154
|
+
}
|
|
155
|
+
// ============================================================================
|
|
111
156
|
// Module Methods
|
|
112
157
|
// ============================================================================
|
|
113
158
|
async makeTestRequest() {
|
|
@@ -42,7 +42,7 @@ export declare function promptAccountDetails(existingData?: Partial<AccountAddDa
|
|
|
42
42
|
* @param accountData - Account details to register
|
|
43
43
|
* @returns AccountResult indicating success or failure
|
|
44
44
|
*/
|
|
45
|
-
export declare function registerAccount(accountData: AccountAddData): Promise<AccountResult>;
|
|
45
|
+
export declare function registerAccount(accountData: AccountAddData, tenantDomain?: string): Promise<AccountResult>;
|
|
46
46
|
/**
|
|
47
47
|
* Execute the AWS account add flow
|
|
48
48
|
*
|
|
@@ -68,7 +68,7 @@ export declare function promptOpenCloudFormation(quickCreateUrl: string): Promis
|
|
|
68
68
|
/**
|
|
69
69
|
* Wait for role verification with polling
|
|
70
70
|
*/
|
|
71
|
-
export declare function waitForRoleVerification(accountId: string, onProgress?: (message: string) => void): Promise<{
|
|
71
|
+
export declare function waitForRoleVerification(accountId: string, onProgress?: (message: string) => void, tenantDomain?: string): Promise<{
|
|
72
72
|
message?: string;
|
|
73
73
|
verified: boolean;
|
|
74
74
|
}>;
|
|
@@ -97,10 +97,10 @@ export async function promptAccountDetails(existingData) {
|
|
|
97
97
|
* @param accountData - Account details to register
|
|
98
98
|
* @returns AccountResult indicating success or failure
|
|
99
99
|
*/
|
|
100
|
-
export async function registerAccount(accountData) {
|
|
100
|
+
export async function registerAccount(accountData, tenantDomain) {
|
|
101
101
|
try {
|
|
102
102
|
// Initialize API service
|
|
103
|
-
const service = new HyperdriveSigV4Service();
|
|
103
|
+
const service = new HyperdriveSigV4Service(tenantDomain);
|
|
104
104
|
// Make API call to add account
|
|
105
105
|
const result = await service.accountAdd({
|
|
106
106
|
accountId: accountData.accountId,
|
|
@@ -190,8 +190,8 @@ const VERIFY_TIMEOUT = 300000; // 5 minutes
|
|
|
190
190
|
/**
|
|
191
191
|
* Wait for role verification with polling
|
|
192
192
|
*/
|
|
193
|
-
export async function waitForRoleVerification(accountId, onProgress) {
|
|
194
|
-
const service = new HyperdriveSigV4Service();
|
|
193
|
+
export async function waitForRoleVerification(accountId, onProgress, tenantDomain) {
|
|
194
|
+
const service = new HyperdriveSigV4Service(tenantDomain);
|
|
195
195
|
const startTime = Date.now();
|
|
196
196
|
let attemptCount = 0;
|
|
197
197
|
while (Date.now() - startTime < VERIFY_TIMEOUT) {
|
package/dist/utils/git-flow.d.ts
CHANGED
package/dist/utils/git-flow.js
CHANGED
|
@@ -159,7 +159,7 @@ export function waitForCallback(expectedState, port = 8765, timeout = 5 * 60 * 1
|
|
|
159
159
|
* @returns GitConnectResult indicating success or failure
|
|
160
160
|
*/
|
|
161
161
|
export async function executeGitConnect(options = {}) {
|
|
162
|
-
const { callbackPort = 8765, logger = () => { }, // No-op by default
|
|
162
|
+
const { callbackPort = 8765, domain, logger = () => { }, // No-op by default
|
|
163
163
|
provider: initialProvider, timeout = 5 * 60 * 1000, } = options;
|
|
164
164
|
try {
|
|
165
165
|
// Step 1: Determine provider (prompt if not specified)
|
|
@@ -175,7 +175,7 @@ export async function executeGitConnect(options = {}) {
|
|
|
175
175
|
provider = selected;
|
|
176
176
|
}
|
|
177
177
|
// Step 2: Initialize API service
|
|
178
|
-
const service = new HyperdriveSigV4Service();
|
|
178
|
+
const service = new HyperdriveSigV4Service(domain);
|
|
179
179
|
// Step 3: Initiate OAuth flow via API
|
|
180
180
|
const authResponse = await service.gitAuthInitiate(provider);
|
|
181
181
|
logger(`Opening browser for ${provider === 'github' ? 'GitHub' : 'GitLab'} authorization...`);
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { HookActionType, HookResponse } from '../services/hyperdrive-sigv4.js';
|
|
2
|
+
/**
|
|
3
|
+
* Prompt user to select a trigger status from available Jira statuses
|
|
4
|
+
*/
|
|
5
|
+
export declare function promptTriggerStatus(statuses: string[]): Promise<string>;
|
|
6
|
+
/**
|
|
7
|
+
* Prompt user to select an action type
|
|
8
|
+
*/
|
|
9
|
+
export declare function promptActionType(): Promise<HookActionType>;
|
|
10
|
+
/**
|
|
11
|
+
* Prompt for action-specific configuration based on action type
|
|
12
|
+
*/
|
|
13
|
+
export declare function promptActionConfig(actionType: HookActionType): Promise<Record<string, unknown>>;
|
|
14
|
+
/**
|
|
15
|
+
* Prompt user to select a hook from a list
|
|
16
|
+
*/
|
|
17
|
+
export declare function promptSelectHook(hooks: HookResponse[]): Promise<HookResponse>;
|
|
18
|
+
/**
|
|
19
|
+
* Prompt user to confirm hook deletion
|
|
20
|
+
*/
|
|
21
|
+
export declare function promptConfirmDelete(hook: HookResponse): Promise<boolean>;
|