@eldrforge/kodrdriv 0.0.17 → 0.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/debug-test.txt +1 -0
- package/dist/main.js +3709 -23
- package/dist/main.js.map +1 -1
- package/dist/src/prompt/personas/you.md +55 -0
- package/package.json +7 -6
- package/test.txt +1 -0
- package/dist/arguments.js +0 -661
- package/dist/arguments.js.map +0 -1
- package/dist/audio/devices.js +0 -284
- package/dist/audio/devices.js.map +0 -1
- package/dist/audio/index.js +0 -31
- package/dist/audio/index.js.map +0 -1
- package/dist/audio/processor.js +0 -766
- package/dist/audio/processor.js.map +0 -1
- package/dist/audio/types.js +0 -16
- package/dist/audio/types.js.map +0 -1
- package/dist/audio/validation.js +0 -35
- package/dist/audio/validation.js.map +0 -1
- package/dist/commands/audio-commit.js +0 -91
- package/dist/commands/audio-commit.js.map +0 -1
- package/dist/commands/audio-review.js +0 -113
- package/dist/commands/audio-review.js.map +0 -1
- package/dist/commands/clean.js +0 -36
- package/dist/commands/clean.js.map +0 -1
- package/dist/commands/commit.js +0 -117
- package/dist/commands/commit.js.map +0 -1
- package/dist/commands/link.js +0 -184
- package/dist/commands/link.js.map +0 -1
- package/dist/commands/publish.js +0 -301
- package/dist/commands/publish.js.map +0 -1
- package/dist/commands/release.js +0 -75
- package/dist/commands/release.js.map +0 -1
- package/dist/commands/review.js +0 -152
- package/dist/commands/review.js.map +0 -1
- package/dist/commands/select-audio.js +0 -265
- package/dist/commands/select-audio.js.map +0 -1
- package/dist/commands/unlink.js +0 -180
- package/dist/commands/unlink.js.map +0 -1
- package/dist/constants.js +0 -162
- package/dist/constants.js.map +0 -1
- package/dist/content/diff.js +0 -220
- package/dist/content/diff.js.map +0 -1
- package/dist/content/issues.js +0 -256
- package/dist/content/issues.js.map +0 -1
- package/dist/content/log.js +0 -53
- package/dist/content/log.js.map +0 -1
- package/dist/content/releaseNotes.js +0 -90
- package/dist/content/releaseNotes.js.map +0 -1
- package/dist/error/ExitError.js +0 -9
- package/dist/error/ExitError.js.map +0 -1
- package/dist/logging.js +0 -58
- package/dist/logging.js.map +0 -1
- package/dist/prompt/personas/committer.md +0 -29
- package/dist/prompt/personas/reviewer.md +0 -29
- package/dist/prompt/personas/you.md +0 -11
- package/dist/prompt/prompts.js +0 -160
- package/dist/prompt/prompts.js.map +0 -1
- package/dist/types.js +0 -87
- package/dist/types.js.map +0 -1
- package/dist/util/child.js +0 -23
- package/dist/util/child.js.map +0 -1
- package/dist/util/general.js +0 -93
- package/dist/util/general.js.map +0 -1
- package/dist/util/github.js +0 -197
- package/dist/util/github.js.map +0 -1
- package/dist/util/openai.js +0 -150
- package/dist/util/openai.js.map +0 -1
- package/dist/util/stdin.js +0 -61
- package/dist/util/stdin.js.map +0 -1
- package/dist/util/storage.js +0 -149
- package/dist/util/storage.js.map +0 -1
- /package/dist/{prompt → src/prompt}/instructions/commit.md +0 -0
- /package/dist/{prompt → src/prompt}/instructions/release.md +0 -0
- /package/dist/{prompt → src/prompt}/instructions/review.md +0 -0
- /package/dist/{prompt → src/prompt}/personas/releaser.md +0 -0
package/dist/main.js
CHANGED
|
@@ -1,21 +1,3705 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import * as Cardigantime from '@theunwalked/cardigantime';
|
|
3
3
|
import 'dotenv/config';
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
import {
|
|
11
|
-
import
|
|
12
|
-
import {
|
|
13
|
-
import
|
|
14
|
-
import {
|
|
15
|
-
import
|
|
16
|
-
import {
|
|
17
|
-
import {
|
|
4
|
+
import { Command } from 'commander';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import os from 'os';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
import winston from 'winston';
|
|
9
|
+
import * as fs from 'fs';
|
|
10
|
+
import { glob } from 'glob';
|
|
11
|
+
import crypto from 'crypto';
|
|
12
|
+
import { quick, Formatter, cook, configureTemplates } from '@riotprompt/riotprompt';
|
|
13
|
+
import shellescape from 'shell-escape';
|
|
14
|
+
import { exec, spawnSync } from 'child_process';
|
|
15
|
+
import util from 'util';
|
|
16
|
+
import { fileURLToPath } from 'url';
|
|
17
|
+
import { OpenAI } from 'openai';
|
|
18
|
+
import { processAudio, selectAndConfigureAudioDevice } from '@theunwalked/unplayable';
|
|
19
|
+
import { Octokit } from '@octokit/rest';
|
|
20
|
+
import fs$1 from 'fs/promises';
|
|
21
|
+
import yaml from 'js-yaml';
|
|
18
22
|
|
|
23
|
+
const VERSION = '0.0.19 (HEAD/d4fa693 T:v0.0.19 2025-07-04 20:16:23 -0700) linux x64 v22.16.0';
|
|
24
|
+
const PROGRAM_NAME = 'kodrdriv';
|
|
25
|
+
const DEFAULT_OVERRIDES = false;
|
|
26
|
+
const DATE_FORMAT_YEAR_MONTH_DAY_HOURS_MINUTES_SECONDS_MILLISECONDS = 'YYYY-MM-DD-HHmmss.SSS';
|
|
27
|
+
const DEFAULT_VERBOSE = false;
|
|
28
|
+
const DEFAULT_DRY_RUN = false;
|
|
29
|
+
const DEFAULT_DEBUG = false;
|
|
30
|
+
const DEFAULT_MODEL = 'gpt-4o-mini';
|
|
31
|
+
const DEFAULT_OUTPUT_DIRECTORY = 'output/kodrdriv';
|
|
32
|
+
const DEFAULT_CONTEXT_DIRECTORIES = [];
|
|
33
|
+
const DEFAULT_CONFIG_DIR = '.kodrdriv';
|
|
34
|
+
const DEFAULT_PREFERENCES_DIRECTORY = path.join(os.homedir(), '.kodrdriv');
|
|
35
|
+
const DEFAULT_FROM_COMMIT_ALIAS = 'origin/HEAD';
|
|
36
|
+
const DEFAULT_TO_COMMIT_ALIAS = 'HEAD';
|
|
37
|
+
const DEFAULT_ADD = false;
|
|
38
|
+
const DEFAULT_CACHED = false;
|
|
39
|
+
const DEFAULT_SENDIT_MODE = false;
|
|
40
|
+
const DEFAULT_MESSAGE_LIMIT = 50;
|
|
41
|
+
const DEFAULT_MERGE_METHOD = 'squash';
|
|
42
|
+
const DEFAULT_EXCLUDED_PATTERNS = [
|
|
43
|
+
'node_modules',
|
|
44
|
+
'pnpm-lock.yaml',
|
|
45
|
+
'package-lock.json',
|
|
46
|
+
'yarn.lock',
|
|
47
|
+
'bun.lockb',
|
|
48
|
+
'composer.lock',
|
|
49
|
+
'Cargo.lock',
|
|
50
|
+
'Gemfile.lock',
|
|
51
|
+
'dist',
|
|
52
|
+
'build',
|
|
53
|
+
'out',
|
|
54
|
+
'.next',
|
|
55
|
+
'.nuxt',
|
|
56
|
+
'coverage',
|
|
57
|
+
'.vscode',
|
|
58
|
+
'.idea',
|
|
59
|
+
'.DS_Store',
|
|
60
|
+
'.git',
|
|
61
|
+
'.gitignore',
|
|
62
|
+
'logs',
|
|
63
|
+
'tmp',
|
|
64
|
+
'.cache',
|
|
65
|
+
'*.log',
|
|
66
|
+
'.env',
|
|
67
|
+
'.env.*',
|
|
68
|
+
'*.pem',
|
|
69
|
+
'*.crt',
|
|
70
|
+
'*.key',
|
|
71
|
+
'*.sqlite',
|
|
72
|
+
'*.db',
|
|
73
|
+
'*.zip',
|
|
74
|
+
'*.tar',
|
|
75
|
+
'*.gz',
|
|
76
|
+
'*.exe',
|
|
77
|
+
'*.bin'
|
|
78
|
+
];
|
|
79
|
+
const COMMAND_COMMIT = 'commit';
|
|
80
|
+
const COMMAND_AUDIO_COMMIT = 'audio-commit';
|
|
81
|
+
const COMMAND_SELECT_AUDIO = 'select-audio';
|
|
82
|
+
const COMMAND_RELEASE = 'release';
|
|
83
|
+
const COMMAND_REVIEW = 'review';
|
|
84
|
+
const COMMAND_AUDIO_REVIEW = 'audio-review';
|
|
85
|
+
const COMMAND_PUBLISH = 'publish';
|
|
86
|
+
const COMMAND_LINK = 'link';
|
|
87
|
+
const COMMAND_UNLINK = 'unlink';
|
|
88
|
+
const COMMAND_CLEAN = 'clean';
|
|
89
|
+
const COMMAND_CHECK_CONFIG = 'check-config';
|
|
90
|
+
const COMMAND_INIT_CONFIG = 'init-config';
|
|
91
|
+
const ALLOWED_COMMANDS = [
|
|
92
|
+
COMMAND_COMMIT,
|
|
93
|
+
COMMAND_AUDIO_COMMIT,
|
|
94
|
+
COMMAND_SELECT_AUDIO,
|
|
95
|
+
COMMAND_RELEASE,
|
|
96
|
+
COMMAND_REVIEW,
|
|
97
|
+
COMMAND_AUDIO_REVIEW,
|
|
98
|
+
COMMAND_PUBLISH,
|
|
99
|
+
COMMAND_LINK,
|
|
100
|
+
COMMAND_UNLINK,
|
|
101
|
+
COMMAND_CLEAN
|
|
102
|
+
];
|
|
103
|
+
const DEFAULT_COMMAND = COMMAND_COMMIT;
|
|
104
|
+
const DEFAULT_INSTRUCTIONS_DIR = `instructions`;
|
|
105
|
+
const DEFAULT_PERSONA_DIR = `personas`;
|
|
106
|
+
const DEFAULT_INSTRUCTIONS_COMMIT_FILE = `${DEFAULT_INSTRUCTIONS_DIR}/commit.md`;
|
|
107
|
+
const DEFAULT_INSTRUCTIONS_RELEASE_FILE = `${DEFAULT_INSTRUCTIONS_DIR}/release.md`;
|
|
108
|
+
const DEFAULT_INSTRUCTIONS_REVIEW_FILE = `${DEFAULT_INSTRUCTIONS_DIR}/review.md`;
|
|
109
|
+
const DEFAULT_PERSONA_RELEASER_FILE = `${DEFAULT_PERSONA_DIR}/releaser.md`;
|
|
110
|
+
const DEFAULT_PERSONA_YOU_FILE = `${DEFAULT_PERSONA_DIR}/you.md`;
|
|
111
|
+
// Define defaults in one place
|
|
112
|
+
const KODRDRIV_DEFAULTS = {
|
|
113
|
+
dryRun: DEFAULT_DRY_RUN,
|
|
114
|
+
verbose: DEFAULT_VERBOSE,
|
|
115
|
+
debug: DEFAULT_DEBUG,
|
|
116
|
+
overrides: DEFAULT_OVERRIDES,
|
|
117
|
+
model: DEFAULT_MODEL,
|
|
118
|
+
contextDirectories: DEFAULT_CONTEXT_DIRECTORIES,
|
|
119
|
+
commandName: DEFAULT_COMMAND,
|
|
120
|
+
configDirectory: DEFAULT_CONFIG_DIR,
|
|
121
|
+
outputDirectory: DEFAULT_OUTPUT_DIRECTORY,
|
|
122
|
+
preferencesDirectory: DEFAULT_PREFERENCES_DIRECTORY,
|
|
123
|
+
commit: {
|
|
124
|
+
add: DEFAULT_ADD,
|
|
125
|
+
cached: DEFAULT_CACHED,
|
|
126
|
+
sendit: DEFAULT_SENDIT_MODE,
|
|
127
|
+
messageLimit: DEFAULT_MESSAGE_LIMIT
|
|
128
|
+
},
|
|
129
|
+
release: {
|
|
130
|
+
from: DEFAULT_FROM_COMMIT_ALIAS,
|
|
131
|
+
to: DEFAULT_TO_COMMIT_ALIAS,
|
|
132
|
+
messageLimit: DEFAULT_MESSAGE_LIMIT
|
|
133
|
+
},
|
|
134
|
+
audioCommit: {
|
|
135
|
+
maxRecordingTime: 300,
|
|
136
|
+
audioDevice: undefined
|
|
137
|
+
},
|
|
138
|
+
review: {
|
|
139
|
+
includeCommitHistory: true,
|
|
140
|
+
includeRecentDiffs: true,
|
|
141
|
+
includeReleaseNotes: false,
|
|
142
|
+
includeGithubIssues: true,
|
|
143
|
+
commitHistoryLimit: 10,
|
|
144
|
+
diffHistoryLimit: 5,
|
|
145
|
+
releaseNotesLimit: 3,
|
|
146
|
+
githubIssuesLimit: 20,
|
|
147
|
+
sendit: DEFAULT_SENDIT_MODE
|
|
148
|
+
},
|
|
149
|
+
audioReview: {
|
|
150
|
+
includeCommitHistory: true,
|
|
151
|
+
includeRecentDiffs: true,
|
|
152
|
+
includeReleaseNotes: false,
|
|
153
|
+
includeGithubIssues: true,
|
|
154
|
+
commitHistoryLimit: 10,
|
|
155
|
+
diffHistoryLimit: 5,
|
|
156
|
+
releaseNotesLimit: 3,
|
|
157
|
+
githubIssuesLimit: 20,
|
|
158
|
+
sendit: DEFAULT_SENDIT_MODE,
|
|
159
|
+
maxRecordingTime: 300,
|
|
160
|
+
audioDevice: undefined
|
|
161
|
+
},
|
|
162
|
+
publish: {
|
|
163
|
+
mergeMethod: DEFAULT_MERGE_METHOD,
|
|
164
|
+
requiredEnvVars: [
|
|
165
|
+
'GITHUB_TOKEN',
|
|
166
|
+
'OPENAI_API_KEY'
|
|
167
|
+
],
|
|
168
|
+
linkWorkspacePackages: true,
|
|
169
|
+
unlinkWorkspacePackages: true
|
|
170
|
+
},
|
|
171
|
+
link: {
|
|
172
|
+
scopeRoots: {},
|
|
173
|
+
workspaceFile: 'pnpm-workspace.yaml',
|
|
174
|
+
dryRun: false
|
|
175
|
+
},
|
|
176
|
+
excludedPatterns: DEFAULT_EXCLUDED_PATTERNS
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
// Track if debug directory has been ensured for this session
|
|
180
|
+
let debugDirectoryEnsured = false;
|
|
181
|
+
const ensureDebugDirectory = ()=>{
|
|
182
|
+
if (debugDirectoryEnsured) return;
|
|
183
|
+
const debugDir = path.join(DEFAULT_OUTPUT_DIRECTORY, 'debug');
|
|
184
|
+
try {
|
|
185
|
+
fs.mkdirSync(debugDir, {
|
|
186
|
+
recursive: true
|
|
187
|
+
});
|
|
188
|
+
debugDirectoryEnsured = true;
|
|
189
|
+
} catch (error) {
|
|
190
|
+
// eslint-disable-next-line no-console
|
|
191
|
+
console.error(`Failed to create debug directory ${debugDir}:`, error);
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
const generateDebugLogFilename = ()=>{
|
|
195
|
+
const now = new Date();
|
|
196
|
+
const timestamp = now.toISOString().replace(/[-:]/g, '').replace(/\./g, '').replace('T', '-').replace('Z', '');
|
|
197
|
+
return `${timestamp}-debug.log`;
|
|
198
|
+
};
|
|
199
|
+
const createTransports = (level)=>{
|
|
200
|
+
const transports = [];
|
|
201
|
+
// Always add console transport for info level and above
|
|
202
|
+
if (level === 'info') {
|
|
203
|
+
transports.push(new winston.transports.Console({
|
|
204
|
+
format: winston.format.combine(winston.format.colorize(), winston.format.printf(({ level, message })=>{
|
|
205
|
+
return `${level}: ${message}`;
|
|
206
|
+
}))
|
|
207
|
+
}));
|
|
208
|
+
} else {
|
|
209
|
+
// For debug/verbose levels, add console transport that shows info and above
|
|
210
|
+
transports.push(new winston.transports.Console({
|
|
211
|
+
level: 'info',
|
|
212
|
+
format: winston.format.combine(winston.format.colorize(), winston.format.printf(({ timestamp, level, message, ...meta })=>{
|
|
213
|
+
// For info level messages, use simpler format without timestamp
|
|
214
|
+
if (level.includes('info')) {
|
|
215
|
+
return String(message);
|
|
216
|
+
}
|
|
217
|
+
const metaStr = Object.keys(meta).length ? ` ${JSON.stringify(meta)}` : '';
|
|
218
|
+
return `${timestamp} ${level}: ${String(message)}${metaStr}`;
|
|
219
|
+
}))
|
|
220
|
+
}));
|
|
221
|
+
// Add file transport for debug levels (debug and silly)
|
|
222
|
+
if (level === 'debug' || level === 'silly') {
|
|
223
|
+
ensureDebugDirectory();
|
|
224
|
+
const debugLogPath = path.join(DEFAULT_OUTPUT_DIRECTORY, 'debug', generateDebugLogFilename());
|
|
225
|
+
transports.push(new winston.transports.File({
|
|
226
|
+
filename: debugLogPath,
|
|
227
|
+
level: 'debug',
|
|
228
|
+
format: winston.format.combine(winston.format.timestamp({
|
|
229
|
+
format: DATE_FORMAT_YEAR_MONTH_DAY_HOURS_MINUTES_SECONDS_MILLISECONDS
|
|
230
|
+
}), winston.format.errors({
|
|
231
|
+
stack: true
|
|
232
|
+
}), winston.format.splat(), winston.format.printf(({ timestamp, level, message, ...meta })=>{
|
|
233
|
+
const metaStr = Object.keys(meta).length ? ` ${JSON.stringify(meta)}` : '';
|
|
234
|
+
return `${timestamp} ${level}: ${message}${metaStr}`;
|
|
235
|
+
}))
|
|
236
|
+
}));
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return transports;
|
|
240
|
+
};
|
|
241
|
+
const createFormat = (level)=>{
|
|
242
|
+
if (level === 'info') {
|
|
243
|
+
return winston.format.combine(winston.format.errors({
|
|
244
|
+
stack: true
|
|
245
|
+
}), winston.format.splat());
|
|
246
|
+
}
|
|
247
|
+
return winston.format.combine(winston.format.timestamp({
|
|
248
|
+
format: DATE_FORMAT_YEAR_MONTH_DAY_HOURS_MINUTES_SECONDS_MILLISECONDS
|
|
249
|
+
}), winston.format.errors({
|
|
250
|
+
stack: true
|
|
251
|
+
}), winston.format.splat(), winston.format.json());
|
|
252
|
+
};
|
|
253
|
+
// Create the logger instance once
|
|
254
|
+
const logger = winston.createLogger({
|
|
255
|
+
level: 'info',
|
|
256
|
+
format: createFormat('info'),
|
|
257
|
+
defaultMeta: {
|
|
258
|
+
service: PROGRAM_NAME
|
|
259
|
+
},
|
|
260
|
+
transports: createTransports('info')
|
|
261
|
+
});
|
|
262
|
+
const setLogLevel = (level)=>{
|
|
263
|
+
// Reconfigure the existing logger instead of creating a new one
|
|
264
|
+
logger.configure({
|
|
265
|
+
level,
|
|
266
|
+
format: createFormat(level),
|
|
267
|
+
defaultMeta: {
|
|
268
|
+
service: PROGRAM_NAME
|
|
269
|
+
},
|
|
270
|
+
transports: createTransports(level)
|
|
271
|
+
});
|
|
272
|
+
};
|
|
273
|
+
const getLogger = ()=>logger;
|
|
274
|
+
|
|
275
|
+
// eslint-disable-next-line no-restricted-imports
|
|
276
|
+
const create$2 = (params)=>{
|
|
277
|
+
// eslint-disable-next-line no-console
|
|
278
|
+
const log = params.log || console.log;
|
|
279
|
+
const exists = async (path)=>{
|
|
280
|
+
try {
|
|
281
|
+
await fs.promises.stat(path);
|
|
282
|
+
return true;
|
|
283
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
284
|
+
} catch (error) {
|
|
285
|
+
return false;
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
const isDirectory = async (path)=>{
|
|
289
|
+
const stats = await fs.promises.stat(path);
|
|
290
|
+
if (!stats.isDirectory()) {
|
|
291
|
+
log(`${path} is not a directory`);
|
|
292
|
+
return false;
|
|
293
|
+
}
|
|
294
|
+
return true;
|
|
295
|
+
};
|
|
296
|
+
const isFile = async (path)=>{
|
|
297
|
+
const stats = await fs.promises.stat(path);
|
|
298
|
+
if (!stats.isFile()) {
|
|
299
|
+
log(`${path} is not a file`);
|
|
300
|
+
return false;
|
|
301
|
+
}
|
|
302
|
+
return true;
|
|
303
|
+
};
|
|
304
|
+
const isReadable = async (path)=>{
|
|
305
|
+
try {
|
|
306
|
+
await fs.promises.access(path, fs.constants.R_OK);
|
|
307
|
+
} catch (error) {
|
|
308
|
+
log(`${path} is not readable: %s %s`, error.message, error.stack);
|
|
309
|
+
return false;
|
|
310
|
+
}
|
|
311
|
+
return true;
|
|
312
|
+
};
|
|
313
|
+
const isWritable = async (path)=>{
|
|
314
|
+
try {
|
|
315
|
+
await fs.promises.access(path, fs.constants.W_OK);
|
|
316
|
+
} catch (error) {
|
|
317
|
+
log(`${path} is not writable: %s %s`, error.message, error.stack);
|
|
318
|
+
return false;
|
|
319
|
+
}
|
|
320
|
+
return true;
|
|
321
|
+
};
|
|
322
|
+
const isFileReadable = async (path)=>{
|
|
323
|
+
return await exists(path) && await isFile(path) && await isReadable(path);
|
|
324
|
+
};
|
|
325
|
+
const isDirectoryWritable = async (path)=>{
|
|
326
|
+
return await exists(path) && await isDirectory(path) && await isWritable(path);
|
|
327
|
+
};
|
|
328
|
+
const isDirectoryReadable = async (path)=>{
|
|
329
|
+
return await exists(path) && await isDirectory(path) && await isReadable(path);
|
|
330
|
+
};
|
|
331
|
+
const createDirectory = async (path)=>{
|
|
332
|
+
try {
|
|
333
|
+
await fs.promises.mkdir(path, {
|
|
334
|
+
recursive: true
|
|
335
|
+
});
|
|
336
|
+
} catch (mkdirError) {
|
|
337
|
+
throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
const ensureDirectory = async (path)=>{
|
|
341
|
+
if (!await exists(path)) {
|
|
342
|
+
await createDirectory(path);
|
|
343
|
+
}
|
|
344
|
+
};
|
|
345
|
+
const removeDirectory = async (path)=>{
|
|
346
|
+
try {
|
|
347
|
+
if (await exists(path)) {
|
|
348
|
+
await fs.promises.rm(path, {
|
|
349
|
+
recursive: true,
|
|
350
|
+
force: true
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
} catch (rmError) {
|
|
354
|
+
throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);
|
|
355
|
+
}
|
|
356
|
+
};
|
|
357
|
+
const readFile = async (path, encoding)=>{
|
|
358
|
+
return await fs.promises.readFile(path, {
|
|
359
|
+
encoding: encoding
|
|
360
|
+
});
|
|
361
|
+
};
|
|
362
|
+
const writeFile = async (path, data, encoding)=>{
|
|
363
|
+
await fs.promises.writeFile(path, data, {
|
|
364
|
+
encoding: encoding
|
|
365
|
+
});
|
|
366
|
+
};
|
|
367
|
+
const rename = async (oldPath, newPath)=>{
|
|
368
|
+
await fs.promises.rename(oldPath, newPath);
|
|
369
|
+
};
|
|
370
|
+
const forEachFileIn = async (directory, callback, options = {
|
|
371
|
+
pattern: '*.*'
|
|
372
|
+
})=>{
|
|
373
|
+
try {
|
|
374
|
+
const files = await glob(options.pattern, {
|
|
375
|
+
cwd: directory,
|
|
376
|
+
nodir: true
|
|
377
|
+
});
|
|
378
|
+
for (const file of files){
|
|
379
|
+
await callback(path.join(directory, file));
|
|
380
|
+
}
|
|
381
|
+
} catch (err) {
|
|
382
|
+
throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);
|
|
383
|
+
}
|
|
384
|
+
};
|
|
385
|
+
const readStream = async (path)=>{
|
|
386
|
+
return fs.createReadStream(path);
|
|
387
|
+
};
|
|
388
|
+
const hashFile = async (path, length)=>{
|
|
389
|
+
const file = await readFile(path, 'utf8');
|
|
390
|
+
return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);
|
|
391
|
+
};
|
|
392
|
+
const listFiles = async (directory)=>{
|
|
393
|
+
return await fs.promises.readdir(directory);
|
|
394
|
+
};
|
|
395
|
+
return {
|
|
396
|
+
exists,
|
|
397
|
+
isDirectory,
|
|
398
|
+
isFile,
|
|
399
|
+
isReadable,
|
|
400
|
+
isWritable,
|
|
401
|
+
isFileReadable,
|
|
402
|
+
isDirectoryWritable,
|
|
403
|
+
isDirectoryReadable,
|
|
404
|
+
createDirectory,
|
|
405
|
+
ensureDirectory,
|
|
406
|
+
readFile,
|
|
407
|
+
readStream,
|
|
408
|
+
writeFile,
|
|
409
|
+
rename,
|
|
410
|
+
forEachFileIn,
|
|
411
|
+
hashFile,
|
|
412
|
+
listFiles,
|
|
413
|
+
removeDirectory
|
|
414
|
+
};
|
|
415
|
+
};
|
|
416
|
+
|
|
417
|
+
// Function to read from STDIN if available
|
|
418
|
+
async function readStdin() {
|
|
419
|
+
// In test environment, allow mocking to work by skipping TTY check
|
|
420
|
+
if (process.env.NODE_ENV === 'test' || process.env.VITEST === 'true') {
|
|
421
|
+
return new Promise((resolve)=>{
|
|
422
|
+
let input = '';
|
|
423
|
+
let hasData = false;
|
|
424
|
+
const timeout = setTimeout(()=>{
|
|
425
|
+
if (!hasData) {
|
|
426
|
+
resolve(null);
|
|
427
|
+
}
|
|
428
|
+
}, 10); // Very short timeout for tests
|
|
429
|
+
process.stdin.setEncoding('utf8');
|
|
430
|
+
process.stdin.on('data', (chunk)=>{
|
|
431
|
+
hasData = true;
|
|
432
|
+
clearTimeout(timeout);
|
|
433
|
+
input += chunk;
|
|
434
|
+
});
|
|
435
|
+
process.stdin.on('end', ()=>{
|
|
436
|
+
resolve(input.trim() || null);
|
|
437
|
+
});
|
|
438
|
+
process.stdin.on('error', ()=>{
|
|
439
|
+
clearTimeout(timeout);
|
|
440
|
+
resolve(null);
|
|
441
|
+
});
|
|
442
|
+
process.stdin.resume();
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
return new Promise((resolve)=>{
|
|
446
|
+
// Check if stdin is TTY (interactive terminal)
|
|
447
|
+
if (process.stdin.isTTY) {
|
|
448
|
+
resolve(null);
|
|
449
|
+
return;
|
|
450
|
+
}
|
|
451
|
+
let input = '';
|
|
452
|
+
let hasData = false;
|
|
453
|
+
const timeout = setTimeout(()=>{
|
|
454
|
+
if (!hasData) {
|
|
455
|
+
resolve(null);
|
|
456
|
+
}
|
|
457
|
+
}, 100); // Short timeout to detect if data is available
|
|
458
|
+
process.stdin.setEncoding('utf8');
|
|
459
|
+
process.stdin.on('data', (chunk)=>{
|
|
460
|
+
hasData = true;
|
|
461
|
+
clearTimeout(timeout);
|
|
462
|
+
input += chunk;
|
|
463
|
+
});
|
|
464
|
+
process.stdin.on('end', ()=>{
|
|
465
|
+
resolve(input.trim() || null);
|
|
466
|
+
});
|
|
467
|
+
process.stdin.on('error', ()=>{
|
|
468
|
+
clearTimeout(timeout);
|
|
469
|
+
resolve(null);
|
|
470
|
+
});
|
|
471
|
+
// If no data comes in quickly, assume no stdin
|
|
472
|
+
process.stdin.resume();
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
z.object({
|
|
477
|
+
dryRun: z.boolean().optional(),
|
|
478
|
+
verbose: z.boolean().optional(),
|
|
479
|
+
debug: z.boolean().optional(),
|
|
480
|
+
overrides: z.boolean().optional(),
|
|
481
|
+
checkConfig: z.boolean().optional(),
|
|
482
|
+
initConfig: z.boolean().optional(),
|
|
483
|
+
model: z.string().optional(),
|
|
484
|
+
contextDirectories: z.array(z.string()).optional(),
|
|
485
|
+
configDir: z.string().optional(),
|
|
486
|
+
outputDir: z.string().optional(),
|
|
487
|
+
preferencesDir: z.string().optional(),
|
|
488
|
+
cached: z.boolean().optional(),
|
|
489
|
+
add: z.boolean().optional(),
|
|
490
|
+
sendit: z.boolean().optional(),
|
|
491
|
+
from: z.string().optional(),
|
|
492
|
+
to: z.string().optional(),
|
|
493
|
+
excludedPatterns: z.array(z.string()).optional(),
|
|
494
|
+
context: z.string().optional(),
|
|
495
|
+
note: z.string().optional(),
|
|
496
|
+
direction: z.string().optional(),
|
|
497
|
+
messageLimit: z.number().optional(),
|
|
498
|
+
mergeMethod: z.enum([
|
|
499
|
+
'merge',
|
|
500
|
+
'squash',
|
|
501
|
+
'rebase'
|
|
502
|
+
]).optional(),
|
|
503
|
+
scopeRoots: z.string().optional(),
|
|
504
|
+
workspaceFile: z.string().optional(),
|
|
505
|
+
includeCommitHistory: z.boolean().optional(),
|
|
506
|
+
includeRecentDiffs: z.boolean().optional(),
|
|
507
|
+
includeReleaseNotes: z.boolean().optional(),
|
|
508
|
+
includeGithubIssues: z.boolean().optional(),
|
|
509
|
+
commitHistoryLimit: z.number().optional(),
|
|
510
|
+
diffHistoryLimit: z.number().optional(),
|
|
511
|
+
releaseNotesLimit: z.number().optional(),
|
|
512
|
+
githubIssuesLimit: z.number().optional(),
|
|
513
|
+
file: z.string().optional(),
|
|
514
|
+
keepTemp: z.boolean().optional()
|
|
515
|
+
});
|
|
516
|
+
// Function to transform flat CLI args into nested Config structure
|
|
517
|
+
const transformCliArgs = (finalCliArgs)=>{
|
|
518
|
+
const transformedCliArgs = {};
|
|
519
|
+
// Direct mappings from Input to Config
|
|
520
|
+
if (finalCliArgs.dryRun !== undefined) transformedCliArgs.dryRun = finalCliArgs.dryRun;
|
|
521
|
+
if (finalCliArgs.verbose !== undefined) transformedCliArgs.verbose = finalCliArgs.verbose;
|
|
522
|
+
if (finalCliArgs.debug !== undefined) transformedCliArgs.debug = finalCliArgs.debug;
|
|
523
|
+
if (finalCliArgs.overrides !== undefined) transformedCliArgs.overrides = finalCliArgs.overrides;
|
|
524
|
+
if (finalCliArgs.model !== undefined) transformedCliArgs.model = finalCliArgs.model;
|
|
525
|
+
if (finalCliArgs.contextDirectories !== undefined) transformedCliArgs.contextDirectories = finalCliArgs.contextDirectories;
|
|
526
|
+
// Map configDir (CLI) to configDirectory (Cardigantime standard)
|
|
527
|
+
if (finalCliArgs.configDir !== undefined) transformedCliArgs.configDirectory = finalCliArgs.configDir;
|
|
528
|
+
// Map outputDir (CLI) to outputDirectory (Config standard)
|
|
529
|
+
if (finalCliArgs.outputDir !== undefined) transformedCliArgs.outputDirectory = finalCliArgs.outputDir;
|
|
530
|
+
// Map preferencesDir (CLI) to preferencesDirectory (Config standard)
|
|
531
|
+
if (finalCliArgs.preferencesDir !== undefined) transformedCliArgs.preferencesDirectory = finalCliArgs.preferencesDir;
|
|
532
|
+
// Nested mappings for 'commit' options
|
|
533
|
+
if (finalCliArgs.cached !== undefined || finalCliArgs.sendit !== undefined || finalCliArgs.add !== undefined) {
|
|
534
|
+
transformedCliArgs.commit = {};
|
|
535
|
+
if (finalCliArgs.add !== undefined) transformedCliArgs.commit.add = finalCliArgs.add;
|
|
536
|
+
if (finalCliArgs.cached !== undefined) transformedCliArgs.commit.cached = finalCliArgs.cached;
|
|
537
|
+
if (finalCliArgs.sendit !== undefined) transformedCliArgs.commit.sendit = finalCliArgs.sendit;
|
|
538
|
+
if (finalCliArgs.messageLimit !== undefined) transformedCliArgs.commit.messageLimit = finalCliArgs.messageLimit;
|
|
539
|
+
if (finalCliArgs.context !== undefined) transformedCliArgs.commit.context = finalCliArgs.context;
|
|
540
|
+
if (finalCliArgs.direction !== undefined) transformedCliArgs.commit.direction = finalCliArgs.direction;
|
|
541
|
+
}
|
|
542
|
+
// Nested mappings for 'audioCommit' options
|
|
543
|
+
if (finalCliArgs.file !== undefined || finalCliArgs.keepTemp !== undefined) {
|
|
544
|
+
transformedCliArgs.audioCommit = {};
|
|
545
|
+
if (finalCliArgs.file !== undefined) transformedCliArgs.audioCommit.file = finalCliArgs.file;
|
|
546
|
+
if (finalCliArgs.keepTemp !== undefined) transformedCliArgs.audioCommit.keepTemp = finalCliArgs.keepTemp;
|
|
547
|
+
}
|
|
548
|
+
// Nested mappings for 'release' options
|
|
549
|
+
if (finalCliArgs.from !== undefined || finalCliArgs.to !== undefined) {
|
|
550
|
+
transformedCliArgs.release = {};
|
|
551
|
+
if (finalCliArgs.from !== undefined) transformedCliArgs.release.from = finalCliArgs.from;
|
|
552
|
+
if (finalCliArgs.to !== undefined) transformedCliArgs.release.to = finalCliArgs.to;
|
|
553
|
+
if (finalCliArgs.context !== undefined) transformedCliArgs.release.context = finalCliArgs.context;
|
|
554
|
+
if (finalCliArgs.messageLimit !== undefined) transformedCliArgs.release.messageLimit = finalCliArgs.messageLimit;
|
|
555
|
+
}
|
|
556
|
+
// Nested mappings for 'publish' options
|
|
557
|
+
if (finalCliArgs.mergeMethod !== undefined) {
|
|
558
|
+
transformedCliArgs.publish = {};
|
|
559
|
+
if (finalCliArgs.mergeMethod !== undefined) transformedCliArgs.publish.mergeMethod = finalCliArgs.mergeMethod;
|
|
560
|
+
}
|
|
561
|
+
// Nested mappings for 'link' and 'unlink' options (both use the same configuration)
|
|
562
|
+
if (finalCliArgs.scopeRoots !== undefined || finalCliArgs.workspaceFile !== undefined) {
|
|
563
|
+
transformedCliArgs.link = {};
|
|
564
|
+
if (finalCliArgs.scopeRoots !== undefined) {
|
|
565
|
+
try {
|
|
566
|
+
transformedCliArgs.link.scopeRoots = JSON.parse(finalCliArgs.scopeRoots);
|
|
567
|
+
} catch (error) {
|
|
568
|
+
throw new Error(`Invalid JSON for scope-roots: ${finalCliArgs.scopeRoots}`);
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
if (finalCliArgs.workspaceFile !== undefined) transformedCliArgs.link.workspaceFile = finalCliArgs.workspaceFile;
|
|
572
|
+
}
|
|
573
|
+
// Nested mappings for 'audio-review' options
|
|
574
|
+
if (finalCliArgs.includeCommitHistory !== undefined || finalCliArgs.includeRecentDiffs !== undefined || finalCliArgs.includeReleaseNotes !== undefined || finalCliArgs.includeGithubIssues !== undefined || finalCliArgs.commitHistoryLimit !== undefined || finalCliArgs.diffHistoryLimit !== undefined || finalCliArgs.releaseNotesLimit !== undefined || finalCliArgs.githubIssuesLimit !== undefined || finalCliArgs.file !== undefined || finalCliArgs.keepTemp !== undefined) {
|
|
575
|
+
transformedCliArgs.audioReview = {};
|
|
576
|
+
if (finalCliArgs.includeCommitHistory !== undefined) transformedCliArgs.audioReview.includeCommitHistory = finalCliArgs.includeCommitHistory;
|
|
577
|
+
if (finalCliArgs.includeRecentDiffs !== undefined) transformedCliArgs.audioReview.includeRecentDiffs = finalCliArgs.includeRecentDiffs;
|
|
578
|
+
if (finalCliArgs.includeReleaseNotes !== undefined) transformedCliArgs.audioReview.includeReleaseNotes = finalCliArgs.includeReleaseNotes;
|
|
579
|
+
if (finalCliArgs.includeGithubIssues !== undefined) transformedCliArgs.audioReview.includeGithubIssues = finalCliArgs.includeGithubIssues;
|
|
580
|
+
if (finalCliArgs.commitHistoryLimit !== undefined) transformedCliArgs.audioReview.commitHistoryLimit = finalCliArgs.commitHistoryLimit;
|
|
581
|
+
if (finalCliArgs.diffHistoryLimit !== undefined) transformedCliArgs.audioReview.diffHistoryLimit = finalCliArgs.diffHistoryLimit;
|
|
582
|
+
if (finalCliArgs.releaseNotesLimit !== undefined) transformedCliArgs.audioReview.releaseNotesLimit = finalCliArgs.releaseNotesLimit;
|
|
583
|
+
if (finalCliArgs.githubIssuesLimit !== undefined) transformedCliArgs.audioReview.githubIssuesLimit = finalCliArgs.githubIssuesLimit;
|
|
584
|
+
if (finalCliArgs.context !== undefined) transformedCliArgs.audioReview.context = finalCliArgs.context;
|
|
585
|
+
if (finalCliArgs.sendit !== undefined) transformedCliArgs.audioReview.sendit = finalCliArgs.sendit;
|
|
586
|
+
if (finalCliArgs.file !== undefined) transformedCliArgs.audioReview.file = finalCliArgs.file;
|
|
587
|
+
if (finalCliArgs.keepTemp !== undefined) transformedCliArgs.audioReview.keepTemp = finalCliArgs.keepTemp;
|
|
588
|
+
}
|
|
589
|
+
// Nested mappings for 'review' options
|
|
590
|
+
if (finalCliArgs.includeCommitHistory !== undefined || finalCliArgs.includeRecentDiffs !== undefined || finalCliArgs.includeReleaseNotes !== undefined || finalCliArgs.includeGithubIssues !== undefined || finalCliArgs.commitHistoryLimit !== undefined || finalCliArgs.diffHistoryLimit !== undefined || finalCliArgs.releaseNotesLimit !== undefined || finalCliArgs.githubIssuesLimit !== undefined || finalCliArgs.context !== undefined || finalCliArgs.sendit !== undefined || finalCliArgs.note !== undefined) {
|
|
591
|
+
transformedCliArgs.review = {};
|
|
592
|
+
if (finalCliArgs.note !== undefined) transformedCliArgs.review.note = finalCliArgs.note;
|
|
593
|
+
// Include optional review configuration options if specified
|
|
594
|
+
if (finalCliArgs.includeCommitHistory !== undefined) transformedCliArgs.review.includeCommitHistory = finalCliArgs.includeCommitHistory;
|
|
595
|
+
if (finalCliArgs.includeRecentDiffs !== undefined) transformedCliArgs.review.includeRecentDiffs = finalCliArgs.includeRecentDiffs;
|
|
596
|
+
if (finalCliArgs.includeReleaseNotes !== undefined) transformedCliArgs.review.includeReleaseNotes = finalCliArgs.includeReleaseNotes;
|
|
597
|
+
if (finalCliArgs.includeGithubIssues !== undefined) transformedCliArgs.review.includeGithubIssues = finalCliArgs.includeGithubIssues;
|
|
598
|
+
if (finalCliArgs.commitHistoryLimit !== undefined) transformedCliArgs.review.commitHistoryLimit = finalCliArgs.commitHistoryLimit;
|
|
599
|
+
if (finalCliArgs.diffHistoryLimit !== undefined) transformedCliArgs.review.diffHistoryLimit = finalCliArgs.diffHistoryLimit;
|
|
600
|
+
if (finalCliArgs.releaseNotesLimit !== undefined) transformedCliArgs.review.releaseNotesLimit = finalCliArgs.releaseNotesLimit;
|
|
601
|
+
if (finalCliArgs.githubIssuesLimit !== undefined) transformedCliArgs.review.githubIssuesLimit = finalCliArgs.githubIssuesLimit;
|
|
602
|
+
if (finalCliArgs.context !== undefined) transformedCliArgs.review.context = finalCliArgs.context;
|
|
603
|
+
if (finalCliArgs.sendit !== undefined) transformedCliArgs.review.sendit = finalCliArgs.sendit;
|
|
604
|
+
}
|
|
605
|
+
if (finalCliArgs.excludedPatterns !== undefined) transformedCliArgs.excludedPatterns = finalCliArgs.excludedPatterns;
|
|
606
|
+
// Note: openaiApiKey is handled separately via environment variable only
|
|
607
|
+
return transformedCliArgs;
|
|
608
|
+
};
|
|
609
|
+
// Update configure signature to accept cardigantime
|
|
610
|
+
const configure = async (cardigantime)=>{
|
|
611
|
+
var _config_contextDirectories, _config_link;
|
|
612
|
+
const logger = getLogger();
|
|
613
|
+
let program = new Command();
|
|
614
|
+
// Configure program basics
|
|
615
|
+
program.name(PROGRAM_NAME).summary('Create Intelligent Release Notes or Change Logs from Git').description('Create Intelligent Release Notes or Change Logs from Git').version(VERSION);
|
|
616
|
+
// Let cardigantime add its arguments first
|
|
617
|
+
program = await cardigantime.configure(program);
|
|
618
|
+
// Check if --check-config is in process.argv early
|
|
619
|
+
if (process.argv.includes('--check-config')) {
|
|
620
|
+
// For check-config, use CardiganTime's built-in checkConfig method
|
|
621
|
+
program.parse();
|
|
622
|
+
const cliArgs = program.opts();
|
|
623
|
+
// Transform the flat CLI args
|
|
624
|
+
const transformedCliArgs = transformCliArgs(cliArgs);
|
|
625
|
+
// Use CardiganTime's built-in checkConfig method which displays
|
|
626
|
+
// hierarchical configuration information in a well-formatted way
|
|
627
|
+
await cardigantime.checkConfig(transformedCliArgs);
|
|
628
|
+
// Return minimal config for consistency, but main processing is done
|
|
629
|
+
const config = await validateAndProcessOptions({});
|
|
630
|
+
const secureConfig = await validateAndProcessSecureOptions();
|
|
631
|
+
const commandConfig = {
|
|
632
|
+
commandName: 'check-config'
|
|
633
|
+
};
|
|
634
|
+
return [
|
|
635
|
+
config,
|
|
636
|
+
secureConfig,
|
|
637
|
+
commandConfig
|
|
638
|
+
];
|
|
639
|
+
}
|
|
640
|
+
// Check if --init-config is in process.argv early
|
|
641
|
+
if (process.argv.includes('--init-config')) {
|
|
642
|
+
// For init-config, use CardiganTime's built-in generateConfig method
|
|
643
|
+
program.parse();
|
|
644
|
+
const cliArgs = program.opts();
|
|
645
|
+
// Transform the flat CLI args
|
|
646
|
+
const transformedCliArgs = transformCliArgs(cliArgs);
|
|
647
|
+
// Use CardiganTime's built-in generateConfig method
|
|
648
|
+
await cardigantime.generateConfig(transformedCliArgs.configDirectory || KODRDRIV_DEFAULTS.configDirectory);
|
|
649
|
+
// Return minimal config for consistency, but main processing is done
|
|
650
|
+
const config = await validateAndProcessOptions({});
|
|
651
|
+
const secureConfig = await validateAndProcessSecureOptions();
|
|
652
|
+
const commandConfig = {
|
|
653
|
+
commandName: 'init-config'
|
|
654
|
+
};
|
|
655
|
+
return [
|
|
656
|
+
config,
|
|
657
|
+
secureConfig,
|
|
658
|
+
commandConfig
|
|
659
|
+
];
|
|
660
|
+
}
|
|
661
|
+
// Get CLI arguments using the new function
|
|
662
|
+
const [finalCliArgs, commandConfig] = await getCliConfig(program);
|
|
663
|
+
logger.silly('Loaded Command Line Options: %s', JSON.stringify(finalCliArgs, null, 2));
|
|
664
|
+
// Transform the flat CLI args using the new function
|
|
665
|
+
const transformedCliArgs = transformCliArgs(finalCliArgs);
|
|
666
|
+
logger.silly('Transformed CLI Args for merging: %s', JSON.stringify(transformedCliArgs, null, 2));
|
|
667
|
+
// Get values from config file using Cardigantime's hierarchical configuration
|
|
668
|
+
const fileValues = await cardigantime.read(transformedCliArgs);
|
|
669
|
+
// Merge configurations: Defaults -> File -> CLI
|
|
670
|
+
// Properly merge the link section to preserve scope roots from config file
|
|
671
|
+
const mergedLink = {
|
|
672
|
+
...KODRDRIV_DEFAULTS.link,
|
|
673
|
+
...fileValues.link,
|
|
674
|
+
...transformedCliArgs.link
|
|
675
|
+
};
|
|
676
|
+
const partialConfig = {
|
|
677
|
+
...KODRDRIV_DEFAULTS,
|
|
678
|
+
...fileValues,
|
|
679
|
+
...transformedCliArgs,
|
|
680
|
+
link: mergedLink
|
|
681
|
+
}; // Cast to Partial<Config> initially
|
|
682
|
+
// Specific validation and processing after merge
|
|
683
|
+
const config = await validateAndProcessOptions(partialConfig);
|
|
684
|
+
// Log effective configuration summary at verbose level
|
|
685
|
+
logger.verbose('Configuration complete. Effective settings:');
|
|
686
|
+
logger.verbose(` Command: ${commandConfig.commandName}`);
|
|
687
|
+
logger.verbose(` Model: ${config.model}`);
|
|
688
|
+
logger.verbose(` Dry run: ${config.dryRun}`);
|
|
689
|
+
logger.verbose(` Debug: ${config.debug}`);
|
|
690
|
+
logger.verbose(` Verbose: ${config.verbose}`);
|
|
691
|
+
logger.verbose(` Config directory: ${config.configDirectory}`);
|
|
692
|
+
logger.verbose(` Output directory: ${config.outputDirectory}`);
|
|
693
|
+
logger.verbose(` Context directories: ${((_config_contextDirectories = config.contextDirectories) === null || _config_contextDirectories === void 0 ? void 0 : _config_contextDirectories.join(', ')) || 'none'}`);
|
|
694
|
+
if (config.excludedPatterns && config.excludedPatterns.length > 0) {
|
|
695
|
+
logger.verbose(` Excluded patterns: ${config.excludedPatterns.join(', ')}`);
|
|
696
|
+
}
|
|
697
|
+
if (Object.keys(((_config_link = config.link) === null || _config_link === void 0 ? void 0 : _config_link.scopeRoots) || {}).length > 0) {
|
|
698
|
+
logger.verbose(` Link scope roots: ${Object.keys(config.link.scopeRoots).join(', ')}`);
|
|
699
|
+
}
|
|
700
|
+
logger.silly('Final configuration: %s', JSON.stringify(config, null, 2));
|
|
701
|
+
const secureConfig = await validateAndProcessSecureOptions();
|
|
702
|
+
return [
|
|
703
|
+
config,
|
|
704
|
+
secureConfig,
|
|
705
|
+
commandConfig
|
|
706
|
+
];
|
|
707
|
+
};
|
|
708
|
+
// Function to handle CLI argument parsing and processing
|
|
709
|
+
async function getCliConfig(program) {
|
|
710
|
+
const addSharedOptions = (command)=>{
|
|
711
|
+
command.option('--dry-run', 'perform a dry run without saving files') // Removed default, will be handled by merging
|
|
712
|
+
.option('--verbose', 'enable verbose logging').option('--debug', 'enable debug logging').option('--overrides', 'enable overrides').option('--model <model>', 'OpenAI model to use').option('-d, --context-directories [contextDirectories...]', 'directories to scan for context').option('--config-dir <configDir>', 'configuration directory') // Keep config-dir for specifying location
|
|
713
|
+
.option('--output-dir <outputDir>', 'output directory for generated files').option('--preferences-dir <preferencesDir>', 'preferences directory for personal settings').option('--excluded-paths [excludedPatterns...]', 'paths to exclude from the diff').option('--keep-temp', 'keep temporary recording files');
|
|
714
|
+
};
|
|
715
|
+
// Add global options to the main program
|
|
716
|
+
// (cardigantime already adds most global options like --verbose, --debug, --config-dir)
|
|
717
|
+
// Add subcommands
|
|
718
|
+
const commitCommand = program.command('commit').argument('[direction]', 'direction or guidance for the commit message').description('Generate commit notes').option('--context <context>', 'context for the commit message').option('--cached', 'use cached diff').option('--add', 'add all changes before committing').option('--sendit', 'Commit with the message generated. No review.').option('--message-limit <messageLimit>', 'limit the number of messages to generate');
|
|
719
|
+
// Add shared options to commit command
|
|
720
|
+
addSharedOptions(commitCommand);
|
|
721
|
+
// Customize help output for commit command
|
|
722
|
+
commitCommand.configureHelp({
|
|
723
|
+
formatHelp: (cmd, helper)=>{
|
|
724
|
+
const nameAndVersion = `${helper.commandUsage(cmd)}\n\n${helper.commandDescription(cmd)}\n`;
|
|
725
|
+
const commitOptions = [
|
|
726
|
+
[
|
|
727
|
+
'--context <context>',
|
|
728
|
+
'context for the commit message'
|
|
729
|
+
]
|
|
730
|
+
];
|
|
731
|
+
const behavioralOptions = [
|
|
732
|
+
[
|
|
733
|
+
'--cached',
|
|
734
|
+
'use cached diff'
|
|
735
|
+
],
|
|
736
|
+
[
|
|
737
|
+
'--add',
|
|
738
|
+
'add all changes before committing'
|
|
739
|
+
],
|
|
740
|
+
[
|
|
741
|
+
'--sendit',
|
|
742
|
+
'Commit with the message generated. No review.'
|
|
743
|
+
],
|
|
744
|
+
[
|
|
745
|
+
'--message-limit <messageLimit>',
|
|
746
|
+
'limit the number of messages to generate'
|
|
747
|
+
]
|
|
748
|
+
];
|
|
749
|
+
const globalOptions = [
|
|
750
|
+
[
|
|
751
|
+
'--dry-run',
|
|
752
|
+
'perform a dry run without saving files'
|
|
753
|
+
],
|
|
754
|
+
[
|
|
755
|
+
'--verbose',
|
|
756
|
+
'enable verbose logging'
|
|
757
|
+
],
|
|
758
|
+
[
|
|
759
|
+
'--debug',
|
|
760
|
+
'enable debug logging'
|
|
761
|
+
],
|
|
762
|
+
[
|
|
763
|
+
'--overrides',
|
|
764
|
+
'enable overrides'
|
|
765
|
+
],
|
|
766
|
+
[
|
|
767
|
+
'--model <model>',
|
|
768
|
+
'OpenAI model to use'
|
|
769
|
+
],
|
|
770
|
+
[
|
|
771
|
+
'-d, --context-directories [contextDirectories...]',
|
|
772
|
+
'directories to scan for context'
|
|
773
|
+
],
|
|
774
|
+
[
|
|
775
|
+
'--config-dir <configDir>',
|
|
776
|
+
'configuration directory'
|
|
777
|
+
],
|
|
778
|
+
[
|
|
779
|
+
'--excluded-paths [excludedPatterns...]',
|
|
780
|
+
'paths to exclude from the diff'
|
|
781
|
+
],
|
|
782
|
+
[
|
|
783
|
+
'-h, --help',
|
|
784
|
+
'display help for command'
|
|
785
|
+
]
|
|
786
|
+
];
|
|
787
|
+
const formatOptionsSection = (title, options)=>{
|
|
788
|
+
const maxWidth = Math.max(...options.map(([flag])=>flag.length));
|
|
789
|
+
return `${title}:\n` + options.map(([flag, desc])=>` ${flag.padEnd(maxWidth + 2)} ${desc}`).join('\n') + '\n';
|
|
790
|
+
};
|
|
791
|
+
return nameAndVersion + '\n' + formatOptionsSection('Commit Message Options', commitOptions) + '\n' + formatOptionsSection('Behavioral Options', behavioralOptions) + '\n' + formatOptionsSection('Global Options', globalOptions) + '\n' + 'Environment Variables:\n' + ' OPENAI_API_KEY OpenAI API key (required)\n';
|
|
792
|
+
}
|
|
793
|
+
});
|
|
794
|
+
const audioCommitCommand = program.command('audio-commit').option('--cached', 'use cached diff').option('--add', 'add all changes before committing').option('--sendit', 'Commit with the message generated. No review.').option('--direction <direction>', 'direction or guidance for the commit message').option('--message-limit <messageLimit>', 'limit the number of messages to generate').option('--file <file>', 'audio file path').description('Record audio to provide context, then generate and optionally commit with AI-generated message');
|
|
795
|
+
addSharedOptions(audioCommitCommand);
|
|
796
|
+
const releaseCommand = program.command('release').option('--from <from>', 'branch to generate release notes from').option('--to <to>', 'branch to generate release notes to').option('--context <context>', 'context for the commit message').description('Generate release notes');
|
|
797
|
+
addSharedOptions(releaseCommand);
|
|
798
|
+
const publishCommand = program.command('publish').option('--merge-method <method>', 'method to merge PR (merge, squash, rebase)', 'squash').description('Publish a release');
|
|
799
|
+
addSharedOptions(publishCommand);
|
|
800
|
+
const linkCommand = program.command('link').option('--scope-roots <scopeRoots>', 'JSON mapping of scopes to root directories (e.g., \'{"@company": "../"}\')').option('--workspace-file <workspaceFile>', 'path to workspace file', 'pnpm-workspace.yaml').description('Manage pnpm workspace links for local development');
|
|
801
|
+
addSharedOptions(linkCommand);
|
|
802
|
+
const unlinkCommand = program.command('unlink').option('--scope-roots <scopeRoots>', 'JSON mapping of scopes to root directories (e.g., \'{"@company": "../"}\')').option('--workspace-file <workspaceFile>', 'path to workspace file', 'pnpm-workspace.yaml').description('Remove pnpm workspace links and rebuild dependencies');
|
|
803
|
+
addSharedOptions(unlinkCommand);
|
|
804
|
+
const audioReviewCommand = program.command('audio-review').option('--include-commit-history', 'include recent commit log messages in context (default: true)').option('--no-include-commit-history', 'exclude commit log messages from context').option('--include-recent-diffs', 'include recent commit diffs in context (default: true)').option('--no-include-recent-diffs', 'exclude recent diffs from context').option('--include-release-notes', 'include recent release notes in context (default: false)').option('--no-include-release-notes', 'exclude release notes from context').option('--include-github-issues', 'include open GitHub issues in context (default: true)').option('--no-include-github-issues', 'exclude GitHub issues from context').option('--commit-history-limit <limit>', 'number of recent commits to include', parseInt).option('--diff-history-limit <limit>', 'number of recent commit diffs to include', parseInt).option('--release-notes-limit <limit>', 'number of recent release notes to include', parseInt).option('--github-issues-limit <limit>', 'number of open GitHub issues to include (max 20)', parseInt).option('--context <context>', 'additional context for the audio review').option('--file <file>', 'audio file path').description('Record audio, transcribe with Whisper, and analyze for project issues using AI');
|
|
805
|
+
addSharedOptions(audioReviewCommand);
|
|
806
|
+
const reviewCommand = program.command('review').argument('[note]', 'review note to analyze for project issues').option('--include-commit-history', 'include recent commit log messages in context (default: true)').option('--no-include-commit-history', 'exclude commit log messages from context').option('--include-recent-diffs', 'include recent commit diffs in context (default: true)').option('--no-include-recent-diffs', 'exclude recent diffs from context').option('--include-release-notes', 'include recent release notes in context (default: false)').option('--no-include-release-notes', 'exclude release notes from context').option('--include-github-issues', 'include open GitHub issues in context (default: true)').option('--no-include-github-issues', 'exclude GitHub issues from context').option('--commit-history-limit <limit>', 'number of recent commits to include', parseInt).option('--diff-history-limit <limit>', 'number of recent commit diffs to include', parseInt).option('--release-notes-limit <limit>', 'number of recent release notes to include', parseInt).option('--github-issues-limit <limit>', 'number of open GitHub issues to include (max 20)', parseInt).option('--context <context>', 'additional context for the review').option('--sendit', 'Create GitHub issues automatically without confirmation').description('Analyze review note for project issues using AI');
|
|
807
|
+
addSharedOptions(reviewCommand);
|
|
808
|
+
// Customize help output for review command
|
|
809
|
+
reviewCommand.configureHelp({
|
|
810
|
+
formatHelp: (cmd, helper)=>{
|
|
811
|
+
const nameAndVersion = `kodrdriv review [note] [options]\n\nAnalyze review note for project issues using AI\n`;
|
|
812
|
+
const argumentsSection = [
|
|
813
|
+
[
|
|
814
|
+
'note',
|
|
815
|
+
'review note to analyze for project issues (can also be piped via STDIN)'
|
|
816
|
+
]
|
|
817
|
+
];
|
|
818
|
+
const reviewOptions = [
|
|
819
|
+
[
|
|
820
|
+
'--context <context>',
|
|
821
|
+
'additional context for the review'
|
|
822
|
+
]
|
|
823
|
+
];
|
|
824
|
+
const gitContextOptions = [
|
|
825
|
+
[
|
|
826
|
+
'--include-commit-history',
|
|
827
|
+
'include recent commit log messages in context (default: true)'
|
|
828
|
+
],
|
|
829
|
+
[
|
|
830
|
+
'--no-include-commit-history',
|
|
831
|
+
'exclude commit log messages from context'
|
|
832
|
+
],
|
|
833
|
+
[
|
|
834
|
+
'--include-recent-diffs',
|
|
835
|
+
'include recent commit diffs in context (default: true)'
|
|
836
|
+
],
|
|
837
|
+
[
|
|
838
|
+
'--no-include-recent-diffs',
|
|
839
|
+
'exclude recent diffs from context'
|
|
840
|
+
],
|
|
841
|
+
[
|
|
842
|
+
'--include-release-notes',
|
|
843
|
+
'include recent release notes in context (default: false)'
|
|
844
|
+
],
|
|
845
|
+
[
|
|
846
|
+
'--no-include-release-notes',
|
|
847
|
+
'exclude release notes from context'
|
|
848
|
+
],
|
|
849
|
+
[
|
|
850
|
+
'--include-github-issues',
|
|
851
|
+
'include open GitHub issues in context (default: true)'
|
|
852
|
+
],
|
|
853
|
+
[
|
|
854
|
+
'--no-include-github-issues',
|
|
855
|
+
'exclude GitHub issues from context'
|
|
856
|
+
],
|
|
857
|
+
[
|
|
858
|
+
'--commit-history-limit <limit>',
|
|
859
|
+
'number of recent commits to include'
|
|
860
|
+
],
|
|
861
|
+
[
|
|
862
|
+
'--diff-history-limit <limit>',
|
|
863
|
+
'number of recent commit diffs to include'
|
|
864
|
+
],
|
|
865
|
+
[
|
|
866
|
+
'--release-notes-limit <limit>',
|
|
867
|
+
'number of recent release notes to include'
|
|
868
|
+
],
|
|
869
|
+
[
|
|
870
|
+
'--github-issues-limit <limit>',
|
|
871
|
+
'number of open GitHub issues to include (max 20)'
|
|
872
|
+
]
|
|
873
|
+
];
|
|
874
|
+
const behavioralOptions = [
|
|
875
|
+
[
|
|
876
|
+
'--sendit',
|
|
877
|
+
'Create GitHub issues automatically without confirmation'
|
|
878
|
+
]
|
|
879
|
+
];
|
|
880
|
+
const globalOptions = [
|
|
881
|
+
[
|
|
882
|
+
'--dry-run',
|
|
883
|
+
'perform a dry run without saving files'
|
|
884
|
+
],
|
|
885
|
+
[
|
|
886
|
+
'--verbose',
|
|
887
|
+
'enable verbose logging'
|
|
888
|
+
],
|
|
889
|
+
[
|
|
890
|
+
'--debug',
|
|
891
|
+
'enable debug logging'
|
|
892
|
+
],
|
|
893
|
+
[
|
|
894
|
+
'--overrides',
|
|
895
|
+
'enable overrides'
|
|
896
|
+
],
|
|
897
|
+
[
|
|
898
|
+
'--model <model>',
|
|
899
|
+
'OpenAI model to use'
|
|
900
|
+
],
|
|
901
|
+
[
|
|
902
|
+
'-d, --context-directories [contextDirectories...]',
|
|
903
|
+
'directories to scan for context'
|
|
904
|
+
],
|
|
905
|
+
[
|
|
906
|
+
'--config-dir <configDir>',
|
|
907
|
+
'configuration directory'
|
|
908
|
+
],
|
|
909
|
+
[
|
|
910
|
+
'--output-dir <outputDir>',
|
|
911
|
+
'output directory for generated files'
|
|
912
|
+
],
|
|
913
|
+
[
|
|
914
|
+
'--excluded-paths [excludedPatterns...]',
|
|
915
|
+
'paths to exclude from the diff'
|
|
916
|
+
],
|
|
917
|
+
[
|
|
918
|
+
'-h, --help',
|
|
919
|
+
'display help for command'
|
|
920
|
+
]
|
|
921
|
+
];
|
|
922
|
+
const formatOptionsSection = (title, options)=>{
|
|
923
|
+
const maxWidth = Math.max(...options.map(([flag])=>flag.length));
|
|
924
|
+
return `${title}:\n` + options.map(([flag, desc])=>` ${flag.padEnd(maxWidth + 2)} ${desc}`).join('\n') + '\n';
|
|
925
|
+
};
|
|
926
|
+
return nameAndVersion + '\n' + formatOptionsSection('Arguments', argumentsSection) + '\n' + formatOptionsSection('Options', reviewOptions) + '\n' + formatOptionsSection('Git Context Parameters', gitContextOptions) + '\n' + formatOptionsSection('Behavioral Options', behavioralOptions) + '\n' + formatOptionsSection('Global Options', globalOptions) + '\n' + 'Environment Variables:\n' + ' OPENAI_API_KEY OpenAI API key (required)\n';
|
|
927
|
+
}
|
|
928
|
+
});
|
|
929
|
+
const cleanCommand = program.command('clean').description('Remove the output directory and all generated files');
|
|
930
|
+
addSharedOptions(cleanCommand);
|
|
931
|
+
const selectAudioCommand = program.command('select-audio').description('Interactively select and save audio device for recording');
|
|
932
|
+
addSharedOptions(selectAudioCommand);
|
|
933
|
+
program.parse();
|
|
934
|
+
const cliArgs = program.opts(); // Get all opts initially
|
|
935
|
+
// Determine which command is being run
|
|
936
|
+
let commandName = DEFAULT_COMMAND;
|
|
937
|
+
let commandOptions = {}; // Store specific command options
|
|
938
|
+
if (program.args.length > 0) {
|
|
939
|
+
commandName = program.args[0];
|
|
940
|
+
validateCommand(commandName);
|
|
941
|
+
}
|
|
942
|
+
// Only proceed with command-specific options if validation passed
|
|
943
|
+
if (ALLOWED_COMMANDS.includes(commandName)) {
|
|
944
|
+
if (commandName === 'commit' && commitCommand.opts) {
|
|
945
|
+
commandOptions = commitCommand.opts();
|
|
946
|
+
// Handle positional argument for direction
|
|
947
|
+
const args = commitCommand.args;
|
|
948
|
+
if (args && args.length > 0 && args[0]) {
|
|
949
|
+
commandOptions.direction = args[0];
|
|
950
|
+
}
|
|
951
|
+
// Check for STDIN input for direction (takes precedence over positional argument)
|
|
952
|
+
const stdinInput = await readStdin();
|
|
953
|
+
if (stdinInput) {
|
|
954
|
+
commandOptions.direction = stdinInput;
|
|
955
|
+
}
|
|
956
|
+
} else if (commandName === 'audio-commit' && audioCommitCommand.opts) {
|
|
957
|
+
commandOptions = audioCommitCommand.opts();
|
|
958
|
+
} else if (commandName === 'release' && releaseCommand.opts) {
|
|
959
|
+
commandOptions = releaseCommand.opts();
|
|
960
|
+
} else if (commandName === 'publish' && publishCommand.opts) {
|
|
961
|
+
commandOptions = publishCommand.opts();
|
|
962
|
+
} else if (commandName === 'link' && linkCommand.opts) {
|
|
963
|
+
commandOptions = linkCommand.opts();
|
|
964
|
+
} else if (commandName === 'unlink' && unlinkCommand.opts) {
|
|
965
|
+
commandOptions = unlinkCommand.opts();
|
|
966
|
+
} else if (commandName === 'audio-review' && audioReviewCommand.opts) {
|
|
967
|
+
commandOptions = audioReviewCommand.opts();
|
|
968
|
+
} else if (commandName === 'review' && reviewCommand.opts) {
|
|
969
|
+
commandOptions = reviewCommand.opts();
|
|
970
|
+
// Handle positional argument for note
|
|
971
|
+
const args = reviewCommand.args;
|
|
972
|
+
if (args && args.length > 0 && args[0]) {
|
|
973
|
+
commandOptions.note = args[0];
|
|
974
|
+
}
|
|
975
|
+
// Check for STDIN input for note (takes precedence over positional argument)
|
|
976
|
+
const stdinInput = await readStdin();
|
|
977
|
+
if (stdinInput) {
|
|
978
|
+
commandOptions.note = stdinInput;
|
|
979
|
+
}
|
|
980
|
+
} else if (commandName === 'clean' && cleanCommand.opts) {
|
|
981
|
+
commandOptions = cleanCommand.opts();
|
|
982
|
+
} else if (commandName === 'select-audio' && selectAudioCommand.opts) {
|
|
983
|
+
commandOptions = selectAudioCommand.opts();
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
// Include command name in CLI args for merging
|
|
987
|
+
const finalCliArgs = {
|
|
988
|
+
...cliArgs,
|
|
989
|
+
...commandOptions
|
|
990
|
+
};
|
|
991
|
+
const commandConfig = {
|
|
992
|
+
commandName
|
|
993
|
+
};
|
|
994
|
+
return [
|
|
995
|
+
finalCliArgs,
|
|
996
|
+
commandConfig
|
|
997
|
+
];
|
|
998
|
+
}
|
|
999
|
+
async function validateAndProcessSecureOptions() {
|
|
1000
|
+
// For check-config and init-config commands, we don't want to throw an error for missing API key
|
|
1001
|
+
const isCheckConfig = process.argv.includes('--check-config');
|
|
1002
|
+
const isInitConfig = process.argv.includes('--init-config');
|
|
1003
|
+
if (!process.env.OPENAI_API_KEY && !isCheckConfig && !isInitConfig) {
|
|
1004
|
+
throw new Error('OpenAI API key is required. Please set the OPENAI_API_KEY environment variable.');
|
|
1005
|
+
}
|
|
1006
|
+
// Prefer CLI key if provided, otherwise use env var (might be undefined for check-config/init-config)
|
|
1007
|
+
const openaiApiKey = process.env.OPENAI_API_KEY;
|
|
1008
|
+
const secureConfig = {
|
|
1009
|
+
openaiApiKey: openaiApiKey
|
|
1010
|
+
};
|
|
1011
|
+
return secureConfig;
|
|
1012
|
+
}
|
|
1013
|
+
// Renamed validation function to reflect its broader role
|
|
1014
|
+
async function validateAndProcessOptions(options) {
|
|
1015
|
+
var _options_commit, _options_commit1, _options_commit2, _options_commit3, _options_commit4, _options_commit5, _options_audioCommit, _options_audioCommit1, _options_audioCommit2, _options_audioCommit3, _options_release, _options_release1, _options_release2, _options_release3, _options_audioReview, _options_audioReview1, _options_audioReview2, _options_audioReview3, _options_audioReview4, _options_audioReview5, _options_audioReview6, _options_audioReview7, _options_audioReview8, _options_audioReview9, _options_audioReview10, _options_audioReview11, _options_audioReview12, _options_audioReview13, _options_review, _options_review1, _options_review2, _options_review3, _options_review4, _options_review5, _options_review6, _options_review7, _options_review8, _options_review9, _options_review10, _options_publish, _options_publish1, _options_publish2, _options_publish3, _options_publish4, _options_link, _options_link1, _options_link2;
|
|
1016
|
+
const contextDirectories = await validateContextDirectories(options.contextDirectories || KODRDRIV_DEFAULTS.contextDirectories);
|
|
1017
|
+
const configDir = options.configDirectory || KODRDRIV_DEFAULTS.configDirectory;
|
|
1018
|
+
var _options_dryRun, _options_verbose, _options_debug, _options_overrides, _options_model, _options_outputDirectory, _options_preferencesDirectory, _options_discoveredConfigDirs, _options_resolvedConfigDirs, _options_commit_add, _options_commit_cached, _options_commit_sendit, _options_commit_messageLimit, _options_audioCommit_maxRecordingTime, _options_audioCommit_audioDevice, _options_release_from, _options_release_to, _options_release_messageLimit, _options_audioReview_includeCommitHistory, _options_audioReview_includeRecentDiffs, _options_audioReview_includeReleaseNotes, _options_audioReview_includeGithubIssues, _options_audioReview_commitHistoryLimit, _options_audioReview_diffHistoryLimit, _options_audioReview_releaseNotesLimit, _options_audioReview_githubIssuesLimit, _options_audioReview_sendit, _options_audioReview_maxRecordingTime, _options_audioReview_audioDevice, _options_review_includeCommitHistory, _options_review_includeRecentDiffs, _options_review_includeReleaseNotes, _options_review_includeGithubIssues, _options_review_commitHistoryLimit, _options_review_diffHistoryLimit, _options_review_releaseNotesLimit, _options_review_githubIssuesLimit, _options_review_sendit, _options_publish_mergeMethod, _options_publish_requiredEnvVars, _options_publish_linkWorkspacePackages, _options_publish_unlinkWorkspacePackages, _options_link_scopeRoots, _options_link_workspaceFile, _options_link_dryRun, _options_excludedPatterns;
|
|
1019
|
+
// Skip config directory validation since Cardigantime handles hierarchical lookup
|
|
1020
|
+
// Ensure all required fields are present and have correct types after merging
|
|
1021
|
+
const finalConfig = {
|
|
1022
|
+
dryRun: (_options_dryRun = options.dryRun) !== null && _options_dryRun !== void 0 ? _options_dryRun : KODRDRIV_DEFAULTS.dryRun,
|
|
1023
|
+
verbose: (_options_verbose = options.verbose) !== null && _options_verbose !== void 0 ? _options_verbose : KODRDRIV_DEFAULTS.verbose,
|
|
1024
|
+
debug: (_options_debug = options.debug) !== null && _options_debug !== void 0 ? _options_debug : KODRDRIV_DEFAULTS.debug,
|
|
1025
|
+
overrides: (_options_overrides = options.overrides) !== null && _options_overrides !== void 0 ? _options_overrides : KODRDRIV_DEFAULTS.overrides,
|
|
1026
|
+
model: (_options_model = options.model) !== null && _options_model !== void 0 ? _options_model : KODRDRIV_DEFAULTS.model,
|
|
1027
|
+
contextDirectories: contextDirectories,
|
|
1028
|
+
configDirectory: configDir,
|
|
1029
|
+
outputDirectory: (_options_outputDirectory = options.outputDirectory) !== null && _options_outputDirectory !== void 0 ? _options_outputDirectory : KODRDRIV_DEFAULTS.outputDirectory,
|
|
1030
|
+
preferencesDirectory: (_options_preferencesDirectory = options.preferencesDirectory) !== null && _options_preferencesDirectory !== void 0 ? _options_preferencesDirectory : KODRDRIV_DEFAULTS.preferencesDirectory,
|
|
1031
|
+
// Cardigantime-specific properties (from fileValues or defaults)
|
|
1032
|
+
discoveredConfigDirs: (_options_discoveredConfigDirs = options.discoveredConfigDirs) !== null && _options_discoveredConfigDirs !== void 0 ? _options_discoveredConfigDirs : [],
|
|
1033
|
+
resolvedConfigDirs: (_options_resolvedConfigDirs = options.resolvedConfigDirs) !== null && _options_resolvedConfigDirs !== void 0 ? _options_resolvedConfigDirs : [],
|
|
1034
|
+
// Command-specific options with defaults
|
|
1035
|
+
commit: {
|
|
1036
|
+
add: (_options_commit_add = (_options_commit = options.commit) === null || _options_commit === void 0 ? void 0 : _options_commit.add) !== null && _options_commit_add !== void 0 ? _options_commit_add : KODRDRIV_DEFAULTS.commit.add,
|
|
1037
|
+
cached: (_options_commit_cached = (_options_commit1 = options.commit) === null || _options_commit1 === void 0 ? void 0 : _options_commit1.cached) !== null && _options_commit_cached !== void 0 ? _options_commit_cached : KODRDRIV_DEFAULTS.commit.cached,
|
|
1038
|
+
sendit: (_options_commit_sendit = (_options_commit2 = options.commit) === null || _options_commit2 === void 0 ? void 0 : _options_commit2.sendit) !== null && _options_commit_sendit !== void 0 ? _options_commit_sendit : KODRDRIV_DEFAULTS.commit.sendit,
|
|
1039
|
+
messageLimit: (_options_commit_messageLimit = (_options_commit3 = options.commit) === null || _options_commit3 === void 0 ? void 0 : _options_commit3.messageLimit) !== null && _options_commit_messageLimit !== void 0 ? _options_commit_messageLimit : KODRDRIV_DEFAULTS.commit.messageLimit,
|
|
1040
|
+
context: (_options_commit4 = options.commit) === null || _options_commit4 === void 0 ? void 0 : _options_commit4.context,
|
|
1041
|
+
direction: (_options_commit5 = options.commit) === null || _options_commit5 === void 0 ? void 0 : _options_commit5.direction
|
|
1042
|
+
},
|
|
1043
|
+
audioCommit: {
|
|
1044
|
+
maxRecordingTime: (_options_audioCommit_maxRecordingTime = (_options_audioCommit = options.audioCommit) === null || _options_audioCommit === void 0 ? void 0 : _options_audioCommit.maxRecordingTime) !== null && _options_audioCommit_maxRecordingTime !== void 0 ? _options_audioCommit_maxRecordingTime : KODRDRIV_DEFAULTS.audioCommit.maxRecordingTime,
|
|
1045
|
+
audioDevice: (_options_audioCommit_audioDevice = (_options_audioCommit1 = options.audioCommit) === null || _options_audioCommit1 === void 0 ? void 0 : _options_audioCommit1.audioDevice) !== null && _options_audioCommit_audioDevice !== void 0 ? _options_audioCommit_audioDevice : KODRDRIV_DEFAULTS.audioCommit.audioDevice,
|
|
1046
|
+
file: (_options_audioCommit2 = options.audioCommit) === null || _options_audioCommit2 === void 0 ? void 0 : _options_audioCommit2.file,
|
|
1047
|
+
keepTemp: (_options_audioCommit3 = options.audioCommit) === null || _options_audioCommit3 === void 0 ? void 0 : _options_audioCommit3.keepTemp
|
|
1048
|
+
},
|
|
1049
|
+
release: {
|
|
1050
|
+
from: (_options_release_from = (_options_release = options.release) === null || _options_release === void 0 ? void 0 : _options_release.from) !== null && _options_release_from !== void 0 ? _options_release_from : KODRDRIV_DEFAULTS.release.from,
|
|
1051
|
+
to: (_options_release_to = (_options_release1 = options.release) === null || _options_release1 === void 0 ? void 0 : _options_release1.to) !== null && _options_release_to !== void 0 ? _options_release_to : KODRDRIV_DEFAULTS.release.to,
|
|
1052
|
+
messageLimit: (_options_release_messageLimit = (_options_release2 = options.release) === null || _options_release2 === void 0 ? void 0 : _options_release2.messageLimit) !== null && _options_release_messageLimit !== void 0 ? _options_release_messageLimit : KODRDRIV_DEFAULTS.release.messageLimit,
|
|
1053
|
+
context: (_options_release3 = options.release) === null || _options_release3 === void 0 ? void 0 : _options_release3.context
|
|
1054
|
+
},
|
|
1055
|
+
audioReview: {
|
|
1056
|
+
includeCommitHistory: (_options_audioReview_includeCommitHistory = (_options_audioReview = options.audioReview) === null || _options_audioReview === void 0 ? void 0 : _options_audioReview.includeCommitHistory) !== null && _options_audioReview_includeCommitHistory !== void 0 ? _options_audioReview_includeCommitHistory : KODRDRIV_DEFAULTS.audioReview.includeCommitHistory,
|
|
1057
|
+
includeRecentDiffs: (_options_audioReview_includeRecentDiffs = (_options_audioReview1 = options.audioReview) === null || _options_audioReview1 === void 0 ? void 0 : _options_audioReview1.includeRecentDiffs) !== null && _options_audioReview_includeRecentDiffs !== void 0 ? _options_audioReview_includeRecentDiffs : KODRDRIV_DEFAULTS.audioReview.includeRecentDiffs,
|
|
1058
|
+
includeReleaseNotes: (_options_audioReview_includeReleaseNotes = (_options_audioReview2 = options.audioReview) === null || _options_audioReview2 === void 0 ? void 0 : _options_audioReview2.includeReleaseNotes) !== null && _options_audioReview_includeReleaseNotes !== void 0 ? _options_audioReview_includeReleaseNotes : KODRDRIV_DEFAULTS.audioReview.includeReleaseNotes,
|
|
1059
|
+
includeGithubIssues: (_options_audioReview_includeGithubIssues = (_options_audioReview3 = options.audioReview) === null || _options_audioReview3 === void 0 ? void 0 : _options_audioReview3.includeGithubIssues) !== null && _options_audioReview_includeGithubIssues !== void 0 ? _options_audioReview_includeGithubIssues : KODRDRIV_DEFAULTS.audioReview.includeGithubIssues,
|
|
1060
|
+
commitHistoryLimit: (_options_audioReview_commitHistoryLimit = (_options_audioReview4 = options.audioReview) === null || _options_audioReview4 === void 0 ? void 0 : _options_audioReview4.commitHistoryLimit) !== null && _options_audioReview_commitHistoryLimit !== void 0 ? _options_audioReview_commitHistoryLimit : KODRDRIV_DEFAULTS.audioReview.commitHistoryLimit,
|
|
1061
|
+
diffHistoryLimit: (_options_audioReview_diffHistoryLimit = (_options_audioReview5 = options.audioReview) === null || _options_audioReview5 === void 0 ? void 0 : _options_audioReview5.diffHistoryLimit) !== null && _options_audioReview_diffHistoryLimit !== void 0 ? _options_audioReview_diffHistoryLimit : KODRDRIV_DEFAULTS.audioReview.diffHistoryLimit,
|
|
1062
|
+
releaseNotesLimit: (_options_audioReview_releaseNotesLimit = (_options_audioReview6 = options.audioReview) === null || _options_audioReview6 === void 0 ? void 0 : _options_audioReview6.releaseNotesLimit) !== null && _options_audioReview_releaseNotesLimit !== void 0 ? _options_audioReview_releaseNotesLimit : KODRDRIV_DEFAULTS.audioReview.releaseNotesLimit,
|
|
1063
|
+
githubIssuesLimit: (_options_audioReview_githubIssuesLimit = (_options_audioReview7 = options.audioReview) === null || _options_audioReview7 === void 0 ? void 0 : _options_audioReview7.githubIssuesLimit) !== null && _options_audioReview_githubIssuesLimit !== void 0 ? _options_audioReview_githubIssuesLimit : KODRDRIV_DEFAULTS.audioReview.githubIssuesLimit,
|
|
1064
|
+
context: (_options_audioReview8 = options.audioReview) === null || _options_audioReview8 === void 0 ? void 0 : _options_audioReview8.context,
|
|
1065
|
+
sendit: (_options_audioReview_sendit = (_options_audioReview9 = options.audioReview) === null || _options_audioReview9 === void 0 ? void 0 : _options_audioReview9.sendit) !== null && _options_audioReview_sendit !== void 0 ? _options_audioReview_sendit : KODRDRIV_DEFAULTS.audioReview.sendit,
|
|
1066
|
+
maxRecordingTime: (_options_audioReview_maxRecordingTime = (_options_audioReview10 = options.audioReview) === null || _options_audioReview10 === void 0 ? void 0 : _options_audioReview10.maxRecordingTime) !== null && _options_audioReview_maxRecordingTime !== void 0 ? _options_audioReview_maxRecordingTime : KODRDRIV_DEFAULTS.audioReview.maxRecordingTime,
|
|
1067
|
+
audioDevice: (_options_audioReview_audioDevice = (_options_audioReview11 = options.audioReview) === null || _options_audioReview11 === void 0 ? void 0 : _options_audioReview11.audioDevice) !== null && _options_audioReview_audioDevice !== void 0 ? _options_audioReview_audioDevice : KODRDRIV_DEFAULTS.audioReview.audioDevice,
|
|
1068
|
+
file: (_options_audioReview12 = options.audioReview) === null || _options_audioReview12 === void 0 ? void 0 : _options_audioReview12.file,
|
|
1069
|
+
keepTemp: (_options_audioReview13 = options.audioReview) === null || _options_audioReview13 === void 0 ? void 0 : _options_audioReview13.keepTemp
|
|
1070
|
+
},
|
|
1071
|
+
review: {
|
|
1072
|
+
includeCommitHistory: (_options_review_includeCommitHistory = (_options_review = options.review) === null || _options_review === void 0 ? void 0 : _options_review.includeCommitHistory) !== null && _options_review_includeCommitHistory !== void 0 ? _options_review_includeCommitHistory : KODRDRIV_DEFAULTS.review.includeCommitHistory,
|
|
1073
|
+
includeRecentDiffs: (_options_review_includeRecentDiffs = (_options_review1 = options.review) === null || _options_review1 === void 0 ? void 0 : _options_review1.includeRecentDiffs) !== null && _options_review_includeRecentDiffs !== void 0 ? _options_review_includeRecentDiffs : KODRDRIV_DEFAULTS.review.includeRecentDiffs,
|
|
1074
|
+
includeReleaseNotes: (_options_review_includeReleaseNotes = (_options_review2 = options.review) === null || _options_review2 === void 0 ? void 0 : _options_review2.includeReleaseNotes) !== null && _options_review_includeReleaseNotes !== void 0 ? _options_review_includeReleaseNotes : KODRDRIV_DEFAULTS.review.includeReleaseNotes,
|
|
1075
|
+
includeGithubIssues: (_options_review_includeGithubIssues = (_options_review3 = options.review) === null || _options_review3 === void 0 ? void 0 : _options_review3.includeGithubIssues) !== null && _options_review_includeGithubIssues !== void 0 ? _options_review_includeGithubIssues : KODRDRIV_DEFAULTS.review.includeGithubIssues,
|
|
1076
|
+
commitHistoryLimit: (_options_review_commitHistoryLimit = (_options_review4 = options.review) === null || _options_review4 === void 0 ? void 0 : _options_review4.commitHistoryLimit) !== null && _options_review_commitHistoryLimit !== void 0 ? _options_review_commitHistoryLimit : KODRDRIV_DEFAULTS.review.commitHistoryLimit,
|
|
1077
|
+
diffHistoryLimit: (_options_review_diffHistoryLimit = (_options_review5 = options.review) === null || _options_review5 === void 0 ? void 0 : _options_review5.diffHistoryLimit) !== null && _options_review_diffHistoryLimit !== void 0 ? _options_review_diffHistoryLimit : KODRDRIV_DEFAULTS.review.diffHistoryLimit,
|
|
1078
|
+
releaseNotesLimit: (_options_review_releaseNotesLimit = (_options_review6 = options.review) === null || _options_review6 === void 0 ? void 0 : _options_review6.releaseNotesLimit) !== null && _options_review_releaseNotesLimit !== void 0 ? _options_review_releaseNotesLimit : KODRDRIV_DEFAULTS.review.releaseNotesLimit,
|
|
1079
|
+
githubIssuesLimit: (_options_review_githubIssuesLimit = (_options_review7 = options.review) === null || _options_review7 === void 0 ? void 0 : _options_review7.githubIssuesLimit) !== null && _options_review_githubIssuesLimit !== void 0 ? _options_review_githubIssuesLimit : KODRDRIV_DEFAULTS.review.githubIssuesLimit,
|
|
1080
|
+
context: (_options_review8 = options.review) === null || _options_review8 === void 0 ? void 0 : _options_review8.context,
|
|
1081
|
+
sendit: (_options_review_sendit = (_options_review9 = options.review) === null || _options_review9 === void 0 ? void 0 : _options_review9.sendit) !== null && _options_review_sendit !== void 0 ? _options_review_sendit : KODRDRIV_DEFAULTS.review.sendit,
|
|
1082
|
+
note: (_options_review10 = options.review) === null || _options_review10 === void 0 ? void 0 : _options_review10.note
|
|
1083
|
+
},
|
|
1084
|
+
publish: {
|
|
1085
|
+
mergeMethod: (_options_publish_mergeMethod = (_options_publish = options.publish) === null || _options_publish === void 0 ? void 0 : _options_publish.mergeMethod) !== null && _options_publish_mergeMethod !== void 0 ? _options_publish_mergeMethod : KODRDRIV_DEFAULTS.publish.mergeMethod,
|
|
1086
|
+
dependencyUpdatePatterns: (_options_publish1 = options.publish) === null || _options_publish1 === void 0 ? void 0 : _options_publish1.dependencyUpdatePatterns,
|
|
1087
|
+
requiredEnvVars: (_options_publish_requiredEnvVars = (_options_publish2 = options.publish) === null || _options_publish2 === void 0 ? void 0 : _options_publish2.requiredEnvVars) !== null && _options_publish_requiredEnvVars !== void 0 ? _options_publish_requiredEnvVars : KODRDRIV_DEFAULTS.publish.requiredEnvVars,
|
|
1088
|
+
linkWorkspacePackages: (_options_publish_linkWorkspacePackages = (_options_publish3 = options.publish) === null || _options_publish3 === void 0 ? void 0 : _options_publish3.linkWorkspacePackages) !== null && _options_publish_linkWorkspacePackages !== void 0 ? _options_publish_linkWorkspacePackages : KODRDRIV_DEFAULTS.publish.linkWorkspacePackages,
|
|
1089
|
+
unlinkWorkspacePackages: (_options_publish_unlinkWorkspacePackages = (_options_publish4 = options.publish) === null || _options_publish4 === void 0 ? void 0 : _options_publish4.unlinkWorkspacePackages) !== null && _options_publish_unlinkWorkspacePackages !== void 0 ? _options_publish_unlinkWorkspacePackages : KODRDRIV_DEFAULTS.publish.unlinkWorkspacePackages
|
|
1090
|
+
},
|
|
1091
|
+
link: {
|
|
1092
|
+
scopeRoots: (_options_link_scopeRoots = (_options_link = options.link) === null || _options_link === void 0 ? void 0 : _options_link.scopeRoots) !== null && _options_link_scopeRoots !== void 0 ? _options_link_scopeRoots : KODRDRIV_DEFAULTS.link.scopeRoots,
|
|
1093
|
+
workspaceFile: (_options_link_workspaceFile = (_options_link1 = options.link) === null || _options_link1 === void 0 ? void 0 : _options_link1.workspaceFile) !== null && _options_link_workspaceFile !== void 0 ? _options_link_workspaceFile : KODRDRIV_DEFAULTS.link.workspaceFile,
|
|
1094
|
+
dryRun: (_options_link_dryRun = (_options_link2 = options.link) === null || _options_link2 === void 0 ? void 0 : _options_link2.dryRun) !== null && _options_link_dryRun !== void 0 ? _options_link_dryRun : KODRDRIV_DEFAULTS.link.dryRun
|
|
1095
|
+
},
|
|
1096
|
+
excludedPatterns: (_options_excludedPatterns = options.excludedPatterns) !== null && _options_excludedPatterns !== void 0 ? _options_excludedPatterns : KODRDRIV_DEFAULTS.excludedPatterns
|
|
1097
|
+
};
|
|
1098
|
+
// Final validation against the MainConfig shape (optional, cardigantime might handle it)
|
|
1099
|
+
// You could potentially use ConfigShape.parse(finalConfig) here if needed
|
|
1100
|
+
return finalConfig;
|
|
1101
|
+
}
|
|
1102
|
+
// Export for testing
|
|
1103
|
+
function validateCommand(commandName) {
|
|
1104
|
+
if (!ALLOWED_COMMANDS.includes(commandName)) {
|
|
1105
|
+
throw new Error(`Invalid command: ${commandName}, allowed commands: ${ALLOWED_COMMANDS.join(', ')}`);
|
|
1106
|
+
}
|
|
1107
|
+
return commandName;
|
|
1108
|
+
}
|
|
1109
|
+
// Export for testing
|
|
1110
|
+
async function validateContextDirectories(contextDirectories) {
|
|
1111
|
+
const logger = getLogger();
|
|
1112
|
+
const storage = create$2({
|
|
1113
|
+
log: logger.info
|
|
1114
|
+
});
|
|
1115
|
+
// Filter out directories that don't exist
|
|
1116
|
+
const validDirectories = [];
|
|
1117
|
+
for (const dir of contextDirectories){
|
|
1118
|
+
try {
|
|
1119
|
+
if (await storage.isDirectoryReadable(dir)) {
|
|
1120
|
+
validDirectories.push(dir);
|
|
1121
|
+
} else {
|
|
1122
|
+
logger.warn(`Directory not readable: ${dir}`);
|
|
1123
|
+
}
|
|
1124
|
+
} catch (error) {
|
|
1125
|
+
logger.warn(`Error validating directory ${dir}: ${error.message}`);
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
return validDirectories;
|
|
1129
|
+
}
|
|
1130
|
+
|
|
1131
|
+
class ExitError extends Error {
|
|
1132
|
+
constructor(message){
|
|
1133
|
+
super(message);
|
|
1134
|
+
this.name = 'ExitError';
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
async function run(command, options = {}) {
|
|
1139
|
+
const execPromise = util.promisify(exec);
|
|
1140
|
+
return execPromise(command, options);
|
|
1141
|
+
}
|
|
1142
|
+
async function runWithDryRunSupport(command, isDryRun, options = {}) {
|
|
1143
|
+
const logger = getLogger();
|
|
1144
|
+
if (isDryRun) {
|
|
1145
|
+
logger.info(`DRY RUN: Would execute command: ${command}`);
|
|
1146
|
+
return {
|
|
1147
|
+
stdout: '',
|
|
1148
|
+
stderr: ''
|
|
1149
|
+
};
|
|
1150
|
+
}
|
|
1151
|
+
return run(command, options);
|
|
1152
|
+
}
|
|
1153
|
+
|
|
1154
|
+
// Enhanced exclusion patterns specifically for review context
|
|
1155
|
+
// These focus on excluding large files, binaries, and content that doesn't help with issue analysis
|
|
1156
|
+
const getReviewExcludedPatterns = (basePatterns)=>{
|
|
1157
|
+
const reviewSpecificExclusions = [
|
|
1158
|
+
// Lock files and dependency files (often massive)
|
|
1159
|
+
"*lock*",
|
|
1160
|
+
"*.lock",
|
|
1161
|
+
"pnpm-lock.yaml",
|
|
1162
|
+
"package-lock.json",
|
|
1163
|
+
"yarn.lock",
|
|
1164
|
+
"bun.lockb",
|
|
1165
|
+
"composer.lock",
|
|
1166
|
+
"Cargo.lock",
|
|
1167
|
+
"Gemfile.lock",
|
|
1168
|
+
"Pipfile.lock",
|
|
1169
|
+
"poetry.lock",
|
|
1170
|
+
// Image files (binary and large)
|
|
1171
|
+
"*.png",
|
|
1172
|
+
"*.jpg",
|
|
1173
|
+
"*.jpeg",
|
|
1174
|
+
"*.gif",
|
|
1175
|
+
"*.bmp",
|
|
1176
|
+
"*.tiff",
|
|
1177
|
+
"*.webp",
|
|
1178
|
+
"*.svg",
|
|
1179
|
+
"*.ico",
|
|
1180
|
+
"*.icns",
|
|
1181
|
+
// Video and audio files
|
|
1182
|
+
"*.mp4",
|
|
1183
|
+
"*.avi",
|
|
1184
|
+
"*.mov",
|
|
1185
|
+
"*.wmv",
|
|
1186
|
+
"*.flv",
|
|
1187
|
+
"*.mp3",
|
|
1188
|
+
"*.wav",
|
|
1189
|
+
"*.flac",
|
|
1190
|
+
// Archives and compressed files
|
|
1191
|
+
"*.zip",
|
|
1192
|
+
"*.tar",
|
|
1193
|
+
"*.tar.gz",
|
|
1194
|
+
"*.tgz",
|
|
1195
|
+
"*.rar",
|
|
1196
|
+
"*.7z",
|
|
1197
|
+
"*.bz2",
|
|
1198
|
+
"*.xz",
|
|
1199
|
+
// Binary executables and libraries
|
|
1200
|
+
"*.exe",
|
|
1201
|
+
"*.dll",
|
|
1202
|
+
"*.so",
|
|
1203
|
+
"*.dylib",
|
|
1204
|
+
"*.bin",
|
|
1205
|
+
"*.app",
|
|
1206
|
+
// Database files
|
|
1207
|
+
"*.db",
|
|
1208
|
+
"*.sqlite",
|
|
1209
|
+
"*.sqlite3",
|
|
1210
|
+
"*.mdb",
|
|
1211
|
+
// Large generated files
|
|
1212
|
+
"*.map",
|
|
1213
|
+
"*.min.js",
|
|
1214
|
+
"*.min.css",
|
|
1215
|
+
"bundle.*",
|
|
1216
|
+
"vendor.*",
|
|
1217
|
+
// Documentation that's often large
|
|
1218
|
+
"*.pdf",
|
|
1219
|
+
"*.doc",
|
|
1220
|
+
"*.docx",
|
|
1221
|
+
"*.ppt",
|
|
1222
|
+
"*.pptx",
|
|
1223
|
+
// IDE and OS generated files
|
|
1224
|
+
".DS_Store",
|
|
1225
|
+
"Thumbs.db",
|
|
1226
|
+
"*.swp",
|
|
1227
|
+
"*.tmp",
|
|
1228
|
+
// Certificate and key files
|
|
1229
|
+
"*.pem",
|
|
1230
|
+
"*.crt",
|
|
1231
|
+
"*.key",
|
|
1232
|
+
"*.p12",
|
|
1233
|
+
"*.pfx",
|
|
1234
|
+
// Large config/data files that are often auto-generated
|
|
1235
|
+
"tsconfig.tsbuildinfo",
|
|
1236
|
+
"*.cache",
|
|
1237
|
+
".eslintcache"
|
|
1238
|
+
];
|
|
1239
|
+
// Combine base patterns with review specific exclusions, removing duplicates
|
|
1240
|
+
const combinedPatterns = [
|
|
1241
|
+
...new Set([
|
|
1242
|
+
...basePatterns,
|
|
1243
|
+
...reviewSpecificExclusions
|
|
1244
|
+
])
|
|
1245
|
+
];
|
|
1246
|
+
return combinedPatterns;
|
|
1247
|
+
};
|
|
1248
|
+
// Function to truncate overly large diff content while preserving structure
|
|
1249
|
+
const truncateLargeDiff = (diffContent, maxLength = 5000)=>{
|
|
1250
|
+
if (diffContent.length <= maxLength) {
|
|
1251
|
+
return diffContent;
|
|
1252
|
+
}
|
|
1253
|
+
const lines = diffContent.split('\n');
|
|
1254
|
+
const truncatedLines = [];
|
|
1255
|
+
let currentLength = 0;
|
|
1256
|
+
let truncated = false;
|
|
1257
|
+
for (const line of lines){
|
|
1258
|
+
if (currentLength + line.length + 1 > maxLength) {
|
|
1259
|
+
truncated = true;
|
|
1260
|
+
break;
|
|
1261
|
+
}
|
|
1262
|
+
truncatedLines.push(line);
|
|
1263
|
+
currentLength += line.length + 1; // +1 for newline
|
|
1264
|
+
}
|
|
1265
|
+
if (truncated) {
|
|
1266
|
+
truncatedLines.push('');
|
|
1267
|
+
truncatedLines.push(`... [TRUNCATED: Original diff was ${diffContent.length} characters, showing first ${currentLength}] ...`);
|
|
1268
|
+
}
|
|
1269
|
+
return truncatedLines.join('\n');
|
|
1270
|
+
};
|
|
1271
|
+
const create$1 = async (options)=>{
|
|
1272
|
+
const logger = getLogger();
|
|
1273
|
+
async function get() {
|
|
1274
|
+
try {
|
|
1275
|
+
logger.verbose('Gathering change information from Git');
|
|
1276
|
+
try {
|
|
1277
|
+
logger.debug('Executing git diff');
|
|
1278
|
+
const excludeString = options.excludedPatterns.map((p)=>`':(exclude)${p}'`).join(' ');
|
|
1279
|
+
let range = '';
|
|
1280
|
+
if (options.from && options.to) {
|
|
1281
|
+
range = `${options.from}..${options.to}`;
|
|
1282
|
+
} else if (options.from) {
|
|
1283
|
+
range = `${options.from}`;
|
|
1284
|
+
} else if (options.to) {
|
|
1285
|
+
range = `${options.to}`;
|
|
1286
|
+
}
|
|
1287
|
+
let command = '';
|
|
1288
|
+
if (options.cached) {
|
|
1289
|
+
command = `git diff --cached${range ? ' ' + range : ''} -- . ${excludeString}`;
|
|
1290
|
+
} else {
|
|
1291
|
+
command = `git diff${range ? ' ' + range : ''} -- . ${excludeString}`;
|
|
1292
|
+
}
|
|
1293
|
+
const { stdout, stderr } = await run(command);
|
|
1294
|
+
if (stderr) {
|
|
1295
|
+
logger.warn('Git log produced stderr: %s', stderr);
|
|
1296
|
+
}
|
|
1297
|
+
logger.debug('Git log output: %s', stdout);
|
|
1298
|
+
return stdout;
|
|
1299
|
+
} catch (error) {
|
|
1300
|
+
logger.error('Failed to execute git log: %s', error.message);
|
|
1301
|
+
throw error;
|
|
1302
|
+
}
|
|
1303
|
+
} catch (error) {
|
|
1304
|
+
logger.error('Error occurred during gather change phase: %s %s', error.message, error.stack);
|
|
1305
|
+
throw new ExitError('Error occurred during gather change phase');
|
|
1306
|
+
}
|
|
1307
|
+
}
|
|
1308
|
+
return {
|
|
1309
|
+
get
|
|
1310
|
+
};
|
|
1311
|
+
};
|
|
1312
|
+
const hasStagedChanges = async ()=>{
|
|
1313
|
+
const logger = getLogger();
|
|
1314
|
+
try {
|
|
1315
|
+
logger.debug('Checking for staged changes');
|
|
1316
|
+
const { stderr } = await run('git diff --cached --quiet');
|
|
1317
|
+
if (stderr) {
|
|
1318
|
+
logger.warn('Git diff produced stderr: %s', stderr);
|
|
1319
|
+
}
|
|
1320
|
+
// If there are staged changes, git diff --cached --quiet will return non-zero
|
|
1321
|
+
// So if we get here without an error, there are no staged changes
|
|
1322
|
+
return false;
|
|
1323
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
1324
|
+
} catch (error) {
|
|
1325
|
+
// If we get an error, it means there are staged changes
|
|
1326
|
+
return true;
|
|
1327
|
+
}
|
|
1328
|
+
};
|
|
1329
|
+
// High-level function to get recent diffs formatted for review context
|
|
1330
|
+
const getRecentDiffsForReview = async (options)=>{
|
|
1331
|
+
const logger = getLogger();
|
|
1332
|
+
const diffLimit = options.limit || 5;
|
|
1333
|
+
// Get enhanced exclusion patterns for review context
|
|
1334
|
+
const reviewExcluded = getReviewExcludedPatterns(options.baseExcludedPatterns);
|
|
1335
|
+
logger.debug('Using %d exclusion patterns for diff context (including %d review specific)', reviewExcluded.length, reviewExcluded.length - options.baseExcludedPatterns.length);
|
|
1336
|
+
logger.debug('Sample exclusions: %s', reviewExcluded.slice(0, 10).join(', ') + (reviewExcluded.length > 10 ? '...' : ''));
|
|
1337
|
+
const diffSections = [];
|
|
1338
|
+
// Get recent commits and their diffs
|
|
1339
|
+
for(let i = 0; i < diffLimit; i++){
|
|
1340
|
+
try {
|
|
1341
|
+
const diffRange = i === 0 ? 'HEAD~1' : `HEAD~${i + 1}..HEAD~${i}`;
|
|
1342
|
+
const diff = await create$1({
|
|
1343
|
+
from: `HEAD~${i + 1}`,
|
|
1344
|
+
to: `HEAD~${i}`,
|
|
1345
|
+
excludedPatterns: reviewExcluded
|
|
1346
|
+
});
|
|
1347
|
+
const diffContent = await diff.get();
|
|
1348
|
+
if (diffContent.trim()) {
|
|
1349
|
+
const truncatedDiff = truncateLargeDiff(diffContent);
|
|
1350
|
+
diffSections.push(`[Recent Diff ${i + 1} (${diffRange})]\n${truncatedDiff}`);
|
|
1351
|
+
if (truncatedDiff.length < diffContent.length) {
|
|
1352
|
+
logger.debug('Added diff %d to context (%d characters, truncated from %d)', i + 1, truncatedDiff.length, diffContent.length);
|
|
1353
|
+
} else {
|
|
1354
|
+
logger.debug('Added diff %d to context (%d characters)', i + 1, diffContent.length);
|
|
1355
|
+
}
|
|
1356
|
+
} else {
|
|
1357
|
+
logger.debug('Diff %d was empty after exclusions', i + 1);
|
|
1358
|
+
}
|
|
1359
|
+
} catch (error) {
|
|
1360
|
+
logger.debug('Could not fetch diff %d: %s', i + 1, error.message);
|
|
1361
|
+
break; // Stop if we can't fetch more diffs
|
|
1362
|
+
}
|
|
1363
|
+
}
|
|
1364
|
+
return diffSections.length > 0 ? '\n\n' + diffSections.join('\n\n') : '';
|
|
1365
|
+
};
|
|
1366
|
+
|
|
1367
|
+
const create = async (options)=>{
|
|
1368
|
+
const logger = getLogger();
|
|
1369
|
+
async function get() {
|
|
1370
|
+
try {
|
|
1371
|
+
logger.verbose('Gathering change information from Git');
|
|
1372
|
+
try {
|
|
1373
|
+
logger.debug('Executing git log');
|
|
1374
|
+
// Build git log range
|
|
1375
|
+
let range = '';
|
|
1376
|
+
let extraArgs = '';
|
|
1377
|
+
// If currentBranchOnly, show only commits unique to HEAD vs. to-branch (or main/master if not provided)
|
|
1378
|
+
if (options.currentBranchOnly) {
|
|
1379
|
+
const toBranch = options.to || 'main'; // Default to 'main' if not provided
|
|
1380
|
+
range = `${toBranch}..HEAD`;
|
|
1381
|
+
} else if (options.from && options.to) {
|
|
1382
|
+
range = `${options.from}..${options.to}`;
|
|
1383
|
+
} else if (options.from) {
|
|
1384
|
+
range = `${options.from}`;
|
|
1385
|
+
} else if (options.to) {
|
|
1386
|
+
range = `${options.to}`;
|
|
1387
|
+
} // else, no range: show all
|
|
1388
|
+
if (options.limit && options.limit > 0) {
|
|
1389
|
+
extraArgs += ` -n ${options.limit}`;
|
|
1390
|
+
}
|
|
1391
|
+
const gitLogCmd = `git log${range ? ' ' + range : ''}${extraArgs}`;
|
|
1392
|
+
logger.debug('Git log command: %s', gitLogCmd);
|
|
1393
|
+
const { stdout, stderr } = await run(gitLogCmd);
|
|
1394
|
+
if (stderr) {
|
|
1395
|
+
logger.warn('Git log produced stderr: %s', stderr);
|
|
1396
|
+
}
|
|
1397
|
+
logger.debug('Git log output: %s', stdout);
|
|
1398
|
+
return stdout;
|
|
1399
|
+
} catch (error) {
|
|
1400
|
+
logger.error('Failed to execute git log: %s', error.message);
|
|
1401
|
+
throw error;
|
|
1402
|
+
}
|
|
1403
|
+
} catch (error) {
|
|
1404
|
+
logger.error('Error occurred during gather change phase: %s %s', error.message, error.stack);
|
|
1405
|
+
throw new ExitError('Error occurred during gather change phase');
|
|
1406
|
+
}
|
|
1407
|
+
}
|
|
1408
|
+
return {
|
|
1409
|
+
get
|
|
1410
|
+
};
|
|
1411
|
+
};
|
|
1412
|
+
|
|
1413
|
+
const __filename$2 = fileURLToPath(import.meta.url);
|
|
1414
|
+
const __dirname$2 = path.dirname(__filename$2);
|
|
1415
|
+
/**
|
|
1416
|
+
* Build a commit prompt using RiotPrompt Recipes.
|
|
1417
|
+
*
|
|
1418
|
+
* @param runConfig The runtime configuration provided by the CLI
|
|
1419
|
+
* @param content Mandatory content inputs (e.g. diff)
|
|
1420
|
+
* @param ctx Optional contextual inputs configured by the user
|
|
1421
|
+
*/ const createPrompt$2 = async ({ overridePaths, overrides }, { diffContent, userDirection }, { logContext, context, directories } = {})=>{
|
|
1422
|
+
// Use the new quick.commit recipe - much simpler!
|
|
1423
|
+
// Adjust basePath for single-file build
|
|
1424
|
+
const basePath = path.resolve(__dirname$2, 'src', 'prompt');
|
|
1425
|
+
return quick.commit(diffContent, {
|
|
1426
|
+
basePath,
|
|
1427
|
+
overridePaths: overridePaths || [],
|
|
1428
|
+
overrides: overrides || false,
|
|
1429
|
+
userDirection,
|
|
1430
|
+
context,
|
|
1431
|
+
directories
|
|
1432
|
+
});
|
|
1433
|
+
};
|
|
1434
|
+
|
|
1435
|
+
//Recursive implementation of jSON.stringify;
|
|
1436
|
+
const stringifyJSON = function(obj, options = {
|
|
1437
|
+
depth: 0
|
|
1438
|
+
}) {
|
|
1439
|
+
if (options.depth > 10) {
|
|
1440
|
+
return '{"error": "Maximum depth reached"}';
|
|
1441
|
+
}
|
|
1442
|
+
const arrOfKeyVals = [];
|
|
1443
|
+
const arrVals = [];
|
|
1444
|
+
let objKeys = [];
|
|
1445
|
+
/*********CHECK FOR PRIMITIVE TYPES**********/ if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null) return '' + obj;
|
|
1446
|
+
else if (typeof obj === 'string') return '"' + obj + '"';
|
|
1447
|
+
else if (Array.isArray(obj)) {
|
|
1448
|
+
//check for empty array
|
|
1449
|
+
if (obj[0] === undefined) return '[]';
|
|
1450
|
+
else {
|
|
1451
|
+
obj.forEach(function(el) {
|
|
1452
|
+
arrVals.push(stringifyJSON(el, {
|
|
1453
|
+
depth: options.depth + 1
|
|
1454
|
+
}));
|
|
1455
|
+
});
|
|
1456
|
+
return '[' + arrVals + ']';
|
|
1457
|
+
}
|
|
1458
|
+
} else if (obj instanceof Object) {
|
|
1459
|
+
//get object keys
|
|
1460
|
+
objKeys = Object.keys(obj);
|
|
1461
|
+
//set key output;
|
|
1462
|
+
objKeys.forEach(function(key) {
|
|
1463
|
+
const keyOut = '"' + key + '":';
|
|
1464
|
+
const keyValOut = obj[key];
|
|
1465
|
+
//skip functions and undefined properties
|
|
1466
|
+
if (keyValOut instanceof Function || keyValOut === undefined) arrOfKeyVals.push('');
|
|
1467
|
+
else if (typeof keyValOut === 'string') arrOfKeyVals.push(keyOut + '"' + keyValOut + '"');
|
|
1468
|
+
else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null) arrOfKeyVals.push(keyOut + keyValOut);
|
|
1469
|
+
else if (keyValOut instanceof Object) {
|
|
1470
|
+
arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut, {
|
|
1471
|
+
depth: options.depth + 1
|
|
1472
|
+
}));
|
|
1473
|
+
}
|
|
1474
|
+
});
|
|
1475
|
+
return '{' + arrOfKeyVals + '}';
|
|
1476
|
+
}
|
|
1477
|
+
return '';
|
|
1478
|
+
};
|
|
1479
|
+
const incrementPatchVersion = (version)=>{
|
|
1480
|
+
const parts = version.split('.');
|
|
1481
|
+
if (parts.length !== 3) {
|
|
1482
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
1483
|
+
}
|
|
1484
|
+
const patch = parseInt(parts[2], 10);
|
|
1485
|
+
if (isNaN(patch)) {
|
|
1486
|
+
throw new Error(`Invalid patch version: ${parts[2]}`);
|
|
1487
|
+
}
|
|
1488
|
+
parts[2] = (patch + 1).toString();
|
|
1489
|
+
return parts.join('.');
|
|
1490
|
+
};
|
|
1491
|
+
const getOutputPath = (outputDirectory, filename)=>{
|
|
1492
|
+
return path.join(outputDirectory, filename);
|
|
1493
|
+
};
|
|
1494
|
+
const getTimestampedFilename = (baseName, extension = '.json')=>{
|
|
1495
|
+
const now = new Date();
|
|
1496
|
+
// Format as YYMMdd-HHmm (e.g., 250701-1030)
|
|
1497
|
+
const yy = now.getFullYear().toString().slice(-2);
|
|
1498
|
+
const mm = (now.getMonth() + 1).toString().padStart(2, '0');
|
|
1499
|
+
const dd = now.getDate().toString().padStart(2, '0');
|
|
1500
|
+
const hh = now.getHours().toString().padStart(2, '0');
|
|
1501
|
+
const min = now.getMinutes().toString().padStart(2, '0');
|
|
1502
|
+
const timestamp = `${yy}${mm}${dd}-${hh}${min}`;
|
|
1503
|
+
return `${timestamp}-${baseName}${extension}`;
|
|
1504
|
+
};
|
|
1505
|
+
const getTimestampedRequestFilename = (baseName)=>{
|
|
1506
|
+
return getTimestampedFilename(baseName, '.request.json');
|
|
1507
|
+
};
|
|
1508
|
+
const getTimestampedResponseFilename = (baseName)=>{
|
|
1509
|
+
return getTimestampedFilename(baseName, '.response.json');
|
|
1510
|
+
};
|
|
1511
|
+
const getTimestampedCommitFilename = ()=>{
|
|
1512
|
+
return getTimestampedFilename('commit-message', '.md');
|
|
1513
|
+
};
|
|
1514
|
+
const getTimestampedReleaseNotesFilename = ()=>{
|
|
1515
|
+
return getTimestampedFilename('release-notes', '.md');
|
|
1516
|
+
};
|
|
1517
|
+
const getTimestampedAudioFilename = ()=>{
|
|
1518
|
+
return getTimestampedFilename('audio-recording', '.wav');
|
|
1519
|
+
};
|
|
1520
|
+
const getTimestampedReviewFilename = ()=>{
|
|
1521
|
+
return getTimestampedFilename('review-analysis', '.md');
|
|
1522
|
+
};
|
|
1523
|
+
const getTimestampedReviewNotesFilename = ()=>{
|
|
1524
|
+
return getTimestampedFilename('review-notes', '.md');
|
|
1525
|
+
};
|
|
1526
|
+
|
|
1527
|
+
class OpenAIError extends Error {
|
|
1528
|
+
constructor(message){
|
|
1529
|
+
super(message);
|
|
1530
|
+
this.name = 'OpenAIError';
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
async function createCompletion(messages, options = {
|
|
1534
|
+
model: "gpt-4o-mini"
|
|
1535
|
+
}) {
|
|
1536
|
+
const logger = getLogger();
|
|
1537
|
+
const storage = create$2({
|
|
1538
|
+
log: logger.debug
|
|
1539
|
+
});
|
|
1540
|
+
let openai = null;
|
|
1541
|
+
try {
|
|
1542
|
+
var _completion_choices__message_content, _completion_choices__message, _completion_choices_;
|
|
1543
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
1544
|
+
if (!apiKey) {
|
|
1545
|
+
throw new OpenAIError('OPENAI_API_KEY environment variable is not set');
|
|
1546
|
+
}
|
|
1547
|
+
// Create the client which we'll close in the finally block.
|
|
1548
|
+
openai = new OpenAI({
|
|
1549
|
+
apiKey: apiKey
|
|
1550
|
+
});
|
|
1551
|
+
logger.debug('Sending prompt to OpenAI: %j', messages);
|
|
1552
|
+
// Save request debug file if enabled
|
|
1553
|
+
if (options.debug && (options.debugRequestFile || options.debugFile)) {
|
|
1554
|
+
const requestData = {
|
|
1555
|
+
model: options.model || "gpt-4o-mini",
|
|
1556
|
+
messages,
|
|
1557
|
+
max_completion_tokens: 10000,
|
|
1558
|
+
response_format: options.responseFormat
|
|
1559
|
+
};
|
|
1560
|
+
const debugFile = options.debugRequestFile || options.debugFile;
|
|
1561
|
+
await storage.writeFile(debugFile, JSON.stringify(requestData, null, 2), 'utf8');
|
|
1562
|
+
logger.debug('Wrote request debug file to %s', debugFile);
|
|
1563
|
+
}
|
|
1564
|
+
const completion = await openai.chat.completions.create({
|
|
1565
|
+
model: options.model || "gpt-4o-mini",
|
|
1566
|
+
messages,
|
|
1567
|
+
max_completion_tokens: 10000,
|
|
1568
|
+
response_format: options.responseFormat
|
|
1569
|
+
});
|
|
1570
|
+
// Save response debug file if enabled
|
|
1571
|
+
if (options.debug && (options.debugResponseFile || options.debugFile)) {
|
|
1572
|
+
const debugFile = options.debugResponseFile || options.debugFile;
|
|
1573
|
+
await storage.writeFile(debugFile, JSON.stringify(completion, null, 2), 'utf8');
|
|
1574
|
+
logger.debug('Wrote response debug file to %s', debugFile);
|
|
1575
|
+
}
|
|
1576
|
+
const response = (_completion_choices_ = completion.choices[0]) === null || _completion_choices_ === void 0 ? void 0 : (_completion_choices__message = _completion_choices_.message) === null || _completion_choices__message === void 0 ? void 0 : (_completion_choices__message_content = _completion_choices__message.content) === null || _completion_choices__message_content === void 0 ? void 0 : _completion_choices__message_content.trim();
|
|
1577
|
+
if (!response) {
|
|
1578
|
+
throw new OpenAIError('No response received from OpenAI');
|
|
1579
|
+
}
|
|
1580
|
+
logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));
|
|
1581
|
+
if (options.responseFormat) {
|
|
1582
|
+
return JSON.parse(response);
|
|
1583
|
+
} else {
|
|
1584
|
+
return response;
|
|
1585
|
+
}
|
|
1586
|
+
} catch (error) {
|
|
1587
|
+
logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);
|
|
1588
|
+
throw new OpenAIError(`Failed to create completion: ${error.message}`);
|
|
1589
|
+
} finally{
|
|
1590
|
+
// Ensure we close the OpenAI client to release underlying keep-alive sockets
|
|
1591
|
+
try {
|
|
1592
|
+
// openai.close() returns a promise; awaiting ensures proper cleanup
|
|
1593
|
+
// but if it throws we silently ignore as it's best-effort.
|
|
1594
|
+
if (openai && typeof openai.close === 'function') {
|
|
1595
|
+
await openai.close();
|
|
1596
|
+
}
|
|
1597
|
+
} catch (closeErr) {
|
|
1598
|
+
logger.debug('Failed to close OpenAI client: %s', closeErr.message);
|
|
1599
|
+
}
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
async function transcribeAudio(filePath, options = {
|
|
1603
|
+
model: "whisper-1"
|
|
1604
|
+
}) {
|
|
1605
|
+
const logger = getLogger();
|
|
1606
|
+
const storage = create$2({
|
|
1607
|
+
log: logger.debug
|
|
1608
|
+
});
|
|
1609
|
+
let openai = null;
|
|
1610
|
+
let audioStream = null;
|
|
1611
|
+
try {
|
|
1612
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
1613
|
+
if (!apiKey) {
|
|
1614
|
+
throw new OpenAIError('OPENAI_API_KEY environment variable is not set');
|
|
1615
|
+
}
|
|
1616
|
+
openai = new OpenAI({
|
|
1617
|
+
apiKey: apiKey
|
|
1618
|
+
});
|
|
1619
|
+
logger.debug('Transcribing audio file: %s', filePath);
|
|
1620
|
+
// Save request debug file if enabled
|
|
1621
|
+
if (options.debug && (options.debugRequestFile || options.debugFile)) {
|
|
1622
|
+
const requestData = {
|
|
1623
|
+
model: options.model || "whisper-1",
|
|
1624
|
+
file: filePath,
|
|
1625
|
+
response_format: "json"
|
|
1626
|
+
};
|
|
1627
|
+
const debugFile = options.debugRequestFile || options.debugFile;
|
|
1628
|
+
await storage.writeFile(debugFile, JSON.stringify(requestData, null, 2), 'utf8');
|
|
1629
|
+
logger.debug('Wrote request debug file to %s', debugFile);
|
|
1630
|
+
}
|
|
1631
|
+
audioStream = await storage.readStream(filePath);
|
|
1632
|
+
const transcription = await openai.audio.transcriptions.create({
|
|
1633
|
+
model: options.model || "whisper-1",
|
|
1634
|
+
file: audioStream,
|
|
1635
|
+
response_format: "json"
|
|
1636
|
+
});
|
|
1637
|
+
// Save response debug file if enabled
|
|
1638
|
+
if (options.debug && (options.debugResponseFile || options.debugFile)) {
|
|
1639
|
+
const debugFile = options.debugResponseFile || options.debugFile;
|
|
1640
|
+
await storage.writeFile(debugFile, JSON.stringify(transcription, null, 2), 'utf8');
|
|
1641
|
+
logger.debug('Wrote response debug file to %s', debugFile);
|
|
1642
|
+
}
|
|
1643
|
+
const response = transcription;
|
|
1644
|
+
if (!response) {
|
|
1645
|
+
throw new OpenAIError('No transcription received from OpenAI');
|
|
1646
|
+
}
|
|
1647
|
+
logger.debug('Received transcription from OpenAI: %s', response);
|
|
1648
|
+
return response;
|
|
1649
|
+
} catch (error) {
|
|
1650
|
+
logger.error('Error transcribing audio file: %s %s', error.message, error.stack);
|
|
1651
|
+
throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);
|
|
1652
|
+
} finally{
|
|
1653
|
+
// Ensure the audio stream is properly closed to release file handles
|
|
1654
|
+
try {
|
|
1655
|
+
if (audioStream) {
|
|
1656
|
+
audioStream.close();
|
|
1657
|
+
}
|
|
1658
|
+
} catch (streamErr) {
|
|
1659
|
+
logger.debug('Failed to close audio read stream: %s', streamErr.message);
|
|
1660
|
+
}
|
|
1661
|
+
try {
|
|
1662
|
+
if (openai && typeof openai.close === 'function') {
|
|
1663
|
+
await openai.close();
|
|
1664
|
+
}
|
|
1665
|
+
} catch (closeErr) {
|
|
1666
|
+
logger.debug('Failed to close OpenAI client: %s', closeErr.message);
|
|
1667
|
+
}
|
|
1668
|
+
}
|
|
1669
|
+
}
|
|
1670
|
+
|
|
1671
|
+
const execute$9 = async (runConfig)=>{
|
|
1672
|
+
var _runConfig_commit, _runConfig_commit1, _runConfig_commit2, _runConfig_commit3, _runConfig_commit4, _runConfig_commit5, _runConfig_commit6, _runConfig_commit7;
|
|
1673
|
+
const logger = getLogger();
|
|
1674
|
+
const isDryRun = runConfig.dryRun || false;
|
|
1675
|
+
if ((_runConfig_commit = runConfig.commit) === null || _runConfig_commit === void 0 ? void 0 : _runConfig_commit.add) {
|
|
1676
|
+
if (isDryRun) {
|
|
1677
|
+
logger.info('DRY RUN: Would add all changes to the index with: git add -A');
|
|
1678
|
+
} else {
|
|
1679
|
+
logger.verbose('Adding all changes to the index...');
|
|
1680
|
+
await run('git add -A');
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
let diffContent = '';
|
|
1684
|
+
let cached = (_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.cached;
|
|
1685
|
+
// If `add` is used, we should always look at staged changes.
|
|
1686
|
+
if ((_runConfig_commit2 = runConfig.commit) === null || _runConfig_commit2 === void 0 ? void 0 : _runConfig_commit2.add) {
|
|
1687
|
+
cached = true;
|
|
1688
|
+
} else if (cached === undefined) {
|
|
1689
|
+
// If cached is undefined? We're going to look for a staged commit; otherwise, we'll use the supplied setting.
|
|
1690
|
+
cached = await hasStagedChanges();
|
|
1691
|
+
}
|
|
1692
|
+
// Fix: Exit early if sendit is true but no changes are staged
|
|
1693
|
+
if (((_runConfig_commit3 = runConfig.commit) === null || _runConfig_commit3 === void 0 ? void 0 : _runConfig_commit3.sendit) && !cached && !isDryRun) {
|
|
1694
|
+
logger.warn('SendIt mode enabled, but no changes to commit.');
|
|
1695
|
+
process.exit(1);
|
|
1696
|
+
}
|
|
1697
|
+
var _runConfig_excludedPatterns;
|
|
1698
|
+
const options = {
|
|
1699
|
+
cached,
|
|
1700
|
+
excludedPatterns: (_runConfig_excludedPatterns = runConfig.excludedPatterns) !== null && _runConfig_excludedPatterns !== void 0 ? _runConfig_excludedPatterns : DEFAULT_EXCLUDED_PATTERNS
|
|
1701
|
+
};
|
|
1702
|
+
const diff = await create$1(options);
|
|
1703
|
+
diffContent = await diff.get();
|
|
1704
|
+
const logOptions = {
|
|
1705
|
+
limit: (_runConfig_commit4 = runConfig.commit) === null || _runConfig_commit4 === void 0 ? void 0 : _runConfig_commit4.messageLimit
|
|
1706
|
+
};
|
|
1707
|
+
const log = await create(logOptions);
|
|
1708
|
+
const logContext = await log.get();
|
|
1709
|
+
const promptConfig = {
|
|
1710
|
+
overridePaths: runConfig.discoveredConfigDirs || [],
|
|
1711
|
+
overrides: runConfig.overrides || false
|
|
1712
|
+
};
|
|
1713
|
+
const promptContent = {
|
|
1714
|
+
diffContent,
|
|
1715
|
+
userDirection: (_runConfig_commit5 = runConfig.commit) === null || _runConfig_commit5 === void 0 ? void 0 : _runConfig_commit5.direction
|
|
1716
|
+
};
|
|
1717
|
+
const promptContext = {
|
|
1718
|
+
logContext,
|
|
1719
|
+
context: (_runConfig_commit6 = runConfig.commit) === null || _runConfig_commit6 === void 0 ? void 0 : _runConfig_commit6.context,
|
|
1720
|
+
directories: runConfig.contextDirectories
|
|
1721
|
+
};
|
|
1722
|
+
const prompt = await createPrompt$2(promptConfig, promptContent, promptContext);
|
|
1723
|
+
if (runConfig.debug) {
|
|
1724
|
+
const formattedPrompt = Formatter.create({
|
|
1725
|
+
logger
|
|
1726
|
+
}).formatPrompt("gpt-4o-mini", prompt);
|
|
1727
|
+
logger.silly('Formatted Prompt: %s', stringifyJSON(formattedPrompt));
|
|
1728
|
+
}
|
|
1729
|
+
const request = Formatter.create({
|
|
1730
|
+
logger
|
|
1731
|
+
}).formatPrompt(runConfig.model, prompt);
|
|
1732
|
+
// Always ensure output directory exists for request/response files
|
|
1733
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
1734
|
+
const storage = create$2({
|
|
1735
|
+
log: logger.info
|
|
1736
|
+
});
|
|
1737
|
+
await storage.ensureDirectory(outputDirectory);
|
|
1738
|
+
const summary = await createCompletion(request.messages, {
|
|
1739
|
+
model: runConfig.model,
|
|
1740
|
+
debug: runConfig.debug,
|
|
1741
|
+
debugRequestFile: getOutputPath(runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY, getTimestampedRequestFilename('commit')),
|
|
1742
|
+
debugResponseFile: getOutputPath(runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY, getTimestampedResponseFilename('commit'))
|
|
1743
|
+
});
|
|
1744
|
+
// Save timestamped copy of commit message to output directory
|
|
1745
|
+
try {
|
|
1746
|
+
const timestampedFilename = getTimestampedCommitFilename();
|
|
1747
|
+
const outputPath = getOutputPath(outputDirectory, timestampedFilename);
|
|
1748
|
+
await storage.writeFile(outputPath, summary, 'utf-8');
|
|
1749
|
+
logger.debug('Saved timestamped commit message: %s', outputPath);
|
|
1750
|
+
} catch (error) {
|
|
1751
|
+
logger.warn('Failed to save timestamped commit message: %s', error.message);
|
|
1752
|
+
}
|
|
1753
|
+
if ((_runConfig_commit7 = runConfig.commit) === null || _runConfig_commit7 === void 0 ? void 0 : _runConfig_commit7.sendit) {
|
|
1754
|
+
if (!cached && !isDryRun) {
|
|
1755
|
+
logger.error('SendIt mode enabled, but no changes to commit. Message: \n\n%s\n\n', summary);
|
|
1756
|
+
process.exit(1);
|
|
1757
|
+
}
|
|
1758
|
+
if (isDryRun) {
|
|
1759
|
+
logger.info('DRY RUN: Would commit with message: \n\n%s\n\n', summary);
|
|
1760
|
+
logger.info('DRY RUN: Would execute: git commit -m <generated-message>');
|
|
1761
|
+
} else {
|
|
1762
|
+
logger.info('SendIt mode enabled. Committing with message: \n\n%s\n\n', summary);
|
|
1763
|
+
try {
|
|
1764
|
+
const escapedSummary = shellescape([
|
|
1765
|
+
summary
|
|
1766
|
+
]);
|
|
1767
|
+
await run(`git commit -m ${escapedSummary}`);
|
|
1768
|
+
logger.info('Commit successful!');
|
|
1769
|
+
} catch (error) {
|
|
1770
|
+
logger.error('Failed to commit:', error);
|
|
1771
|
+
process.exit(1);
|
|
1772
|
+
}
|
|
1773
|
+
}
|
|
1774
|
+
} else if (isDryRun) {
|
|
1775
|
+
logger.info('DRY RUN: Generated commit message: \n\n%s\n\n', summary);
|
|
1776
|
+
}
|
|
1777
|
+
return summary;
|
|
1778
|
+
};
|
|
1779
|
+
|
|
1780
|
+
const execute$8 = async (runConfig)=>{
|
|
1781
|
+
var _runConfig_commit;
|
|
1782
|
+
const logger = getLogger();
|
|
1783
|
+
const isDryRun = runConfig.dryRun || false;
|
|
1784
|
+
if (isDryRun) {
|
|
1785
|
+
var _runConfig_audioCommit, _runConfig_commit1;
|
|
1786
|
+
if ((_runConfig_audioCommit = runConfig.audioCommit) === null || _runConfig_audioCommit === void 0 ? void 0 : _runConfig_audioCommit.file) {
|
|
1787
|
+
logger.info('DRY RUN: Would process audio file: %s', runConfig.audioCommit.file);
|
|
1788
|
+
logger.info('DRY RUN: Would transcribe audio and use as context for commit message generation');
|
|
1789
|
+
} else {
|
|
1790
|
+
logger.info('DRY RUN: Would start audio recording for commit context');
|
|
1791
|
+
logger.info('DRY RUN: Would transcribe audio and use as context for commit message generation');
|
|
1792
|
+
}
|
|
1793
|
+
logger.info('DRY RUN: Would then delegate to regular commit command');
|
|
1794
|
+
// In dry run, just call the regular commit command with empty audio context
|
|
1795
|
+
return execute$9({
|
|
1796
|
+
...runConfig,
|
|
1797
|
+
commit: {
|
|
1798
|
+
...runConfig.commit,
|
|
1799
|
+
direction: ((_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.direction) || ''
|
|
1800
|
+
}
|
|
1801
|
+
});
|
|
1802
|
+
}
|
|
1803
|
+
let audioContext;
|
|
1804
|
+
try {
|
|
1805
|
+
var _runConfig_audioCommit1, _runConfig_audioCommit2, _runConfig_audioCommit3, _runConfig_audioCommit4;
|
|
1806
|
+
// Step 1: Record audio using unplayable with new key handling
|
|
1807
|
+
logger.info('🎙️ Starting audio recording for commit context...');
|
|
1808
|
+
if (!((_runConfig_audioCommit1 = runConfig.audioCommit) === null || _runConfig_audioCommit1 === void 0 ? void 0 : _runConfig_audioCommit1.file)) {
|
|
1809
|
+
logger.info('Press ENTER to stop recording or C to cancel');
|
|
1810
|
+
}
|
|
1811
|
+
// Use processAudio with proper configuration
|
|
1812
|
+
const audioResult = await processAudio({
|
|
1813
|
+
file: (_runConfig_audioCommit2 = runConfig.audioCommit) === null || _runConfig_audioCommit2 === void 0 ? void 0 : _runConfig_audioCommit2.file,
|
|
1814
|
+
maxRecordingTime: (_runConfig_audioCommit3 = runConfig.audioCommit) === null || _runConfig_audioCommit3 === void 0 ? void 0 : _runConfig_audioCommit3.maxRecordingTime,
|
|
1815
|
+
outputDirectory: runConfig.outputDirectory || 'output',
|
|
1816
|
+
debug: runConfig.debug
|
|
1817
|
+
});
|
|
1818
|
+
// Check if recording was cancelled
|
|
1819
|
+
if (audioResult.cancelled) {
|
|
1820
|
+
logger.info('❌ Audio commit cancelled by user');
|
|
1821
|
+
process.exit(0);
|
|
1822
|
+
}
|
|
1823
|
+
// Step 2: Get the audio file path from the result
|
|
1824
|
+
let audioFilePath;
|
|
1825
|
+
if ((_runConfig_audioCommit4 = runConfig.audioCommit) === null || _runConfig_audioCommit4 === void 0 ? void 0 : _runConfig_audioCommit4.file) {
|
|
1826
|
+
// Use the provided file path
|
|
1827
|
+
audioFilePath = runConfig.audioCommit.file;
|
|
1828
|
+
} else if (audioResult.audioFilePath) {
|
|
1829
|
+
// Use the file path returned by processAudio
|
|
1830
|
+
audioFilePath = audioResult.audioFilePath;
|
|
1831
|
+
} else {
|
|
1832
|
+
// Fallback to generated filename (this should rarely happen now)
|
|
1833
|
+
const outputDir = runConfig.outputDirectory || 'output';
|
|
1834
|
+
audioFilePath = path.join(outputDir, getTimestampedAudioFilename());
|
|
1835
|
+
logger.warn('Using generated filename for recorded audio: %s', audioFilePath);
|
|
1836
|
+
logger.warn('Note: This may not match the actual file created by unplayable');
|
|
1837
|
+
}
|
|
1838
|
+
// Step 3: Use kodrdriv's transcription functionality
|
|
1839
|
+
logger.info('🤖 Transcribing audio locally using OpenAI Whisper...');
|
|
1840
|
+
const transcription = await transcribeAudio(audioFilePath, {
|
|
1841
|
+
model: "whisper-1",
|
|
1842
|
+
debug: runConfig.debug
|
|
1843
|
+
});
|
|
1844
|
+
audioContext = transcription.text;
|
|
1845
|
+
if (!audioContext.trim()) {
|
|
1846
|
+
logger.warn('No audio content was transcribed. Proceeding without audio context.');
|
|
1847
|
+
audioContext = '';
|
|
1848
|
+
} else {
|
|
1849
|
+
logger.info('📝 Successfully transcribed audio using kodrdriv');
|
|
1850
|
+
logger.debug('Transcribed text: %s', audioContext);
|
|
1851
|
+
}
|
|
1852
|
+
} catch (error) {
|
|
1853
|
+
logger.error('Audio processing failed: %s', error.message);
|
|
1854
|
+
logger.info('Proceeding with commit generation without audio context...');
|
|
1855
|
+
audioContext = '';
|
|
1856
|
+
}
|
|
1857
|
+
// Now delegate to the regular commit command with the audio context
|
|
1858
|
+
logger.info('🤖 Generating commit message using audio context...');
|
|
1859
|
+
const result = await execute$9({
|
|
1860
|
+
...runConfig,
|
|
1861
|
+
commit: {
|
|
1862
|
+
...runConfig.commit,
|
|
1863
|
+
direction: audioContext.trim() || ((_runConfig_commit = runConfig.commit) === null || _runConfig_commit === void 0 ? void 0 : _runConfig_commit.direction) || ''
|
|
1864
|
+
}
|
|
1865
|
+
});
|
|
1866
|
+
return result;
|
|
1867
|
+
};
|
|
1868
|
+
|
|
1869
|
+
const __filename$1 = fileURLToPath(import.meta.url);
|
|
1870
|
+
const __dirname$1 = path.dirname(__filename$1);
|
|
1871
|
+
const createPrompt$1 = async ({ overridePaths, overrides }, { notes }, { logContext, diffContext, releaseNotesContext, issuesContext, context, directories } = {})=>{
|
|
1872
|
+
// Prepare content array for the recipe
|
|
1873
|
+
const content = [
|
|
1874
|
+
{
|
|
1875
|
+
content: notes,
|
|
1876
|
+
title: 'Review Notes',
|
|
1877
|
+
weight: 1.0
|
|
1878
|
+
}
|
|
1879
|
+
];
|
|
1880
|
+
// Prepare context array for the recipe
|
|
1881
|
+
const contextArray = [];
|
|
1882
|
+
if (logContext) {
|
|
1883
|
+
contextArray.push({
|
|
1884
|
+
content: logContext,
|
|
1885
|
+
title: 'Log Context',
|
|
1886
|
+
weight: 0.5
|
|
1887
|
+
});
|
|
1888
|
+
}
|
|
1889
|
+
if (diffContext) {
|
|
1890
|
+
contextArray.push({
|
|
1891
|
+
content: diffContext,
|
|
1892
|
+
title: 'Diff Context',
|
|
1893
|
+
weight: 0.5
|
|
1894
|
+
});
|
|
1895
|
+
}
|
|
1896
|
+
if (releaseNotesContext) {
|
|
1897
|
+
contextArray.push({
|
|
1898
|
+
content: releaseNotesContext,
|
|
1899
|
+
title: 'Release Notes Context',
|
|
1900
|
+
weight: 0.5
|
|
1901
|
+
});
|
|
1902
|
+
}
|
|
1903
|
+
if (issuesContext) {
|
|
1904
|
+
contextArray.push({
|
|
1905
|
+
content: issuesContext,
|
|
1906
|
+
title: 'Issues Context',
|
|
1907
|
+
weight: 0.5
|
|
1908
|
+
});
|
|
1909
|
+
}
|
|
1910
|
+
if (context) {
|
|
1911
|
+
contextArray.push({
|
|
1912
|
+
content: context,
|
|
1913
|
+
title: 'User Context',
|
|
1914
|
+
weight: 1.0
|
|
1915
|
+
});
|
|
1916
|
+
}
|
|
1917
|
+
if (directories === null || directories === void 0 ? void 0 : directories.length) {
|
|
1918
|
+
contextArray.push({
|
|
1919
|
+
directories,
|
|
1920
|
+
weight: 0.5
|
|
1921
|
+
});
|
|
1922
|
+
}
|
|
1923
|
+
// Use the new cook recipe with template
|
|
1924
|
+
// Adjust basePath for single-file build
|
|
1925
|
+
const basePath = path.resolve(__dirname$1, 'src', 'prompt');
|
|
1926
|
+
return cook({
|
|
1927
|
+
basePath,
|
|
1928
|
+
overridePaths: overridePaths || [],
|
|
1929
|
+
overrides: overrides || false,
|
|
1930
|
+
template: 'review',
|
|
1931
|
+
content,
|
|
1932
|
+
context: contextArray
|
|
1933
|
+
});
|
|
1934
|
+
};
|
|
1935
|
+
|
|
1936
|
+
const getOctokit = ()=>{
|
|
1937
|
+
const logger = getLogger();
|
|
1938
|
+
const token = process.env.GITHUB_TOKEN;
|
|
1939
|
+
if (!token) {
|
|
1940
|
+
logger.error('GITHUB_TOKEN environment variable is not set.');
|
|
1941
|
+
throw new Error('GITHUB_TOKEN is not set.');
|
|
1942
|
+
}
|
|
1943
|
+
return new Octokit({
|
|
1944
|
+
auth: token
|
|
1945
|
+
});
|
|
1946
|
+
};
|
|
1947
|
+
const getCurrentBranchName = async ()=>{
|
|
1948
|
+
const { stdout } = await run('git rev-parse --abbrev-ref HEAD');
|
|
1949
|
+
return stdout.trim();
|
|
1950
|
+
};
|
|
1951
|
+
const getRepoDetails = async ()=>{
|
|
1952
|
+
const { stdout } = await run('git remote get-url origin');
|
|
1953
|
+
const url = stdout.trim();
|
|
1954
|
+
// git@github.com:owner/repo.git or https://github.com/owner/repo.git
|
|
1955
|
+
const match = url.match(/github\.com[/:]([\w-]+)\/([\w.-]+)\.git/);
|
|
1956
|
+
if (!match) {
|
|
1957
|
+
throw new Error(`Could not parse repository owner and name from origin URL: "${url}". Expected format: git@github.com:owner/repo.git or https://github.com/owner/repo.git`);
|
|
1958
|
+
}
|
|
1959
|
+
return {
|
|
1960
|
+
owner: match[1],
|
|
1961
|
+
repo: match[2]
|
|
1962
|
+
};
|
|
1963
|
+
};
|
|
1964
|
+
const createPullRequest = async (title, body, head, base = 'main')=>{
|
|
1965
|
+
const octokit = getOctokit();
|
|
1966
|
+
const { owner, repo } = await getRepoDetails();
|
|
1967
|
+
const response = await octokit.pulls.create({
|
|
1968
|
+
owner,
|
|
1969
|
+
repo,
|
|
1970
|
+
title,
|
|
1971
|
+
body,
|
|
1972
|
+
head,
|
|
1973
|
+
base
|
|
1974
|
+
});
|
|
1975
|
+
return response.data;
|
|
1976
|
+
};
|
|
1977
|
+
const findOpenPullRequestByHeadRef = async (head)=>{
|
|
1978
|
+
const octokit = getOctokit();
|
|
1979
|
+
const { owner, repo } = await getRepoDetails();
|
|
1980
|
+
const logger = getLogger();
|
|
1981
|
+
try {
|
|
1982
|
+
logger.debug(`Searching for open pull requests with head: ${owner}:${head} in ${owner}/${repo}`);
|
|
1983
|
+
const response = await octokit.pulls.list({
|
|
1984
|
+
owner,
|
|
1985
|
+
repo,
|
|
1986
|
+
state: 'open',
|
|
1987
|
+
head: `${owner}:${head}`
|
|
1988
|
+
});
|
|
1989
|
+
logger.debug(`Found ${response.data.length} open pull requests`);
|
|
1990
|
+
var _response_data_;
|
|
1991
|
+
return (_response_data_ = response.data[0]) !== null && _response_data_ !== void 0 ? _response_data_ : null;
|
|
1992
|
+
} catch (error) {
|
|
1993
|
+
logger.error(`Failed to find open pull requests: ${error.message}`);
|
|
1994
|
+
if (error.status === 404) {
|
|
1995
|
+
logger.error(`Repository ${owner}/${repo} not found or access denied. Please check your GITHUB_TOKEN permissions.`);
|
|
1996
|
+
}
|
|
1997
|
+
throw error;
|
|
1998
|
+
}
|
|
1999
|
+
};
|
|
2000
|
+
const delay = (ms)=>new Promise((resolve)=>setTimeout(resolve, ms));
|
|
2001
|
+
const waitForPullRequestChecks = async (prNumber)=>{
|
|
2002
|
+
const octokit = getOctokit();
|
|
2003
|
+
const { owner, repo } = await getRepoDetails();
|
|
2004
|
+
const logger = getLogger();
|
|
2005
|
+
while(true){
|
|
2006
|
+
const pr = await octokit.pulls.get({
|
|
2007
|
+
owner,
|
|
2008
|
+
repo,
|
|
2009
|
+
pull_number: prNumber
|
|
2010
|
+
});
|
|
2011
|
+
const checkRunsResponse = await octokit.checks.listForRef({
|
|
2012
|
+
owner,
|
|
2013
|
+
repo,
|
|
2014
|
+
ref: pr.data.head.sha
|
|
2015
|
+
});
|
|
2016
|
+
const checkRuns = checkRunsResponse.data.check_runs;
|
|
2017
|
+
if (checkRuns.length === 0) {
|
|
2018
|
+
logger.info(`PR #${prNumber}: No checks found. Waiting...`);
|
|
2019
|
+
await delay(10000);
|
|
2020
|
+
continue;
|
|
2021
|
+
}
|
|
2022
|
+
const failingChecks = checkRuns.filter((cr)=>cr.conclusion && [
|
|
2023
|
+
'failure',
|
|
2024
|
+
'timed_out',
|
|
2025
|
+
'cancelled'
|
|
2026
|
+
].includes(cr.conclusion));
|
|
2027
|
+
if (failingChecks.length > 0) {
|
|
2028
|
+
logger.error(`PR #${prNumber} has failing checks:`);
|
|
2029
|
+
for (const check of failingChecks){
|
|
2030
|
+
logger.error(`- ${check.name}: ${check.conclusion}`);
|
|
2031
|
+
}
|
|
2032
|
+
throw new Error(`PR #${prNumber} checks failed.`);
|
|
2033
|
+
}
|
|
2034
|
+
const allChecksCompleted = checkRuns.every((cr)=>cr.status === 'completed');
|
|
2035
|
+
if (allChecksCompleted) {
|
|
2036
|
+
logger.info(`All checks for PR #${prNumber} have completed successfully.`);
|
|
2037
|
+
return;
|
|
2038
|
+
}
|
|
2039
|
+
const completedCount = checkRuns.filter((cr)=>cr.status === 'completed').length;
|
|
2040
|
+
logger.info(`PR #${prNumber} checks: ${completedCount}/${checkRuns.length} completed. Waiting...`);
|
|
2041
|
+
await delay(10000); // wait 10 seconds
|
|
2042
|
+
}
|
|
2043
|
+
};
|
|
2044
|
+
const mergePullRequest = async (prNumber, mergeMethod = 'squash')=>{
|
|
2045
|
+
const octokit = getOctokit();
|
|
2046
|
+
const { owner, repo } = await getRepoDetails();
|
|
2047
|
+
const logger = getLogger();
|
|
2048
|
+
logger.info(`Merging PR #${prNumber} using ${mergeMethod} method...`);
|
|
2049
|
+
const pr = await octokit.pulls.get({
|
|
2050
|
+
owner,
|
|
2051
|
+
repo,
|
|
2052
|
+
pull_number: prNumber
|
|
2053
|
+
});
|
|
2054
|
+
const headBranch = pr.data.head.ref;
|
|
2055
|
+
await octokit.pulls.merge({
|
|
2056
|
+
owner,
|
|
2057
|
+
repo,
|
|
2058
|
+
pull_number: prNumber,
|
|
2059
|
+
merge_method: mergeMethod
|
|
2060
|
+
});
|
|
2061
|
+
logger.info(`PR #${prNumber} merged using ${mergeMethod} method.`);
|
|
2062
|
+
logger.info(`Deleting branch ${headBranch}...`);
|
|
2063
|
+
await octokit.git.deleteRef({
|
|
2064
|
+
owner,
|
|
2065
|
+
repo,
|
|
2066
|
+
ref: `heads/${headBranch}`
|
|
2067
|
+
});
|
|
2068
|
+
logger.info(`Branch ${headBranch} deleted.`);
|
|
2069
|
+
};
|
|
2070
|
+
const createRelease = async (tagName, title, notes)=>{
|
|
2071
|
+
const octokit = getOctokit();
|
|
2072
|
+
const { owner, repo } = await getRepoDetails();
|
|
2073
|
+
const logger = getLogger();
|
|
2074
|
+
logger.info(`Creating release for tag ${tagName}...`);
|
|
2075
|
+
await octokit.repos.createRelease({
|
|
2076
|
+
owner,
|
|
2077
|
+
repo,
|
|
2078
|
+
tag_name: tagName,
|
|
2079
|
+
name: title,
|
|
2080
|
+
body: notes
|
|
2081
|
+
});
|
|
2082
|
+
logger.info(`Release ${tagName} created.`);
|
|
2083
|
+
};
|
|
2084
|
+
const getOpenIssues = async (limit = 20)=>{
|
|
2085
|
+
const octokit = getOctokit();
|
|
2086
|
+
const { owner, repo } = await getRepoDetails();
|
|
2087
|
+
const logger = getLogger();
|
|
2088
|
+
try {
|
|
2089
|
+
logger.debug(`Fetching up to ${limit} open GitHub issues...`);
|
|
2090
|
+
const response = await octokit.issues.listForRepo({
|
|
2091
|
+
owner,
|
|
2092
|
+
repo,
|
|
2093
|
+
state: 'open',
|
|
2094
|
+
per_page: Math.min(limit, 100),
|
|
2095
|
+
sort: 'updated',
|
|
2096
|
+
direction: 'desc'
|
|
2097
|
+
});
|
|
2098
|
+
const issues = response.data.filter((issue)=>!issue.pull_request); // Filter out PRs
|
|
2099
|
+
if (issues.length === 0) {
|
|
2100
|
+
logger.debug('No open issues found');
|
|
2101
|
+
return '';
|
|
2102
|
+
}
|
|
2103
|
+
const issueStrings = issues.slice(0, limit).map((issue)=>{
|
|
2104
|
+
var _issue_body;
|
|
2105
|
+
const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
|
|
2106
|
+
return [
|
|
2107
|
+
`Issue #${issue.number}: ${issue.title}`,
|
|
2108
|
+
`Labels: ${labels || 'none'}`,
|
|
2109
|
+
`Created: ${issue.created_at}`,
|
|
2110
|
+
`Updated: ${issue.updated_at}`,
|
|
2111
|
+
`Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 500)) || 'No description'}${issue.body && issue.body.length > 500 ? '...' : ''}`,
|
|
2112
|
+
'---'
|
|
2113
|
+
].join('\n');
|
|
2114
|
+
});
|
|
2115
|
+
logger.debug(`Fetched ${issues.length} open issues`);
|
|
2116
|
+
return issueStrings.join('\n\n');
|
|
2117
|
+
} catch (error) {
|
|
2118
|
+
logger.warn('Failed to fetch GitHub issues: %s', error.message);
|
|
2119
|
+
return '';
|
|
2120
|
+
}
|
|
2121
|
+
};
|
|
2122
|
+
const createIssue = async (title, body, labels)=>{
|
|
2123
|
+
const octokit = getOctokit();
|
|
2124
|
+
const { owner, repo } = await getRepoDetails();
|
|
2125
|
+
const response = await octokit.issues.create({
|
|
2126
|
+
owner,
|
|
2127
|
+
repo,
|
|
2128
|
+
title,
|
|
2129
|
+
body,
|
|
2130
|
+
labels: labels || []
|
|
2131
|
+
});
|
|
2132
|
+
return {
|
|
2133
|
+
number: response.data.number,
|
|
2134
|
+
html_url: response.data.html_url
|
|
2135
|
+
};
|
|
2136
|
+
};
|
|
2137
|
+
|
|
2138
|
+
// Function to truncate overly large content while preserving structure
|
|
2139
|
+
const truncateContent = (content, maxLength = 3000)=>{
|
|
2140
|
+
if (content.length <= maxLength) {
|
|
2141
|
+
return content;
|
|
2142
|
+
}
|
|
2143
|
+
const lines = content.split('\n');
|
|
2144
|
+
const truncatedLines = [];
|
|
2145
|
+
let currentLength = 0;
|
|
2146
|
+
for (const line of lines){
|
|
2147
|
+
if (currentLength + line.length + 1 > maxLength) {
|
|
2148
|
+
break;
|
|
2149
|
+
}
|
|
2150
|
+
truncatedLines.push(line);
|
|
2151
|
+
currentLength += line.length + 1; // +1 for newline
|
|
2152
|
+
}
|
|
2153
|
+
truncatedLines.push('');
|
|
2154
|
+
truncatedLines.push(`... [TRUNCATED: Original content was ${content.length} characters, showing first ${currentLength}] ...`);
|
|
2155
|
+
return truncatedLines.join('\n');
|
|
2156
|
+
};
|
|
2157
|
+
// Function to fetch recent releases from GitHub API
|
|
2158
|
+
const findRecentReleaseNotes = async (limit)=>{
|
|
2159
|
+
const logger = getLogger();
|
|
2160
|
+
const releaseNotes = [];
|
|
2161
|
+
if (limit <= 0) {
|
|
2162
|
+
return releaseNotes;
|
|
2163
|
+
}
|
|
2164
|
+
try {
|
|
2165
|
+
const octokit = getOctokit();
|
|
2166
|
+
const { owner, repo } = await getRepoDetails();
|
|
2167
|
+
logger.debug(`Fetching up to ${limit} recent releases from GitHub...`);
|
|
2168
|
+
const response = await octokit.repos.listReleases({
|
|
2169
|
+
owner,
|
|
2170
|
+
repo,
|
|
2171
|
+
per_page: Math.min(limit, 100)
|
|
2172
|
+
});
|
|
2173
|
+
const releases = response.data;
|
|
2174
|
+
if (releases.length === 0) {
|
|
2175
|
+
logger.debug('No releases found in GitHub repository');
|
|
2176
|
+
return releaseNotes;
|
|
2177
|
+
}
|
|
2178
|
+
for (const release of releases.slice(0, limit)){
|
|
2179
|
+
const releaseContent = [
|
|
2180
|
+
`# ${release.name || release.tag_name}`,
|
|
2181
|
+
`**Tag:** ${release.tag_name}`,
|
|
2182
|
+
`**Published:** ${release.published_at}`,
|
|
2183
|
+
release.prerelease ? '**Type:** Pre-release' : '**Type:** Release',
|
|
2184
|
+
release.draft ? '**Status:** Draft' : '**Status:** Published',
|
|
2185
|
+
'',
|
|
2186
|
+
release.body || 'No release notes provided'
|
|
2187
|
+
].join('\n');
|
|
2188
|
+
const truncatedContent = truncateContent(releaseContent);
|
|
2189
|
+
releaseNotes.push(`=== GitHub Release: ${release.tag_name} ===\n${truncatedContent}`);
|
|
2190
|
+
if (truncatedContent.length < releaseContent.length) {
|
|
2191
|
+
logger.debug(`Found release ${release.tag_name} (%d characters, truncated from %d)`, truncatedContent.length, releaseContent.length);
|
|
2192
|
+
} else {
|
|
2193
|
+
logger.debug(`Found release ${release.tag_name} (%d characters)`, releaseContent.length);
|
|
2194
|
+
}
|
|
2195
|
+
}
|
|
2196
|
+
logger.debug(`Fetched ${releaseNotes.length} releases from GitHub`);
|
|
2197
|
+
} catch (error) {
|
|
2198
|
+
logger.warn('Error fetching releases from GitHub API: %s', error.message);
|
|
2199
|
+
// If we have a GitHub API error, we could fall back to checking for local release notes
|
|
2200
|
+
// This maintains some backward compatibility
|
|
2201
|
+
logger.debug('Falling back to local RELEASE_NOTES.md file...');
|
|
2202
|
+
try {
|
|
2203
|
+
const fs = await import('fs/promises');
|
|
2204
|
+
const content = await fs.readFile('RELEASE_NOTES.md', 'utf-8');
|
|
2205
|
+
if (content.trim()) {
|
|
2206
|
+
const truncatedContent = truncateContent(content);
|
|
2207
|
+
releaseNotes.push(`=== Local RELEASE_NOTES.md ===\n${truncatedContent}`);
|
|
2208
|
+
logger.debug(`Found local release notes (%d characters)`, content.length);
|
|
2209
|
+
}
|
|
2210
|
+
} catch {
|
|
2211
|
+
// No local file either, return empty array
|
|
2212
|
+
logger.debug('No local RELEASE_NOTES.md file found either');
|
|
2213
|
+
}
|
|
2214
|
+
}
|
|
2215
|
+
return releaseNotes.slice(0, limit);
|
|
2216
|
+
};
|
|
2217
|
+
const get$1 = async (options = {})=>{
|
|
2218
|
+
const { limit = 3 } = options;
|
|
2219
|
+
const releaseNotes = await findRecentReleaseNotes(limit);
|
|
2220
|
+
return releaseNotes.join('\n\n');
|
|
2221
|
+
};
|
|
2222
|
+
|
|
2223
|
+
// Get GitHub issues content
|
|
2224
|
+
const get = async (options = {})=>{
|
|
2225
|
+
const logger = getLogger();
|
|
2226
|
+
const { limit = 20 } = options;
|
|
2227
|
+
try {
|
|
2228
|
+
logger.debug('Fetching open GitHub issues...');
|
|
2229
|
+
const issuesLimit = Math.min(limit, 20); // Cap at 20
|
|
2230
|
+
const githubIssues = await getOpenIssues(issuesLimit);
|
|
2231
|
+
if (githubIssues.trim()) {
|
|
2232
|
+
logger.debug('Added GitHub issues to context (%d characters)', githubIssues.length);
|
|
2233
|
+
return githubIssues;
|
|
2234
|
+
} else {
|
|
2235
|
+
logger.debug('No open GitHub issues found');
|
|
2236
|
+
return '';
|
|
2237
|
+
}
|
|
2238
|
+
} catch (error) {
|
|
2239
|
+
logger.warn('Failed to fetch GitHub issues: %s', error.message);
|
|
2240
|
+
return '';
|
|
2241
|
+
}
|
|
2242
|
+
};
|
|
2243
|
+
// Helper function to get user choice interactively
|
|
2244
|
+
async function getUserChoice(prompt, choices) {
|
|
2245
|
+
const logger = getLogger();
|
|
2246
|
+
logger.info(prompt);
|
|
2247
|
+
choices.forEach((choice)=>{
|
|
2248
|
+
logger.info(` [${choice.key}] ${choice.label}`);
|
|
2249
|
+
});
|
|
2250
|
+
logger.info('');
|
|
2251
|
+
return new Promise((resolve)=>{
|
|
2252
|
+
// Ensure stdin is referenced so the process doesn't exit while waiting for input
|
|
2253
|
+
if (typeof process.stdin.ref === 'function') {
|
|
2254
|
+
process.stdin.ref();
|
|
2255
|
+
}
|
|
2256
|
+
process.stdin.setRawMode(true);
|
|
2257
|
+
process.stdin.resume();
|
|
2258
|
+
process.stdin.on('data', (key)=>{
|
|
2259
|
+
const keyStr = key.toString().toLowerCase();
|
|
2260
|
+
const choice = choices.find((c)=>c.key === keyStr);
|
|
2261
|
+
if (choice) {
|
|
2262
|
+
process.stdin.setRawMode(false);
|
|
2263
|
+
process.stdin.pause();
|
|
2264
|
+
// Detach stdin again now that we're done
|
|
2265
|
+
if (typeof process.stdin.unref === 'function') {
|
|
2266
|
+
process.stdin.unref();
|
|
2267
|
+
}
|
|
2268
|
+
logger.info(`Selected: ${choice.label}\n`);
|
|
2269
|
+
resolve(choice.key);
|
|
2270
|
+
}
|
|
2271
|
+
});
|
|
2272
|
+
});
|
|
2273
|
+
}
|
|
2274
|
+
// Helper function to edit issue interactively
|
|
2275
|
+
async function editIssueInteractively(issue) {
|
|
2276
|
+
const logger = getLogger();
|
|
2277
|
+
const readline = await import('readline');
|
|
2278
|
+
// Ensure stdin is referenced during readline interaction
|
|
2279
|
+
if (typeof process.stdin.ref === 'function') {
|
|
2280
|
+
process.stdin.ref();
|
|
2281
|
+
}
|
|
2282
|
+
const rl = readline.createInterface({
|
|
2283
|
+
input: process.stdin,
|
|
2284
|
+
output: process.stdout
|
|
2285
|
+
});
|
|
2286
|
+
const question = (prompt)=>{
|
|
2287
|
+
return new Promise((resolve)=>{
|
|
2288
|
+
rl.question(prompt, resolve);
|
|
2289
|
+
});
|
|
2290
|
+
};
|
|
2291
|
+
try {
|
|
2292
|
+
logger.info('📝 Edit issue details (press Enter to keep current value):');
|
|
2293
|
+
const newTitle = await question(`Title [${issue.title}]: `);
|
|
2294
|
+
const newDescription = await question(`Description [${issue.description}]: `);
|
|
2295
|
+
const newPriority = await question(`Priority (low/medium/high) [${issue.priority}]: `);
|
|
2296
|
+
const newCategory = await question(`Category (ui/content/functionality/accessibility/performance/other) [${issue.category}]: `);
|
|
2297
|
+
const updatedIssue = {
|
|
2298
|
+
title: newTitle.trim() || issue.title,
|
|
2299
|
+
description: newDescription.trim() || issue.description,
|
|
2300
|
+
priority: newPriority.trim() || issue.priority,
|
|
2301
|
+
category: newCategory.trim() || issue.category,
|
|
2302
|
+
suggestions: issue.suggestions
|
|
2303
|
+
};
|
|
2304
|
+
logger.info('✅ Issue updated successfully');
|
|
2305
|
+
return updatedIssue;
|
|
2306
|
+
} finally{
|
|
2307
|
+
rl.close();
|
|
2308
|
+
// Detach stdin after interactive edit completes
|
|
2309
|
+
if (typeof process.stdin.unref === 'function') {
|
|
2310
|
+
process.stdin.unref();
|
|
2311
|
+
}
|
|
2312
|
+
}
|
|
2313
|
+
}
|
|
2314
|
+
// Helper function to format issue body for GitHub
|
|
2315
|
+
function formatIssueBody(issue) {
|
|
2316
|
+
let body = `## Description\n\n${issue.description}\n\n`;
|
|
2317
|
+
body += `## Details\n\n`;
|
|
2318
|
+
body += `- **Priority:** ${issue.priority}\n`;
|
|
2319
|
+
body += `- **Category:** ${issue.category}\n`;
|
|
2320
|
+
body += `- **Source:** Review\n\n`;
|
|
2321
|
+
if (issue.suggestions && issue.suggestions.length > 0) {
|
|
2322
|
+
body += `## Suggestions\n\n`;
|
|
2323
|
+
issue.suggestions.forEach((suggestion)=>{
|
|
2324
|
+
body += `- ${suggestion}\n`;
|
|
2325
|
+
});
|
|
2326
|
+
body += '\n';
|
|
2327
|
+
}
|
|
2328
|
+
body += `---\n\n`;
|
|
2329
|
+
body += `*This issue was automatically created from a review session.*`;
|
|
2330
|
+
return body;
|
|
2331
|
+
}
|
|
2332
|
+
// Helper function to format results with created GitHub issues
|
|
2333
|
+
function formatReviewResultsWithIssues(result, createdIssues) {
|
|
2334
|
+
let output = `📝 Review Results\n\n`;
|
|
2335
|
+
output += `📋 Summary: ${result.summary}\n`;
|
|
2336
|
+
output += `📊 Total Issues Found: ${result.totalIssues}\n`;
|
|
2337
|
+
output += `🚀 GitHub Issues Created: ${createdIssues.length}\n\n`;
|
|
2338
|
+
if (result.issues && result.issues.length > 0) {
|
|
2339
|
+
output += `📝 Issues Identified:\n\n`;
|
|
2340
|
+
result.issues.forEach((issue, index)=>{
|
|
2341
|
+
const priorityEmoji = issue.priority === 'high' ? '🔴' : issue.priority === 'medium' ? '🟡' : '🟢';
|
|
2342
|
+
const categoryEmoji = issue.category === 'ui' ? '🎨' : issue.category === 'content' ? '📝' : issue.category === 'functionality' ? '⚙️' : issue.category === 'accessibility' ? '♿' : issue.category === 'performance' ? '⚡' : '🔧';
|
|
2343
|
+
output += `${index + 1}. ${priorityEmoji} ${issue.title}\n`;
|
|
2344
|
+
output += ` ${categoryEmoji} Category: ${issue.category} | Priority: ${issue.priority}\n`;
|
|
2345
|
+
output += ` 📖 Description: ${issue.description}\n`;
|
|
2346
|
+
// Check if this issue was created as a GitHub issue
|
|
2347
|
+
const createdIssue = createdIssues.find((ci)=>ci.issue === issue);
|
|
2348
|
+
if (createdIssue) {
|
|
2349
|
+
output += ` 🔗 GitHub Issue: #${createdIssue.number} - ${createdIssue.githubUrl}\n`;
|
|
2350
|
+
}
|
|
2351
|
+
if (issue.suggestions && issue.suggestions.length > 0) {
|
|
2352
|
+
output += ` 💡 Suggestions:\n`;
|
|
2353
|
+
issue.suggestions.forEach((suggestion)=>{
|
|
2354
|
+
output += ` • ${suggestion}\n`;
|
|
2355
|
+
});
|
|
2356
|
+
}
|
|
2357
|
+
output += `\n`;
|
|
2358
|
+
});
|
|
2359
|
+
} else {
|
|
2360
|
+
output += `✅ No specific issues identified from the review.\n\n`;
|
|
2361
|
+
}
|
|
2362
|
+
if (createdIssues.length > 0) {
|
|
2363
|
+
output += `\n🎯 Created GitHub Issues:\n`;
|
|
2364
|
+
createdIssues.forEach((createdIssue)=>{
|
|
2365
|
+
output += `• #${createdIssue.number}: ${createdIssue.issue.title} - ${createdIssue.githubUrl}\n`;
|
|
2366
|
+
});
|
|
2367
|
+
output += `\n`;
|
|
2368
|
+
}
|
|
2369
|
+
output += `🚀 Next Steps: Review the created GitHub issues and prioritize them in your development workflow.`;
|
|
2370
|
+
return output;
|
|
2371
|
+
}
|
|
2372
|
+
function formatReviewResults(result) {
|
|
2373
|
+
let output = `📝 Review Results\n\n`;
|
|
2374
|
+
output += `📋 Summary: ${result.summary}\n`;
|
|
2375
|
+
output += `📊 Total Issues Found: ${result.totalIssues}\n\n`;
|
|
2376
|
+
if (result.issues && result.issues.length > 0) {
|
|
2377
|
+
output += `📝 Issues Identified:\n\n`;
|
|
2378
|
+
result.issues.forEach((issue, index)=>{
|
|
2379
|
+
const priorityEmoji = issue.priority === 'high' ? '🔴' : issue.priority === 'medium' ? '🟡' : '🟢';
|
|
2380
|
+
const categoryEmoji = issue.category === 'ui' ? '🎨' : issue.category === 'content' ? '📝' : issue.category === 'functionality' ? '⚙️' : issue.category === 'accessibility' ? '♿' : issue.category === 'performance' ? '⚡' : '🔧';
|
|
2381
|
+
output += `${index + 1}. ${priorityEmoji} ${issue.title}\n`;
|
|
2382
|
+
output += ` ${categoryEmoji} Category: ${issue.category} | Priority: ${issue.priority}\n`;
|
|
2383
|
+
output += ` 📖 Description: ${issue.description}\n`;
|
|
2384
|
+
if (issue.suggestions && issue.suggestions.length > 0) {
|
|
2385
|
+
output += ` 💡 Suggestions:\n`;
|
|
2386
|
+
issue.suggestions.forEach((suggestion)=>{
|
|
2387
|
+
output += ` • ${suggestion}\n`;
|
|
2388
|
+
});
|
|
2389
|
+
}
|
|
2390
|
+
output += `\n`;
|
|
2391
|
+
});
|
|
2392
|
+
} else {
|
|
2393
|
+
output += `✅ No specific issues identified from the review.\n\n`;
|
|
2394
|
+
}
|
|
2395
|
+
output += `🚀 Next Steps: Review the identified issues and prioritize them for your development workflow.`;
|
|
2396
|
+
return output;
|
|
2397
|
+
}
|
|
2398
|
+
// Handle GitHub issue creation workflow
|
|
2399
|
+
const handleIssueCreation = async (result, senditMode = false)=>{
|
|
2400
|
+
const logger = getLogger();
|
|
2401
|
+
const createdIssues = [];
|
|
2402
|
+
if (!result.issues || result.issues.length === 0) {
|
|
2403
|
+
return formatReviewResults(result);
|
|
2404
|
+
}
|
|
2405
|
+
logger.info(`🔍 Found ${result.issues.length} issues to potentially create as GitHub issues`);
|
|
2406
|
+
for(let i = 0; i < result.issues.length; i++){
|
|
2407
|
+
const issue = result.issues[i];
|
|
2408
|
+
let shouldCreateIssue = senditMode;
|
|
2409
|
+
if (!senditMode) {
|
|
2410
|
+
// Interactive confirmation for each issue
|
|
2411
|
+
logger.info(`\n📋 Issue ${i + 1} of ${result.issues.length}:`);
|
|
2412
|
+
logger.info(` Title: ${issue.title}`);
|
|
2413
|
+
logger.info(` Priority: ${issue.priority} | Category: ${issue.category}`);
|
|
2414
|
+
logger.info(` Description: ${issue.description}`);
|
|
2415
|
+
if (issue.suggestions && issue.suggestions.length > 0) {
|
|
2416
|
+
logger.info(` Suggestions: ${issue.suggestions.join(', ')}`);
|
|
2417
|
+
}
|
|
2418
|
+
// Get user choice
|
|
2419
|
+
const choice = await getUserChoice('\nWhat would you like to do with this issue?', [
|
|
2420
|
+
{
|
|
2421
|
+
key: 'c',
|
|
2422
|
+
label: 'Create GitHub issue'
|
|
2423
|
+
},
|
|
2424
|
+
{
|
|
2425
|
+
key: 's',
|
|
2426
|
+
label: 'Skip this issue'
|
|
2427
|
+
},
|
|
2428
|
+
{
|
|
2429
|
+
key: 'e',
|
|
2430
|
+
label: 'Edit issue details'
|
|
2431
|
+
}
|
|
2432
|
+
]);
|
|
2433
|
+
if (choice === 'c') {
|
|
2434
|
+
shouldCreateIssue = true;
|
|
2435
|
+
} else if (choice === 'e') {
|
|
2436
|
+
// Allow user to edit the issue
|
|
2437
|
+
const editedIssue = await editIssueInteractively(issue);
|
|
2438
|
+
result.issues[i] = editedIssue;
|
|
2439
|
+
shouldCreateIssue = true;
|
|
2440
|
+
}
|
|
2441
|
+
// If choice is 's', shouldCreateIssue remains false
|
|
2442
|
+
}
|
|
2443
|
+
if (shouldCreateIssue) {
|
|
2444
|
+
try {
|
|
2445
|
+
logger.info(`🚀 Creating GitHub issue: "${issue.title}"`);
|
|
2446
|
+
// Format issue body with additional details
|
|
2447
|
+
const issueBody = formatIssueBody(issue);
|
|
2448
|
+
// Create labels based on priority and category
|
|
2449
|
+
const labels = [
|
|
2450
|
+
`priority-${issue.priority}`,
|
|
2451
|
+
`category-${issue.category}`,
|
|
2452
|
+
'review'
|
|
2453
|
+
];
|
|
2454
|
+
const createdIssue = await createIssue(issue.title, issueBody, labels);
|
|
2455
|
+
createdIssues.push({
|
|
2456
|
+
issue,
|
|
2457
|
+
githubUrl: createdIssue.html_url,
|
|
2458
|
+
number: createdIssue.number
|
|
2459
|
+
});
|
|
2460
|
+
logger.info(`✅ Created GitHub issue #${createdIssue.number}: ${createdIssue.html_url}`);
|
|
2461
|
+
} catch (error) {
|
|
2462
|
+
logger.error(`❌ Failed to create GitHub issue for "${issue.title}": ${error.message}`);
|
|
2463
|
+
}
|
|
2464
|
+
}
|
|
2465
|
+
}
|
|
2466
|
+
// Return formatted results
|
|
2467
|
+
if (createdIssues.length > 0) {
|
|
2468
|
+
return formatReviewResultsWithIssues(result, createdIssues);
|
|
2469
|
+
} else {
|
|
2470
|
+
return formatReviewResults(result);
|
|
2471
|
+
}
|
|
2472
|
+
};
|
|
2473
|
+
|
|
2474
|
+
const execute$7 = async (runConfig)=>{
|
|
2475
|
+
var _runConfig_review, _runConfig_review1, _runConfig_review2, _runConfig_review3, _runConfig_review4, _runConfig_review5, _runConfig_review6, _runConfig_review7, _runConfig_review8, _runConfig_review9, _runConfig_review10, _runConfig_review11, _runConfig_review12, _runConfig_review13, _runConfig_review_context, _runConfig_review14, _runConfig_review15, _analysisResult_issues, _runConfig_review16;
|
|
2476
|
+
const logger = getLogger();
|
|
2477
|
+
const isDryRun = runConfig.dryRun || false;
|
|
2478
|
+
// Show configuration even in dry-run mode
|
|
2479
|
+
logger.debug('Review context configuration:');
|
|
2480
|
+
logger.debug(' Include commit history: %s', (_runConfig_review = runConfig.review) === null || _runConfig_review === void 0 ? void 0 : _runConfig_review.includeCommitHistory);
|
|
2481
|
+
logger.debug(' Include recent diffs: %s', (_runConfig_review1 = runConfig.review) === null || _runConfig_review1 === void 0 ? void 0 : _runConfig_review1.includeRecentDiffs);
|
|
2482
|
+
logger.debug(' Include release notes: %s', (_runConfig_review2 = runConfig.review) === null || _runConfig_review2 === void 0 ? void 0 : _runConfig_review2.includeReleaseNotes);
|
|
2483
|
+
logger.debug(' Include GitHub issues: %s', (_runConfig_review3 = runConfig.review) === null || _runConfig_review3 === void 0 ? void 0 : _runConfig_review3.includeGithubIssues);
|
|
2484
|
+
logger.debug(' Commit history limit: %d', (_runConfig_review4 = runConfig.review) === null || _runConfig_review4 === void 0 ? void 0 : _runConfig_review4.commitHistoryLimit);
|
|
2485
|
+
logger.debug(' Diff history limit: %d', (_runConfig_review5 = runConfig.review) === null || _runConfig_review5 === void 0 ? void 0 : _runConfig_review5.diffHistoryLimit);
|
|
2486
|
+
logger.debug(' Release notes limit: %d', (_runConfig_review6 = runConfig.review) === null || _runConfig_review6 === void 0 ? void 0 : _runConfig_review6.releaseNotesLimit);
|
|
2487
|
+
logger.debug(' GitHub issues limit: %d', (_runConfig_review7 = runConfig.review) === null || _runConfig_review7 === void 0 ? void 0 : _runConfig_review7.githubIssuesLimit);
|
|
2488
|
+
logger.debug(' Sendit mode (auto-create issues): %s', (_runConfig_review8 = runConfig.review) === null || _runConfig_review8 === void 0 ? void 0 : _runConfig_review8.sendit);
|
|
2489
|
+
if (isDryRun) {
|
|
2490
|
+
var _runConfig_review17, _runConfig_review18;
|
|
2491
|
+
logger.info('DRY RUN: Would analyze provided note for review');
|
|
2492
|
+
logger.info('DRY RUN: Would gather additional context based on configuration above');
|
|
2493
|
+
logger.info('DRY RUN: Would analyze note and identify issues');
|
|
2494
|
+
if ((_runConfig_review17 = runConfig.review) === null || _runConfig_review17 === void 0 ? void 0 : _runConfig_review17.sendit) {
|
|
2495
|
+
logger.info('DRY RUN: Would automatically create GitHub issues (sendit mode enabled)');
|
|
2496
|
+
} else {
|
|
2497
|
+
logger.info('DRY RUN: Would prompt for confirmation before creating GitHub issues');
|
|
2498
|
+
}
|
|
2499
|
+
// Show what exclusion patterns would be used in dry-run mode
|
|
2500
|
+
if ((_runConfig_review18 = runConfig.review) === null || _runConfig_review18 === void 0 ? void 0 : _runConfig_review18.includeRecentDiffs) {
|
|
2501
|
+
var _runConfig_excludedPatterns;
|
|
2502
|
+
const basePatterns = (_runConfig_excludedPatterns = runConfig.excludedPatterns) !== null && _runConfig_excludedPatterns !== void 0 ? _runConfig_excludedPatterns : DEFAULT_EXCLUDED_PATTERNS;
|
|
2503
|
+
const reviewExcluded = getReviewExcludedPatterns(basePatterns);
|
|
2504
|
+
logger.info('DRY RUN: Would use %d exclusion patterns for diff context', reviewExcluded.length);
|
|
2505
|
+
logger.debug('DRY RUN: Sample exclusions: %s', reviewExcluded.slice(0, 15).join(', ') + (reviewExcluded.length > 15 ? '...' : ''));
|
|
2506
|
+
}
|
|
2507
|
+
return 'DRY RUN: Review command would analyze note, gather context, and create GitHub issues';
|
|
2508
|
+
}
|
|
2509
|
+
// Get the review note from configuration
|
|
2510
|
+
let reviewNote = (_runConfig_review9 = runConfig.review) === null || _runConfig_review9 === void 0 ? void 0 : _runConfig_review9.note;
|
|
2511
|
+
// If no review note was provided via CLI arg or STDIN, open the user's editor to capture it.
|
|
2512
|
+
if (!reviewNote || !reviewNote.trim()) {
|
|
2513
|
+
const editor = process.env.EDITOR || process.env.VISUAL || 'vi';
|
|
2514
|
+
// Create a temporary file for the user to edit.
|
|
2515
|
+
const tmpDir = os.tmpdir();
|
|
2516
|
+
const tmpFilePath = path.join(tmpDir, `kodrdriv_review_${Date.now()}.md`);
|
|
2517
|
+
// Pre-populate the file with a helpful header so users know what to do.
|
|
2518
|
+
const templateContent = [
|
|
2519
|
+
'# Kodrdriv Review Note',
|
|
2520
|
+
'',
|
|
2521
|
+
'# Please enter your review note below. Lines starting with "#" will be ignored.',
|
|
2522
|
+
'# Save and close the editor when you are done.',
|
|
2523
|
+
'',
|
|
2524
|
+
''
|
|
2525
|
+
].join('\n');
|
|
2526
|
+
await fs$1.writeFile(tmpFilePath, templateContent, 'utf8');
|
|
2527
|
+
logger.info(`No review note provided – opening ${editor} to capture input...`);
|
|
2528
|
+
// Open the editor synchronously so execution resumes after the user closes it.
|
|
2529
|
+
const result = spawnSync(editor, [
|
|
2530
|
+
tmpFilePath
|
|
2531
|
+
], {
|
|
2532
|
+
stdio: 'inherit'
|
|
2533
|
+
});
|
|
2534
|
+
if (result.error) {
|
|
2535
|
+
throw new Error(`Failed to launch editor '${editor}': ${result.error.message}`);
|
|
2536
|
+
}
|
|
2537
|
+
// Read the file back in, stripping comment lines and whitespace.
|
|
2538
|
+
const fileContent = (await fs$1.readFile(tmpFilePath, 'utf8')).split('\n').filter((line)=>!line.trim().startsWith('#')).join('\n').trim();
|
|
2539
|
+
// Clean up the temporary file (best-effort – ignore errors).
|
|
2540
|
+
try {
|
|
2541
|
+
await fs$1.unlink(tmpFilePath);
|
|
2542
|
+
} catch {
|
|
2543
|
+
/* ignore */ }
|
|
2544
|
+
if (!fileContent) {
|
|
2545
|
+
throw new Error('Review note is empty – aborting. Provide a note as an argument, via STDIN, or through the editor.');
|
|
2546
|
+
}
|
|
2547
|
+
reviewNote = fileContent;
|
|
2548
|
+
// If the original runConfig.review object exists, update it so downstream code has the note.
|
|
2549
|
+
if (runConfig.review) {
|
|
2550
|
+
runConfig.review.note = reviewNote;
|
|
2551
|
+
}
|
|
2552
|
+
}
|
|
2553
|
+
logger.info('📝 Starting review analysis...');
|
|
2554
|
+
logger.debug('Review note: %s', reviewNote);
|
|
2555
|
+
logger.debug('Review note length: %d characters', reviewNote.length);
|
|
2556
|
+
// Gather additional context based on configuration
|
|
2557
|
+
let logContext = '';
|
|
2558
|
+
let diffContext = '';
|
|
2559
|
+
let releaseNotesContext = '';
|
|
2560
|
+
let issuesContext = '';
|
|
2561
|
+
// Fetch commit history if enabled
|
|
2562
|
+
if ((_runConfig_review10 = runConfig.review) === null || _runConfig_review10 === void 0 ? void 0 : _runConfig_review10.includeCommitHistory) {
|
|
2563
|
+
try {
|
|
2564
|
+
logger.debug('Fetching recent commit history...');
|
|
2565
|
+
const log = await create({
|
|
2566
|
+
limit: runConfig.review.commitHistoryLimit
|
|
2567
|
+
});
|
|
2568
|
+
const logContent = await log.get();
|
|
2569
|
+
if (logContent.trim()) {
|
|
2570
|
+
logContext += `\n\n[Recent Commit History]\n${logContent}`;
|
|
2571
|
+
logger.debug('Added commit history to context (%d characters)', logContent.length);
|
|
2572
|
+
}
|
|
2573
|
+
} catch (error) {
|
|
2574
|
+
logger.warn('Failed to fetch commit history: %s', error.message);
|
|
2575
|
+
}
|
|
2576
|
+
}
|
|
2577
|
+
// Fetch recent diffs if enabled
|
|
2578
|
+
if ((_runConfig_review11 = runConfig.review) === null || _runConfig_review11 === void 0 ? void 0 : _runConfig_review11.includeRecentDiffs) {
|
|
2579
|
+
try {
|
|
2580
|
+
logger.debug('Fetching recent commit diffs...');
|
|
2581
|
+
var _runConfig_excludedPatterns1;
|
|
2582
|
+
const basePatterns = (_runConfig_excludedPatterns1 = runConfig.excludedPatterns) !== null && _runConfig_excludedPatterns1 !== void 0 ? _runConfig_excludedPatterns1 : DEFAULT_EXCLUDED_PATTERNS;
|
|
2583
|
+
const recentDiffs = await getRecentDiffsForReview({
|
|
2584
|
+
limit: runConfig.review.diffHistoryLimit,
|
|
2585
|
+
baseExcludedPatterns: basePatterns
|
|
2586
|
+
});
|
|
2587
|
+
diffContext += recentDiffs;
|
|
2588
|
+
} catch (error) {
|
|
2589
|
+
logger.warn('Failed to fetch recent diffs: %s', error.message);
|
|
2590
|
+
}
|
|
2591
|
+
}
|
|
2592
|
+
// Fetch release notes if enabled
|
|
2593
|
+
if ((_runConfig_review12 = runConfig.review) === null || _runConfig_review12 === void 0 ? void 0 : _runConfig_review12.includeReleaseNotes) {
|
|
2594
|
+
try {
|
|
2595
|
+
logger.debug('Fetching recent release notes from GitHub...');
|
|
2596
|
+
const releaseNotesContent = await get$1({
|
|
2597
|
+
limit: runConfig.review.releaseNotesLimit || 3
|
|
2598
|
+
});
|
|
2599
|
+
if (releaseNotesContent.trim()) {
|
|
2600
|
+
releaseNotesContext += `\n\n[Recent Release Notes]\n${releaseNotesContent}`;
|
|
2601
|
+
logger.debug('Added release notes to context (%d characters)', releaseNotesContent.length);
|
|
2602
|
+
}
|
|
2603
|
+
} catch (error) {
|
|
2604
|
+
logger.warn('Failed to fetch release notes: %s', error.message);
|
|
2605
|
+
}
|
|
2606
|
+
}
|
|
2607
|
+
// Fetch GitHub issues if enabled
|
|
2608
|
+
if ((_runConfig_review13 = runConfig.review) === null || _runConfig_review13 === void 0 ? void 0 : _runConfig_review13.includeGithubIssues) {
|
|
2609
|
+
try {
|
|
2610
|
+
logger.debug('Fetching open GitHub issues...');
|
|
2611
|
+
issuesContext = await get({
|
|
2612
|
+
limit: runConfig.review.githubIssuesLimit || 20
|
|
2613
|
+
});
|
|
2614
|
+
logger.debug('Added GitHub issues to context (%d characters)', issuesContext.length);
|
|
2615
|
+
} catch (error) {
|
|
2616
|
+
logger.warn('Failed to fetch GitHub issues: %s', error.message);
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
// Analyze review note for issues using OpenAI
|
|
2620
|
+
logger.info('🤖 Analyzing review note for project issues...');
|
|
2621
|
+
logger.debug('Context summary:');
|
|
2622
|
+
logger.debug(' - Review note: %d chars', reviewNote.length);
|
|
2623
|
+
logger.debug(' - Log context: %d chars', logContext.length);
|
|
2624
|
+
logger.debug(' - Diff context: %d chars', diffContext.length);
|
|
2625
|
+
logger.debug(' - Release notes context: %d chars', releaseNotesContext.length);
|
|
2626
|
+
logger.debug(' - Issues context: %d chars', issuesContext.length);
|
|
2627
|
+
logger.debug(' - User context: %d chars', ((_runConfig_review14 = runConfig.review) === null || _runConfig_review14 === void 0 ? void 0 : (_runConfig_review_context = _runConfig_review14.context) === null || _runConfig_review_context === void 0 ? void 0 : _runConfig_review_context.length) || 0);
|
|
2628
|
+
const promptConfig = {
|
|
2629
|
+
overridePaths: runConfig.discoveredConfigDirs || [],
|
|
2630
|
+
overrides: runConfig.overrides || false
|
|
2631
|
+
};
|
|
2632
|
+
const promptContent = {
|
|
2633
|
+
notes: reviewNote
|
|
2634
|
+
};
|
|
2635
|
+
const promptContext = {
|
|
2636
|
+
context: (_runConfig_review15 = runConfig.review) === null || _runConfig_review15 === void 0 ? void 0 : _runConfig_review15.context,
|
|
2637
|
+
logContext,
|
|
2638
|
+
diffContext,
|
|
2639
|
+
releaseNotesContext,
|
|
2640
|
+
issuesContext
|
|
2641
|
+
};
|
|
2642
|
+
const prompt = await createPrompt$1(promptConfig, promptContent, promptContext);
|
|
2643
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
2644
|
+
const storage = create$2({
|
|
2645
|
+
log: logger.info
|
|
2646
|
+
});
|
|
2647
|
+
await storage.ensureDirectory(outputDirectory);
|
|
2648
|
+
// Save timestamped copy of review notes and context to output directory
|
|
2649
|
+
try {
|
|
2650
|
+
var _runConfig_review_context1, _runConfig_review19;
|
|
2651
|
+
// Save the original review note
|
|
2652
|
+
const reviewNotesFilename = getTimestampedReviewNotesFilename();
|
|
2653
|
+
const reviewNotesPath = getOutputPath(outputDirectory, reviewNotesFilename);
|
|
2654
|
+
let reviewNotesContent = `# Review Notes\n\n${reviewNote}\n\n`;
|
|
2655
|
+
// Add all context sections if they exist
|
|
2656
|
+
if (logContext.trim()) {
|
|
2657
|
+
reviewNotesContent += `# Commit History Context\n\n${logContext}\n\n`;
|
|
2658
|
+
}
|
|
2659
|
+
if (diffContext.trim()) {
|
|
2660
|
+
reviewNotesContent += `# Recent Diffs Context\n\n${diffContext}\n\n`;
|
|
2661
|
+
}
|
|
2662
|
+
if (releaseNotesContext.trim()) {
|
|
2663
|
+
reviewNotesContent += `# Release Notes Context\n\n${releaseNotesContext}\n\n`;
|
|
2664
|
+
}
|
|
2665
|
+
if (issuesContext.trim()) {
|
|
2666
|
+
reviewNotesContent += `# GitHub Issues Context\n\n${issuesContext}\n\n`;
|
|
2667
|
+
}
|
|
2668
|
+
if ((_runConfig_review19 = runConfig.review) === null || _runConfig_review19 === void 0 ? void 0 : (_runConfig_review_context1 = _runConfig_review19.context) === null || _runConfig_review_context1 === void 0 ? void 0 : _runConfig_review_context1.trim()) {
|
|
2669
|
+
reviewNotesContent += `# User Context\n\n${runConfig.review.context}\n\n`;
|
|
2670
|
+
}
|
|
2671
|
+
await storage.writeFile(reviewNotesPath, reviewNotesContent, 'utf-8');
|
|
2672
|
+
logger.debug('Saved timestamped review notes and context: %s', reviewNotesPath);
|
|
2673
|
+
} catch (error) {
|
|
2674
|
+
logger.warn('Failed to save timestamped review notes: %s', error.message);
|
|
2675
|
+
}
|
|
2676
|
+
const request = Formatter.create({
|
|
2677
|
+
logger
|
|
2678
|
+
}).formatPrompt(runConfig.model, prompt);
|
|
2679
|
+
const analysisResult = await createCompletion(request.messages, {
|
|
2680
|
+
model: runConfig.model,
|
|
2681
|
+
responseFormat: {
|
|
2682
|
+
type: 'json_object'
|
|
2683
|
+
},
|
|
2684
|
+
debug: runConfig.debug,
|
|
2685
|
+
debugRequestFile: getOutputPath(outputDirectory, getTimestampedRequestFilename('review-analysis')),
|
|
2686
|
+
debugResponseFile: getOutputPath(outputDirectory, getTimestampedResponseFilename('review-analysis'))
|
|
2687
|
+
});
|
|
2688
|
+
logger.info('✅ Analysis completed');
|
|
2689
|
+
logger.debug('Analysis result summary: %s', analysisResult.summary);
|
|
2690
|
+
logger.debug('Total issues found: %d', analysisResult.totalIssues);
|
|
2691
|
+
logger.debug('Issues array length: %d', ((_analysisResult_issues = analysisResult.issues) === null || _analysisResult_issues === void 0 ? void 0 : _analysisResult_issues.length) || 0);
|
|
2692
|
+
if (analysisResult.issues && analysisResult.issues.length > 0) {
|
|
2693
|
+
analysisResult.issues.forEach((issue, index)=>{
|
|
2694
|
+
logger.debug(' Issue %d: [%s] %s', index + 1, issue.priority, issue.title);
|
|
2695
|
+
});
|
|
2696
|
+
}
|
|
2697
|
+
// Save timestamped copy of analysis result to output directory
|
|
2698
|
+
try {
|
|
2699
|
+
const reviewFilename = getTimestampedReviewFilename();
|
|
2700
|
+
const reviewPath = getOutputPath(outputDirectory, reviewFilename);
|
|
2701
|
+
// Format the analysis result as markdown
|
|
2702
|
+
const reviewContent = `# Review Analysis Result\n\n` + `## Summary\n${analysisResult.summary}\n\n` + `## Total Issues Found\n${analysisResult.totalIssues}\n\n` + `## Issues\n\n${JSON.stringify(analysisResult.issues, null, 2)}\n\n` + `---\n\n*Analysis completed at ${new Date().toISOString()}*`;
|
|
2703
|
+
await storage.writeFile(reviewPath, reviewContent, 'utf-8');
|
|
2704
|
+
logger.debug('Saved timestamped review analysis: %s', reviewPath);
|
|
2705
|
+
} catch (error) {
|
|
2706
|
+
logger.warn('Failed to save timestamped review analysis: %s', error.message);
|
|
2707
|
+
}
|
|
2708
|
+
// Handle GitHub issue creation using the issues module
|
|
2709
|
+
const senditMode = ((_runConfig_review16 = runConfig.review) === null || _runConfig_review16 === void 0 ? void 0 : _runConfig_review16.sendit) || false;
|
|
2710
|
+
return await handleIssueCreation(analysisResult, senditMode);
|
|
2711
|
+
};
|
|
2712
|
+
|
|
2713
|
+
const execute$6 = async (runConfig)=>{
|
|
2714
|
+
var _runConfig_audioReview, _runConfig_audioReview1, _runConfig_audioReview2, _runConfig_audioReview3, _runConfig_audioReview4, _runConfig_audioReview5, _runConfig_audioReview6, _runConfig_audioReview7, _runConfig_audioReview8, _runConfig_audioReview9, _runConfig_review;
|
|
2715
|
+
const logger = getLogger();
|
|
2716
|
+
const isDryRun = runConfig.dryRun || false;
|
|
2717
|
+
if (isDryRun) {
|
|
2718
|
+
var _runConfig_audioReview10, _runConfig_review1;
|
|
2719
|
+
if ((_runConfig_audioReview10 = runConfig.audioReview) === null || _runConfig_audioReview10 === void 0 ? void 0 : _runConfig_audioReview10.file) {
|
|
2720
|
+
logger.info('DRY RUN: Would process audio file: %s', runConfig.audioReview.file);
|
|
2721
|
+
logger.info('DRY RUN: Would transcribe audio and use as context for review analysis');
|
|
2722
|
+
} else {
|
|
2723
|
+
logger.info('DRY RUN: Would start audio recording for review context');
|
|
2724
|
+
logger.info('DRY RUN: Would transcribe audio and use as context for review analysis');
|
|
2725
|
+
}
|
|
2726
|
+
logger.info('DRY RUN: Would then delegate to regular review command');
|
|
2727
|
+
// In dry run, just call the regular review command with empty note
|
|
2728
|
+
return execute$7({
|
|
2729
|
+
...runConfig,
|
|
2730
|
+
review: {
|
|
2731
|
+
...runConfig.review,
|
|
2732
|
+
note: ((_runConfig_review1 = runConfig.review) === null || _runConfig_review1 === void 0 ? void 0 : _runConfig_review1.note) || ''
|
|
2733
|
+
}
|
|
2734
|
+
});
|
|
2735
|
+
}
|
|
2736
|
+
let audioContext;
|
|
2737
|
+
try {
|
|
2738
|
+
var _runConfig_audioReview11, _runConfig_audioReview12, _runConfig_audioReview13, _runConfig_audioReview14;
|
|
2739
|
+
// Step 1: Record audio using unplayable (for audio file acquisition only)
|
|
2740
|
+
logger.info('🎙️ Starting audio recording for review context...');
|
|
2741
|
+
if (!((_runConfig_audioReview11 = runConfig.audioReview) === null || _runConfig_audioReview11 === void 0 ? void 0 : _runConfig_audioReview11.file)) {
|
|
2742
|
+
logger.info('Press Ctrl+C after you finish speaking to generate your review analysis');
|
|
2743
|
+
}
|
|
2744
|
+
// Use processAudio but ignore its transcription - we only want the audio file
|
|
2745
|
+
const audioResult = await processAudio({
|
|
2746
|
+
file: (_runConfig_audioReview12 = runConfig.audioReview) === null || _runConfig_audioReview12 === void 0 ? void 0 : _runConfig_audioReview12.file,
|
|
2747
|
+
maxRecordingTime: (_runConfig_audioReview13 = runConfig.audioReview) === null || _runConfig_audioReview13 === void 0 ? void 0 : _runConfig_audioReview13.maxRecordingTime
|
|
2748
|
+
});
|
|
2749
|
+
// Check if recording was cancelled
|
|
2750
|
+
if (audioResult.cancelled) {
|
|
2751
|
+
logger.info('❌ Audio review cancelled by user');
|
|
2752
|
+
process.exit(0);
|
|
2753
|
+
}
|
|
2754
|
+
// Step 2: Determine the audio file path
|
|
2755
|
+
let audioFilePath;
|
|
2756
|
+
if ((_runConfig_audioReview14 = runConfig.audioReview) === null || _runConfig_audioReview14 === void 0 ? void 0 : _runConfig_audioReview14.file) {
|
|
2757
|
+
// Use the provided file path
|
|
2758
|
+
audioFilePath = runConfig.audioReview.file;
|
|
2759
|
+
} else {
|
|
2760
|
+
// For recorded audio, we need to determine where unplayable saved the file
|
|
2761
|
+
// This is a temporary solution - ideally unplayable should return the file path
|
|
2762
|
+
const outputDir = runConfig.outputDirectory || 'output';
|
|
2763
|
+
audioFilePath = path.join(outputDir, getTimestampedAudioFilename());
|
|
2764
|
+
logger.warn('Using generated filename for recorded audio: %s', audioFilePath);
|
|
2765
|
+
logger.warn('Note: This may not match the actual file created by unplayable');
|
|
2766
|
+
}
|
|
2767
|
+
// Step 3: Use kodrdriv's transcription functionality instead of unplayable's
|
|
2768
|
+
logger.info('🤖 Transcribing audio locally using OpenAI Whisper...');
|
|
2769
|
+
logger.info('📝 Ignoring transcript from unplayable, using kodrdriv transcription');
|
|
2770
|
+
const transcription = await transcribeAudio(audioFilePath, {
|
|
2771
|
+
model: "whisper-1",
|
|
2772
|
+
debug: runConfig.debug
|
|
2773
|
+
});
|
|
2774
|
+
audioContext = transcription.text;
|
|
2775
|
+
if (!audioContext.trim()) {
|
|
2776
|
+
logger.warn('No audio content was transcribed. Proceeding without audio context.');
|
|
2777
|
+
audioContext = '';
|
|
2778
|
+
} else {
|
|
2779
|
+
logger.info('📝 Successfully transcribed audio using kodrdriv');
|
|
2780
|
+
logger.debug('Transcribed text: %s', audioContext);
|
|
2781
|
+
}
|
|
2782
|
+
} catch (error) {
|
|
2783
|
+
logger.error('Audio processing failed: %s', error.message);
|
|
2784
|
+
logger.info('Proceeding with review analysis without audio context...');
|
|
2785
|
+
audioContext = '';
|
|
2786
|
+
}
|
|
2787
|
+
// Now delegate to the regular review command with the audio context
|
|
2788
|
+
logger.info('🤖 Analyzing review using audio context...');
|
|
2789
|
+
const result = await execute$7({
|
|
2790
|
+
...runConfig,
|
|
2791
|
+
review: {
|
|
2792
|
+
// Map audioReview configuration to review configuration
|
|
2793
|
+
includeCommitHistory: (_runConfig_audioReview = runConfig.audioReview) === null || _runConfig_audioReview === void 0 ? void 0 : _runConfig_audioReview.includeCommitHistory,
|
|
2794
|
+
includeRecentDiffs: (_runConfig_audioReview1 = runConfig.audioReview) === null || _runConfig_audioReview1 === void 0 ? void 0 : _runConfig_audioReview1.includeRecentDiffs,
|
|
2795
|
+
includeReleaseNotes: (_runConfig_audioReview2 = runConfig.audioReview) === null || _runConfig_audioReview2 === void 0 ? void 0 : _runConfig_audioReview2.includeReleaseNotes,
|
|
2796
|
+
includeGithubIssues: (_runConfig_audioReview3 = runConfig.audioReview) === null || _runConfig_audioReview3 === void 0 ? void 0 : _runConfig_audioReview3.includeGithubIssues,
|
|
2797
|
+
commitHistoryLimit: (_runConfig_audioReview4 = runConfig.audioReview) === null || _runConfig_audioReview4 === void 0 ? void 0 : _runConfig_audioReview4.commitHistoryLimit,
|
|
2798
|
+
diffHistoryLimit: (_runConfig_audioReview5 = runConfig.audioReview) === null || _runConfig_audioReview5 === void 0 ? void 0 : _runConfig_audioReview5.diffHistoryLimit,
|
|
2799
|
+
releaseNotesLimit: (_runConfig_audioReview6 = runConfig.audioReview) === null || _runConfig_audioReview6 === void 0 ? void 0 : _runConfig_audioReview6.releaseNotesLimit,
|
|
2800
|
+
githubIssuesLimit: (_runConfig_audioReview7 = runConfig.audioReview) === null || _runConfig_audioReview7 === void 0 ? void 0 : _runConfig_audioReview7.githubIssuesLimit,
|
|
2801
|
+
sendit: (_runConfig_audioReview8 = runConfig.audioReview) === null || _runConfig_audioReview8 === void 0 ? void 0 : _runConfig_audioReview8.sendit,
|
|
2802
|
+
context: (_runConfig_audioReview9 = runConfig.audioReview) === null || _runConfig_audioReview9 === void 0 ? void 0 : _runConfig_audioReview9.context,
|
|
2803
|
+
// Use the transcribed audio as content
|
|
2804
|
+
note: audioContext.trim() || ((_runConfig_review = runConfig.review) === null || _runConfig_review === void 0 ? void 0 : _runConfig_review.note) || ''
|
|
2805
|
+
}
|
|
2806
|
+
});
|
|
2807
|
+
return result;
|
|
2808
|
+
};
|
|
2809
|
+
|
|
2810
|
+
const execute$5 = async (runConfig)=>{
|
|
2811
|
+
const logger = getLogger();
|
|
2812
|
+
const storage = create$2({
|
|
2813
|
+
log: logger.info
|
|
2814
|
+
});
|
|
2815
|
+
const isDryRun = runConfig.dryRun || false;
|
|
2816
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
2817
|
+
logger.info(isDryRun ? `DRY RUN: Would remove output directory: ${outputDirectory}` : `Removing output directory: ${outputDirectory}`);
|
|
2818
|
+
if (isDryRun) {
|
|
2819
|
+
if (await storage.exists(outputDirectory)) {
|
|
2820
|
+
logger.info('DRY RUN: Output directory exists and would be removed');
|
|
2821
|
+
} else {
|
|
2822
|
+
logger.info('DRY RUN: Output directory does not exist, nothing to clean');
|
|
2823
|
+
}
|
|
2824
|
+
return;
|
|
2825
|
+
}
|
|
2826
|
+
try {
|
|
2827
|
+
if (await storage.exists(outputDirectory)) {
|
|
2828
|
+
await storage.removeDirectory(outputDirectory);
|
|
2829
|
+
logger.info(`Successfully removed output directory: ${outputDirectory}`);
|
|
2830
|
+
} else {
|
|
2831
|
+
logger.info(`Output directory does not exist: ${outputDirectory}`);
|
|
2832
|
+
}
|
|
2833
|
+
} catch (error) {
|
|
2834
|
+
logger.error(`Failed to clean output directory: ${error.message}`);
|
|
2835
|
+
throw error;
|
|
2836
|
+
}
|
|
2837
|
+
};
|
|
2838
|
+
|
|
2839
|
+
const scanDirectoryForPackages$1 = async (rootDir, storage)=>{
|
|
2840
|
+
const logger = getLogger();
|
|
2841
|
+
const packageMap = new Map(); // packageName -> relativePath
|
|
2842
|
+
const absoluteRootDir = path.resolve(process.cwd(), rootDir);
|
|
2843
|
+
logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);
|
|
2844
|
+
if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {
|
|
2845
|
+
logger.verbose(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);
|
|
2846
|
+
return packageMap;
|
|
2847
|
+
}
|
|
2848
|
+
try {
|
|
2849
|
+
// Get all subdirectories in the root directory
|
|
2850
|
+
const items = await storage.listFiles(absoluteRootDir);
|
|
2851
|
+
for (const item of items){
|
|
2852
|
+
const itemPath = path.join(absoluteRootDir, item);
|
|
2853
|
+
if (await storage.isDirectory(itemPath)) {
|
|
2854
|
+
const packageJsonPath = path.join(itemPath, 'package.json');
|
|
2855
|
+
if (await storage.exists(packageJsonPath)) {
|
|
2856
|
+
try {
|
|
2857
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
2858
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
2859
|
+
if (packageJson.name) {
|
|
2860
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
2861
|
+
packageMap.set(packageJson.name, relativePath);
|
|
2862
|
+
logger.debug(`Found package: ${packageJson.name} at ${relativePath}`);
|
|
2863
|
+
}
|
|
2864
|
+
} catch (error) {
|
|
2865
|
+
logger.debug(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
|
|
2866
|
+
}
|
|
2867
|
+
}
|
|
2868
|
+
}
|
|
2869
|
+
}
|
|
2870
|
+
} catch (error) {
|
|
2871
|
+
logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);
|
|
2872
|
+
}
|
|
2873
|
+
return packageMap;
|
|
2874
|
+
};
|
|
2875
|
+
const findPackagesByScope = async (dependencies, scopeRoots, storage)=>{
|
|
2876
|
+
const logger = getLogger();
|
|
2877
|
+
const workspacePackages = new Map();
|
|
2878
|
+
logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);
|
|
2879
|
+
// First, scan all scope roots to build a comprehensive map of available packages
|
|
2880
|
+
const allPackages = new Map(); // packageName -> relativePath
|
|
2881
|
+
for (const [scope, rootDir] of Object.entries(scopeRoots)){
|
|
2882
|
+
logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);
|
|
2883
|
+
const scopePackages = await scanDirectoryForPackages$1(rootDir, storage);
|
|
2884
|
+
// Add packages from this scope to the overall map
|
|
2885
|
+
for (const [packageName, packagePath] of scopePackages){
|
|
2886
|
+
if (packageName.startsWith(scope)) {
|
|
2887
|
+
allPackages.set(packageName, packagePath);
|
|
2888
|
+
logger.debug(`Registered package: ${packageName} -> ${packagePath}`);
|
|
2889
|
+
}
|
|
2890
|
+
}
|
|
2891
|
+
}
|
|
2892
|
+
// Now check each dependency against our discovered packages
|
|
2893
|
+
for (const [depName, depVersion] of Object.entries(dependencies)){
|
|
2894
|
+
logger.debug(`Processing dependency: ${depName}@${depVersion}`);
|
|
2895
|
+
if (allPackages.has(depName)) {
|
|
2896
|
+
const packagePath = allPackages.get(depName);
|
|
2897
|
+
workspacePackages.set(depName, packagePath);
|
|
2898
|
+
logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);
|
|
2899
|
+
}
|
|
2900
|
+
}
|
|
2901
|
+
return workspacePackages;
|
|
2902
|
+
};
|
|
2903
|
+
const readCurrentWorkspaceFile$1 = async (workspaceFilePath, storage)=>{
|
|
2904
|
+
if (await storage.exists(workspaceFilePath)) {
|
|
2905
|
+
try {
|
|
2906
|
+
const content = await storage.readFile(workspaceFilePath, 'utf-8');
|
|
2907
|
+
return yaml.load(content) || {};
|
|
2908
|
+
} catch (error) {
|
|
2909
|
+
throw new Error(`Failed to parse existing workspace file: ${error}`);
|
|
2910
|
+
}
|
|
2911
|
+
}
|
|
2912
|
+
return {};
|
|
2913
|
+
};
|
|
2914
|
+
const writeWorkspaceFile$1 = async (workspaceFilePath, config, storage)=>{
|
|
2915
|
+
const yamlContent = yaml.dump(config, {
|
|
2916
|
+
indent: 2,
|
|
2917
|
+
lineWidth: -1,
|
|
2918
|
+
noRefs: true,
|
|
2919
|
+
sortKeys: false
|
|
2920
|
+
});
|
|
2921
|
+
await storage.writeFile(workspaceFilePath, yamlContent, 'utf-8');
|
|
2922
|
+
};
|
|
2923
|
+
const execute$4 = async (runConfig)=>{
|
|
2924
|
+
var _runConfig_link, _runConfig_link1, _runConfig_link2;
|
|
2925
|
+
const logger = getLogger();
|
|
2926
|
+
const storage = create$2({
|
|
2927
|
+
log: logger.info
|
|
2928
|
+
});
|
|
2929
|
+
logger.verbose('Starting pnpm workspace link management using overrides...');
|
|
2930
|
+
// Read current package.json
|
|
2931
|
+
const packageJsonPath = path.join(process.cwd(), 'package.json');
|
|
2932
|
+
if (!await storage.exists(packageJsonPath)) {
|
|
2933
|
+
throw new Error('package.json not found in current directory.');
|
|
2934
|
+
}
|
|
2935
|
+
let packageJson;
|
|
2936
|
+
try {
|
|
2937
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
2938
|
+
packageJson = JSON.parse(packageJsonContent);
|
|
2939
|
+
} catch (error) {
|
|
2940
|
+
throw new Error(`Failed to parse package.json: ${error}`);
|
|
2941
|
+
}
|
|
2942
|
+
logger.verbose(`Processing package: ${packageJson.name || 'unnamed'}`);
|
|
2943
|
+
// Get configuration
|
|
2944
|
+
const scopeRoots = ((_runConfig_link = runConfig.link) === null || _runConfig_link === void 0 ? void 0 : _runConfig_link.scopeRoots) || {};
|
|
2945
|
+
const workspaceFileName = ((_runConfig_link1 = runConfig.link) === null || _runConfig_link1 === void 0 ? void 0 : _runConfig_link1.workspaceFile) || 'pnpm-workspace.yaml';
|
|
2946
|
+
const isDryRun = runConfig.dryRun || ((_runConfig_link2 = runConfig.link) === null || _runConfig_link2 === void 0 ? void 0 : _runConfig_link2.dryRun) || false;
|
|
2947
|
+
logger.silly('Extracted scopeRoots:', JSON.stringify(scopeRoots));
|
|
2948
|
+
logger.debug('Extracted workspaceFileName:', workspaceFileName);
|
|
2949
|
+
logger.debug('Extracted isDryRun:', isDryRun);
|
|
2950
|
+
if (Object.keys(scopeRoots).length === 0) {
|
|
2951
|
+
logger.verbose('No scope roots configured. Skipping link management.');
|
|
2952
|
+
return 'No scope roots configured. Skipping link management.';
|
|
2953
|
+
}
|
|
2954
|
+
logger.silly(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);
|
|
2955
|
+
// Collect all dependencies
|
|
2956
|
+
const allDependencies = {
|
|
2957
|
+
...packageJson.dependencies,
|
|
2958
|
+
...packageJson.devDependencies,
|
|
2959
|
+
...packageJson.peerDependencies
|
|
2960
|
+
};
|
|
2961
|
+
logger.verbose(`Found ${Object.keys(allDependencies).length} total dependencies`);
|
|
2962
|
+
// Find matching sibling packages
|
|
2963
|
+
const packagesToLink = await findPackagesByScope(allDependencies, scopeRoots, storage);
|
|
2964
|
+
if (packagesToLink.size === 0) {
|
|
2965
|
+
logger.verbose('No matching sibling packages found for linking.');
|
|
2966
|
+
return 'No matching sibling packages found for linking.';
|
|
2967
|
+
}
|
|
2968
|
+
logger.verbose(`Found ${packagesToLink.size} packages to link: ${[
|
|
2969
|
+
...packagesToLink.keys()
|
|
2970
|
+
].join(', ')}`);
|
|
2971
|
+
// Read existing workspace configuration
|
|
2972
|
+
const workspaceFilePath = path.join(process.cwd(), workspaceFileName);
|
|
2973
|
+
const workspaceConfig = await readCurrentWorkspaceFile$1(workspaceFilePath, storage);
|
|
2974
|
+
// Create overrides
|
|
2975
|
+
const newOverrides = {};
|
|
2976
|
+
for (const [packageName, packagePath] of packagesToLink.entries()){
|
|
2977
|
+
newOverrides[packageName] = `link:${packagePath}`;
|
|
2978
|
+
}
|
|
2979
|
+
const updatedOverrides = {
|
|
2980
|
+
...workspaceConfig.overrides || {},
|
|
2981
|
+
...newOverrides
|
|
2982
|
+
};
|
|
2983
|
+
const sortedOverrides = Object.keys(updatedOverrides).sort().reduce((obj, key)=>{
|
|
2984
|
+
obj[key] = updatedOverrides[key];
|
|
2985
|
+
return obj;
|
|
2986
|
+
}, {});
|
|
2987
|
+
const updatedConfig = {
|
|
2988
|
+
...workspaceConfig,
|
|
2989
|
+
overrides: sortedOverrides
|
|
2990
|
+
};
|
|
2991
|
+
// Write the updated workspace file
|
|
2992
|
+
if (isDryRun) {
|
|
2993
|
+
logger.verbose('DRY RUN: Would write the following workspace configuration:');
|
|
2994
|
+
logger.silly(yaml.dump(updatedConfig, {
|
|
2995
|
+
indent: 2
|
|
2996
|
+
}));
|
|
2997
|
+
} else {
|
|
2998
|
+
await writeWorkspaceFile$1(workspaceFilePath, updatedConfig, storage);
|
|
2999
|
+
logger.verbose(`Updated ${workspaceFileName} with ${packagesToLink.size} linked packages in overrides.`);
|
|
3000
|
+
// Rebuild pnpm lock file and node_modules
|
|
3001
|
+
logger.verbose('Running pnpm install to apply links...');
|
|
3002
|
+
try {
|
|
3003
|
+
await run('pnpm install');
|
|
3004
|
+
logger.verbose('Successfully applied links.');
|
|
3005
|
+
} catch (error) {
|
|
3006
|
+
logger.warn(`Failed to run pnpm install: ${error}. You may need to run 'pnpm install' manually.`);
|
|
3007
|
+
}
|
|
3008
|
+
}
|
|
3009
|
+
const summary = `Successfully linked ${packagesToLink.size} sibling packages:\n${[
|
|
3010
|
+
...packagesToLink.entries()
|
|
3011
|
+
].map(([name, path])=>` - ${name}: link:${path}`).join('\n')}`;
|
|
3012
|
+
return summary;
|
|
3013
|
+
};
|
|
3014
|
+
|
|
3015
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
3016
|
+
const __dirname = path.dirname(__filename);
|
|
3017
|
+
/**
|
|
3018
|
+
* Build a release prompt using RiotPrompt Recipes.
|
|
3019
|
+
*/ const createPrompt = async ({ overrides, overridePaths }, { logContent, diffContent }, { releaseFocus, context, directories } = {})=>{
|
|
3020
|
+
// Use the new quick.release recipe - much simpler!
|
|
3021
|
+
// Adjust basePath for single-file build
|
|
3022
|
+
const basePath = path.resolve(__dirname, 'src', 'prompt');
|
|
3023
|
+
return quick.release(logContent, diffContent, {
|
|
3024
|
+
basePath,
|
|
3025
|
+
overridePaths: overridePaths || [],
|
|
3026
|
+
overrides: overrides || false,
|
|
3027
|
+
releaseFocus,
|
|
3028
|
+
context,
|
|
3029
|
+
directories
|
|
3030
|
+
});
|
|
3031
|
+
};
|
|
3032
|
+
|
|
3033
|
+
const execute$3 = async (runConfig)=>{
|
|
3034
|
+
var _runConfig_release, _runConfig_release1, _runConfig_release2, _runConfig_release3, _runConfig_release4;
|
|
3035
|
+
const logger = getLogger();
|
|
3036
|
+
const isDryRun = runConfig.dryRun || false;
|
|
3037
|
+
var _runConfig_release_from, _runConfig_release_to;
|
|
3038
|
+
const log = await create({
|
|
3039
|
+
from: (_runConfig_release_from = (_runConfig_release = runConfig.release) === null || _runConfig_release === void 0 ? void 0 : _runConfig_release.from) !== null && _runConfig_release_from !== void 0 ? _runConfig_release_from : DEFAULT_FROM_COMMIT_ALIAS,
|
|
3040
|
+
to: (_runConfig_release_to = (_runConfig_release1 = runConfig.release) === null || _runConfig_release1 === void 0 ? void 0 : _runConfig_release1.to) !== null && _runConfig_release_to !== void 0 ? _runConfig_release_to : DEFAULT_TO_COMMIT_ALIAS
|
|
3041
|
+
});
|
|
3042
|
+
let logContent = '';
|
|
3043
|
+
var _runConfig_release_from1, _runConfig_release_to1, _runConfig_excludedPatterns;
|
|
3044
|
+
const diff = await create$1({
|
|
3045
|
+
from: (_runConfig_release_from1 = (_runConfig_release2 = runConfig.release) === null || _runConfig_release2 === void 0 ? void 0 : _runConfig_release2.from) !== null && _runConfig_release_from1 !== void 0 ? _runConfig_release_from1 : DEFAULT_FROM_COMMIT_ALIAS,
|
|
3046
|
+
to: (_runConfig_release_to1 = (_runConfig_release3 = runConfig.release) === null || _runConfig_release3 === void 0 ? void 0 : _runConfig_release3.to) !== null && _runConfig_release_to1 !== void 0 ? _runConfig_release_to1 : DEFAULT_TO_COMMIT_ALIAS,
|
|
3047
|
+
excludedPatterns: (_runConfig_excludedPatterns = runConfig.excludedPatterns) !== null && _runConfig_excludedPatterns !== void 0 ? _runConfig_excludedPatterns : DEFAULT_EXCLUDED_PATTERNS
|
|
3048
|
+
});
|
|
3049
|
+
let diffContent = '';
|
|
3050
|
+
diffContent = await diff.get();
|
|
3051
|
+
logContent = await log.get();
|
|
3052
|
+
const promptConfig = {
|
|
3053
|
+
overridePaths: runConfig.discoveredConfigDirs || [],
|
|
3054
|
+
overrides: runConfig.overrides || false
|
|
3055
|
+
};
|
|
3056
|
+
const promptContent = {
|
|
3057
|
+
logContent,
|
|
3058
|
+
diffContent
|
|
3059
|
+
};
|
|
3060
|
+
const promptContext = {
|
|
3061
|
+
context: (_runConfig_release4 = runConfig.release) === null || _runConfig_release4 === void 0 ? void 0 : _runConfig_release4.context
|
|
3062
|
+
};
|
|
3063
|
+
const prompt = await createPrompt(promptConfig, promptContent, promptContext);
|
|
3064
|
+
const request = Formatter.create({
|
|
3065
|
+
logger
|
|
3066
|
+
}).formatPrompt(runConfig.model, prompt);
|
|
3067
|
+
// Always ensure output directory exists for request/response files
|
|
3068
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
3069
|
+
const storage = create$2({
|
|
3070
|
+
log: logger.info
|
|
3071
|
+
});
|
|
3072
|
+
await storage.ensureDirectory(outputDirectory);
|
|
3073
|
+
const summary = await createCompletion(request.messages, {
|
|
3074
|
+
model: runConfig.model,
|
|
3075
|
+
responseFormat: {
|
|
3076
|
+
type: 'json_object'
|
|
3077
|
+
},
|
|
3078
|
+
debug: runConfig.debug,
|
|
3079
|
+
debugRequestFile: getOutputPath(outputDirectory, getTimestampedRequestFilename('release')),
|
|
3080
|
+
debugResponseFile: getOutputPath(outputDirectory, getTimestampedResponseFilename('release'))
|
|
3081
|
+
});
|
|
3082
|
+
// Save timestamped copy of release notes to output directory
|
|
3083
|
+
try {
|
|
3084
|
+
const timestampedFilename = getTimestampedReleaseNotesFilename();
|
|
3085
|
+
const outputPath = getOutputPath(outputDirectory, timestampedFilename);
|
|
3086
|
+
// Format the release notes as markdown
|
|
3087
|
+
const releaseSummary = summary;
|
|
3088
|
+
const releaseNotesContent = `# ${releaseSummary.title}\n\n${releaseSummary.body}`;
|
|
3089
|
+
await storage.writeFile(outputPath, releaseNotesContent, 'utf-8');
|
|
3090
|
+
logger.debug('Saved timestamped release notes: %s', outputPath);
|
|
3091
|
+
} catch (error) {
|
|
3092
|
+
logger.warn('Failed to save timestamped release notes: %s', error.message);
|
|
3093
|
+
}
|
|
3094
|
+
if (isDryRun) {
|
|
3095
|
+
logger.info('DRY RUN: Generated release summary:');
|
|
3096
|
+
logger.info('Title: %s', summary.title);
|
|
3097
|
+
logger.info('Body: %s', summary.body);
|
|
3098
|
+
}
|
|
3099
|
+
return summary;
|
|
3100
|
+
};
|
|
3101
|
+
|
|
3102
|
+
const scanDirectoryForPackages = async (rootDir, storage)=>{
|
|
3103
|
+
const logger = getLogger();
|
|
3104
|
+
const packageMap = new Map(); // packageName -> relativePath
|
|
3105
|
+
const absoluteRootDir = path.resolve(process.cwd(), rootDir);
|
|
3106
|
+
logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);
|
|
3107
|
+
if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {
|
|
3108
|
+
logger.verbose(`Root directory does not exist or is not a directory: ${absoluteRootDir}`);
|
|
3109
|
+
return packageMap;
|
|
3110
|
+
}
|
|
3111
|
+
try {
|
|
3112
|
+
// Get all subdirectories in the root directory
|
|
3113
|
+
const items = await storage.listFiles(absoluteRootDir);
|
|
3114
|
+
for (const item of items){
|
|
3115
|
+
const itemPath = path.join(absoluteRootDir, item);
|
|
3116
|
+
if (await storage.isDirectory(itemPath)) {
|
|
3117
|
+
const packageJsonPath = path.join(itemPath, 'package.json');
|
|
3118
|
+
if (await storage.exists(packageJsonPath)) {
|
|
3119
|
+
try {
|
|
3120
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
3121
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
3122
|
+
if (packageJson.name) {
|
|
3123
|
+
const relativePath = path.relative(process.cwd(), itemPath);
|
|
3124
|
+
packageMap.set(packageJson.name, relativePath);
|
|
3125
|
+
logger.debug(`Found package: ${packageJson.name} at ${relativePath}`);
|
|
3126
|
+
}
|
|
3127
|
+
} catch (error) {
|
|
3128
|
+
logger.debug(`Failed to parse package.json at ${packageJsonPath}: ${error}`);
|
|
3129
|
+
}
|
|
3130
|
+
}
|
|
3131
|
+
}
|
|
3132
|
+
}
|
|
3133
|
+
} catch (error) {
|
|
3134
|
+
logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);
|
|
3135
|
+
}
|
|
3136
|
+
return packageMap;
|
|
3137
|
+
};
|
|
3138
|
+
const findPackagesToUnlink = async (scopeRoots, storage)=>{
|
|
3139
|
+
const logger = getLogger();
|
|
3140
|
+
const packagesToUnlink = [];
|
|
3141
|
+
logger.silly(`Finding packages to unlink from scope roots: ${JSON.stringify(scopeRoots)}`);
|
|
3142
|
+
// Scan all scope roots to build a comprehensive map of packages that should be unlinked
|
|
3143
|
+
const allScopePackages = new Map(); // packageName -> relativePath
|
|
3144
|
+
for (const [scope, rootDir] of Object.entries(scopeRoots)){
|
|
3145
|
+
logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);
|
|
3146
|
+
const scopePackages = await scanDirectoryForPackages(rootDir, storage);
|
|
3147
|
+
// Add packages from this scope to the overall map
|
|
3148
|
+
for (const [packageName, packagePath] of scopePackages){
|
|
3149
|
+
if (packageName.startsWith(scope)) {
|
|
3150
|
+
allScopePackages.set(packageName, packagePath);
|
|
3151
|
+
packagesToUnlink.push(packagePath);
|
|
3152
|
+
logger.debug(`Package to unlink: ${packageName} -> ${packagePath}`);
|
|
3153
|
+
}
|
|
3154
|
+
}
|
|
3155
|
+
}
|
|
3156
|
+
return packagesToUnlink;
|
|
3157
|
+
};
|
|
3158
|
+
const readCurrentWorkspaceFile = async (workspaceFilePath, storage)=>{
|
|
3159
|
+
if (await storage.exists(workspaceFilePath)) {
|
|
3160
|
+
try {
|
|
3161
|
+
const content = await storage.readFile(workspaceFilePath, 'utf-8');
|
|
3162
|
+
return yaml.load(content) || {};
|
|
3163
|
+
} catch (error) {
|
|
3164
|
+
throw new Error(`Failed to parse existing workspace file: ${error}`);
|
|
3165
|
+
}
|
|
3166
|
+
}
|
|
3167
|
+
return {};
|
|
3168
|
+
};
|
|
3169
|
+
const writeWorkspaceFile = async (workspaceFilePath, config, storage)=>{
|
|
3170
|
+
let yamlContent = yaml.dump(config, {
|
|
3171
|
+
indent: 2,
|
|
3172
|
+
lineWidth: -1,
|
|
3173
|
+
noRefs: true,
|
|
3174
|
+
sortKeys: false,
|
|
3175
|
+
quotingType: "'",
|
|
3176
|
+
forceQuotes: true
|
|
3177
|
+
});
|
|
3178
|
+
// Post-process to fix numeric values that shouldn't be quoted
|
|
3179
|
+
yamlContent = yamlContent.replace(/: '(\d+(?:\.\d+)*)'/g, ': $1');
|
|
3180
|
+
await storage.writeFile(workspaceFilePath, yamlContent, 'utf-8');
|
|
3181
|
+
};
|
|
3182
|
+
const execute$2 = async (runConfig)=>{
|
|
3183
|
+
var _runConfig_link, _runConfig_link1, _runConfig_link2;
|
|
3184
|
+
const logger = getLogger();
|
|
3185
|
+
const storage = create$2({
|
|
3186
|
+
log: logger.info
|
|
3187
|
+
});
|
|
3188
|
+
logger.verbose('Starting pnpm workspace unlink management...');
|
|
3189
|
+
// Read current package.json
|
|
3190
|
+
const packageJsonPath = path.join(process.cwd(), 'package.json');
|
|
3191
|
+
if (!await storage.exists(packageJsonPath)) {
|
|
3192
|
+
throw new Error('package.json not found in current directory.');
|
|
3193
|
+
}
|
|
3194
|
+
let packageJson;
|
|
3195
|
+
try {
|
|
3196
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
3197
|
+
packageJson = JSON.parse(packageJsonContent);
|
|
3198
|
+
} catch (error) {
|
|
3199
|
+
throw new Error(`Failed to parse package.json: ${error}`);
|
|
3200
|
+
}
|
|
3201
|
+
logger.verbose(`Processing package: ${packageJson.name || 'unnamed'}`);
|
|
3202
|
+
// Get configuration
|
|
3203
|
+
const scopeRoots = ((_runConfig_link = runConfig.link) === null || _runConfig_link === void 0 ? void 0 : _runConfig_link.scopeRoots) || {};
|
|
3204
|
+
const workspaceFileName = ((_runConfig_link1 = runConfig.link) === null || _runConfig_link1 === void 0 ? void 0 : _runConfig_link1.workspaceFile) || 'pnpm-workspace.yaml';
|
|
3205
|
+
const isDryRun = runConfig.dryRun || ((_runConfig_link2 = runConfig.link) === null || _runConfig_link2 === void 0 ? void 0 : _runConfig_link2.dryRun) || false;
|
|
3206
|
+
logger.silly('Extracted scopeRoots:', JSON.stringify(scopeRoots));
|
|
3207
|
+
logger.debug('Extracted workspaceFileName:', workspaceFileName);
|
|
3208
|
+
logger.debug('Extracted isDryRun:', isDryRun);
|
|
3209
|
+
if (Object.keys(scopeRoots).length === 0) {
|
|
3210
|
+
logger.verbose('No scope roots configured. Skipping unlink management.');
|
|
3211
|
+
return 'No scope roots configured. Skipping unlink management.';
|
|
3212
|
+
}
|
|
3213
|
+
logger.silly(`Configured scope roots: ${JSON.stringify(scopeRoots)}`);
|
|
3214
|
+
// Find packages to unlink based on scope roots
|
|
3215
|
+
const packagesToUnlinkPaths = await findPackagesToUnlink(scopeRoots, storage);
|
|
3216
|
+
if (packagesToUnlinkPaths.length === 0) {
|
|
3217
|
+
logger.verbose('No packages found matching scope roots for unlinking.');
|
|
3218
|
+
return 'No packages found matching scope roots for unlinking.';
|
|
3219
|
+
}
|
|
3220
|
+
logger.verbose(`Found ${packagesToUnlinkPaths.length} packages that could be unlinked: ${packagesToUnlinkPaths.join(', ')}`);
|
|
3221
|
+
// Read existing workspace configuration
|
|
3222
|
+
const workspaceFilePath = path.join(process.cwd(), workspaceFileName);
|
|
3223
|
+
const workspaceConfig = await readCurrentWorkspaceFile(workspaceFilePath, storage);
|
|
3224
|
+
if (!workspaceConfig.overrides || Object.keys(workspaceConfig.overrides).length === 0) {
|
|
3225
|
+
logger.verbose('No overrides found in workspace file. Nothing to do.');
|
|
3226
|
+
return 'No overrides found in workspace file. Nothing to do.';
|
|
3227
|
+
}
|
|
3228
|
+
// Filter out packages that match our scope roots from overrides
|
|
3229
|
+
const existingOverrides = workspaceConfig.overrides || {};
|
|
3230
|
+
const remainingOverrides = {};
|
|
3231
|
+
const actuallyRemovedPackages = [];
|
|
3232
|
+
const packagesToUnlinkSet = new Set(packagesToUnlinkPaths.map((p)=>`link:${p}`));
|
|
3233
|
+
for (const [pkgName, pkgLink] of Object.entries(existingOverrides)){
|
|
3234
|
+
if (packagesToUnlinkSet.has(pkgLink)) {
|
|
3235
|
+
actuallyRemovedPackages.push(pkgName);
|
|
3236
|
+
} else {
|
|
3237
|
+
remainingOverrides[pkgName] = pkgLink;
|
|
3238
|
+
}
|
|
3239
|
+
}
|
|
3240
|
+
if (actuallyRemovedPackages.length === 0) {
|
|
3241
|
+
logger.verbose('No linked packages found in workspace file that match scope roots.');
|
|
3242
|
+
return 'No linked packages found in workspace file that match scope roots.';
|
|
3243
|
+
}
|
|
3244
|
+
const updatedConfig = {
|
|
3245
|
+
...workspaceConfig,
|
|
3246
|
+
overrides: remainingOverrides
|
|
3247
|
+
};
|
|
3248
|
+
if (Object.keys(remainingOverrides).length === 0) {
|
|
3249
|
+
delete updatedConfig.overrides;
|
|
3250
|
+
}
|
|
3251
|
+
// Write the updated workspace file
|
|
3252
|
+
if (isDryRun) {
|
|
3253
|
+
logger.verbose('DRY RUN: Would write the following workspace configuration:');
|
|
3254
|
+
logger.silly(yaml.dump(updatedConfig, {
|
|
3255
|
+
indent: 2
|
|
3256
|
+
}));
|
|
3257
|
+
logger.verbose(`DRY RUN: Would remove ${actuallyRemovedPackages.length} packages: ${actuallyRemovedPackages.join(', ')}`);
|
|
3258
|
+
} else {
|
|
3259
|
+
await writeWorkspaceFile(workspaceFilePath, updatedConfig, storage);
|
|
3260
|
+
logger.verbose(`Updated ${workspaceFileName} - removed ${actuallyRemovedPackages.length} linked packages from overrides.`);
|
|
3261
|
+
// Rebuild pnpm lock file and node_modules
|
|
3262
|
+
logger.verbose('Rebuilding pnpm lock file and node_modules...');
|
|
3263
|
+
try {
|
|
3264
|
+
await run('pnpm install');
|
|
3265
|
+
logger.verbose('Successfully rebuilt pnpm lock file and node_modules');
|
|
3266
|
+
} catch (error) {
|
|
3267
|
+
logger.warn(`Failed to rebuild dependencies: ${error}. You may need to run 'pnpm install' manually.`);
|
|
3268
|
+
}
|
|
3269
|
+
}
|
|
3270
|
+
const summary = `Successfully unlinked ${actuallyRemovedPackages.length} sibling packages:\n${actuallyRemovedPackages.map((pkg)=>` - ${pkg}`).join('\n')}`;
|
|
3271
|
+
return summary;
|
|
3272
|
+
};
|
|
3273
|
+
|
|
3274
|
+
const scanNpmrcForEnvVars = async (storage)=>{
|
|
3275
|
+
const npmrcPath = path.join(process.cwd(), '.npmrc');
|
|
3276
|
+
const envVars = [];
|
|
3277
|
+
if (await storage.exists(npmrcPath)) {
|
|
3278
|
+
try {
|
|
3279
|
+
const npmrcContent = await storage.readFile(npmrcPath, 'utf-8');
|
|
3280
|
+
// Match environment variable patterns like ${VAR_NAME} or $VAR_NAME
|
|
3281
|
+
const envVarMatches = npmrcContent.match(/\$\{([^}]+)\}|\$([A-Z_][A-Z0-9_]*)/g);
|
|
3282
|
+
if (envVarMatches) {
|
|
3283
|
+
for (const match of envVarMatches){
|
|
3284
|
+
// Extract variable name from ${VAR_NAME} or $VAR_NAME format
|
|
3285
|
+
const varName = match.replace(/\$\{|\}|\$/g, '');
|
|
3286
|
+
if (varName && !envVars.includes(varName)) {
|
|
3287
|
+
envVars.push(varName);
|
|
3288
|
+
}
|
|
3289
|
+
}
|
|
3290
|
+
}
|
|
3291
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
3292
|
+
} catch (error) {
|
|
3293
|
+
// If we can't read .npmrc, that's okay - just continue
|
|
3294
|
+
}
|
|
3295
|
+
}
|
|
3296
|
+
return envVars;
|
|
3297
|
+
};
|
|
3298
|
+
const validateEnvironmentVariables = (requiredEnvVars, isDryRun)=>{
|
|
3299
|
+
const logger = getLogger();
|
|
3300
|
+
const missingEnvVars = [];
|
|
3301
|
+
for (const envVar of requiredEnvVars){
|
|
3302
|
+
if (!process.env[envVar]) {
|
|
3303
|
+
missingEnvVars.push(envVar);
|
|
3304
|
+
}
|
|
3305
|
+
}
|
|
3306
|
+
if (missingEnvVars.length > 0) {
|
|
3307
|
+
if (isDryRun) {
|
|
3308
|
+
logger.warn(`DRY RUN: Missing required environment variables: ${missingEnvVars.join(', ')}`);
|
|
3309
|
+
} else {
|
|
3310
|
+
logger.error(`Missing required environment variables: ${missingEnvVars.join(', ')}`);
|
|
3311
|
+
throw new Error(`Missing required environment variables: ${missingEnvVars.join(', ')}. Please set these environment variables before running publish.`);
|
|
3312
|
+
}
|
|
3313
|
+
}
|
|
3314
|
+
};
|
|
3315
|
+
const runPrechecks = async (runConfig)=>{
|
|
3316
|
+
var _runConfig_publish;
|
|
3317
|
+
const logger = getLogger();
|
|
3318
|
+
const storage = create$2({
|
|
3319
|
+
log: logger.info
|
|
3320
|
+
});
|
|
3321
|
+
const isDryRun = runConfig.dryRun || false;
|
|
3322
|
+
logger.info(isDryRun ? 'DRY RUN: Running prechecks...' : 'Running prechecks...');
|
|
3323
|
+
// Check if we're in a git repository
|
|
3324
|
+
try {
|
|
3325
|
+
if (isDryRun) {
|
|
3326
|
+
logger.info('DRY RUN: Would check git repository with: git rev-parse --git-dir');
|
|
3327
|
+
} else {
|
|
3328
|
+
await run('git rev-parse --git-dir');
|
|
3329
|
+
}
|
|
3330
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
3331
|
+
} catch (error) {
|
|
3332
|
+
if (!isDryRun) {
|
|
3333
|
+
throw new Error('Not in a git repository. Please run this command from within a git repository.');
|
|
3334
|
+
}
|
|
3335
|
+
}
|
|
3336
|
+
// Check for uncommitted changes
|
|
3337
|
+
logger.info(isDryRun ? 'DRY RUN: Would check for uncommitted changes...' : 'Checking for uncommitted changes...');
|
|
3338
|
+
try {
|
|
3339
|
+
if (isDryRun) {
|
|
3340
|
+
logger.info('DRY RUN: Would check git status with: git status --porcelain');
|
|
3341
|
+
} else {
|
|
3342
|
+
const { stdout } = await run('git status --porcelain');
|
|
3343
|
+
if (stdout.trim()) {
|
|
3344
|
+
throw new Error('Working directory has uncommitted changes. Please commit or stash your changes before running publish.');
|
|
3345
|
+
}
|
|
3346
|
+
}
|
|
3347
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
3348
|
+
} catch (error) {
|
|
3349
|
+
if (!isDryRun) {
|
|
3350
|
+
throw new Error('Failed to check git status. Please ensure you are in a valid git repository.');
|
|
3351
|
+
}
|
|
3352
|
+
}
|
|
3353
|
+
// Check if we're on a release branch
|
|
3354
|
+
logger.info(isDryRun ? 'DRY RUN: Would check current branch...' : 'Checking current branch...');
|
|
3355
|
+
if (isDryRun) {
|
|
3356
|
+
logger.info('DRY RUN: Would verify current branch is a release branch (starts with "release/")');
|
|
3357
|
+
} else {
|
|
3358
|
+
const currentBranch = await getCurrentBranchName();
|
|
3359
|
+
if (!currentBranch.startsWith('release/')) {
|
|
3360
|
+
throw new Error(`Current branch '${currentBranch}' is not a release branch. Please switch to a release branch (e.g., release/1.0.0) before running publish.`);
|
|
3361
|
+
}
|
|
3362
|
+
}
|
|
3363
|
+
// Check if prepublishOnly script exists in package.json
|
|
3364
|
+
logger.info(isDryRun ? 'DRY RUN: Would check for prepublishOnly script...' : 'Checking for prepublishOnly script...');
|
|
3365
|
+
const packageJsonPath = path.join(process.cwd(), 'package.json');
|
|
3366
|
+
if (!await storage.exists(packageJsonPath)) {
|
|
3367
|
+
if (!isDryRun) {
|
|
3368
|
+
throw new Error('package.json not found in current directory.');
|
|
3369
|
+
} else {
|
|
3370
|
+
logger.warn('DRY RUN: package.json not found in current directory.');
|
|
3371
|
+
}
|
|
3372
|
+
} else {
|
|
3373
|
+
var _packageJson_scripts;
|
|
3374
|
+
let packageJson;
|
|
3375
|
+
try {
|
|
3376
|
+
const packageJsonContents = await storage.readFile(packageJsonPath, 'utf-8');
|
|
3377
|
+
packageJson = JSON.parse(packageJsonContents);
|
|
3378
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
3379
|
+
} catch (error) {
|
|
3380
|
+
if (!isDryRun) {
|
|
3381
|
+
throw new Error('Failed to parse package.json. Please ensure it contains valid JSON.');
|
|
3382
|
+
} else {
|
|
3383
|
+
logger.warn('DRY RUN: Failed to parse package.json. Please ensure it contains valid JSON.');
|
|
3384
|
+
}
|
|
3385
|
+
}
|
|
3386
|
+
if (packageJson && !((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.prepublishOnly)) {
|
|
3387
|
+
if (!isDryRun) {
|
|
3388
|
+
throw new Error('prepublishOnly script is required in package.json but was not found. Please add a prepublishOnly script that runs your pre-flight checks (e.g., clean, lint, build, test).');
|
|
3389
|
+
} else {
|
|
3390
|
+
logger.warn('DRY RUN: prepublishOnly script is required in package.json but was not found.');
|
|
3391
|
+
}
|
|
3392
|
+
}
|
|
3393
|
+
}
|
|
3394
|
+
// Check required environment variables
|
|
3395
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would check required environment variables...' : 'Checking required environment variables...');
|
|
3396
|
+
const coreRequiredEnvVars = ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.requiredEnvVars) || [];
|
|
3397
|
+
const npmrcEnvVars = isDryRun ? [] : await scanNpmrcForEnvVars(storage); // Skip .npmrc scan in dry run
|
|
3398
|
+
const allRequiredEnvVars = [
|
|
3399
|
+
...new Set([
|
|
3400
|
+
...coreRequiredEnvVars,
|
|
3401
|
+
...npmrcEnvVars
|
|
3402
|
+
])
|
|
3403
|
+
];
|
|
3404
|
+
if (allRequiredEnvVars.length > 0) {
|
|
3405
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Required environment variables: ${allRequiredEnvVars.join(', ')}`);
|
|
3406
|
+
validateEnvironmentVariables(allRequiredEnvVars, isDryRun);
|
|
3407
|
+
} else {
|
|
3408
|
+
logger.verbose(isDryRun ? 'DRY RUN: No required environment variables specified.' : 'No required environment variables specified.');
|
|
3409
|
+
}
|
|
3410
|
+
logger.info(isDryRun ? 'DRY RUN: All prechecks would pass.' : 'All prechecks passed.');
|
|
3411
|
+
};
|
|
3412
|
+
const execute$1 = async (runConfig)=>{
|
|
3413
|
+
const logger = getLogger();
|
|
3414
|
+
const storage = create$2({
|
|
3415
|
+
log: logger.info
|
|
3416
|
+
});
|
|
3417
|
+
const isDryRun = runConfig.dryRun || false;
|
|
3418
|
+
// Run prechecks before starting any work
|
|
3419
|
+
await runPrechecks(runConfig);
|
|
3420
|
+
logger.info(isDryRun ? 'DRY RUN: Would start release process...' : 'Starting release process...');
|
|
3421
|
+
try {
|
|
3422
|
+
var _runConfig_publish, _runConfig_publish1;
|
|
3423
|
+
// Unlink all workspace packages before starting (if enabled)
|
|
3424
|
+
const shouldUnlink = ((_runConfig_publish = runConfig.publish) === null || _runConfig_publish === void 0 ? void 0 : _runConfig_publish.unlinkWorkspacePackages) !== false; // default to true
|
|
3425
|
+
if (shouldUnlink) {
|
|
3426
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would unlink workspace packages...' : 'Unlinking workspace packages...');
|
|
3427
|
+
await execute$2(runConfig);
|
|
3428
|
+
} else {
|
|
3429
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would skip unlink workspace packages (disabled in config).' : 'Skipping unlink workspace packages (disabled in config).');
|
|
3430
|
+
}
|
|
3431
|
+
let pr = null;
|
|
3432
|
+
if (isDryRun) {
|
|
3433
|
+
logger.info('DRY RUN: Would check for existing pull request');
|
|
3434
|
+
logger.info('DRY RUN: Assuming no existing PR found for demo purposes');
|
|
3435
|
+
} else {
|
|
3436
|
+
const branchName = await getCurrentBranchName();
|
|
3437
|
+
pr = await findOpenPullRequestByHeadRef(branchName);
|
|
3438
|
+
}
|
|
3439
|
+
if (pr) {
|
|
3440
|
+
logger.info(`${isDryRun ? 'DRY RUN: ' : ''}Found existing pull request for branch: ${pr.html_url}`);
|
|
3441
|
+
} else {
|
|
3442
|
+
var _runConfig_publish2;
|
|
3443
|
+
logger.info(isDryRun ? 'DRY RUN: No open pull request found, would start new release publishing process...' : 'No open pull request found, starting new release publishing process...');
|
|
3444
|
+
// 1. Prepare for release
|
|
3445
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would prepare for release: switching from workspace to remote dependencies.' : 'Preparing for release: switching from workspace to remote dependencies.');
|
|
3446
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would update dependencies to latest versions from registry' : 'Updating dependencies to latest versions from registry');
|
|
3447
|
+
const updatePatterns = (_runConfig_publish2 = runConfig.publish) === null || _runConfig_publish2 === void 0 ? void 0 : _runConfig_publish2.dependencyUpdatePatterns;
|
|
3448
|
+
if (updatePatterns && updatePatterns.length > 0) {
|
|
3449
|
+
logger.verbose(`${isDryRun ? 'DRY RUN: ' : ''}Updating dependencies matching patterns: ${updatePatterns.join(', ')}`);
|
|
3450
|
+
const patternsArg = updatePatterns.join(' ');
|
|
3451
|
+
await runWithDryRunSupport(`pnpm update --latest ${patternsArg}`, isDryRun);
|
|
3452
|
+
} else {
|
|
3453
|
+
logger.verbose(isDryRun ? 'DRY RUN: No dependency update patterns specified, would update all dependencies' : 'No dependency update patterns specified, updating all dependencies');
|
|
3454
|
+
await runWithDryRunSupport('pnpm update --latest', isDryRun);
|
|
3455
|
+
}
|
|
3456
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would stage changes for release commit' : 'Staging changes for release commit');
|
|
3457
|
+
await runWithDryRunSupport('git add package.json pnpm-lock.yaml', isDryRun);
|
|
3458
|
+
logger.info(isDryRun ? 'DRY RUN: Would run prepublishOnly script...' : 'Running prepublishOnly script...');
|
|
3459
|
+
await runWithDryRunSupport('pnpm run prepublishOnly', isDryRun);
|
|
3460
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would check for staged changes...' : 'Checking for staged changes...');
|
|
3461
|
+
if (isDryRun) {
|
|
3462
|
+
logger.verbose('DRY RUN: Assuming staged changes exist for demo purposes');
|
|
3463
|
+
logger.verbose('DRY RUN: Would create commit...');
|
|
3464
|
+
await execute$9(runConfig);
|
|
3465
|
+
} else {
|
|
3466
|
+
if (await hasStagedChanges()) {
|
|
3467
|
+
logger.verbose('Staged changes found, creating commit...');
|
|
3468
|
+
await execute$9(runConfig);
|
|
3469
|
+
} else {
|
|
3470
|
+
logger.verbose('No changes to commit, skipping commit.');
|
|
3471
|
+
}
|
|
3472
|
+
}
|
|
3473
|
+
logger.info(isDryRun ? 'DRY RUN: Would bump version...' : 'Bumping version...');
|
|
3474
|
+
await runWithDryRunSupport('pnpm version patch', isDryRun);
|
|
3475
|
+
logger.info(isDryRun ? 'DRY RUN: Would generate release notes...' : 'Generating release notes...');
|
|
3476
|
+
const releaseSummary = await execute$3(runConfig);
|
|
3477
|
+
if (isDryRun) {
|
|
3478
|
+
logger.info('DRY RUN: Would write release notes to RELEASE_NOTES.md and RELEASE_TITLE.md in output directory');
|
|
3479
|
+
} else {
|
|
3480
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
3481
|
+
await storage.ensureDirectory(outputDirectory);
|
|
3482
|
+
const releaseNotesPath = getOutputPath(outputDirectory, 'RELEASE_NOTES.md');
|
|
3483
|
+
const releaseTitlePath = getOutputPath(outputDirectory, 'RELEASE_TITLE.md');
|
|
3484
|
+
await storage.writeFile(releaseNotesPath, releaseSummary.body, 'utf-8');
|
|
3485
|
+
await storage.writeFile(releaseTitlePath, releaseSummary.title, 'utf-8');
|
|
3486
|
+
logger.info(`Release notes and title generated and saved to ${releaseNotesPath} and ${releaseTitlePath}.`);
|
|
3487
|
+
}
|
|
3488
|
+
logger.info(isDryRun ? 'DRY RUN: Would push to origin...' : 'Pushing to origin...');
|
|
3489
|
+
await runWithDryRunSupport('git push --follow-tags', isDryRun);
|
|
3490
|
+
logger.info(isDryRun ? 'DRY RUN: Would create pull request...' : 'Creating pull request...');
|
|
3491
|
+
if (isDryRun) {
|
|
3492
|
+
logger.info('DRY RUN: Would get commit title and create PR with GitHub API');
|
|
3493
|
+
pr = {
|
|
3494
|
+
number: 123,
|
|
3495
|
+
html_url: 'https://github.com/mock/repo/pull/123',
|
|
3496
|
+
labels: []
|
|
3497
|
+
};
|
|
3498
|
+
} else {
|
|
3499
|
+
const { stdout: commitTitle } = await run('git log -1 --pretty=%B');
|
|
3500
|
+
pr = await createPullRequest(commitTitle, 'Automated release PR.', await getCurrentBranchName());
|
|
3501
|
+
if (!pr) {
|
|
3502
|
+
throw new Error('Failed to create pull request.');
|
|
3503
|
+
}
|
|
3504
|
+
logger.info(`Pull request created: ${pr.html_url}`);
|
|
3505
|
+
}
|
|
3506
|
+
}
|
|
3507
|
+
logger.info(`${isDryRun ? 'DRY RUN: Would wait for' : 'Waiting for'} PR #${pr.number} checks to complete...`);
|
|
3508
|
+
if (!isDryRun) {
|
|
3509
|
+
await waitForPullRequestChecks(pr.number);
|
|
3510
|
+
}
|
|
3511
|
+
const mergeMethod = ((_runConfig_publish1 = runConfig.publish) === null || _runConfig_publish1 === void 0 ? void 0 : _runConfig_publish1.mergeMethod) || 'squash';
|
|
3512
|
+
if (isDryRun) {
|
|
3513
|
+
logger.info(`DRY RUN: Would merge PR #${pr.number} using ${mergeMethod} method`);
|
|
3514
|
+
} else {
|
|
3515
|
+
await mergePullRequest(pr.number, mergeMethod);
|
|
3516
|
+
}
|
|
3517
|
+
logger.info(isDryRun ? 'DRY RUN: Would checkout main branch...' : 'Checking out main branch...');
|
|
3518
|
+
await runWithDryRunSupport('git checkout main', isDryRun);
|
|
3519
|
+
await runWithDryRunSupport('git pull origin main', isDryRun);
|
|
3520
|
+
logger.info(isDryRun ? 'DRY RUN: Would create GitHub release...' : 'Creating GitHub release...');
|
|
3521
|
+
if (isDryRun) {
|
|
3522
|
+
logger.info('DRY RUN: Would read package.json version and create GitHub release');
|
|
3523
|
+
} else {
|
|
3524
|
+
const packageJsonContents = await storage.readFile('package.json', 'utf-8');
|
|
3525
|
+
const { version } = JSON.parse(packageJsonContents);
|
|
3526
|
+
const tagName = `v${version}`;
|
|
3527
|
+
const outputDirectory = runConfig.outputDirectory || DEFAULT_OUTPUT_DIRECTORY;
|
|
3528
|
+
const releaseNotesPath = getOutputPath(outputDirectory, 'RELEASE_NOTES.md');
|
|
3529
|
+
const releaseTitlePath = getOutputPath(outputDirectory, 'RELEASE_TITLE.md');
|
|
3530
|
+
const releaseNotesContent = await storage.readFile(releaseNotesPath, 'utf-8');
|
|
3531
|
+
const releaseTitle = await storage.readFile(releaseTitlePath, 'utf-8');
|
|
3532
|
+
await createRelease(tagName, releaseTitle, releaseNotesContent);
|
|
3533
|
+
}
|
|
3534
|
+
logger.info(isDryRun ? 'DRY RUN: Would create new release branch...' : 'Creating new release branch...');
|
|
3535
|
+
if (isDryRun) {
|
|
3536
|
+
logger.info('DRY RUN: Would create next release branch (e.g., release/1.0.1) and push to origin');
|
|
3537
|
+
} else {
|
|
3538
|
+
const packageJsonContents = await storage.readFile('package.json', 'utf-8');
|
|
3539
|
+
const { version } = JSON.parse(packageJsonContents);
|
|
3540
|
+
const nextVersion = incrementPatchVersion(version);
|
|
3541
|
+
const newBranchName = `release/${nextVersion}`;
|
|
3542
|
+
await run(`git checkout -b ${newBranchName}`);
|
|
3543
|
+
await run(`git push -u origin ${newBranchName}`);
|
|
3544
|
+
logger.info(`Branch ${newBranchName} created and pushed to origin.`);
|
|
3545
|
+
}
|
|
3546
|
+
logger.info(isDryRun ? 'DRY RUN: Preparation would be complete.' : 'Preparation complete.');
|
|
3547
|
+
} finally{
|
|
3548
|
+
var _runConfig_publish3;
|
|
3549
|
+
// Restore linked packages (if enabled)
|
|
3550
|
+
const shouldLink = ((_runConfig_publish3 = runConfig.publish) === null || _runConfig_publish3 === void 0 ? void 0 : _runConfig_publish3.linkWorkspacePackages) !== false; // default to true
|
|
3551
|
+
if (shouldLink) {
|
|
3552
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would restore linked packages...' : 'Restoring linked packages...');
|
|
3553
|
+
await execute$4(runConfig);
|
|
3554
|
+
} else {
|
|
3555
|
+
logger.verbose(isDryRun ? 'DRY RUN: Would skip restore linked packages (disabled in config).' : 'Skipping restore linked packages (disabled in config).');
|
|
3556
|
+
}
|
|
3557
|
+
}
|
|
3558
|
+
};
|
|
3559
|
+
|
|
3560
|
+
const getUnplayableConfigPath = ()=>{
|
|
3561
|
+
return path.join(os.homedir(), '.unplayable', 'audio-device.json');
|
|
3562
|
+
};
|
|
3563
|
+
const execute = async (runConfig)=>{
|
|
3564
|
+
const logger = getLogger();
|
|
3565
|
+
const isDryRun = runConfig.dryRun || false;
|
|
3566
|
+
if (isDryRun) {
|
|
3567
|
+
logger.info('DRY RUN: Would start audio device selection process');
|
|
3568
|
+
logger.info('DRY RUN: Would save selected device to %s', getUnplayableConfigPath());
|
|
3569
|
+
return 'Audio device selection completed (dry run)';
|
|
3570
|
+
}
|
|
3571
|
+
try {
|
|
3572
|
+
const preferencesDir = path.join(os.homedir(), '.unplayable');
|
|
3573
|
+
const result = await selectAndConfigureAudioDevice(preferencesDir, logger, runConfig.debug);
|
|
3574
|
+
return result;
|
|
3575
|
+
} catch (error) {
|
|
3576
|
+
logger.error('❌ Audio device selection failed: %s', error.message);
|
|
3577
|
+
process.exit(1);
|
|
3578
|
+
}
|
|
3579
|
+
};
|
|
3580
|
+
|
|
3581
|
+
const ConfigSchema = z.object({
|
|
3582
|
+
dryRun: z.boolean().optional(),
|
|
3583
|
+
verbose: z.boolean().optional(),
|
|
3584
|
+
debug: z.boolean().optional(),
|
|
3585
|
+
overrides: z.boolean().optional(),
|
|
3586
|
+
model: z.string().optional(),
|
|
3587
|
+
contextDirectories: z.array(z.string()).optional(),
|
|
3588
|
+
outputDirectory: z.string().optional(),
|
|
3589
|
+
preferencesDirectory: z.string().optional(),
|
|
3590
|
+
commit: z.object({
|
|
3591
|
+
add: z.boolean().optional(),
|
|
3592
|
+
cached: z.boolean().optional(),
|
|
3593
|
+
sendit: z.boolean().optional(),
|
|
3594
|
+
messageLimit: z.number().optional(),
|
|
3595
|
+
context: z.string().optional(),
|
|
3596
|
+
direction: z.string().optional()
|
|
3597
|
+
}).optional(),
|
|
3598
|
+
audioCommit: z.object({
|
|
3599
|
+
maxRecordingTime: z.number().optional(),
|
|
3600
|
+
audioDevice: z.string().optional(),
|
|
3601
|
+
file: z.string().optional(),
|
|
3602
|
+
keepTemp: z.boolean().optional()
|
|
3603
|
+
}).optional(),
|
|
3604
|
+
release: z.object({
|
|
3605
|
+
from: z.string().optional(),
|
|
3606
|
+
to: z.string().optional(),
|
|
3607
|
+
messageLimit: z.number().optional(),
|
|
3608
|
+
context: z.string().optional()
|
|
3609
|
+
}).optional(),
|
|
3610
|
+
review: z.object({
|
|
3611
|
+
includeCommitHistory: z.boolean().optional(),
|
|
3612
|
+
includeRecentDiffs: z.boolean().optional(),
|
|
3613
|
+
includeReleaseNotes: z.boolean().optional(),
|
|
3614
|
+
includeGithubIssues: z.boolean().optional(),
|
|
3615
|
+
commitHistoryLimit: z.number().optional(),
|
|
3616
|
+
diffHistoryLimit: z.number().optional(),
|
|
3617
|
+
releaseNotesLimit: z.number().optional(),
|
|
3618
|
+
githubIssuesLimit: z.number().optional(),
|
|
3619
|
+
context: z.string().optional(),
|
|
3620
|
+
sendit: z.boolean().optional(),
|
|
3621
|
+
note: z.string().optional()
|
|
3622
|
+
}).optional(),
|
|
3623
|
+
audioReview: z.object({
|
|
3624
|
+
includeCommitHistory: z.boolean().optional(),
|
|
3625
|
+
includeRecentDiffs: z.boolean().optional(),
|
|
3626
|
+
includeReleaseNotes: z.boolean().optional(),
|
|
3627
|
+
includeGithubIssues: z.boolean().optional(),
|
|
3628
|
+
commitHistoryLimit: z.number().optional(),
|
|
3629
|
+
diffHistoryLimit: z.number().optional(),
|
|
3630
|
+
releaseNotesLimit: z.number().optional(),
|
|
3631
|
+
githubIssuesLimit: z.number().optional(),
|
|
3632
|
+
context: z.string().optional(),
|
|
3633
|
+
sendit: z.boolean().optional(),
|
|
3634
|
+
maxRecordingTime: z.number().optional(),
|
|
3635
|
+
audioDevice: z.string().optional(),
|
|
3636
|
+
file: z.string().optional(),
|
|
3637
|
+
keepTemp: z.boolean().optional()
|
|
3638
|
+
}).optional(),
|
|
3639
|
+
publish: z.object({
|
|
3640
|
+
mergeMethod: z.enum([
|
|
3641
|
+
'merge',
|
|
3642
|
+
'squash',
|
|
3643
|
+
'rebase'
|
|
3644
|
+
]).optional(),
|
|
3645
|
+
dependencyUpdatePatterns: z.array(z.string()).optional(),
|
|
3646
|
+
requiredEnvVars: z.array(z.string()).optional(),
|
|
3647
|
+
linkWorkspacePackages: z.boolean().optional(),
|
|
3648
|
+
unlinkWorkspacePackages: z.boolean().optional()
|
|
3649
|
+
}).optional(),
|
|
3650
|
+
link: z.object({
|
|
3651
|
+
scopeRoots: z.record(z.string(), z.string()).optional(),
|
|
3652
|
+
workspaceFile: z.string().optional(),
|
|
3653
|
+
dryRun: z.boolean().optional()
|
|
3654
|
+
}).optional(),
|
|
3655
|
+
excludedPatterns: z.array(z.string()).optional()
|
|
3656
|
+
});
|
|
3657
|
+
z.object({
|
|
3658
|
+
openaiApiKey: z.string().optional()
|
|
3659
|
+
});
|
|
3660
|
+
z.object({
|
|
3661
|
+
commandName: z.string().optional()
|
|
3662
|
+
});
|
|
3663
|
+
|
|
3664
|
+
// Configure RiotPrompt templates once for the entire application
|
|
3665
|
+
configureTemplates({
|
|
3666
|
+
commit: {
|
|
3667
|
+
persona: {
|
|
3668
|
+
path: DEFAULT_PERSONA_YOU_FILE,
|
|
3669
|
+
title: 'Developer Persona'
|
|
3670
|
+
},
|
|
3671
|
+
instructions: [
|
|
3672
|
+
{
|
|
3673
|
+
path: DEFAULT_INSTRUCTIONS_COMMIT_FILE,
|
|
3674
|
+
title: 'Commit Instructions'
|
|
3675
|
+
}
|
|
3676
|
+
]
|
|
3677
|
+
},
|
|
3678
|
+
release: {
|
|
3679
|
+
persona: {
|
|
3680
|
+
path: DEFAULT_PERSONA_RELEASER_FILE,
|
|
3681
|
+
title: 'Release Manager Persona'
|
|
3682
|
+
},
|
|
3683
|
+
instructions: [
|
|
3684
|
+
{
|
|
3685
|
+
path: DEFAULT_INSTRUCTIONS_RELEASE_FILE,
|
|
3686
|
+
title: 'Release Instructions'
|
|
3687
|
+
}
|
|
3688
|
+
]
|
|
3689
|
+
},
|
|
3690
|
+
review: {
|
|
3691
|
+
persona: {
|
|
3692
|
+
path: DEFAULT_PERSONA_YOU_FILE,
|
|
3693
|
+
title: 'Developer Persona'
|
|
3694
|
+
},
|
|
3695
|
+
instructions: [
|
|
3696
|
+
{
|
|
3697
|
+
path: DEFAULT_INSTRUCTIONS_REVIEW_FILE,
|
|
3698
|
+
title: 'Review Instructions'
|
|
3699
|
+
}
|
|
3700
|
+
]
|
|
3701
|
+
}
|
|
3702
|
+
});
|
|
19
3703
|
/**
|
|
20
3704
|
* Configure early logging based on command line flags.
|
|
21
3705
|
*
|
|
@@ -36,7 +3720,9 @@ import { ConfigSchema } from './types.js';
|
|
|
36
3720
|
async function main() {
|
|
37
3721
|
// Configure logging early, before CardiganTime initialization
|
|
38
3722
|
configureEarlyLogging();
|
|
39
|
-
|
|
3723
|
+
// Cast create to `any` to avoid excessive type instantiation issues in TS compiler
|
|
3724
|
+
const createCardigantime = Cardigantime.create;
|
|
3725
|
+
const cardigantime = createCardigantime({
|
|
40
3726
|
defaults: {
|
|
41
3727
|
configDirectory: DEFAULT_CONFIG_DIR,
|
|
42
3728
|
// Move pathResolution INSIDE defaults
|
|
@@ -90,27 +3776,27 @@ async function main() {
|
|
|
90
3776
|
}
|
|
91
3777
|
let summary = '';
|
|
92
3778
|
if (commandName === COMMAND_COMMIT) {
|
|
93
|
-
summary = await execute(runConfig);
|
|
3779
|
+
summary = await execute$9(runConfig);
|
|
94
3780
|
} else if (commandName === COMMAND_AUDIO_COMMIT) {
|
|
95
|
-
summary = await execute$
|
|
3781
|
+
summary = await execute$8(runConfig);
|
|
96
3782
|
} else if (commandName === COMMAND_RELEASE) {
|
|
97
|
-
const releaseSummary = await execute$
|
|
3783
|
+
const releaseSummary = await execute$3(runConfig);
|
|
98
3784
|
summary = `${releaseSummary.title}\n\n${releaseSummary.body}`;
|
|
99
3785
|
} else if (commandName === COMMAND_PUBLISH) {
|
|
100
|
-
await execute$
|
|
3786
|
+
await execute$1(runConfig);
|
|
101
3787
|
} else if (commandName === COMMAND_LINK) {
|
|
102
3788
|
summary = await execute$4(runConfig);
|
|
103
3789
|
} else if (commandName === COMMAND_UNLINK) {
|
|
104
|
-
summary = await execute$
|
|
3790
|
+
summary = await execute$2(runConfig);
|
|
105
3791
|
} else if (commandName === COMMAND_AUDIO_REVIEW) {
|
|
106
3792
|
summary = await execute$6(runConfig);
|
|
107
3793
|
} else if (commandName === COMMAND_CLEAN) {
|
|
108
|
-
await execute$
|
|
3794
|
+
await execute$5(runConfig);
|
|
109
3795
|
summary = 'Output directory cleaned successfully.';
|
|
110
3796
|
} else if (commandName === COMMAND_REVIEW) {
|
|
111
|
-
summary = await execute$
|
|
3797
|
+
summary = await execute$7(runConfig);
|
|
112
3798
|
} else if (commandName === COMMAND_SELECT_AUDIO) {
|
|
113
|
-
await execute
|
|
3799
|
+
await execute(runConfig);
|
|
114
3800
|
summary = 'Audio selection completed successfully.';
|
|
115
3801
|
}
|
|
116
3802
|
// eslint-disable-next-line no-console
|