@eldrforge/shared 0.1.1-dev.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +190 -0
- package/README.md +64 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1092 -0
- package/dist/index.js.map +1 -0
- package/dist/src/dates.d.ts +57 -0
- package/dist/src/dates.d.ts.map +1 -0
- package/dist/src/errors/ArgumentError.d.ts +6 -0
- package/dist/src/errors/ArgumentError.d.ts.map +1 -0
- package/dist/src/errors/CancellationError.d.ts +4 -0
- package/dist/src/errors/CancellationError.d.ts.map +1 -0
- package/dist/src/errors/CommandErrors.d.ts +74 -0
- package/dist/src/errors/CommandErrors.d.ts.map +1 -0
- package/dist/src/errors/ExitError.d.ts +4 -0
- package/dist/src/errors/ExitError.d.ts.map +1 -0
- package/dist/src/errors/index.d.ts +8 -0
- package/dist/src/errors/index.d.ts.map +1 -0
- package/dist/src/general.d.ts +21 -0
- package/dist/src/general.d.ts.map +1 -0
- package/dist/src/index.d.ts +16 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/logger.d.ts +10 -0
- package/dist/src/logger.d.ts.map +1 -0
- package/dist/src/safety.d.ts +28 -0
- package/dist/src/safety.d.ts.map +1 -0
- package/dist/src/stdin.d.ts +3 -0
- package/dist/src/stdin.d.ts.map +1 -0
- package/dist/src/storage.d.ts +34 -0
- package/dist/src/storage.d.ts.map +1 -0
- package/dist/src/types.d.ts +20 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/validation.d.ts +39 -0
- package/dist/src/validation.d.ts.map +1 -0
- package/package.json +85 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1092 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import { glob } from 'glob';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import crypto from 'crypto';
|
|
5
|
+
import dayjs from 'dayjs';
|
|
6
|
+
import timezone from 'dayjs/plugin/timezone.js';
|
|
7
|
+
import utc from 'dayjs/plugin/utc.js';
|
|
8
|
+
import moment from 'moment-timezone';
|
|
9
|
+
import { safeJsonParse as safeJsonParse$1, validatePackageJson } from '@eldrforge/git-tools';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Logger utilities
|
|
13
|
+
* Provides optional Winston integration with console fallback
|
|
14
|
+
*/ let logger;
|
|
15
|
+
/**
|
|
16
|
+
* Set the logger instance
|
|
17
|
+
*/ function setLogger(newLogger) {
|
|
18
|
+
logger = newLogger;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Get the current logger or create a console fallback
|
|
22
|
+
*/ function getLogger() {
|
|
23
|
+
if (logger) {
|
|
24
|
+
return logger;
|
|
25
|
+
}
|
|
26
|
+
// Console fallback
|
|
27
|
+
/* eslint-disable no-console */ return {
|
|
28
|
+
error: (message, ...args)=>console.error(message, ...args),
|
|
29
|
+
warn: (message, ...args)=>console.warn(message, ...args),
|
|
30
|
+
info: (message, ...args)=>console.log(message, ...args),
|
|
31
|
+
debug: (message, ...args)=>console.debug(message, ...args)
|
|
32
|
+
};
|
|
33
|
+
/* eslint-enable no-console */ }
|
|
34
|
+
|
|
35
|
+
function _define_property$1(obj, key, value) {
|
|
36
|
+
if (key in obj) {
|
|
37
|
+
Object.defineProperty(obj, key, {
|
|
38
|
+
value: value,
|
|
39
|
+
enumerable: true,
|
|
40
|
+
configurable: true,
|
|
41
|
+
writable: true
|
|
42
|
+
});
|
|
43
|
+
} else {
|
|
44
|
+
obj[key] = value;
|
|
45
|
+
}
|
|
46
|
+
return obj;
|
|
47
|
+
}
|
|
48
|
+
class ArgumentError extends Error {
|
|
49
|
+
get argument() {
|
|
50
|
+
return this.argumentName;
|
|
51
|
+
}
|
|
52
|
+
constructor(argumentName, message){
|
|
53
|
+
super(`${message}`), _define_property$1(this, "argumentName", void 0);
|
|
54
|
+
this.name = 'ArgumentError';
|
|
55
|
+
this.argumentName = argumentName;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
class CancellationError extends Error {
|
|
60
|
+
constructor(message){
|
|
61
|
+
super(message);
|
|
62
|
+
this.name = 'CancellationError';
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function _define_property(obj, key, value) {
|
|
67
|
+
if (key in obj) {
|
|
68
|
+
Object.defineProperty(obj, key, {
|
|
69
|
+
value: value,
|
|
70
|
+
enumerable: true,
|
|
71
|
+
configurable: true,
|
|
72
|
+
writable: true
|
|
73
|
+
});
|
|
74
|
+
} else {
|
|
75
|
+
obj[key] = value;
|
|
76
|
+
}
|
|
77
|
+
return obj;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Base class for all command-related errors
|
|
81
|
+
*/ class CommandError extends Error {
|
|
82
|
+
constructor(message, code, recoverable = false, cause){
|
|
83
|
+
super(message), _define_property(this, "code", void 0), _define_property(this, "recoverable", void 0), _define_property(this, "originalCause", void 0);
|
|
84
|
+
this.name = 'CommandError';
|
|
85
|
+
this.code = code;
|
|
86
|
+
this.recoverable = recoverable;
|
|
87
|
+
this.originalCause = cause;
|
|
88
|
+
// Also set the standard cause property for compatibility
|
|
89
|
+
if (cause) {
|
|
90
|
+
this.cause = cause;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Configuration-related errors (missing config, invalid settings, etc.)
|
|
96
|
+
*/ class ConfigurationError extends CommandError {
|
|
97
|
+
constructor(message, cause){
|
|
98
|
+
super(message, 'CONFIG_ERROR', false, cause);
|
|
99
|
+
this.name = 'ConfigurationError';
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Validation errors (invalid arguments, missing required data, etc.)
|
|
104
|
+
*/ class ValidationError extends CommandError {
|
|
105
|
+
constructor(message, cause){
|
|
106
|
+
super(message, 'VALIDATION_ERROR', false, cause);
|
|
107
|
+
this.name = 'ValidationError';
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* User cancellation errors (user cancelled operation)
|
|
112
|
+
*/ class UserCancellationError extends CommandError {
|
|
113
|
+
constructor(message = 'Operation cancelled by user'){
|
|
114
|
+
super(message, 'USER_CANCELLED', true);
|
|
115
|
+
this.name = 'UserCancellationError';
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* External dependency errors (Git, NPM, file system, etc.)
|
|
120
|
+
*/ class ExternalDependencyError extends CommandError {
|
|
121
|
+
constructor(message, dependency, cause){
|
|
122
|
+
super(`${dependency}: ${message}`, 'EXTERNAL_DEPENDENCY_ERROR', false, cause);
|
|
123
|
+
this.name = 'ExternalDependencyError';
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* File operation errors (read, write, permissions, etc.)
|
|
128
|
+
*/ class FileOperationError extends CommandError {
|
|
129
|
+
constructor(message, filePath, cause){
|
|
130
|
+
super(`File operation failed on ${filePath}: ${message}`, 'FILE_OPERATION_ERROR', false, cause);
|
|
131
|
+
this.name = 'FileOperationError';
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Pull request check failures with detailed information
|
|
136
|
+
*/ class PullRequestCheckError extends CommandError {
|
|
137
|
+
/**
|
|
138
|
+
* Get specific instructions based on the type of failures
|
|
139
|
+
*/ getRecoveryInstructions() {
|
|
140
|
+
const instructions = [];
|
|
141
|
+
const branchName = this.currentBranch || 'your current branch';
|
|
142
|
+
// Analyze failure types for specific guidance
|
|
143
|
+
const testFailures = this.failedChecks.filter((check)=>{
|
|
144
|
+
var _check_output_title, _check_output;
|
|
145
|
+
return check.name.toLowerCase().includes('test') || check.name.toLowerCase().includes('ci') || ((_check_output = check.output) === null || _check_output === void 0 ? void 0 : (_check_output_title = _check_output.title) === null || _check_output_title === void 0 ? void 0 : _check_output_title.toLowerCase().includes('test'));
|
|
146
|
+
});
|
|
147
|
+
const lintFailures = this.failedChecks.filter((check)=>{
|
|
148
|
+
var _check_output_title, _check_output;
|
|
149
|
+
return check.name.toLowerCase().includes('lint') || check.name.toLowerCase().includes('style') || ((_check_output = check.output) === null || _check_output === void 0 ? void 0 : (_check_output_title = _check_output.title) === null || _check_output_title === void 0 ? void 0 : _check_output_title.toLowerCase().includes('lint'));
|
|
150
|
+
});
|
|
151
|
+
const buildFailures = this.failedChecks.filter((check)=>{
|
|
152
|
+
var _check_output_title, _check_output;
|
|
153
|
+
return check.name.toLowerCase().includes('build') || check.name.toLowerCase().includes('compile') || ((_check_output = check.output) === null || _check_output === void 0 ? void 0 : (_check_output_title = _check_output.title) === null || _check_output_title === void 0 ? void 0 : _check_output_title.toLowerCase().includes('build'));
|
|
154
|
+
});
|
|
155
|
+
instructions.push('🔧 To fix these failures:');
|
|
156
|
+
instructions.push('');
|
|
157
|
+
// Specific instructions based on failure types
|
|
158
|
+
if (testFailures.length > 0) {
|
|
159
|
+
instructions.push('📋 Test Failures:');
|
|
160
|
+
instructions.push(' • Run tests locally: `npm test` or `yarn test`');
|
|
161
|
+
instructions.push(' • Fix failing tests or update test expectations');
|
|
162
|
+
instructions.push(' • Consider running specific test files if identified in the failure details');
|
|
163
|
+
instructions.push('');
|
|
164
|
+
}
|
|
165
|
+
if (lintFailures.length > 0) {
|
|
166
|
+
instructions.push('🎨 Linting/Style Failures:');
|
|
167
|
+
instructions.push(' • Run linter locally: `npm run lint` or `yarn lint`');
|
|
168
|
+
instructions.push(' • Auto-fix where possible: `npm run lint:fix` or `yarn lint:fix`');
|
|
169
|
+
instructions.push(' • Check code formatting: `npm run format` or `yarn format`');
|
|
170
|
+
instructions.push('');
|
|
171
|
+
}
|
|
172
|
+
if (buildFailures.length > 0) {
|
|
173
|
+
instructions.push('🏗️ Build Failures:');
|
|
174
|
+
instructions.push(' • Run build locally: `npm run build` or `yarn build`');
|
|
175
|
+
instructions.push(' • Check for TypeScript errors: `npx tsc --noEmit`');
|
|
176
|
+
instructions.push(' • Review dependency issues and import paths');
|
|
177
|
+
instructions.push('');
|
|
178
|
+
}
|
|
179
|
+
// General workflow instructions
|
|
180
|
+
instructions.push('📤 After fixing the issues:');
|
|
181
|
+
instructions.push(` 1. Stage your changes: \`git add .\``);
|
|
182
|
+
instructions.push(` 2. Commit your fixes: \`git commit -m "fix: resolve PR check failures"\``);
|
|
183
|
+
instructions.push(` 3. Push to ${branchName}: \`git push origin ${branchName}\``);
|
|
184
|
+
instructions.push(` 4. The PR checks will automatically re-run`);
|
|
185
|
+
instructions.push('');
|
|
186
|
+
instructions.push('🔄 Re-running this command:');
|
|
187
|
+
instructions.push(' • The kodrdriv publish command will automatically detect the existing PR');
|
|
188
|
+
instructions.push(' • Simply run the same command again after pushing your fixes');
|
|
189
|
+
instructions.push(' • You can also manually trigger checks by pushing an empty commit:');
|
|
190
|
+
instructions.push(` \`git commit --allow-empty -m "trigger checks" && git push origin ${branchName}\``);
|
|
191
|
+
return instructions;
|
|
192
|
+
}
|
|
193
|
+
constructor(message, prNumber, failedChecks, prUrl, currentBranch){
|
|
194
|
+
super(message, 'PR_CHECK_FAILED', true), _define_property(this, "prNumber", void 0), _define_property(this, "failedChecks", void 0), _define_property(this, "prUrl", void 0), _define_property(this, "currentBranch", void 0), this.prNumber = prNumber, this.failedChecks = failedChecks, this.prUrl = prUrl, this.currentBranch = currentBranch;
|
|
195
|
+
this.name = 'PullRequestCheckError';
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
class ExitError extends Error {
|
|
200
|
+
constructor(message){
|
|
201
|
+
super(message);
|
|
202
|
+
this.name = 'ExitError';
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
const create$1 = ()=>{
|
|
207
|
+
const logger = getLogger();
|
|
208
|
+
const exists = async (path)=>{
|
|
209
|
+
try {
|
|
210
|
+
await fs.promises.stat(path);
|
|
211
|
+
return true;
|
|
212
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
213
|
+
} catch (error) {
|
|
214
|
+
return false;
|
|
215
|
+
}
|
|
216
|
+
};
|
|
217
|
+
const isDirectory = async (path)=>{
|
|
218
|
+
const stats = await fs.promises.stat(path);
|
|
219
|
+
if (!stats.isDirectory()) {
|
|
220
|
+
// Log at debug level since this is expected when scanning directories
|
|
221
|
+
// that contain both files and directories
|
|
222
|
+
return false;
|
|
223
|
+
}
|
|
224
|
+
return true;
|
|
225
|
+
};
|
|
226
|
+
const isFile = async (path)=>{
|
|
227
|
+
const stats = await fs.promises.stat(path);
|
|
228
|
+
if (!stats.isFile()) {
|
|
229
|
+
// Log removed since this is expected when checking file types
|
|
230
|
+
return false;
|
|
231
|
+
}
|
|
232
|
+
return true;
|
|
233
|
+
};
|
|
234
|
+
const isReadable = async (path)=>{
|
|
235
|
+
try {
|
|
236
|
+
await fs.promises.access(path, fs.constants.R_OK);
|
|
237
|
+
} catch (error) {
|
|
238
|
+
logger.debug(`${path} is not readable: ${error.message}`);
|
|
239
|
+
return false;
|
|
240
|
+
}
|
|
241
|
+
return true;
|
|
242
|
+
};
|
|
243
|
+
const isWritable = async (path)=>{
|
|
244
|
+
try {
|
|
245
|
+
await fs.promises.access(path, fs.constants.W_OK);
|
|
246
|
+
} catch (error) {
|
|
247
|
+
logger.debug(`${path} is not writable: ${error.message}`);
|
|
248
|
+
return false;
|
|
249
|
+
}
|
|
250
|
+
return true;
|
|
251
|
+
};
|
|
252
|
+
const isFileReadable = async (path)=>{
|
|
253
|
+
return await exists(path) && await isFile(path) && await isReadable(path);
|
|
254
|
+
};
|
|
255
|
+
const isDirectoryWritable = async (path)=>{
|
|
256
|
+
return await exists(path) && await isDirectory(path) && await isWritable(path);
|
|
257
|
+
};
|
|
258
|
+
const isDirectoryReadable = async (path)=>{
|
|
259
|
+
return await exists(path) && await isDirectory(path) && await isReadable(path);
|
|
260
|
+
};
|
|
261
|
+
const createDirectory = async (path)=>{
|
|
262
|
+
try {
|
|
263
|
+
await fs.promises.mkdir(path, {
|
|
264
|
+
recursive: true
|
|
265
|
+
});
|
|
266
|
+
} catch (mkdirError) {
|
|
267
|
+
throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);
|
|
268
|
+
}
|
|
269
|
+
};
|
|
270
|
+
const ensureDirectory = async (path)=>{
|
|
271
|
+
if (!await exists(path)) {
|
|
272
|
+
// Before creating the directory, check if any parent directory is blocked by a file
|
|
273
|
+
try {
|
|
274
|
+
await fs.promises.mkdir(path, {
|
|
275
|
+
recursive: true
|
|
276
|
+
});
|
|
277
|
+
} catch (mkdirError) {
|
|
278
|
+
// If mkdir fails with ENOTDIR, it means a parent directory is actually a file
|
|
279
|
+
if (mkdirError.code === 'ENOTDIR') {
|
|
280
|
+
// Find which parent directory is the problem
|
|
281
|
+
const pathParts = path.split('/').filter((p)=>p !== '');
|
|
282
|
+
let currentPath = '';
|
|
283
|
+
for (const part of pathParts){
|
|
284
|
+
currentPath = currentPath ? `${currentPath}/${part}` : part;
|
|
285
|
+
if (await exists(currentPath) && !await isDirectory(currentPath)) {
|
|
286
|
+
throw new Error(`Cannot create directory at ${path}: a file exists at ${currentPath} blocking the path`);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
// Re-throw the original error if it's not the file-blocking-path issue or we couldn't find the blocking file
|
|
291
|
+
throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);
|
|
292
|
+
}
|
|
293
|
+
} else {
|
|
294
|
+
// Path exists, but we need to check if it's actually a directory
|
|
295
|
+
if (!await isDirectory(path)) {
|
|
296
|
+
// Path exists but is not a directory (likely a file)
|
|
297
|
+
throw new Error(`Cannot create directory at ${path}: a file already exists at this location`);
|
|
298
|
+
}
|
|
299
|
+
// If we reach here, the directory already exists, so nothing to do
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
const removeDirectory = async (path)=>{
|
|
303
|
+
try {
|
|
304
|
+
if (await exists(path)) {
|
|
305
|
+
await fs.promises.rm(path, {
|
|
306
|
+
recursive: true,
|
|
307
|
+
force: true
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
} catch (rmError) {
|
|
311
|
+
throw new Error(`Failed to remove directory ${path}: ${rmError.message} ${rmError.stack}`);
|
|
312
|
+
}
|
|
313
|
+
};
|
|
314
|
+
const readFile = async (path, encoding)=>{
|
|
315
|
+
return await fs.promises.readFile(path, {
|
|
316
|
+
encoding: encoding
|
|
317
|
+
});
|
|
318
|
+
};
|
|
319
|
+
const writeFile = async (path, data, encoding)=>{
|
|
320
|
+
await fs.promises.writeFile(path, data, {
|
|
321
|
+
encoding: encoding
|
|
322
|
+
});
|
|
323
|
+
};
|
|
324
|
+
const rename = async (oldPath, newPath)=>{
|
|
325
|
+
await fs.promises.rename(oldPath, newPath);
|
|
326
|
+
};
|
|
327
|
+
const deleteFile = async (path)=>{
|
|
328
|
+
try {
|
|
329
|
+
if (await exists(path)) {
|
|
330
|
+
await fs.promises.unlink(path);
|
|
331
|
+
}
|
|
332
|
+
} catch (deleteError) {
|
|
333
|
+
throw new Error(`Failed to delete file ${path}: ${deleteError.message} ${deleteError.stack}`);
|
|
334
|
+
}
|
|
335
|
+
};
|
|
336
|
+
const forEachFileIn = async (directory, callback, options = {
|
|
337
|
+
pattern: '*.*'
|
|
338
|
+
})=>{
|
|
339
|
+
try {
|
|
340
|
+
const files = await glob(options.pattern, {
|
|
341
|
+
cwd: directory,
|
|
342
|
+
nodir: true
|
|
343
|
+
});
|
|
344
|
+
for (const file of files){
|
|
345
|
+
await callback(path.join(directory, file));
|
|
346
|
+
}
|
|
347
|
+
} catch (err) {
|
|
348
|
+
throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);
|
|
349
|
+
}
|
|
350
|
+
};
|
|
351
|
+
const readStream = async (path)=>{
|
|
352
|
+
return fs.createReadStream(path);
|
|
353
|
+
};
|
|
354
|
+
const hashFile = async (path, length)=>{
|
|
355
|
+
const file = await readFile(path, 'utf8');
|
|
356
|
+
return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);
|
|
357
|
+
};
|
|
358
|
+
const listFiles = async (directory)=>{
|
|
359
|
+
return await fs.promises.readdir(directory);
|
|
360
|
+
};
|
|
361
|
+
return {
|
|
362
|
+
exists,
|
|
363
|
+
isDirectory,
|
|
364
|
+
isFile,
|
|
365
|
+
isReadable,
|
|
366
|
+
isWritable,
|
|
367
|
+
isFileReadable,
|
|
368
|
+
isDirectoryWritable,
|
|
369
|
+
isDirectoryReadable,
|
|
370
|
+
createDirectory,
|
|
371
|
+
ensureDirectory,
|
|
372
|
+
readFile,
|
|
373
|
+
readStream,
|
|
374
|
+
writeFile,
|
|
375
|
+
rename,
|
|
376
|
+
deleteFile,
|
|
377
|
+
forEachFileIn,
|
|
378
|
+
hashFile,
|
|
379
|
+
listFiles,
|
|
380
|
+
removeDirectory
|
|
381
|
+
};
|
|
382
|
+
};
|
|
383
|
+
|
|
384
|
+
// Function to read from STDIN if available
|
|
385
|
+
async function readStdin() {
|
|
386
|
+
// In test environment, allow mocking to work by skipping TTY check
|
|
387
|
+
if (process.env.NODE_ENV === 'test' || process.env.VITEST === 'true') {
|
|
388
|
+
return new Promise((resolve)=>{
|
|
389
|
+
let input = '';
|
|
390
|
+
let hasData = false;
|
|
391
|
+
let resolved = false;
|
|
392
|
+
const timeout = setTimeout(()=>{
|
|
393
|
+
if (!hasData && !resolved) {
|
|
394
|
+
resolved = true;
|
|
395
|
+
cleanup();
|
|
396
|
+
resolve(null);
|
|
397
|
+
}
|
|
398
|
+
}, 10); // Very short timeout for tests
|
|
399
|
+
const onData = (chunk)=>{
|
|
400
|
+
hasData = true;
|
|
401
|
+
clearTimeout(timeout);
|
|
402
|
+
input += chunk;
|
|
403
|
+
};
|
|
404
|
+
const onEnd = ()=>{
|
|
405
|
+
if (!resolved) {
|
|
406
|
+
resolved = true;
|
|
407
|
+
cleanup();
|
|
408
|
+
resolve(input.trim() || null);
|
|
409
|
+
}
|
|
410
|
+
};
|
|
411
|
+
const onError = ()=>{
|
|
412
|
+
if (!resolved) {
|
|
413
|
+
resolved = true;
|
|
414
|
+
clearTimeout(timeout);
|
|
415
|
+
cleanup();
|
|
416
|
+
resolve(null);
|
|
417
|
+
}
|
|
418
|
+
};
|
|
419
|
+
const cleanup = ()=>{
|
|
420
|
+
process.stdin.removeListener('data', onData);
|
|
421
|
+
process.stdin.removeListener('end', onEnd);
|
|
422
|
+
process.stdin.removeListener('error', onError);
|
|
423
|
+
process.stdin.pause();
|
|
424
|
+
};
|
|
425
|
+
process.stdin.setEncoding('utf8');
|
|
426
|
+
process.stdin.on('data', onData);
|
|
427
|
+
process.stdin.on('end', onEnd);
|
|
428
|
+
process.stdin.on('error', onError);
|
|
429
|
+
process.stdin.resume();
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
return new Promise((resolve)=>{
|
|
433
|
+
// Check if stdin is TTY (interactive terminal)
|
|
434
|
+
if (process.stdin.isTTY) {
|
|
435
|
+
resolve(null);
|
|
436
|
+
return;
|
|
437
|
+
}
|
|
438
|
+
let input = '';
|
|
439
|
+
let hasData = false;
|
|
440
|
+
let resolved = false;
|
|
441
|
+
const timeout = setTimeout(()=>{
|
|
442
|
+
if (!hasData && !resolved) {
|
|
443
|
+
resolved = true;
|
|
444
|
+
cleanup();
|
|
445
|
+
resolve(null);
|
|
446
|
+
}
|
|
447
|
+
}, 100); // Short timeout to detect if data is available
|
|
448
|
+
const onData = (chunk)=>{
|
|
449
|
+
hasData = true;
|
|
450
|
+
clearTimeout(timeout);
|
|
451
|
+
input += chunk;
|
|
452
|
+
};
|
|
453
|
+
const onEnd = ()=>{
|
|
454
|
+
if (!resolved) {
|
|
455
|
+
resolved = true;
|
|
456
|
+
cleanup();
|
|
457
|
+
resolve(input.trim() || null);
|
|
458
|
+
}
|
|
459
|
+
};
|
|
460
|
+
const onError = ()=>{
|
|
461
|
+
if (!resolved) {
|
|
462
|
+
resolved = true;
|
|
463
|
+
clearTimeout(timeout);
|
|
464
|
+
cleanup();
|
|
465
|
+
resolve(null);
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
const cleanup = ()=>{
|
|
469
|
+
process.stdin.removeListener('data', onData);
|
|
470
|
+
process.stdin.removeListener('end', onEnd);
|
|
471
|
+
process.stdin.removeListener('error', onError);
|
|
472
|
+
process.stdin.pause();
|
|
473
|
+
};
|
|
474
|
+
process.stdin.setEncoding('utf8');
|
|
475
|
+
process.stdin.on('data', onData);
|
|
476
|
+
process.stdin.on('end', onEnd);
|
|
477
|
+
process.stdin.on('error', onError);
|
|
478
|
+
// If no data comes in quickly, assume no stdin
|
|
479
|
+
process.stdin.resume();
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
// Function to prompt user for confirmation (y/n)
|
|
483
|
+
async function promptConfirmation(message) {
|
|
484
|
+
// In test environment, return true by default
|
|
485
|
+
if (process.env.NODE_ENV === 'test' || process.env.VITEST === 'true') {
|
|
486
|
+
return true;
|
|
487
|
+
}
|
|
488
|
+
// Use process.stdout.write instead of console.log to avoid linter issues
|
|
489
|
+
process.stdout.write(message + '\n');
|
|
490
|
+
process.stdout.write('Please enter "y" for yes or "n" for no: ');
|
|
491
|
+
return new Promise((resolve)=>{
|
|
492
|
+
const handleInput = (chunk)=>{
|
|
493
|
+
const input = chunk.toString().trim().toLowerCase();
|
|
494
|
+
if (input === 'y' || input === 'yes') {
|
|
495
|
+
cleanup();
|
|
496
|
+
resolve(true);
|
|
497
|
+
} else if (input === 'n' || input === 'no') {
|
|
498
|
+
cleanup();
|
|
499
|
+
resolve(false);
|
|
500
|
+
} else {
|
|
501
|
+
process.stdout.write('Please enter "y" for yes or "n" for no: ');
|
|
502
|
+
// Continue listening for input
|
|
503
|
+
}
|
|
504
|
+
};
|
|
505
|
+
const cleanup = ()=>{
|
|
506
|
+
process.stdin.removeListener('data', handleInput);
|
|
507
|
+
process.stdin.pause();
|
|
508
|
+
};
|
|
509
|
+
process.stdin.setEncoding('utf8');
|
|
510
|
+
process.stdin.on('data', handleInput);
|
|
511
|
+
process.stdin.resume();
|
|
512
|
+
});
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
dayjs.extend(utc);
|
|
516
|
+
dayjs.extend(timezone);
|
|
517
|
+
const create = (parameters)=>{
|
|
518
|
+
const { timezone } = parameters;
|
|
519
|
+
const now = ()=>{
|
|
520
|
+
return date(undefined);
|
|
521
|
+
};
|
|
522
|
+
const date = (date)=>{
|
|
523
|
+
let value;
|
|
524
|
+
if (date) {
|
|
525
|
+
value = dayjs.tz(date, timezone);
|
|
526
|
+
} else {
|
|
527
|
+
value = dayjs().tz(timezone);
|
|
528
|
+
}
|
|
529
|
+
if (!value.isValid()) {
|
|
530
|
+
throw new Error(`Invalid date: ${date}`);
|
|
531
|
+
}
|
|
532
|
+
return value.toDate();
|
|
533
|
+
};
|
|
534
|
+
const parse = (date, format)=>{
|
|
535
|
+
const value = dayjs.tz(date, format, timezone);
|
|
536
|
+
if (!value.isValid()) {
|
|
537
|
+
throw new Error(`Invalid date: ${date}, expected format: ${format}`);
|
|
538
|
+
}
|
|
539
|
+
return value.toDate();
|
|
540
|
+
};
|
|
541
|
+
const addDays = (date, days)=>{
|
|
542
|
+
return dayjs.tz(date, timezone).add(days, 'day').toDate();
|
|
543
|
+
};
|
|
544
|
+
const addMonths = (date, months)=>{
|
|
545
|
+
return dayjs.tz(date, timezone).add(months, 'month').toDate();
|
|
546
|
+
};
|
|
547
|
+
const addYears = (date, years)=>{
|
|
548
|
+
return dayjs.tz(date, timezone).add(years, 'year').toDate();
|
|
549
|
+
};
|
|
550
|
+
const format = (date, format)=>{
|
|
551
|
+
return dayjs.tz(date, timezone).format(format);
|
|
552
|
+
};
|
|
553
|
+
const subDays = (date, days)=>{
|
|
554
|
+
return dayjs.tz(date, timezone).subtract(days, 'day').toDate();
|
|
555
|
+
};
|
|
556
|
+
const subMonths = (date, months)=>{
|
|
557
|
+
return dayjs.tz(date, timezone).subtract(months, 'month').toDate();
|
|
558
|
+
};
|
|
559
|
+
const subYears = (date, years)=>{
|
|
560
|
+
return dayjs.tz(date, timezone).subtract(years, 'year').toDate();
|
|
561
|
+
};
|
|
562
|
+
const startOfMonth = (date)=>{
|
|
563
|
+
return dayjs.tz(date, timezone).startOf('month').toDate();
|
|
564
|
+
};
|
|
565
|
+
const endOfMonth = (date)=>{
|
|
566
|
+
return dayjs.tz(date, timezone).endOf('month').toDate();
|
|
567
|
+
};
|
|
568
|
+
const startOfYear = (date)=>{
|
|
569
|
+
return dayjs.tz(date, timezone).startOf('year').toDate();
|
|
570
|
+
};
|
|
571
|
+
const endOfYear = (date)=>{
|
|
572
|
+
return dayjs.tz(date, timezone).endOf('year').toDate();
|
|
573
|
+
};
|
|
574
|
+
const isBefore = (date, other)=>{
|
|
575
|
+
return dayjs.tz(date, timezone).isBefore(dayjs.tz(other, timezone));
|
|
576
|
+
};
|
|
577
|
+
const isAfter = (date, other)=>{
|
|
578
|
+
return dayjs.tz(date, timezone).isAfter(dayjs.tz(other, timezone));
|
|
579
|
+
};
|
|
580
|
+
return {
|
|
581
|
+
now,
|
|
582
|
+
date,
|
|
583
|
+
parse,
|
|
584
|
+
addDays,
|
|
585
|
+
addMonths,
|
|
586
|
+
addYears,
|
|
587
|
+
format,
|
|
588
|
+
subDays,
|
|
589
|
+
subMonths,
|
|
590
|
+
subYears,
|
|
591
|
+
startOfMonth,
|
|
592
|
+
endOfMonth,
|
|
593
|
+
startOfYear,
|
|
594
|
+
endOfYear,
|
|
595
|
+
isBefore,
|
|
596
|
+
isAfter
|
|
597
|
+
};
|
|
598
|
+
};
|
|
599
|
+
const validTimezones = ()=>{
|
|
600
|
+
return moment.tz.names();
|
|
601
|
+
};
|
|
602
|
+
|
|
603
|
+
/**
|
|
604
|
+
* General utility functions
|
|
605
|
+
*/ // Utility function for deep merging two objects.
|
|
606
|
+
function deepMerge(target, source) {
|
|
607
|
+
for(const key in source){
|
|
608
|
+
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
|
609
|
+
if (key === "__proto__" || key === "constructor") {
|
|
610
|
+
continue; // Skip prototype-polluting keys
|
|
611
|
+
}
|
|
612
|
+
if (source[key] && typeof source[key] === 'object' && !Array.isArray(source[key])) {
|
|
613
|
+
if (!target[key]) {
|
|
614
|
+
target[key] = {};
|
|
615
|
+
}
|
|
616
|
+
deepMerge(target[key], source[key]);
|
|
617
|
+
} else {
|
|
618
|
+
target[key] = source[key];
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
return target;
|
|
623
|
+
}
|
|
624
|
+
// Recursive implementation of JSON.stringify
|
|
625
|
+
const stringifyJSON = function(obj, options = {
|
|
626
|
+
depth: 0
|
|
627
|
+
}) {
|
|
628
|
+
if (options.depth > 10) {
|
|
629
|
+
return '{"error": "Maximum depth reached"}';
|
|
630
|
+
}
|
|
631
|
+
const arrOfKeyVals = [];
|
|
632
|
+
const arrVals = [];
|
|
633
|
+
let objKeys = [];
|
|
634
|
+
/*********CHECK FOR PRIMITIVE TYPES**********/ if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null) return '' + obj;
|
|
635
|
+
else if (typeof obj === 'string') return '"' + obj + '"';
|
|
636
|
+
else if (Array.isArray(obj)) {
|
|
637
|
+
//check for empty array
|
|
638
|
+
if (obj[0] === undefined) return '[]';
|
|
639
|
+
else {
|
|
640
|
+
obj.forEach(function(el) {
|
|
641
|
+
arrVals.push(stringifyJSON(el, {
|
|
642
|
+
depth: options.depth + 1
|
|
643
|
+
}));
|
|
644
|
+
});
|
|
645
|
+
return '[' + arrVals + ']';
|
|
646
|
+
}
|
|
647
|
+
} else if (obj instanceof Object) {
|
|
648
|
+
//get object keys
|
|
649
|
+
objKeys = Object.keys(obj);
|
|
650
|
+
//set key output;
|
|
651
|
+
objKeys.forEach(function(key) {
|
|
652
|
+
const keyOut = '"' + key + '":';
|
|
653
|
+
const keyValOut = obj[key];
|
|
654
|
+
//skip functions and undefined properties
|
|
655
|
+
if (keyValOut instanceof Function || keyValOut === undefined) arrOfKeyVals.push('');
|
|
656
|
+
else if (typeof keyValOut === 'string') arrOfKeyVals.push(keyOut + '"' + keyValOut + '"');
|
|
657
|
+
else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null) arrOfKeyVals.push(keyOut + keyValOut);
|
|
658
|
+
else if (keyValOut instanceof Object) {
|
|
659
|
+
arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut, {
|
|
660
|
+
depth: options.depth + 1
|
|
661
|
+
}));
|
|
662
|
+
}
|
|
663
|
+
});
|
|
664
|
+
return '{' + arrOfKeyVals + '}';
|
|
665
|
+
}
|
|
666
|
+
return '';
|
|
667
|
+
};
|
|
668
|
+
// Version increment functions
|
|
669
|
+
const incrementPatchVersion = (version)=>{
|
|
670
|
+
// Remove 'v' prefix if present
|
|
671
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
672
|
+
// Split into major.minor.patch and handle pre-release identifiers
|
|
673
|
+
const parts = cleanVersion.split('.');
|
|
674
|
+
if (parts.length < 3) {
|
|
675
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
676
|
+
}
|
|
677
|
+
// Handle pre-release versions like "4.6.24-dev.0"
|
|
678
|
+
// Split the patch part on '-' to separate patch number from pre-release
|
|
679
|
+
const patchPart = parts[2];
|
|
680
|
+
let patchNumber;
|
|
681
|
+
if (patchPart.startsWith('-')) {
|
|
682
|
+
// Handle negative patch numbers like "-1" or "-5" or "-1-dev.0"
|
|
683
|
+
const negativeComponents = patchPart.split('-');
|
|
684
|
+
if (negativeComponents.length === 2) {
|
|
685
|
+
// "-1" format
|
|
686
|
+
patchNumber = parseInt(negativeComponents[1]);
|
|
687
|
+
} else if (negativeComponents.length > 2) {
|
|
688
|
+
// "-1-dev.0" format - take the number after first dash
|
|
689
|
+
patchNumber = parseInt(negativeComponents[1]);
|
|
690
|
+
} else {
|
|
691
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
692
|
+
}
|
|
693
|
+
} else if (patchPart.includes('-')) {
|
|
694
|
+
// Normal case with possible prerelease: "24-dev.0" or "24"
|
|
695
|
+
const patchComponents = patchPart.split('-');
|
|
696
|
+
patchNumber = parseInt(patchComponents[0]);
|
|
697
|
+
} else {
|
|
698
|
+
// Simple patch number: "24"
|
|
699
|
+
patchNumber = parseInt(patchPart);
|
|
700
|
+
}
|
|
701
|
+
if (isNaN(patchNumber)) {
|
|
702
|
+
throw new Error(`Invalid patch number in version: ${version}`);
|
|
703
|
+
}
|
|
704
|
+
const incrementedPatch = patchNumber + 1;
|
|
705
|
+
return `${parts[0]}.${parts[1]}.${incrementedPatch}`;
|
|
706
|
+
};
|
|
707
|
+
const incrementMinorVersion = (version)=>{
|
|
708
|
+
// Remove 'v' prefix if present
|
|
709
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
710
|
+
const parts = cleanVersion.split('.');
|
|
711
|
+
if (parts.length < 3) {
|
|
712
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
713
|
+
}
|
|
714
|
+
const major = parts[0];
|
|
715
|
+
const minor = parseInt(parts[1]);
|
|
716
|
+
if (isNaN(minor)) {
|
|
717
|
+
throw new Error(`Invalid minor version in: ${version}`);
|
|
718
|
+
}
|
|
719
|
+
return `${major}.${minor + 1}.0`;
|
|
720
|
+
};
|
|
721
|
+
const incrementMajorVersion = (version)=>{
|
|
722
|
+
// Remove 'v' prefix if present
|
|
723
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
724
|
+
const parts = cleanVersion.split('.');
|
|
725
|
+
if (parts.length < 3) {
|
|
726
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
727
|
+
}
|
|
728
|
+
const major = parseInt(parts[0]);
|
|
729
|
+
if (isNaN(major)) {
|
|
730
|
+
throw new Error(`Invalid major version in: ${version}`);
|
|
731
|
+
}
|
|
732
|
+
return `${major + 1}.0.0`;
|
|
733
|
+
};
|
|
734
|
+
const validateVersionString = (version)=>{
|
|
735
|
+
// Simple validation: x.y.z format (with optional v prefix)
|
|
736
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
737
|
+
return /^\d+\.\d+\.\d+(-[a-zA-Z0-9.-]+)?$/.test(cleanVersion);
|
|
738
|
+
};
|
|
739
|
+
const calculateTargetVersion = (currentVersion, targetVersion)=>{
|
|
740
|
+
switch(targetVersion){
|
|
741
|
+
case 'patch':
|
|
742
|
+
return incrementPatchVersion(currentVersion);
|
|
743
|
+
case 'minor':
|
|
744
|
+
return incrementMinorVersion(currentVersion);
|
|
745
|
+
case 'major':
|
|
746
|
+
return incrementMajorVersion(currentVersion);
|
|
747
|
+
default:
|
|
748
|
+
// Explicit version provided
|
|
749
|
+
if (!validateVersionString(targetVersion)) {
|
|
750
|
+
throw new Error(`Invalid version format: ${targetVersion}. Expected format: "x.y.z" or one of: "patch", "minor", "major"`);
|
|
751
|
+
}
|
|
752
|
+
return targetVersion.startsWith('v') ? targetVersion.slice(1) : targetVersion;
|
|
753
|
+
}
|
|
754
|
+
};
|
|
755
|
+
const incrementPrereleaseVersion = (version, tag)=>{
|
|
756
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
757
|
+
// Split on dots but only use first 3 parts for major.minor.patch
|
|
758
|
+
// This handles cases like "1.2.3-dev.5" correctly
|
|
759
|
+
const dotParts = cleanVersion.split('.');
|
|
760
|
+
if (dotParts.length < 3) {
|
|
761
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
762
|
+
}
|
|
763
|
+
const major = dotParts[0];
|
|
764
|
+
const minor = dotParts[1];
|
|
765
|
+
// Reconstruct the patch part - everything after the second dot
|
|
766
|
+
const patchAndPrerelease = dotParts.slice(2).join('.');
|
|
767
|
+
const patchComponents = patchAndPrerelease.split('-');
|
|
768
|
+
const patchNumber = patchComponents[0];
|
|
769
|
+
if (patchComponents.length > 1) {
|
|
770
|
+
// Already has prerelease (e.g., "3-dev.0" or "3-test.2")
|
|
771
|
+
const prereleaseString = patchComponents.slice(1).join('-'); // Handle multiple dashes
|
|
772
|
+
const prereleaseComponents = prereleaseString.split('.');
|
|
773
|
+
const existingTag = prereleaseComponents[0];
|
|
774
|
+
const existingPrereleaseVersion = prereleaseComponents[1];
|
|
775
|
+
if (existingTag === tag) {
|
|
776
|
+
// Same tag, increment the prerelease version
|
|
777
|
+
const prereleaseNumber = parseInt(existingPrereleaseVersion) || 0;
|
|
778
|
+
return `${major}.${minor}.${patchNumber}-${tag}.${prereleaseNumber + 1}`;
|
|
779
|
+
} else {
|
|
780
|
+
// Different tag, start at 0
|
|
781
|
+
return `${major}.${minor}.${patchNumber}-${tag}.0`;
|
|
782
|
+
}
|
|
783
|
+
} else {
|
|
784
|
+
// No prerelease yet, add it
|
|
785
|
+
return `${major}.${minor}.${patchNumber}-${tag}.0`;
|
|
786
|
+
}
|
|
787
|
+
};
|
|
788
|
+
const convertToReleaseVersion = (version)=>{
|
|
789
|
+
const cleanVersion = version.startsWith('v') ? version.slice(1) : version;
|
|
790
|
+
// Split on dots but only use first 3 parts for major.minor.patch
|
|
791
|
+
const dotParts = cleanVersion.split('.');
|
|
792
|
+
if (dotParts.length < 3) {
|
|
793
|
+
throw new Error(`Invalid version string: ${version}`);
|
|
794
|
+
}
|
|
795
|
+
const major = dotParts[0];
|
|
796
|
+
const minor = dotParts[1];
|
|
797
|
+
// Reconstruct the patch part - everything after the second dot
|
|
798
|
+
const patchAndPrerelease = dotParts.slice(2).join('.');
|
|
799
|
+
const patchComponents = patchAndPrerelease.split('-');
|
|
800
|
+
const patchNumber = patchComponents[0];
|
|
801
|
+
return `${major}.${minor}.${patchNumber}`;
|
|
802
|
+
};
|
|
803
|
+
// Async utility functions
|
|
804
|
+
const sleep = (ms)=>{
|
|
805
|
+
return new Promise((resolve)=>setTimeout(resolve, ms));
|
|
806
|
+
};
|
|
807
|
+
const retryWithBackoff = async (fn, maxRetries = 3, initialDelay = 1000)=>{
|
|
808
|
+
let lastError;
|
|
809
|
+
for(let i = 0; i < maxRetries; i++){
|
|
810
|
+
try {
|
|
811
|
+
return await fn();
|
|
812
|
+
} catch (error) {
|
|
813
|
+
lastError = error;
|
|
814
|
+
if (i < maxRetries - 1) {
|
|
815
|
+
const delay = initialDelay * Math.pow(2, i);
|
|
816
|
+
await sleep(delay);
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
throw lastError || new Error('Retry failed');
|
|
821
|
+
};
|
|
822
|
+
// Array utilities
|
|
823
|
+
const uniqueArray = (array)=>{
|
|
824
|
+
return Array.from(new Set(array));
|
|
825
|
+
};
|
|
826
|
+
const groupBy = (array, key)=>{
|
|
827
|
+
return array.reduce((result, item)=>{
|
|
828
|
+
const groupKey = String(item[key]);
|
|
829
|
+
if (!result[groupKey]) {
|
|
830
|
+
result[groupKey] = [];
|
|
831
|
+
}
|
|
832
|
+
result[groupKey].push(item);
|
|
833
|
+
return result;
|
|
834
|
+
}, {});
|
|
835
|
+
};
|
|
836
|
+
// String utilities
|
|
837
|
+
const truncateString = (str, maxLength, suffix = '...')=>{
|
|
838
|
+
if (str.length <= maxLength) {
|
|
839
|
+
return str;
|
|
840
|
+
}
|
|
841
|
+
return str.substring(0, maxLength - suffix.length) + suffix;
|
|
842
|
+
};
|
|
843
|
+
// JSON utilities
|
|
844
|
+
const safeJsonParse = (jsonString, fallback)=>{
|
|
845
|
+
try {
|
|
846
|
+
return JSON.parse(jsonString);
|
|
847
|
+
} catch {
|
|
848
|
+
return fallback !== undefined ? fallback : null;
|
|
849
|
+
}
|
|
850
|
+
};
|
|
851
|
+
|
|
852
|
+
const EXCLUDED_DIRECTORIES = [
|
|
853
|
+
'node_modules',
|
|
854
|
+
'dist',
|
|
855
|
+
'build',
|
|
856
|
+
'coverage',
|
|
857
|
+
'.git',
|
|
858
|
+
'.next',
|
|
859
|
+
'.nuxt',
|
|
860
|
+
'out',
|
|
861
|
+
'public',
|
|
862
|
+
'static',
|
|
863
|
+
'assets'
|
|
864
|
+
];
|
|
865
|
+
const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
866
|
+
const logger = getLogger();
|
|
867
|
+
const packageJsonFiles = [];
|
|
868
|
+
const scanDirectory = async (currentDir, depth = 0)=>{
|
|
869
|
+
// Prevent infinite recursion and overly deep scanning
|
|
870
|
+
if (depth > 5) {
|
|
871
|
+
return;
|
|
872
|
+
}
|
|
873
|
+
try {
|
|
874
|
+
if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
|
|
875
|
+
return;
|
|
876
|
+
}
|
|
877
|
+
const items = await storage.listFiles(currentDir);
|
|
878
|
+
// Check for package.json in current directory
|
|
879
|
+
if (items.includes('package.json')) {
|
|
880
|
+
const packageJsonPath = path.join(currentDir, 'package.json');
|
|
881
|
+
try {
|
|
882
|
+
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
883
|
+
const parsed = safeJsonParse$1(packageJsonContent, packageJsonPath);
|
|
884
|
+
const packageJson = validatePackageJson(parsed, packageJsonPath);
|
|
885
|
+
const relativePath = path.relative(rootDir, currentDir);
|
|
886
|
+
packageJsonFiles.push({
|
|
887
|
+
path: packageJsonPath,
|
|
888
|
+
packageJson,
|
|
889
|
+
relativePath: relativePath || '.'
|
|
890
|
+
});
|
|
891
|
+
logger.debug(`Found package.json at: ${relativePath || '.'}`);
|
|
892
|
+
} catch (error) {
|
|
893
|
+
logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
// Scan subdirectories, excluding build/generated directories
|
|
897
|
+
for (const item of items){
|
|
898
|
+
if (EXCLUDED_DIRECTORIES.includes(item)) {
|
|
899
|
+
continue;
|
|
900
|
+
}
|
|
901
|
+
const itemPath = path.join(currentDir, item);
|
|
902
|
+
try {
|
|
903
|
+
if (await storage.isDirectory(itemPath)) {
|
|
904
|
+
await scanDirectory(itemPath, depth + 1);
|
|
905
|
+
}
|
|
906
|
+
} catch (error) {
|
|
907
|
+
// Skip directories that can't be accessed
|
|
908
|
+
logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
|
|
909
|
+
continue;
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
} catch (error) {
|
|
913
|
+
logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);
|
|
914
|
+
}
|
|
915
|
+
};
|
|
916
|
+
await scanDirectory(rootDir);
|
|
917
|
+
logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);
|
|
918
|
+
return packageJsonFiles;
|
|
919
|
+
};
|
|
920
|
+
/**
|
|
921
|
+
* Checks for file: dependencies in package.json files that should not be committed
|
|
922
|
+
* @param storage Storage utility instance
|
|
923
|
+
* @param rootDir Root directory to scan (defaults to current working directory)
|
|
924
|
+
* @returns Array of issues found, empty array if no issues
|
|
925
|
+
*/ const checkForFileDependencies = async (storage, rootDir = process.cwd())=>{
|
|
926
|
+
const logger = getLogger();
|
|
927
|
+
const issues = [];
|
|
928
|
+
try {
|
|
929
|
+
const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);
|
|
930
|
+
for (const { packageJson, relativePath } of packageJsonFiles){
|
|
931
|
+
const fileDeps = [];
|
|
932
|
+
// Check all dependency types for file: paths
|
|
933
|
+
const dependencyChecks = [
|
|
934
|
+
{
|
|
935
|
+
deps: packageJson.dependencies,
|
|
936
|
+
type: 'dependencies'
|
|
937
|
+
},
|
|
938
|
+
{
|
|
939
|
+
deps: packageJson.devDependencies,
|
|
940
|
+
type: 'devDependencies'
|
|
941
|
+
},
|
|
942
|
+
{
|
|
943
|
+
deps: packageJson.peerDependencies,
|
|
944
|
+
type: 'peerDependencies'
|
|
945
|
+
}
|
|
946
|
+
];
|
|
947
|
+
for (const { deps, type } of dependencyChecks){
|
|
948
|
+
if (deps) {
|
|
949
|
+
for (const [name, version] of Object.entries(deps)){
|
|
950
|
+
if (version.startsWith('file:')) {
|
|
951
|
+
fileDeps.push({
|
|
952
|
+
name,
|
|
953
|
+
version,
|
|
954
|
+
dependencyType: type
|
|
955
|
+
});
|
|
956
|
+
}
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
if (fileDeps.length > 0) {
|
|
961
|
+
issues.push({
|
|
962
|
+
packagePath: relativePath,
|
|
963
|
+
dependencies: fileDeps
|
|
964
|
+
});
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
} catch (error) {
|
|
968
|
+
logger.debug(`Failed to check for file dependencies: ${error.message}`);
|
|
969
|
+
}
|
|
970
|
+
return issues;
|
|
971
|
+
};
|
|
972
|
+
/**
|
|
973
|
+
* Logs file dependency issues in a user-friendly format
|
|
974
|
+
* @param issues Array of file dependency issues
|
|
975
|
+
* @param context Context for the warning (e.g., 'commit', 'link check')
|
|
976
|
+
*/ const logFileDependencyWarning = (issues, context = 'operation')=>{
|
|
977
|
+
const logger = getLogger();
|
|
978
|
+
if (issues.length === 0) {
|
|
979
|
+
return;
|
|
980
|
+
}
|
|
981
|
+
logger.warn(`FILE_DEPS_WARNING: Found file: dependencies that should not be committed | Context: ${context} | Count: ${issues.length} | Impact: May cause build issues`);
|
|
982
|
+
for (const issue of issues){
|
|
983
|
+
logger.warn(`FILE_DEPS_PACKAGE: Package with file dependencies | Package: ${issue.packagePath}`);
|
|
984
|
+
for (const dep of issue.dependencies){
|
|
985
|
+
logger.warn(`FILE_DEPS_DETAIL: Dependency details | Name: ${dep.name} | Version: ${dep.version} | Type: ${dep.dependencyType}`);
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
logger.warn('');
|
|
989
|
+
};
|
|
990
|
+
/**
|
|
991
|
+
* Provides suggestions for resolving file dependency issues
|
|
992
|
+
* @param hasUnlinkCapability Whether the current context supports unlinking
|
|
993
|
+
*/ const logFileDependencySuggestions = (hasUnlinkCapability = true)=>{
|
|
994
|
+
const logger = getLogger();
|
|
995
|
+
logger.warn('FILE_DEPS_RESOLUTION: Steps to resolve file dependency issues:');
|
|
996
|
+
if (hasUnlinkCapability) {
|
|
997
|
+
logger.warn(' STEP_1: Restore registry versions | Command: kodrdriv unlink');
|
|
998
|
+
logger.warn(' STEP_2: Complete commit operation | Command: git commit');
|
|
999
|
+
logger.warn(' STEP_3: Restore local development links | Command: kodrdriv link');
|
|
1000
|
+
} else {
|
|
1001
|
+
logger.warn(' STEP_1: Manually restore registry versions in package.json files');
|
|
1002
|
+
logger.warn(' STEP_2: Complete commit operation | Command: git commit');
|
|
1003
|
+
logger.warn(' STEP_3: Re-link local dependencies for development');
|
|
1004
|
+
}
|
|
1005
|
+
logger.warn('');
|
|
1006
|
+
logger.warn('FILE_DEPS_BYPASS: Alternative bypass options:');
|
|
1007
|
+
logger.warn(' OPTION_1: Skip file check | Flag: --skip-file-check');
|
|
1008
|
+
logger.warn(' OPTION_2: Skip all hooks | Command: git commit --no-verify');
|
|
1009
|
+
logger.warn('');
|
|
1010
|
+
};
|
|
1011
|
+
|
|
1012
|
+
/**
|
|
1013
|
+
* Runtime validation utilities for safe type handling
|
|
1014
|
+
*/ /**
|
|
1015
|
+
* Generic validation function that validates data against a schema
|
|
1016
|
+
* @param data Data to validate
|
|
1017
|
+
* @param schema Validation schema (can be a Zod schema or custom validator)
|
|
1018
|
+
* @returns ValidationResult with success status and either data or error
|
|
1019
|
+
*/ const validate = (data, schema)=>{
|
|
1020
|
+
try {
|
|
1021
|
+
const validated = schema.parse(data);
|
|
1022
|
+
return {
|
|
1023
|
+
success: true,
|
|
1024
|
+
data: validated
|
|
1025
|
+
};
|
|
1026
|
+
} catch (error) {
|
|
1027
|
+
return {
|
|
1028
|
+
success: false,
|
|
1029
|
+
error: String(error)
|
|
1030
|
+
};
|
|
1031
|
+
}
|
|
1032
|
+
};
|
|
1033
|
+
/**
|
|
1034
|
+
* Validates that a value is a string
|
|
1035
|
+
*/ const validateString = (value, fieldName = 'value')=>{
|
|
1036
|
+
if (typeof value !== 'string') {
|
|
1037
|
+
throw new Error(`${fieldName} must be a string`);
|
|
1038
|
+
}
|
|
1039
|
+
return value;
|
|
1040
|
+
};
|
|
1041
|
+
/**
|
|
1042
|
+
* Validates that a value is a number
|
|
1043
|
+
*/ const validateNumber = (value, fieldName = 'value')=>{
|
|
1044
|
+
if (typeof value !== 'number' || isNaN(value)) {
|
|
1045
|
+
throw new Error(`${fieldName} must be a valid number`);
|
|
1046
|
+
}
|
|
1047
|
+
return value;
|
|
1048
|
+
};
|
|
1049
|
+
/**
|
|
1050
|
+
* Validates that a value is a boolean
|
|
1051
|
+
*/ const validateBoolean = (value, fieldName = 'value')=>{
|
|
1052
|
+
if (typeof value !== 'boolean') {
|
|
1053
|
+
throw new Error(`${fieldName} must be a boolean`);
|
|
1054
|
+
}
|
|
1055
|
+
return value;
|
|
1056
|
+
};
|
|
1057
|
+
/**
|
|
1058
|
+
* Validates that a value is an array
|
|
1059
|
+
*/ const validateArray = (value, fieldName = 'value')=>{
|
|
1060
|
+
if (!Array.isArray(value)) {
|
|
1061
|
+
throw new Error(`${fieldName} must be an array`);
|
|
1062
|
+
}
|
|
1063
|
+
return value;
|
|
1064
|
+
};
|
|
1065
|
+
/**
|
|
1066
|
+
* Validates that a value is an object (and not null or array)
|
|
1067
|
+
*/ const validateObject = (value, fieldName = 'value')=>{
|
|
1068
|
+
if (typeof value !== 'object' || value === null || Array.isArray(value)) {
|
|
1069
|
+
throw new Error(`${fieldName} must be an object`);
|
|
1070
|
+
}
|
|
1071
|
+
return value;
|
|
1072
|
+
};
|
|
1073
|
+
/**
|
|
1074
|
+
* Validates that a string is not empty
|
|
1075
|
+
*/ const validateNonEmptyString = (value, fieldName = 'value')=>{
|
|
1076
|
+
const str = validateString(value, fieldName);
|
|
1077
|
+
if (str.trim().length === 0) {
|
|
1078
|
+
throw new Error(`${fieldName} must not be empty`);
|
|
1079
|
+
}
|
|
1080
|
+
return str;
|
|
1081
|
+
};
|
|
1082
|
+
/**
|
|
1083
|
+
* Validates that a value is one of the allowed values
|
|
1084
|
+
*/ const validateEnum = (value, allowedValues, fieldName = 'value')=>{
|
|
1085
|
+
if (!allowedValues.includes(value)) {
|
|
1086
|
+
throw new Error(`${fieldName} must be one of: ${allowedValues.join(', ')}. Got: ${value}`);
|
|
1087
|
+
}
|
|
1088
|
+
return value;
|
|
1089
|
+
};
|
|
1090
|
+
|
|
1091
|
+
export { ArgumentError, CancellationError, CommandError, ConfigurationError, ExitError, ExternalDependencyError, FileOperationError, PullRequestCheckError, UserCancellationError, ValidationError, calculateTargetVersion, checkForFileDependencies, convertToReleaseVersion, create as createDateUtility, create$1 as createStorage, deepMerge, getLogger, groupBy, incrementMajorVersion, incrementMinorVersion, incrementPatchVersion, incrementPrereleaseVersion, logFileDependencySuggestions, logFileDependencyWarning, promptConfirmation, readStdin, retryWithBackoff, safeJsonParse, setLogger, sleep, stringifyJSON, truncateString, uniqueArray, validTimezones, validate, validateArray, validateBoolean, validateEnum, validateNonEmptyString, validateNumber, validateObject, validateString, validateVersionString };
|
|
1092
|
+
//# sourceMappingURL=index.js.map
|