@nicnocquee/dataqueue 1.31.0 → 1.33.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,709 @@
1
+ import {
2
+ chmodSync,
3
+ existsSync,
4
+ mkdirSync,
5
+ readFileSync,
6
+ writeFileSync,
7
+ } from 'fs';
8
+ import path from 'path';
9
+
10
+ type JsonObject = Record<string, unknown>;
11
+ type JsonMap = Record<string, string>;
12
+
13
+ const DEPENDENCIES_TO_ADD = [
14
+ '@nicnocquee/dataqueue',
15
+ '@nicnocquee/dataqueue-dashboard',
16
+ '@nicnocquee/dataqueue-react',
17
+ ] as const;
18
+
19
+ const DEV_DEPENDENCIES_TO_ADD = [
20
+ 'dotenv-cli',
21
+ 'ts-node',
22
+ 'node-pg-migrate',
23
+ ] as const;
24
+
25
+ const SCRIPTS_TO_ADD = {
26
+ cron: 'bash cron.sh',
27
+ 'migrate-dataqueue': 'dotenv -e .env.local -- dataqueue-cli migrate',
28
+ } as const;
29
+
30
+ /**
31
+ * App router endpoint template for queue management.
32
+ */
33
+ export const APP_ROUTER_ROUTE_TEMPLATE = `/**
34
+ * This end point is used to manage the job queue.
35
+ * It supports the following tasks:
36
+ * - reclaim: Reclaim stuck jobs
37
+ * - cleanup: Cleanup old jobs
38
+ * - process: Process jobs
39
+ *
40
+ * Example usage with default values (reclaim stuck jobs for 10 minutes, cleanup old jobs for 30 days, and process jobs with batch size 3, concurrency 2, and verbose true):
41
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET"
42
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET"
43
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET"
44
+ *
45
+ * Example usage with custom values:
46
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET" -d '{"maxProcessingTimeMinutes": 15}' -H "Content-Type: application/json"
47
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET" -d '{"daysToKeep": 15}' -H "Content-Type: application/json"
48
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET" -d '{"batchSize": 5, "concurrency": 3, "verbose": false, "workerId": "custom-worker-id"}' -H "Content-Type: application/json"
49
+ *
50
+ * During development, you can run the following script to run the cron jobs continuously in the background:
51
+ * pnpm cron
52
+ */
53
+ import { getJobQueue, jobHandlers } from '@/lib/dataqueue/queue';
54
+ import { NextResponse } from 'next/server';
55
+
56
+ export async function POST(
57
+ request: Request,
58
+ { params }: { params: Promise<{ task: string[] }> },
59
+ ) {
60
+ const { task } = await params;
61
+ const authHeader = request.headers.get('authorization');
62
+ if (authHeader !== \`Bearer \${process.env.CRON_SECRET}\`) {
63
+ return NextResponse.json({ message: 'Unauthorized' }, { status: 401 });
64
+ }
65
+
66
+ if (!task || task.length === 0) {
67
+ return NextResponse.json({ message: 'Task is required' }, { status: 400 });
68
+ }
69
+
70
+ const supportedTasks = ['reclaim', 'cleanup', 'process'];
71
+ const theTask = task[0];
72
+ if (!supportedTasks.includes(theTask)) {
73
+ return NextResponse.json(
74
+ { message: 'Task not supported' },
75
+ { status: 400 },
76
+ );
77
+ }
78
+
79
+ try {
80
+ const jobQueue = getJobQueue();
81
+
82
+ if (theTask === 'reclaim') {
83
+ let maxProcessingTimeMinutes = 10;
84
+ try {
85
+ const body = await request.json();
86
+ maxProcessingTimeMinutes = body.maxProcessingTimeMinutes || 10;
87
+ } catch {
88
+ // ignore parsing error and use default value
89
+ }
90
+ const reclaimed = await jobQueue.reclaimStuckJobs(
91
+ maxProcessingTimeMinutes,
92
+ );
93
+ console.log(\`Reclaimed \${reclaimed} stuck jobs\`);
94
+ return NextResponse.json({
95
+ message: \`Stuck jobs reclaimed: \${reclaimed} with maxProcessingTimeMinutes: \${maxProcessingTimeMinutes}\`,
96
+ reclaimed,
97
+ });
98
+ }
99
+
100
+ if (theTask === 'cleanup') {
101
+ let daysToKeep = 30;
102
+ try {
103
+ const body = await request.json();
104
+ daysToKeep = body.daysToKeep || 30;
105
+ } catch {
106
+ // ignore parsing error and use default value
107
+ }
108
+ const deleted = await jobQueue.cleanupOldJobs(daysToKeep);
109
+ console.log(\`Deleted \${deleted} old jobs\`);
110
+ return NextResponse.json({
111
+ message: \`Old jobs cleaned up: \${deleted} with daysToKeep: \${daysToKeep}\`,
112
+ deleted,
113
+ });
114
+ }
115
+
116
+ if (theTask === 'process') {
117
+ let batchSize = 3;
118
+ let concurrency = 2;
119
+ let verbose = true;
120
+ let workerId = \`manage-\${theTask}-\${Date.now()}\`;
121
+ try {
122
+ const body = await request.json();
123
+ batchSize = body.batchSize || 3;
124
+ concurrency = body.concurrency || 2;
125
+ verbose = body.verbose || true;
126
+ workerId = body.workerId || \`manage-\${theTask}-\${Date.now()}\`;
127
+ } catch {
128
+ // ignore parsing error and use default value
129
+ }
130
+ const processor = jobQueue.createProcessor(jobHandlers, {
131
+ workerId,
132
+ batchSize,
133
+ concurrency,
134
+ verbose,
135
+ });
136
+ const processed = await processor.start();
137
+
138
+ return NextResponse.json({
139
+ message: \`Jobs processed: \${processed} with workerId: \${workerId}, batchSize: \${batchSize}, concurrency: \${concurrency}, and verbose: \${verbose}\`,
140
+ processed,
141
+ });
142
+ }
143
+
144
+ return NextResponse.json(
145
+ { message: 'Task not supported' },
146
+ { status: 400 },
147
+ );
148
+ } catch (error) {
149
+ console.error('Error processing jobs:', error);
150
+ return NextResponse.json(
151
+ { message: 'Failed to process jobs' },
152
+ { status: 500 },
153
+ );
154
+ }
155
+ }
156
+ `;
157
+
158
+ /**
159
+ * Pages router endpoint template for queue management.
160
+ */
161
+ export const PAGES_ROUTER_ROUTE_TEMPLATE = `/**
162
+ * This end point is used to manage the job queue.
163
+ * It supports the following tasks:
164
+ * - reclaim: Reclaim stuck jobs
165
+ * - cleanup: Cleanup old jobs
166
+ * - process: Process jobs
167
+ *
168
+ * Example usage with default values (reclaim stuck jobs for 10 minutes, cleanup old jobs for 30 days, and process jobs with batch size 3, concurrency 2, and verbose true):
169
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET"
170
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET"
171
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET"
172
+ *
173
+ * Example usage with custom values:
174
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET" -d '{"maxProcessingTimeMinutes": 15}' -H "Content-Type: application/json"
175
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET" -d '{"daysToKeep": 15}' -H "Content-Type: application/json"
176
+ * curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET" -d '{"batchSize": 5, "concurrency": 3, "verbose": false, "workerId": "custom-worker-id"}' -H "Content-Type: application/json"
177
+ *
178
+ * During development, you can run the following script to run the cron jobs continuously in the background:
179
+ * pnpm cron
180
+ */
181
+ import type { NextApiRequest, NextApiResponse } from 'next';
182
+ import { getJobQueue, jobHandlers } from '@/lib/dataqueue/queue';
183
+
184
+ type ResponseBody = {
185
+ message: string;
186
+ reclaimed?: number;
187
+ deleted?: number;
188
+ processed?: number;
189
+ };
190
+
191
+ export default async function handler(
192
+ req: NextApiRequest,
193
+ res: NextApiResponse<ResponseBody>,
194
+ ) {
195
+ if (req.method !== 'POST') {
196
+ res.setHeader('Allow', 'POST');
197
+ return res.status(405).json({ message: 'Method not allowed' });
198
+ }
199
+
200
+ const authHeader = req.headers.authorization;
201
+ if (authHeader !== \`Bearer \${process.env.CRON_SECRET}\`) {
202
+ return res.status(401).json({ message: 'Unauthorized' });
203
+ }
204
+
205
+ const task = req.query.task;
206
+ const taskArray = Array.isArray(task) ? task : task ? [task] : [];
207
+ if (!taskArray.length) {
208
+ return res.status(400).json({ message: 'Task is required' });
209
+ }
210
+
211
+ const supportedTasks = ['reclaim', 'cleanup', 'process'];
212
+ const theTask = taskArray[0];
213
+ if (!supportedTasks.includes(theTask)) {
214
+ return res.status(400).json({ message: 'Task not supported' });
215
+ }
216
+
217
+ try {
218
+ const jobQueue = getJobQueue();
219
+ const body = typeof req.body === 'object' && req.body ? req.body : {};
220
+
221
+ if (theTask === 'reclaim') {
222
+ const maxProcessingTimeMinutes = body.maxProcessingTimeMinutes || 10;
223
+ const reclaimed = await jobQueue.reclaimStuckJobs(maxProcessingTimeMinutes);
224
+ console.log(\`Reclaimed \${reclaimed} stuck jobs\`);
225
+ return res.status(200).json({
226
+ message: \`Stuck jobs reclaimed: \${reclaimed} with maxProcessingTimeMinutes: \${maxProcessingTimeMinutes}\`,
227
+ reclaimed,
228
+ });
229
+ }
230
+
231
+ if (theTask === 'cleanup') {
232
+ const daysToKeep = body.daysToKeep || 30;
233
+ const deleted = await jobQueue.cleanupOldJobs(daysToKeep);
234
+ console.log(\`Deleted \${deleted} old jobs\`);
235
+ return res.status(200).json({
236
+ message: \`Old jobs cleaned up: \${deleted} with daysToKeep: \${daysToKeep}\`,
237
+ deleted,
238
+ });
239
+ }
240
+
241
+ const batchSize = body.batchSize || 3;
242
+ const concurrency = body.concurrency || 2;
243
+ const verbose = body.verbose || true;
244
+ const workerId = body.workerId || \`manage-\${theTask}-\${Date.now()}\`;
245
+ const processor = jobQueue.createProcessor(jobHandlers, {
246
+ workerId,
247
+ batchSize,
248
+ concurrency,
249
+ verbose,
250
+ });
251
+ const processed = await processor.start();
252
+
253
+ return res.status(200).json({
254
+ message: \`Jobs processed: \${processed} with workerId: \${workerId}, batchSize: \${batchSize}, concurrency: \${concurrency}, and verbose: \${verbose}\`,
255
+ processed,
256
+ });
257
+ } catch (error) {
258
+ console.error('Error processing jobs:', error);
259
+ return res.status(500).json({ message: 'Failed to process jobs' });
260
+ }
261
+ }
262
+ `;
263
+
264
+ /**
265
+ * Cron script template for local queue processing.
266
+ */
267
+ export const CRON_SH_TEMPLATE = `#!/bin/bash
268
+
269
+ # This script is used to run the cron jobs for the demo app during development.
270
+ # Run it with \`pnpm cron\` from the apps/demo directory.
271
+
272
+ set -a
273
+ source "$(dirname "$0")/.env.local"
274
+ set +a
275
+
276
+ if [ -z "$CRON_SECRET" ]; then
277
+ echo "Error: CRON_SECRET environment variable is not set in .env.local"
278
+ exit 1
279
+ fi
280
+
281
+ cleanup() {
282
+ kill 0
283
+ wait
284
+ }
285
+ trap cleanup SIGINT SIGTERM
286
+
287
+ while true; do
288
+ echo "Processing jobs..."
289
+ curl http://localhost:3000/api/dataqueue/manage/process -X POST -H "Authorization: Bearer $CRON_SECRET"
290
+ echo ""
291
+ sleep 10 # Process jobs every 10 seconds
292
+ done &
293
+
294
+ while true; do
295
+ echo "Reclaiming stuck jobs..."
296
+ curl http://localhost:3000/api/dataqueue/manage/reclaim -X POST -H "Authorization: Bearer $CRON_SECRET"
297
+ echo ""
298
+ sleep 20 # Reclaim stuck jobs every 20 seconds
299
+ done &
300
+
301
+ while true; do
302
+ echo "Cleaning up old jobs..."
303
+ curl http://localhost:3000/api/dataqueue/manage/cleanup -X POST -H "Authorization: Bearer $CRON_SECRET"
304
+ echo ""
305
+ sleep 30 # Cleanup old jobs every 30 seconds
306
+ done &
307
+
308
+ wait
309
+ `;
310
+
311
+ /**
312
+ * Queue placeholder template with a single `send_email` job.
313
+ */
314
+ export const QUEUE_TEMPLATE = `import { initJobQueue, JobHandlers } from '@nicnocquee/dataqueue';
315
+
316
+ export type JobPayloadMap = {
317
+ send_email: {
318
+ to: string;
319
+ subject: string;
320
+ body: string;
321
+ };
322
+ };
323
+
324
+ let jobQueue: ReturnType<typeof initJobQueue<JobPayloadMap>> | null = null;
325
+
326
+ export const getJobQueue = () => {
327
+ if (!jobQueue) {
328
+ jobQueue = initJobQueue<JobPayloadMap>({
329
+ databaseConfig: {
330
+ connectionString: process.env.PG_DATAQUEUE_DATABASE,
331
+ },
332
+ verbose: process.env.NODE_ENV === 'development',
333
+ });
334
+ }
335
+ return jobQueue;
336
+ };
337
+
338
+ export const jobHandlers: JobHandlers<JobPayloadMap> = {
339
+ send_email: async (payload) => {
340
+ const { to, subject, body } = payload;
341
+ console.log('send_email placeholder:', { to, subject, body });
342
+ },
343
+ };
344
+ `;
345
+
346
+ export interface InitDeps {
347
+ log?: (...args: any[]) => void;
348
+ error?: (...args: any[]) => void;
349
+ exit?: (code: number) => void;
350
+ cwd?: string;
351
+ readFileSyncImpl?: typeof readFileSync;
352
+ writeFileSyncImpl?: typeof writeFileSync;
353
+ existsSyncImpl?: typeof existsSync;
354
+ mkdirSyncImpl?: typeof mkdirSync;
355
+ chmodSyncImpl?: typeof chmodSync;
356
+ }
357
+
358
+ type RouterKind = 'app' | 'pages';
359
+
360
+ interface ProjectDetails {
361
+ cwd: string;
362
+ packageJsonPath: string;
363
+ packageJson: JsonObject;
364
+ srcRoot: string;
365
+ router: RouterKind;
366
+ }
367
+
368
+ /**
369
+ * Runs the `dataqueue-cli init` command.
370
+ */
371
+ export function runInit({
372
+ log = console.log,
373
+ error = console.error,
374
+ exit = (code: number) => process.exit(code),
375
+ cwd = process.cwd(),
376
+ readFileSyncImpl = readFileSync,
377
+ writeFileSyncImpl = writeFileSync,
378
+ existsSyncImpl = existsSync,
379
+ mkdirSyncImpl = mkdirSync,
380
+ chmodSyncImpl = chmodSync,
381
+ }: InitDeps = {}): void {
382
+ try {
383
+ log(`dataqueue: Initializing in ${cwd}...`);
384
+ log('');
385
+
386
+ const details = detectNextJsAndRouter({
387
+ cwd,
388
+ existsSyncImpl,
389
+ readFileSyncImpl,
390
+ });
391
+
392
+ createScaffoldFiles({
393
+ details,
394
+ log,
395
+ existsSyncImpl,
396
+ mkdirSyncImpl,
397
+ writeFileSyncImpl,
398
+ chmodSyncImpl,
399
+ });
400
+
401
+ updatePackageJson({
402
+ details,
403
+ log,
404
+ writeFileSyncImpl,
405
+ });
406
+
407
+ log('');
408
+ log(
409
+ "Done! Run your package manager's install command to install new dependencies.",
410
+ );
411
+ exit(0);
412
+ } catch (cause) {
413
+ const message = cause instanceof Error ? cause.message : String(cause);
414
+ error(`dataqueue: ${message}`);
415
+ exit(1);
416
+ }
417
+ }
418
+
419
+ /**
420
+ * Detects that the current directory is a Next.js app and chooses the router.
421
+ */
422
+ export function detectNextJsAndRouter({
423
+ cwd,
424
+ existsSyncImpl,
425
+ readFileSyncImpl,
426
+ }: {
427
+ cwd: string;
428
+ existsSyncImpl: typeof existsSync;
429
+ readFileSyncImpl: typeof readFileSync;
430
+ }): ProjectDetails {
431
+ const packageJsonPath = path.join(cwd, 'package.json');
432
+ if (!existsSyncImpl(packageJsonPath)) {
433
+ throw new Error('package.json not found in current directory.');
434
+ }
435
+
436
+ const packageJson = parsePackageJson(
437
+ readFileSyncImpl(packageJsonPath, 'utf8'),
438
+ packageJsonPath,
439
+ );
440
+ if (!isNextJsProject(packageJson)) {
441
+ throw new Error(
442
+ "Not a Next.js project. Could not find 'next' in package.json dependencies.",
443
+ );
444
+ }
445
+
446
+ const srcDir = path.join(cwd, 'src');
447
+ const srcRoot = existsSyncImpl(srcDir) ? 'src' : '.';
448
+ const appDir = path.join(cwd, srcRoot, 'app');
449
+ const pagesDir = path.join(cwd, srcRoot, 'pages');
450
+ const hasAppDir = existsSyncImpl(appDir);
451
+ const hasPagesDir = existsSyncImpl(pagesDir);
452
+
453
+ if (!hasAppDir && !hasPagesDir) {
454
+ throw new Error(
455
+ 'Could not detect Next.js router. Expected either app/ or pages/ directory.',
456
+ );
457
+ }
458
+
459
+ const router: RouterKind = hasAppDir ? 'app' : 'pages';
460
+ return { cwd, packageJsonPath, packageJson, srcRoot, router };
461
+ }
462
+
463
+ /**
464
+ * Updates package.json with required dependencies and scripts.
465
+ */
466
+ function updatePackageJson({
467
+ details,
468
+ log,
469
+ writeFileSyncImpl,
470
+ }: {
471
+ details: ProjectDetails;
472
+ log: (...args: any[]) => void;
473
+ writeFileSyncImpl: typeof writeFileSync;
474
+ }): void {
475
+ const packageJson = details.packageJson;
476
+ const dependencies = ensureStringMapSection(packageJson, 'dependencies');
477
+ const devDependencies = ensureStringMapSection(
478
+ packageJson,
479
+ 'devDependencies',
480
+ );
481
+ const scripts = ensureStringMapSection(packageJson, 'scripts');
482
+
483
+ for (const dependency of DEPENDENCIES_TO_ADD) {
484
+ if (dependencies[dependency]) {
485
+ log(` [skipped] dependency ${dependency} (already exists)`);
486
+ continue;
487
+ }
488
+ dependencies[dependency] = 'latest';
489
+ log(` [added] dependency ${dependency}`);
490
+ }
491
+
492
+ for (const devDependency of DEV_DEPENDENCIES_TO_ADD) {
493
+ if (devDependencies[devDependency]) {
494
+ log(` [skipped] devDependency ${devDependency} (already exists)`);
495
+ continue;
496
+ }
497
+ devDependencies[devDependency] = 'latest';
498
+ log(` [added] devDependency ${devDependency}`);
499
+ }
500
+
501
+ for (const [scriptName, scriptValue] of Object.entries(SCRIPTS_TO_ADD)) {
502
+ if (scripts[scriptName]) {
503
+ log(` [skipped] script "${scriptName}" (already exists)`);
504
+ continue;
505
+ }
506
+ scripts[scriptName] = scriptValue;
507
+ log(` [added] script "${scriptName}"`);
508
+ }
509
+
510
+ writeFileSyncImpl(
511
+ details.packageJsonPath,
512
+ `${JSON.stringify(packageJson, null, 2)}\n`,
513
+ );
514
+ }
515
+
516
+ /**
517
+ * Creates all scaffold files for the detected router without overwriting.
518
+ */
519
+ function createScaffoldFiles({
520
+ details,
521
+ log,
522
+ existsSyncImpl,
523
+ mkdirSyncImpl,
524
+ writeFileSyncImpl,
525
+ chmodSyncImpl,
526
+ }: {
527
+ details: ProjectDetails;
528
+ log: (...args: any[]) => void;
529
+ existsSyncImpl: typeof existsSync;
530
+ mkdirSyncImpl: typeof mkdirSync;
531
+ writeFileSyncImpl: typeof writeFileSync;
532
+ chmodSyncImpl: typeof chmodSync;
533
+ }): void {
534
+ const appRoutePath = path.join(
535
+ details.cwd,
536
+ details.srcRoot,
537
+ 'app',
538
+ 'api',
539
+ 'dataqueue',
540
+ 'manage',
541
+ '[[...task]]',
542
+ 'route.ts',
543
+ );
544
+ const pagesRoutePath = path.join(
545
+ details.cwd,
546
+ details.srcRoot,
547
+ 'pages',
548
+ 'api',
549
+ 'dataqueue',
550
+ 'manage',
551
+ '[[...task]].ts',
552
+ );
553
+ const queuePath = path.join(
554
+ details.cwd,
555
+ details.srcRoot,
556
+ 'lib',
557
+ 'dataqueue',
558
+ 'queue.ts',
559
+ );
560
+ const cronPath = path.join(details.cwd, 'cron.sh');
561
+
562
+ if (details.router === 'app') {
563
+ createFileIfMissing({
564
+ absolutePath: appRoutePath,
565
+ content: APP_ROUTER_ROUTE_TEMPLATE,
566
+ existsSyncImpl,
567
+ mkdirSyncImpl,
568
+ writeFileSyncImpl,
569
+ log,
570
+ logPath: toRelativePath(details.cwd, appRoutePath),
571
+ });
572
+ log(
573
+ ' [skipped] pages/api/dataqueue/manage/[[...task]].ts (router not selected)',
574
+ );
575
+ } else {
576
+ log(
577
+ ' [skipped] app/api/dataqueue/manage/[[...task]]/route.ts (router not selected)',
578
+ );
579
+ createFileIfMissing({
580
+ absolutePath: pagesRoutePath,
581
+ content: PAGES_ROUTER_ROUTE_TEMPLATE,
582
+ existsSyncImpl,
583
+ mkdirSyncImpl,
584
+ writeFileSyncImpl,
585
+ log,
586
+ logPath: toRelativePath(details.cwd, pagesRoutePath),
587
+ });
588
+ }
589
+
590
+ createFileIfMissing({
591
+ absolutePath: cronPath,
592
+ content: CRON_SH_TEMPLATE,
593
+ existsSyncImpl,
594
+ mkdirSyncImpl,
595
+ writeFileSyncImpl,
596
+ log,
597
+ logPath: 'cron.sh',
598
+ });
599
+ if (existsSyncImpl(cronPath)) {
600
+ chmodSyncImpl(cronPath, 0o755);
601
+ }
602
+
603
+ createFileIfMissing({
604
+ absolutePath: queuePath,
605
+ content: QUEUE_TEMPLATE,
606
+ existsSyncImpl,
607
+ mkdirSyncImpl,
608
+ writeFileSyncImpl,
609
+ log,
610
+ logPath: toRelativePath(details.cwd, queuePath),
611
+ });
612
+ }
613
+
614
+ /**
615
+ * Creates a file only if it does not already exist.
616
+ */
617
+ function createFileIfMissing({
618
+ absolutePath,
619
+ content,
620
+ existsSyncImpl,
621
+ mkdirSyncImpl,
622
+ writeFileSyncImpl,
623
+ log,
624
+ logPath,
625
+ }: {
626
+ absolutePath: string;
627
+ content: string;
628
+ existsSyncImpl: typeof existsSync;
629
+ mkdirSyncImpl: typeof mkdirSync;
630
+ writeFileSyncImpl: typeof writeFileSync;
631
+ log: (...args: any[]) => void;
632
+ logPath: string;
633
+ }): void {
634
+ if (existsSyncImpl(absolutePath)) {
635
+ log(` [skipped] ${logPath} (already exists)`);
636
+ return;
637
+ }
638
+
639
+ mkdirSyncImpl(path.dirname(absolutePath), { recursive: true });
640
+ writeFileSyncImpl(absolutePath, content);
641
+ log(` [created] ${logPath}`);
642
+ }
643
+
644
+ /**
645
+ * Parses package.json content with clear source context.
646
+ */
647
+ function parsePackageJson(content: string, filePath: string): JsonObject {
648
+ try {
649
+ const parsed = JSON.parse(content);
650
+ if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
651
+ throw new Error('package.json must contain an object.');
652
+ }
653
+ return parsed as JsonObject;
654
+ } catch (cause) {
655
+ throw new Error(
656
+ `Failed to parse package.json at ${filePath}: ${
657
+ cause instanceof Error ? cause.message : String(cause)
658
+ }`,
659
+ );
660
+ }
661
+ }
662
+
663
+ /**
664
+ * Returns true when package.json declares Next.js in deps or devDeps.
665
+ */
666
+ function isNextJsProject(packageJson: JsonObject): boolean {
667
+ const dependencies = packageJson.dependencies;
668
+ const devDependencies = packageJson.devDependencies;
669
+
670
+ return (
671
+ hasPackage(dependencies, 'next') || hasPackage(devDependencies, 'next')
672
+ );
673
+ }
674
+
675
+ /**
676
+ * Returns true when a package name exists in a dependency section object.
677
+ */
678
+ function hasPackage(section: unknown, packageName: string): boolean {
679
+ if (!section || typeof section !== 'object' || Array.isArray(section)) {
680
+ return false;
681
+ }
682
+ return Boolean((section as JsonMap)[packageName]);
683
+ }
684
+
685
+ /**
686
+ * Ensures package.json has a string map section and returns it.
687
+ */
688
+ function ensureStringMapSection(
689
+ packageJson: JsonObject,
690
+ sectionName: 'dependencies' | 'devDependencies' | 'scripts',
691
+ ): JsonMap {
692
+ const currentValue = packageJson[sectionName];
693
+ if (
694
+ !currentValue ||
695
+ typeof currentValue !== 'object' ||
696
+ Array.isArray(currentValue)
697
+ ) {
698
+ packageJson[sectionName] = {};
699
+ }
700
+ return packageJson[sectionName] as JsonMap;
701
+ }
702
+
703
+ /**
704
+ * Converts an absolute path to a stable relative path for log output.
705
+ */
706
+ function toRelativePath(cwd: string, absolutePath: string): string {
707
+ const relative = path.relative(cwd, absolutePath);
708
+ return relative || '.';
709
+ }