@nicnocquee/dataqueue 1.32.0 → 1.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ai/build-docs-content.ts +96 -0
- package/ai/build-llms-full.ts +42 -0
- package/ai/docs-content.json +278 -0
- package/ai/rules/advanced.md +94 -0
- package/ai/rules/basic.md +90 -0
- package/ai/rules/react-dashboard.md +83 -0
- package/ai/skills/dataqueue-advanced/SKILL.md +211 -0
- package/ai/skills/dataqueue-core/SKILL.md +131 -0
- package/ai/skills/dataqueue-react/SKILL.md +189 -0
- package/dist/cli.cjs +1149 -14
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.d.cts +66 -1
- package/dist/cli.d.ts +66 -1
- package/dist/cli.js +1146 -13
- package/dist/cli.js.map +1 -1
- package/dist/mcp-server.cjs +186 -0
- package/dist/mcp-server.cjs.map +1 -0
- package/dist/mcp-server.d.cts +32 -0
- package/dist/mcp-server.d.ts +32 -0
- package/dist/mcp-server.js +175 -0
- package/dist/mcp-server.js.map +1 -0
- package/package.json +10 -4
- package/src/cli.test.ts +82 -6
- package/src/cli.ts +73 -10
- package/src/init-command.test.ts +449 -0
- package/src/init-command.ts +709 -0
- package/src/install-mcp-command.test.ts +216 -0
- package/src/install-mcp-command.ts +185 -0
- package/src/install-rules-command.test.ts +218 -0
- package/src/install-rules-command.ts +233 -0
- package/src/install-skills-command.test.ts +176 -0
- package/src/install-skills-command.ts +124 -0
- package/src/mcp-server.test.ts +162 -0
- package/src/mcp-server.ts +231 -0
package/dist/cli.js
CHANGED
|
@@ -1,23 +1,1126 @@
|
|
|
1
1
|
import { spawnSync } from 'child_process';
|
|
2
|
-
import
|
|
2
|
+
import path3 from 'path';
|
|
3
3
|
import { fileURLToPath } from 'url';
|
|
4
|
+
import fs, { readFileSync, existsSync, chmodSync, writeFileSync, mkdirSync } from 'fs';
|
|
5
|
+
import readline from 'readline';
|
|
6
|
+
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
|
7
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
8
|
+
import { z } from 'zod';
|
|
4
9
|
|
|
5
10
|
// src/cli.ts
|
|
11
|
+
var DEPENDENCIES_TO_ADD = [
|
|
12
|
+
"@nicnocquee/dataqueue",
|
|
13
|
+
"@nicnocquee/dataqueue-dashboard",
|
|
14
|
+
"@nicnocquee/dataqueue-react"
|
|
15
|
+
];
|
|
16
|
+
var DEV_DEPENDENCIES_TO_ADD = [
|
|
17
|
+
"dotenv-cli",
|
|
18
|
+
"ts-node",
|
|
19
|
+
"node-pg-migrate"
|
|
20
|
+
];
|
|
21
|
+
var SCRIPTS_TO_ADD = {
|
|
22
|
+
cron: "bash cron.sh",
|
|
23
|
+
"migrate-dataqueue": "dotenv -e .env.local -- dataqueue-cli migrate"
|
|
24
|
+
};
|
|
25
|
+
var APP_ROUTER_ROUTE_TEMPLATE = `/**
|
|
26
|
+
* This end point is used to manage the job queue.
|
|
27
|
+
* It supports the following tasks:
|
|
28
|
+
* - reclaim: Reclaim stuck jobs
|
|
29
|
+
* - cleanup: Cleanup old jobs
|
|
30
|
+
* - process: Process jobs
|
|
31
|
+
*
|
|
32
|
+
* Example usage with default values (reclaim stuck jobs for 10 minutes, cleanup old jobs for 30 days, and process jobs with batch size 3, concurrency 2, and verbose true):
|
|
33
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET"
|
|
34
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET"
|
|
35
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET"
|
|
36
|
+
*
|
|
37
|
+
* Example usage with custom values:
|
|
38
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET" -d '{"maxProcessingTimeMinutes": 15}' -H "Content-Type: application/json"
|
|
39
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET" -d '{"daysToKeep": 15}' -H "Content-Type: application/json"
|
|
40
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET" -d '{"batchSize": 5, "concurrency": 3, "verbose": false, "workerId": "custom-worker-id"}' -H "Content-Type: application/json"
|
|
41
|
+
*
|
|
42
|
+
* During development, you can run the following script to run the cron jobs continuously in the background:
|
|
43
|
+
* pnpm cron
|
|
44
|
+
*/
|
|
45
|
+
import { getJobQueue, jobHandlers } from '@/lib/dataqueue/queue';
|
|
46
|
+
import { NextResponse } from 'next/server';
|
|
47
|
+
|
|
48
|
+
export async function POST(
|
|
49
|
+
request: Request,
|
|
50
|
+
{ params }: { params: Promise<{ task: string[] }> },
|
|
51
|
+
) {
|
|
52
|
+
const { task } = await params;
|
|
53
|
+
const authHeader = request.headers.get('authorization');
|
|
54
|
+
if (authHeader !== \`Bearer \${process.env.CRON_SECRET}\`) {
|
|
55
|
+
return NextResponse.json({ message: 'Unauthorized' }, { status: 401 });
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (!task || task.length === 0) {
|
|
59
|
+
return NextResponse.json({ message: 'Task is required' }, { status: 400 });
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const supportedTasks = ['reclaim', 'cleanup', 'process'];
|
|
63
|
+
const theTask = task[0];
|
|
64
|
+
if (!supportedTasks.includes(theTask)) {
|
|
65
|
+
return NextResponse.json(
|
|
66
|
+
{ message: 'Task not supported' },
|
|
67
|
+
{ status: 400 },
|
|
68
|
+
);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
const jobQueue = getJobQueue();
|
|
73
|
+
|
|
74
|
+
if (theTask === 'reclaim') {
|
|
75
|
+
let maxProcessingTimeMinutes = 10;
|
|
76
|
+
try {
|
|
77
|
+
const body = await request.json();
|
|
78
|
+
maxProcessingTimeMinutes = body.maxProcessingTimeMinutes || 10;
|
|
79
|
+
} catch {
|
|
80
|
+
// ignore parsing error and use default value
|
|
81
|
+
}
|
|
82
|
+
const reclaimed = await jobQueue.reclaimStuckJobs(
|
|
83
|
+
maxProcessingTimeMinutes,
|
|
84
|
+
);
|
|
85
|
+
console.log(\`Reclaimed \${reclaimed} stuck jobs\`);
|
|
86
|
+
return NextResponse.json({
|
|
87
|
+
message: \`Stuck jobs reclaimed: \${reclaimed} with maxProcessingTimeMinutes: \${maxProcessingTimeMinutes}\`,
|
|
88
|
+
reclaimed,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (theTask === 'cleanup') {
|
|
93
|
+
let daysToKeep = 30;
|
|
94
|
+
try {
|
|
95
|
+
const body = await request.json();
|
|
96
|
+
daysToKeep = body.daysToKeep || 30;
|
|
97
|
+
} catch {
|
|
98
|
+
// ignore parsing error and use default value
|
|
99
|
+
}
|
|
100
|
+
const deleted = await jobQueue.cleanupOldJobs(daysToKeep);
|
|
101
|
+
console.log(\`Deleted \${deleted} old jobs\`);
|
|
102
|
+
return NextResponse.json({
|
|
103
|
+
message: \`Old jobs cleaned up: \${deleted} with daysToKeep: \${daysToKeep}\`,
|
|
104
|
+
deleted,
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (theTask === 'process') {
|
|
109
|
+
let batchSize = 3;
|
|
110
|
+
let concurrency = 2;
|
|
111
|
+
let verbose = true;
|
|
112
|
+
let workerId = \`manage-\${theTask}-\${Date.now()}\`;
|
|
113
|
+
try {
|
|
114
|
+
const body = await request.json();
|
|
115
|
+
batchSize = body.batchSize || 3;
|
|
116
|
+
concurrency = body.concurrency || 2;
|
|
117
|
+
verbose = body.verbose || true;
|
|
118
|
+
workerId = body.workerId || \`manage-\${theTask}-\${Date.now()}\`;
|
|
119
|
+
} catch {
|
|
120
|
+
// ignore parsing error and use default value
|
|
121
|
+
}
|
|
122
|
+
const processor = jobQueue.createProcessor(jobHandlers, {
|
|
123
|
+
workerId,
|
|
124
|
+
batchSize,
|
|
125
|
+
concurrency,
|
|
126
|
+
verbose,
|
|
127
|
+
});
|
|
128
|
+
const processed = await processor.start();
|
|
129
|
+
|
|
130
|
+
return NextResponse.json({
|
|
131
|
+
message: \`Jobs processed: \${processed} with workerId: \${workerId}, batchSize: \${batchSize}, concurrency: \${concurrency}, and verbose: \${verbose}\`,
|
|
132
|
+
processed,
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return NextResponse.json(
|
|
137
|
+
{ message: 'Task not supported' },
|
|
138
|
+
{ status: 400 },
|
|
139
|
+
);
|
|
140
|
+
} catch (error) {
|
|
141
|
+
console.error('Error processing jobs:', error);
|
|
142
|
+
return NextResponse.json(
|
|
143
|
+
{ message: 'Failed to process jobs' },
|
|
144
|
+
{ status: 500 },
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
`;
|
|
149
|
+
var PAGES_ROUTER_ROUTE_TEMPLATE = `/**
|
|
150
|
+
* This end point is used to manage the job queue.
|
|
151
|
+
* It supports the following tasks:
|
|
152
|
+
* - reclaim: Reclaim stuck jobs
|
|
153
|
+
* - cleanup: Cleanup old jobs
|
|
154
|
+
* - process: Process jobs
|
|
155
|
+
*
|
|
156
|
+
* Example usage with default values (reclaim stuck jobs for 10 minutes, cleanup old jobs for 30 days, and process jobs with batch size 3, concurrency 2, and verbose true):
|
|
157
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET"
|
|
158
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET"
|
|
159
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET"
|
|
160
|
+
*
|
|
161
|
+
* Example usage with custom values:
|
|
162
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/reclaim -H "Authorization: Bearer $CRON_SECRET" -d '{"maxProcessingTimeMinutes": 15}' -H "Content-Type: application/json"
|
|
163
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/cleanup -H "Authorization: Bearer $CRON_SECRET" -d '{"daysToKeep": 15}' -H "Content-Type: application/json"
|
|
164
|
+
* curl -X POST http://localhost:3000/api/dataqueue/manage/process -H "Authorization: Bearer $CRON_SECRET" -d '{"batchSize": 5, "concurrency": 3, "verbose": false, "workerId": "custom-worker-id"}' -H "Content-Type: application/json"
|
|
165
|
+
*
|
|
166
|
+
* During development, you can run the following script to run the cron jobs continuously in the background:
|
|
167
|
+
* pnpm cron
|
|
168
|
+
*/
|
|
169
|
+
import type { NextApiRequest, NextApiResponse } from 'next';
|
|
170
|
+
import { getJobQueue, jobHandlers } from '@/lib/dataqueue/queue';
|
|
171
|
+
|
|
172
|
+
type ResponseBody = {
|
|
173
|
+
message: string;
|
|
174
|
+
reclaimed?: number;
|
|
175
|
+
deleted?: number;
|
|
176
|
+
processed?: number;
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
export default async function handler(
|
|
180
|
+
req: NextApiRequest,
|
|
181
|
+
res: NextApiResponse<ResponseBody>,
|
|
182
|
+
) {
|
|
183
|
+
if (req.method !== 'POST') {
|
|
184
|
+
res.setHeader('Allow', 'POST');
|
|
185
|
+
return res.status(405).json({ message: 'Method not allowed' });
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
const authHeader = req.headers.authorization;
|
|
189
|
+
if (authHeader !== \`Bearer \${process.env.CRON_SECRET}\`) {
|
|
190
|
+
return res.status(401).json({ message: 'Unauthorized' });
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const task = req.query.task;
|
|
194
|
+
const taskArray = Array.isArray(task) ? task : task ? [task] : [];
|
|
195
|
+
if (!taskArray.length) {
|
|
196
|
+
return res.status(400).json({ message: 'Task is required' });
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const supportedTasks = ['reclaim', 'cleanup', 'process'];
|
|
200
|
+
const theTask = taskArray[0];
|
|
201
|
+
if (!supportedTasks.includes(theTask)) {
|
|
202
|
+
return res.status(400).json({ message: 'Task not supported' });
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
try {
|
|
206
|
+
const jobQueue = getJobQueue();
|
|
207
|
+
const body = typeof req.body === 'object' && req.body ? req.body : {};
|
|
208
|
+
|
|
209
|
+
if (theTask === 'reclaim') {
|
|
210
|
+
const maxProcessingTimeMinutes = body.maxProcessingTimeMinutes || 10;
|
|
211
|
+
const reclaimed = await jobQueue.reclaimStuckJobs(maxProcessingTimeMinutes);
|
|
212
|
+
console.log(\`Reclaimed \${reclaimed} stuck jobs\`);
|
|
213
|
+
return res.status(200).json({
|
|
214
|
+
message: \`Stuck jobs reclaimed: \${reclaimed} with maxProcessingTimeMinutes: \${maxProcessingTimeMinutes}\`,
|
|
215
|
+
reclaimed,
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
if (theTask === 'cleanup') {
|
|
220
|
+
const daysToKeep = body.daysToKeep || 30;
|
|
221
|
+
const deleted = await jobQueue.cleanupOldJobs(daysToKeep);
|
|
222
|
+
console.log(\`Deleted \${deleted} old jobs\`);
|
|
223
|
+
return res.status(200).json({
|
|
224
|
+
message: \`Old jobs cleaned up: \${deleted} with daysToKeep: \${daysToKeep}\`,
|
|
225
|
+
deleted,
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
const batchSize = body.batchSize || 3;
|
|
230
|
+
const concurrency = body.concurrency || 2;
|
|
231
|
+
const verbose = body.verbose || true;
|
|
232
|
+
const workerId = body.workerId || \`manage-\${theTask}-\${Date.now()}\`;
|
|
233
|
+
const processor = jobQueue.createProcessor(jobHandlers, {
|
|
234
|
+
workerId,
|
|
235
|
+
batchSize,
|
|
236
|
+
concurrency,
|
|
237
|
+
verbose,
|
|
238
|
+
});
|
|
239
|
+
const processed = await processor.start();
|
|
240
|
+
|
|
241
|
+
return res.status(200).json({
|
|
242
|
+
message: \`Jobs processed: \${processed} with workerId: \${workerId}, batchSize: \${batchSize}, concurrency: \${concurrency}, and verbose: \${verbose}\`,
|
|
243
|
+
processed,
|
|
244
|
+
});
|
|
245
|
+
} catch (error) {
|
|
246
|
+
console.error('Error processing jobs:', error);
|
|
247
|
+
return res.status(500).json({ message: 'Failed to process jobs' });
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
`;
|
|
251
|
+
var CRON_SH_TEMPLATE = `#!/bin/bash
|
|
252
|
+
|
|
253
|
+
# This script is used to run the cron jobs for the demo app during development.
|
|
254
|
+
# Run it with \`pnpm cron\` from the apps/demo directory.
|
|
255
|
+
|
|
256
|
+
set -a
|
|
257
|
+
source "$(dirname "$0")/.env.local"
|
|
258
|
+
set +a
|
|
259
|
+
|
|
260
|
+
if [ -z "$CRON_SECRET" ]; then
|
|
261
|
+
echo "Error: CRON_SECRET environment variable is not set in .env.local"
|
|
262
|
+
exit 1
|
|
263
|
+
fi
|
|
264
|
+
|
|
265
|
+
cleanup() {
|
|
266
|
+
kill 0
|
|
267
|
+
wait
|
|
268
|
+
}
|
|
269
|
+
trap cleanup SIGINT SIGTERM
|
|
270
|
+
|
|
271
|
+
while true; do
|
|
272
|
+
echo "Processing jobs..."
|
|
273
|
+
curl http://localhost:3000/api/dataqueue/manage/process -X POST -H "Authorization: Bearer $CRON_SECRET"
|
|
274
|
+
echo ""
|
|
275
|
+
sleep 10 # Process jobs every 10 seconds
|
|
276
|
+
done &
|
|
277
|
+
|
|
278
|
+
while true; do
|
|
279
|
+
echo "Reclaiming stuck jobs..."
|
|
280
|
+
curl http://localhost:3000/api/dataqueue/manage/reclaim -X POST -H "Authorization: Bearer $CRON_SECRET"
|
|
281
|
+
echo ""
|
|
282
|
+
sleep 20 # Reclaim stuck jobs every 20 seconds
|
|
283
|
+
done &
|
|
284
|
+
|
|
285
|
+
while true; do
|
|
286
|
+
echo "Cleaning up old jobs..."
|
|
287
|
+
curl http://localhost:3000/api/dataqueue/manage/cleanup -X POST -H "Authorization: Bearer $CRON_SECRET"
|
|
288
|
+
echo ""
|
|
289
|
+
sleep 30 # Cleanup old jobs every 30 seconds
|
|
290
|
+
done &
|
|
291
|
+
|
|
292
|
+
wait
|
|
293
|
+
`;
|
|
294
|
+
var QUEUE_TEMPLATE = `import { initJobQueue, JobHandlers } from '@nicnocquee/dataqueue';
|
|
295
|
+
|
|
296
|
+
export type JobPayloadMap = {
|
|
297
|
+
send_email: {
|
|
298
|
+
to: string;
|
|
299
|
+
subject: string;
|
|
300
|
+
body: string;
|
|
301
|
+
};
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
let jobQueue: ReturnType<typeof initJobQueue<JobPayloadMap>> | null = null;
|
|
305
|
+
|
|
306
|
+
export const getJobQueue = () => {
|
|
307
|
+
if (!jobQueue) {
|
|
308
|
+
jobQueue = initJobQueue<JobPayloadMap>({
|
|
309
|
+
databaseConfig: {
|
|
310
|
+
connectionString: process.env.PG_DATAQUEUE_DATABASE,
|
|
311
|
+
},
|
|
312
|
+
verbose: process.env.NODE_ENV === 'development',
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
return jobQueue;
|
|
316
|
+
};
|
|
317
|
+
|
|
318
|
+
export const jobHandlers: JobHandlers<JobPayloadMap> = {
|
|
319
|
+
send_email: async (payload) => {
|
|
320
|
+
const { to, subject, body } = payload;
|
|
321
|
+
console.log('send_email placeholder:', { to, subject, body });
|
|
322
|
+
},
|
|
323
|
+
};
|
|
324
|
+
`;
|
|
325
|
+
function runInit({
|
|
326
|
+
log = console.log,
|
|
327
|
+
error = console.error,
|
|
328
|
+
exit = (code) => process.exit(code),
|
|
329
|
+
cwd = process.cwd(),
|
|
330
|
+
readFileSyncImpl = readFileSync,
|
|
331
|
+
writeFileSyncImpl = writeFileSync,
|
|
332
|
+
existsSyncImpl = existsSync,
|
|
333
|
+
mkdirSyncImpl = mkdirSync,
|
|
334
|
+
chmodSyncImpl = chmodSync
|
|
335
|
+
} = {}) {
|
|
336
|
+
try {
|
|
337
|
+
log(`dataqueue: Initializing in ${cwd}...`);
|
|
338
|
+
log("");
|
|
339
|
+
const details = detectNextJsAndRouter({
|
|
340
|
+
cwd,
|
|
341
|
+
existsSyncImpl,
|
|
342
|
+
readFileSyncImpl
|
|
343
|
+
});
|
|
344
|
+
createScaffoldFiles({
|
|
345
|
+
details,
|
|
346
|
+
log,
|
|
347
|
+
existsSyncImpl,
|
|
348
|
+
mkdirSyncImpl,
|
|
349
|
+
writeFileSyncImpl,
|
|
350
|
+
chmodSyncImpl
|
|
351
|
+
});
|
|
352
|
+
updatePackageJson({
|
|
353
|
+
details,
|
|
354
|
+
log,
|
|
355
|
+
writeFileSyncImpl
|
|
356
|
+
});
|
|
357
|
+
log("");
|
|
358
|
+
log(
|
|
359
|
+
"Done! Run your package manager's install command to install new dependencies."
|
|
360
|
+
);
|
|
361
|
+
exit(0);
|
|
362
|
+
} catch (cause) {
|
|
363
|
+
const message = cause instanceof Error ? cause.message : String(cause);
|
|
364
|
+
error(`dataqueue: ${message}`);
|
|
365
|
+
exit(1);
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
function detectNextJsAndRouter({
|
|
369
|
+
cwd,
|
|
370
|
+
existsSyncImpl,
|
|
371
|
+
readFileSyncImpl
|
|
372
|
+
}) {
|
|
373
|
+
const packageJsonPath = path3.join(cwd, "package.json");
|
|
374
|
+
if (!existsSyncImpl(packageJsonPath)) {
|
|
375
|
+
throw new Error("package.json not found in current directory.");
|
|
376
|
+
}
|
|
377
|
+
const packageJson = parsePackageJson(
|
|
378
|
+
readFileSyncImpl(packageJsonPath, "utf8"),
|
|
379
|
+
packageJsonPath
|
|
380
|
+
);
|
|
381
|
+
if (!isNextJsProject(packageJson)) {
|
|
382
|
+
throw new Error(
|
|
383
|
+
"Not a Next.js project. Could not find 'next' in package.json dependencies."
|
|
384
|
+
);
|
|
385
|
+
}
|
|
386
|
+
const srcDir = path3.join(cwd, "src");
|
|
387
|
+
const srcRoot = existsSyncImpl(srcDir) ? "src" : ".";
|
|
388
|
+
const appDir = path3.join(cwd, srcRoot, "app");
|
|
389
|
+
const pagesDir = path3.join(cwd, srcRoot, "pages");
|
|
390
|
+
const hasAppDir = existsSyncImpl(appDir);
|
|
391
|
+
const hasPagesDir = existsSyncImpl(pagesDir);
|
|
392
|
+
if (!hasAppDir && !hasPagesDir) {
|
|
393
|
+
throw new Error(
|
|
394
|
+
"Could not detect Next.js router. Expected either app/ or pages/ directory."
|
|
395
|
+
);
|
|
396
|
+
}
|
|
397
|
+
const router = hasAppDir ? "app" : "pages";
|
|
398
|
+
return { cwd, packageJsonPath, packageJson, srcRoot, router };
|
|
399
|
+
}
|
|
400
|
+
function updatePackageJson({
|
|
401
|
+
details,
|
|
402
|
+
log,
|
|
403
|
+
writeFileSyncImpl
|
|
404
|
+
}) {
|
|
405
|
+
const packageJson = details.packageJson;
|
|
406
|
+
const dependencies = ensureStringMapSection(packageJson, "dependencies");
|
|
407
|
+
const devDependencies = ensureStringMapSection(
|
|
408
|
+
packageJson,
|
|
409
|
+
"devDependencies"
|
|
410
|
+
);
|
|
411
|
+
const scripts = ensureStringMapSection(packageJson, "scripts");
|
|
412
|
+
for (const dependency of DEPENDENCIES_TO_ADD) {
|
|
413
|
+
if (dependencies[dependency]) {
|
|
414
|
+
log(` [skipped] dependency ${dependency} (already exists)`);
|
|
415
|
+
continue;
|
|
416
|
+
}
|
|
417
|
+
dependencies[dependency] = "latest";
|
|
418
|
+
log(` [added] dependency ${dependency}`);
|
|
419
|
+
}
|
|
420
|
+
for (const devDependency of DEV_DEPENDENCIES_TO_ADD) {
|
|
421
|
+
if (devDependencies[devDependency]) {
|
|
422
|
+
log(` [skipped] devDependency ${devDependency} (already exists)`);
|
|
423
|
+
continue;
|
|
424
|
+
}
|
|
425
|
+
devDependencies[devDependency] = "latest";
|
|
426
|
+
log(` [added] devDependency ${devDependency}`);
|
|
427
|
+
}
|
|
428
|
+
for (const [scriptName, scriptValue] of Object.entries(SCRIPTS_TO_ADD)) {
|
|
429
|
+
if (scripts[scriptName]) {
|
|
430
|
+
log(` [skipped] script "${scriptName}" (already exists)`);
|
|
431
|
+
continue;
|
|
432
|
+
}
|
|
433
|
+
scripts[scriptName] = scriptValue;
|
|
434
|
+
log(` [added] script "${scriptName}"`);
|
|
435
|
+
}
|
|
436
|
+
writeFileSyncImpl(
|
|
437
|
+
details.packageJsonPath,
|
|
438
|
+
`${JSON.stringify(packageJson, null, 2)}
|
|
439
|
+
`
|
|
440
|
+
);
|
|
441
|
+
}
|
|
442
|
+
function createScaffoldFiles({
|
|
443
|
+
details,
|
|
444
|
+
log,
|
|
445
|
+
existsSyncImpl,
|
|
446
|
+
mkdirSyncImpl,
|
|
447
|
+
writeFileSyncImpl,
|
|
448
|
+
chmodSyncImpl
|
|
449
|
+
}) {
|
|
450
|
+
const appRoutePath = path3.join(
|
|
451
|
+
details.cwd,
|
|
452
|
+
details.srcRoot,
|
|
453
|
+
"app",
|
|
454
|
+
"api",
|
|
455
|
+
"dataqueue",
|
|
456
|
+
"manage",
|
|
457
|
+
"[[...task]]",
|
|
458
|
+
"route.ts"
|
|
459
|
+
);
|
|
460
|
+
const pagesRoutePath = path3.join(
|
|
461
|
+
details.cwd,
|
|
462
|
+
details.srcRoot,
|
|
463
|
+
"pages",
|
|
464
|
+
"api",
|
|
465
|
+
"dataqueue",
|
|
466
|
+
"manage",
|
|
467
|
+
"[[...task]].ts"
|
|
468
|
+
);
|
|
469
|
+
const queuePath = path3.join(
|
|
470
|
+
details.cwd,
|
|
471
|
+
details.srcRoot,
|
|
472
|
+
"lib",
|
|
473
|
+
"dataqueue",
|
|
474
|
+
"queue.ts"
|
|
475
|
+
);
|
|
476
|
+
const cronPath = path3.join(details.cwd, "cron.sh");
|
|
477
|
+
if (details.router === "app") {
|
|
478
|
+
createFileIfMissing({
|
|
479
|
+
absolutePath: appRoutePath,
|
|
480
|
+
content: APP_ROUTER_ROUTE_TEMPLATE,
|
|
481
|
+
existsSyncImpl,
|
|
482
|
+
mkdirSyncImpl,
|
|
483
|
+
writeFileSyncImpl,
|
|
484
|
+
log,
|
|
485
|
+
logPath: toRelativePath(details.cwd, appRoutePath)
|
|
486
|
+
});
|
|
487
|
+
log(
|
|
488
|
+
" [skipped] pages/api/dataqueue/manage/[[...task]].ts (router not selected)"
|
|
489
|
+
);
|
|
490
|
+
} else {
|
|
491
|
+
log(
|
|
492
|
+
" [skipped] app/api/dataqueue/manage/[[...task]]/route.ts (router not selected)"
|
|
493
|
+
);
|
|
494
|
+
createFileIfMissing({
|
|
495
|
+
absolutePath: pagesRoutePath,
|
|
496
|
+
content: PAGES_ROUTER_ROUTE_TEMPLATE,
|
|
497
|
+
existsSyncImpl,
|
|
498
|
+
mkdirSyncImpl,
|
|
499
|
+
writeFileSyncImpl,
|
|
500
|
+
log,
|
|
501
|
+
logPath: toRelativePath(details.cwd, pagesRoutePath)
|
|
502
|
+
});
|
|
503
|
+
}
|
|
504
|
+
createFileIfMissing({
|
|
505
|
+
absolutePath: cronPath,
|
|
506
|
+
content: CRON_SH_TEMPLATE,
|
|
507
|
+
existsSyncImpl,
|
|
508
|
+
mkdirSyncImpl,
|
|
509
|
+
writeFileSyncImpl,
|
|
510
|
+
log,
|
|
511
|
+
logPath: "cron.sh"
|
|
512
|
+
});
|
|
513
|
+
if (existsSyncImpl(cronPath)) {
|
|
514
|
+
chmodSyncImpl(cronPath, 493);
|
|
515
|
+
}
|
|
516
|
+
createFileIfMissing({
|
|
517
|
+
absolutePath: queuePath,
|
|
518
|
+
content: QUEUE_TEMPLATE,
|
|
519
|
+
existsSyncImpl,
|
|
520
|
+
mkdirSyncImpl,
|
|
521
|
+
writeFileSyncImpl,
|
|
522
|
+
log,
|
|
523
|
+
logPath: toRelativePath(details.cwd, queuePath)
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
function createFileIfMissing({
|
|
527
|
+
absolutePath,
|
|
528
|
+
content,
|
|
529
|
+
existsSyncImpl,
|
|
530
|
+
mkdirSyncImpl,
|
|
531
|
+
writeFileSyncImpl,
|
|
532
|
+
log,
|
|
533
|
+
logPath
|
|
534
|
+
}) {
|
|
535
|
+
if (existsSyncImpl(absolutePath)) {
|
|
536
|
+
log(` [skipped] ${logPath} (already exists)`);
|
|
537
|
+
return;
|
|
538
|
+
}
|
|
539
|
+
mkdirSyncImpl(path3.dirname(absolutePath), { recursive: true });
|
|
540
|
+
writeFileSyncImpl(absolutePath, content);
|
|
541
|
+
log(` [created] ${logPath}`);
|
|
542
|
+
}
|
|
543
|
+
function parsePackageJson(content, filePath) {
|
|
544
|
+
try {
|
|
545
|
+
const parsed = JSON.parse(content);
|
|
546
|
+
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
547
|
+
throw new Error("package.json must contain an object.");
|
|
548
|
+
}
|
|
549
|
+
return parsed;
|
|
550
|
+
} catch (cause) {
|
|
551
|
+
throw new Error(
|
|
552
|
+
`Failed to parse package.json at ${filePath}: ${cause instanceof Error ? cause.message : String(cause)}`
|
|
553
|
+
);
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
function isNextJsProject(packageJson) {
|
|
557
|
+
const dependencies = packageJson.dependencies;
|
|
558
|
+
const devDependencies = packageJson.devDependencies;
|
|
559
|
+
return hasPackage(dependencies, "next") || hasPackage(devDependencies, "next");
|
|
560
|
+
}
|
|
561
|
+
function hasPackage(section, packageName) {
|
|
562
|
+
if (!section || typeof section !== "object" || Array.isArray(section)) {
|
|
563
|
+
return false;
|
|
564
|
+
}
|
|
565
|
+
return Boolean(section[packageName]);
|
|
566
|
+
}
|
|
567
|
+
function ensureStringMapSection(packageJson, sectionName) {
|
|
568
|
+
const currentValue = packageJson[sectionName];
|
|
569
|
+
if (!currentValue || typeof currentValue !== "object" || Array.isArray(currentValue)) {
|
|
570
|
+
packageJson[sectionName] = {};
|
|
571
|
+
}
|
|
572
|
+
return packageJson[sectionName];
|
|
573
|
+
}
|
|
574
|
+
function toRelativePath(cwd, absolutePath) {
|
|
575
|
+
const relative = path3.relative(cwd, absolutePath);
|
|
576
|
+
return relative || ".";
|
|
577
|
+
}
|
|
6
578
|
var __filename = fileURLToPath(import.meta.url);
|
|
7
|
-
var __dirname =
|
|
579
|
+
var __dirname = path3.dirname(__filename);
|
|
580
|
+
var SKILL_DIRS = ["dataqueue-core", "dataqueue-advanced", "dataqueue-react"];
|
|
581
|
+
function detectAiTools(cwd, existsSync2 = fs.existsSync) {
|
|
582
|
+
const tools = [];
|
|
583
|
+
const checks = [
|
|
584
|
+
{
|
|
585
|
+
name: "Cursor",
|
|
586
|
+
indicator: ".cursor",
|
|
587
|
+
targetDir: ".cursor/skills"
|
|
588
|
+
},
|
|
589
|
+
{
|
|
590
|
+
name: "Claude Code",
|
|
591
|
+
indicator: ".claude",
|
|
592
|
+
targetDir: ".claude/skills"
|
|
593
|
+
},
|
|
594
|
+
{
|
|
595
|
+
name: "GitHub Copilot",
|
|
596
|
+
indicator: ".github",
|
|
597
|
+
targetDir: ".github/skills"
|
|
598
|
+
}
|
|
599
|
+
];
|
|
600
|
+
for (const check of checks) {
|
|
601
|
+
if (existsSync2(path3.join(cwd, check.indicator))) {
|
|
602
|
+
tools.push({ name: check.name, targetDir: check.targetDir });
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
return tools;
|
|
606
|
+
}
|
|
607
|
+
function runInstallSkills({
|
|
608
|
+
log = console.log,
|
|
609
|
+
error = console.error,
|
|
610
|
+
exit = (code) => process.exit(code),
|
|
611
|
+
cwd = process.cwd(),
|
|
612
|
+
existsSync: existsSync2 = fs.existsSync,
|
|
613
|
+
mkdirSync: mkdirSync2 = fs.mkdirSync,
|
|
614
|
+
copyFileSync = fs.copyFileSync,
|
|
615
|
+
readdirSync = fs.readdirSync,
|
|
616
|
+
skillsSourceDir = path3.join(__dirname, "../ai/skills")
|
|
617
|
+
} = {}) {
|
|
618
|
+
const tools = detectAiTools(cwd, existsSync2);
|
|
619
|
+
if (tools.length === 0) {
|
|
620
|
+
log("No AI tool directories detected (.cursor/, .claude/, .github/).");
|
|
621
|
+
log("Creating .cursor/skills/ as the default target.");
|
|
622
|
+
tools.push({ name: "Cursor", targetDir: ".cursor/skills" });
|
|
623
|
+
}
|
|
624
|
+
let installed = 0;
|
|
625
|
+
for (const tool of tools) {
|
|
626
|
+
log(`
|
|
627
|
+
Installing skills for ${tool.name}...`);
|
|
628
|
+
for (const skillDir of SKILL_DIRS) {
|
|
629
|
+
const srcDir = path3.join(skillsSourceDir, skillDir);
|
|
630
|
+
const destDir = path3.join(cwd, tool.targetDir, skillDir);
|
|
631
|
+
try {
|
|
632
|
+
mkdirSync2(destDir, { recursive: true });
|
|
633
|
+
const files = readdirSync(srcDir);
|
|
634
|
+
for (const file of files) {
|
|
635
|
+
copyFileSync(path3.join(srcDir, file), path3.join(destDir, file));
|
|
636
|
+
}
|
|
637
|
+
log(` \u2713 ${skillDir}`);
|
|
638
|
+
installed++;
|
|
639
|
+
} catch (err) {
|
|
640
|
+
error(` \u2717 Failed to install ${skillDir}:`, err);
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
if (installed > 0) {
|
|
645
|
+
log(
|
|
646
|
+
`
|
|
647
|
+
Done! Installed ${installed} skill(s) for ${tools.map((t) => t.name).join(", ")}.`
|
|
648
|
+
);
|
|
649
|
+
} else {
|
|
650
|
+
error("No skills were installed.");
|
|
651
|
+
exit(1);
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
var __filename2 = fileURLToPath(import.meta.url);
|
|
655
|
+
var __dirname2 = path3.dirname(__filename2);
|
|
656
|
+
var RULE_FILES = ["basic.md", "advanced.md", "react-dashboard.md"];
|
|
657
|
+
var MARKER_START = "<!-- DATAQUEUE RULES START -->";
|
|
658
|
+
var MARKER_END = "<!-- DATAQUEUE RULES END -->";
|
|
659
|
+
function upsertMarkedSection(filePath, content, deps) {
|
|
660
|
+
const block = `${MARKER_START}
|
|
661
|
+
${content}
|
|
662
|
+
${MARKER_END}`;
|
|
663
|
+
if (!deps.existsSync(filePath)) {
|
|
664
|
+
deps.writeFileSync(filePath, block + "\n");
|
|
665
|
+
return;
|
|
666
|
+
}
|
|
667
|
+
const existing = deps.readFileSync(filePath, "utf-8");
|
|
668
|
+
const startIdx = existing.indexOf(MARKER_START);
|
|
669
|
+
const endIdx = existing.indexOf(MARKER_END);
|
|
670
|
+
if (startIdx !== -1 && endIdx !== -1) {
|
|
671
|
+
const before = existing.slice(0, startIdx);
|
|
672
|
+
const after = existing.slice(endIdx + MARKER_END.length);
|
|
673
|
+
deps.writeFileSync(filePath, before + block + after);
|
|
674
|
+
} else {
|
|
675
|
+
deps.writeFileSync(filePath, existing.trimEnd() + "\n\n" + block + "\n");
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
function getAllRulesContent(rulesSourceDir, readFileSync2) {
|
|
679
|
+
return RULE_FILES.map(
|
|
680
|
+
(f) => readFileSync2(path3.join(rulesSourceDir, f), "utf-8")
|
|
681
|
+
).join("\n\n");
|
|
682
|
+
}
|
|
683
|
+
var CLIENTS = {
|
|
684
|
+
"1": {
|
|
685
|
+
label: "Cursor",
|
|
686
|
+
install: (deps) => {
|
|
687
|
+
const rulesDir = path3.join(deps.cwd, ".cursor", "rules");
|
|
688
|
+
deps.mkdirSync(rulesDir, { recursive: true });
|
|
689
|
+
for (const file of RULE_FILES) {
|
|
690
|
+
const src = deps.readFileSync(
|
|
691
|
+
path3.join(deps.rulesSourceDir, file),
|
|
692
|
+
"utf-8"
|
|
693
|
+
);
|
|
694
|
+
const destName = `dataqueue-${file.replace(/\.md$/, ".mdc")}`;
|
|
695
|
+
deps.writeFileSync(path3.join(rulesDir, destName), src);
|
|
696
|
+
deps.log(` \u2713 .cursor/rules/${destName}`);
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
},
|
|
700
|
+
"2": {
|
|
701
|
+
label: "Claude Code",
|
|
702
|
+
install: (deps) => {
|
|
703
|
+
const content = getAllRulesContent(
|
|
704
|
+
deps.rulesSourceDir,
|
|
705
|
+
deps.readFileSync
|
|
706
|
+
);
|
|
707
|
+
const filePath = path3.join(deps.cwd, "CLAUDE.md");
|
|
708
|
+
upsertMarkedSection(filePath, content, deps);
|
|
709
|
+
deps.log(` \u2713 CLAUDE.md`);
|
|
710
|
+
}
|
|
711
|
+
},
|
|
712
|
+
"3": {
|
|
713
|
+
label: "AGENTS.md (Codex, Jules, OpenCode)",
|
|
714
|
+
install: (deps) => {
|
|
715
|
+
const content = getAllRulesContent(
|
|
716
|
+
deps.rulesSourceDir,
|
|
717
|
+
deps.readFileSync
|
|
718
|
+
);
|
|
719
|
+
const filePath = path3.join(deps.cwd, "AGENTS.md");
|
|
720
|
+
upsertMarkedSection(filePath, content, deps);
|
|
721
|
+
deps.log(` \u2713 AGENTS.md`);
|
|
722
|
+
}
|
|
723
|
+
},
|
|
724
|
+
"4": {
|
|
725
|
+
label: "GitHub Copilot",
|
|
726
|
+
install: (deps) => {
|
|
727
|
+
const content = getAllRulesContent(
|
|
728
|
+
deps.rulesSourceDir,
|
|
729
|
+
deps.readFileSync
|
|
730
|
+
);
|
|
731
|
+
deps.mkdirSync(path3.join(deps.cwd, ".github"), { recursive: true });
|
|
732
|
+
const filePath = path3.join(
|
|
733
|
+
deps.cwd,
|
|
734
|
+
".github",
|
|
735
|
+
"copilot-instructions.md"
|
|
736
|
+
);
|
|
737
|
+
upsertMarkedSection(filePath, content, deps);
|
|
738
|
+
deps.log(` \u2713 .github/copilot-instructions.md`);
|
|
739
|
+
}
|
|
740
|
+
},
|
|
741
|
+
"5": {
|
|
742
|
+
label: "Windsurf",
|
|
743
|
+
install: (deps) => {
|
|
744
|
+
const content = getAllRulesContent(
|
|
745
|
+
deps.rulesSourceDir,
|
|
746
|
+
deps.readFileSync
|
|
747
|
+
);
|
|
748
|
+
const filePath = path3.join(deps.cwd, "CONVENTIONS.md");
|
|
749
|
+
upsertMarkedSection(filePath, content, deps);
|
|
750
|
+
deps.log(` \u2713 CONVENTIONS.md`);
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
};
|
|
754
|
+
async function runInstallRules({
|
|
755
|
+
log = console.log,
|
|
756
|
+
error = console.error,
|
|
757
|
+
exit = (code) => process.exit(code),
|
|
758
|
+
cwd = process.cwd(),
|
|
759
|
+
readFileSync: readFileSync2 = fs.readFileSync,
|
|
760
|
+
writeFileSync: writeFileSync2 = fs.writeFileSync,
|
|
761
|
+
appendFileSync = fs.appendFileSync,
|
|
762
|
+
mkdirSync: mkdirSync2 = fs.mkdirSync,
|
|
763
|
+
existsSync: existsSync2 = fs.existsSync,
|
|
764
|
+
rulesSourceDir = path3.join(__dirname2, "../ai/rules"),
|
|
765
|
+
selectedClient
|
|
766
|
+
} = {}) {
|
|
767
|
+
log("DataQueue Agent Rules Installer\n");
|
|
768
|
+
log("Select your AI client:\n");
|
|
769
|
+
for (const [key, client2] of Object.entries(CLIENTS)) {
|
|
770
|
+
log(` ${key}) ${client2.label}`);
|
|
771
|
+
}
|
|
772
|
+
log("");
|
|
773
|
+
let choice = selectedClient;
|
|
774
|
+
if (!choice) {
|
|
775
|
+
const rl = readline.createInterface({
|
|
776
|
+
input: process.stdin,
|
|
777
|
+
output: process.stdout
|
|
778
|
+
});
|
|
779
|
+
choice = await new Promise((resolve) => {
|
|
780
|
+
rl.question("Enter choice (1-5): ", (answer) => {
|
|
781
|
+
rl.close();
|
|
782
|
+
resolve(answer.trim());
|
|
783
|
+
});
|
|
784
|
+
});
|
|
785
|
+
}
|
|
786
|
+
const client = CLIENTS[choice];
|
|
787
|
+
if (!client) {
|
|
788
|
+
error(`Invalid choice: "${choice}". Expected 1-5.`);
|
|
789
|
+
exit(1);
|
|
790
|
+
return;
|
|
791
|
+
}
|
|
792
|
+
log(`
|
|
793
|
+
Installing rules for ${client.label}...`);
|
|
794
|
+
try {
|
|
795
|
+
client.install({
|
|
796
|
+
cwd,
|
|
797
|
+
readFileSync: readFileSync2,
|
|
798
|
+
writeFileSync: writeFileSync2,
|
|
799
|
+
appendFileSync,
|
|
800
|
+
mkdirSync: mkdirSync2,
|
|
801
|
+
existsSync: existsSync2,
|
|
802
|
+
log,
|
|
803
|
+
rulesSourceDir
|
|
804
|
+
});
|
|
805
|
+
log("\nDone!");
|
|
806
|
+
} catch (err) {
|
|
807
|
+
error("Failed to install rules:", err);
|
|
808
|
+
exit(1);
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
function upsertMcpConfig(filePath, serverKey, serverConfig, deps) {
|
|
812
|
+
let config = {};
|
|
813
|
+
if (deps.existsSync(filePath)) {
|
|
814
|
+
try {
|
|
815
|
+
config = JSON.parse(deps.readFileSync(filePath, "utf-8"));
|
|
816
|
+
} catch {
|
|
817
|
+
config = {};
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
if (!config.mcpServers || typeof config.mcpServers !== "object") {
|
|
821
|
+
config.mcpServers = {};
|
|
822
|
+
}
|
|
823
|
+
config.mcpServers[serverKey] = serverConfig;
|
|
824
|
+
deps.writeFileSync(filePath, JSON.stringify(config, null, 2) + "\n");
|
|
825
|
+
}
|
|
826
|
+
var MCP_SERVER_CONFIG = {
|
|
827
|
+
command: "npx",
|
|
828
|
+
args: ["dataqueue-cli", "mcp"]
|
|
829
|
+
};
|
|
830
|
+
var MCP_CLIENTS = {
|
|
831
|
+
"1": {
|
|
832
|
+
label: "Cursor",
|
|
833
|
+
install: (deps) => {
|
|
834
|
+
const configDir = path3.join(deps.cwd, ".cursor");
|
|
835
|
+
deps.mkdirSync(configDir, { recursive: true });
|
|
836
|
+
const configFile = path3.join(configDir, "mcp.json");
|
|
837
|
+
upsertMcpConfig(configFile, "dataqueue", MCP_SERVER_CONFIG, deps);
|
|
838
|
+
deps.log(` \u2713 .cursor/mcp.json`);
|
|
839
|
+
}
|
|
840
|
+
},
|
|
841
|
+
"2": {
|
|
842
|
+
label: "Claude Code",
|
|
843
|
+
install: (deps) => {
|
|
844
|
+
const configFile = path3.join(deps.cwd, ".mcp.json");
|
|
845
|
+
upsertMcpConfig(configFile, "dataqueue", MCP_SERVER_CONFIG, deps);
|
|
846
|
+
deps.log(` \u2713 .mcp.json`);
|
|
847
|
+
}
|
|
848
|
+
},
|
|
849
|
+
"3": {
|
|
850
|
+
label: "VS Code (Copilot)",
|
|
851
|
+
install: (deps) => {
|
|
852
|
+
const configDir = path3.join(deps.cwd, ".vscode");
|
|
853
|
+
deps.mkdirSync(configDir, { recursive: true });
|
|
854
|
+
const configFile = path3.join(configDir, "mcp.json");
|
|
855
|
+
upsertMcpConfig(configFile, "dataqueue", MCP_SERVER_CONFIG, deps);
|
|
856
|
+
deps.log(` \u2713 .vscode/mcp.json`);
|
|
857
|
+
}
|
|
858
|
+
},
|
|
859
|
+
"4": {
|
|
860
|
+
label: "Windsurf",
|
|
861
|
+
install: (deps) => {
|
|
862
|
+
const homeDir = process.env.HOME || process.env.USERPROFILE || "";
|
|
863
|
+
const configFile = path3.join(
|
|
864
|
+
homeDir,
|
|
865
|
+
".codeium",
|
|
866
|
+
"windsurf",
|
|
867
|
+
"mcp_config.json"
|
|
868
|
+
);
|
|
869
|
+
deps.mkdirSync(path3.dirname(configFile), { recursive: true });
|
|
870
|
+
upsertMcpConfig(configFile, "dataqueue", MCP_SERVER_CONFIG, deps);
|
|
871
|
+
deps.log(` \u2713 ~/.codeium/windsurf/mcp_config.json`);
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
};
|
|
875
|
+
async function runInstallMcp({
|
|
876
|
+
log = console.log,
|
|
877
|
+
error = console.error,
|
|
878
|
+
exit = (code) => process.exit(code),
|
|
879
|
+
cwd = process.cwd(),
|
|
880
|
+
readFileSync: readFileSync2 = fs.readFileSync,
|
|
881
|
+
writeFileSync: writeFileSync2 = fs.writeFileSync,
|
|
882
|
+
mkdirSync: mkdirSync2 = fs.mkdirSync,
|
|
883
|
+
existsSync: existsSync2 = fs.existsSync,
|
|
884
|
+
selectedClient
|
|
885
|
+
} = {}) {
|
|
886
|
+
log("DataQueue MCP Server Installer\n");
|
|
887
|
+
log("Select your AI client:\n");
|
|
888
|
+
for (const [key, client2] of Object.entries(MCP_CLIENTS)) {
|
|
889
|
+
log(` ${key}) ${client2.label}`);
|
|
890
|
+
}
|
|
891
|
+
log("");
|
|
892
|
+
let choice = selectedClient;
|
|
893
|
+
if (!choice) {
|
|
894
|
+
const rl = readline.createInterface({
|
|
895
|
+
input: process.stdin,
|
|
896
|
+
output: process.stdout
|
|
897
|
+
});
|
|
898
|
+
choice = await new Promise((resolve) => {
|
|
899
|
+
rl.question("Enter choice (1-4): ", (answer) => {
|
|
900
|
+
rl.close();
|
|
901
|
+
resolve(answer.trim());
|
|
902
|
+
});
|
|
903
|
+
});
|
|
904
|
+
}
|
|
905
|
+
const client = MCP_CLIENTS[choice];
|
|
906
|
+
if (!client) {
|
|
907
|
+
error(`Invalid choice: "${choice}". Expected 1-4.`);
|
|
908
|
+
exit(1);
|
|
909
|
+
return;
|
|
910
|
+
}
|
|
911
|
+
log(`
|
|
912
|
+
Installing MCP config for ${client.label}...`);
|
|
913
|
+
try {
|
|
914
|
+
client.install({
|
|
915
|
+
cwd,
|
|
916
|
+
readFileSync: readFileSync2,
|
|
917
|
+
writeFileSync: writeFileSync2,
|
|
918
|
+
mkdirSync: mkdirSync2,
|
|
919
|
+
existsSync: existsSync2,
|
|
920
|
+
log
|
|
921
|
+
});
|
|
922
|
+
log("\nDone! The MCP server will run via: npx dataqueue-cli mcp");
|
|
923
|
+
} catch (err) {
|
|
924
|
+
error("Failed to install MCP config:", err);
|
|
925
|
+
exit(1);
|
|
926
|
+
}
|
|
927
|
+
}
|
|
928
|
+
var __filename3 = fileURLToPath(import.meta.url);
|
|
929
|
+
var __dirname3 = path3.dirname(__filename3);
|
|
930
|
+
function loadDocsContent(docsPath = path3.join(__dirname3, "../ai/docs-content.json")) {
|
|
931
|
+
const raw = fs.readFileSync(docsPath, "utf-8");
|
|
932
|
+
return JSON.parse(raw);
|
|
933
|
+
}
|
|
934
|
+
function scorePageForQuery(page, queryTerms) {
|
|
935
|
+
const titleLower = page.title.toLowerCase();
|
|
936
|
+
const descLower = page.description.toLowerCase();
|
|
937
|
+
const contentLower = page.content.toLowerCase();
|
|
938
|
+
let score = 0;
|
|
939
|
+
for (const term of queryTerms) {
|
|
940
|
+
if (titleLower.includes(term)) score += 10;
|
|
941
|
+
if (descLower.includes(term)) score += 5;
|
|
942
|
+
const contentMatches = contentLower.split(term).length - 1;
|
|
943
|
+
score += Math.min(contentMatches, 10);
|
|
944
|
+
}
|
|
945
|
+
return score;
|
|
946
|
+
}
|
|
947
|
+
function extractExcerpt(content, queryTerms, maxLength = 500) {
|
|
948
|
+
const lower = content.toLowerCase();
|
|
949
|
+
let earliestIndex = -1;
|
|
950
|
+
for (const term of queryTerms) {
|
|
951
|
+
const idx = lower.indexOf(term);
|
|
952
|
+
if (idx !== -1 && (earliestIndex === -1 || idx < earliestIndex)) {
|
|
953
|
+
earliestIndex = idx;
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
if (earliestIndex === -1) {
|
|
957
|
+
return content.slice(0, maxLength);
|
|
958
|
+
}
|
|
959
|
+
const start = Math.max(0, earliestIndex - 100);
|
|
960
|
+
const end = Math.min(content.length, start + maxLength);
|
|
961
|
+
let excerpt = content.slice(start, end);
|
|
962
|
+
if (start > 0) excerpt = "..." + excerpt;
|
|
963
|
+
if (end < content.length) excerpt = excerpt + "...";
|
|
964
|
+
return excerpt;
|
|
965
|
+
}
|
|
966
|
+
async function startMcpServer(deps = {}) {
|
|
967
|
+
const pages = loadDocsContent(deps.docsPath);
|
|
968
|
+
const server = new McpServer({
|
|
969
|
+
name: "dataqueue-docs",
|
|
970
|
+
version: "1.0.0"
|
|
971
|
+
});
|
|
972
|
+
server.resource("llms-txt", "dataqueue://llms.txt", async () => {
|
|
973
|
+
const llmsPath = path3.join(
|
|
974
|
+
__dirname3,
|
|
975
|
+
"../ai/skills/dataqueue-core/SKILL.md"
|
|
976
|
+
);
|
|
977
|
+
let content;
|
|
978
|
+
try {
|
|
979
|
+
content = fs.readFileSync(llmsPath, "utf-8");
|
|
980
|
+
} catch {
|
|
981
|
+
content = pages.map((p) => `## ${p.title}
|
|
982
|
+
|
|
983
|
+
Slug: ${p.slug}
|
|
984
|
+
|
|
985
|
+
${p.description}`).join("\n\n");
|
|
986
|
+
}
|
|
987
|
+
return { contents: [{ uri: "dataqueue://llms.txt", text: content }] };
|
|
988
|
+
});
|
|
989
|
+
server.tool(
|
|
990
|
+
"list-doc-pages",
|
|
991
|
+
"List all available DataQueue documentation pages with titles and descriptions.",
|
|
992
|
+
{},
|
|
993
|
+
async () => {
|
|
994
|
+
const listing = pages.map((p) => ({
|
|
995
|
+
slug: p.slug,
|
|
996
|
+
title: p.title,
|
|
997
|
+
description: p.description
|
|
998
|
+
}));
|
|
999
|
+
return {
|
|
1000
|
+
content: [
|
|
1001
|
+
{ type: "text", text: JSON.stringify(listing, null, 2) }
|
|
1002
|
+
]
|
|
1003
|
+
};
|
|
1004
|
+
}
|
|
1005
|
+
);
|
|
1006
|
+
server.tool(
|
|
1007
|
+
"get-doc-page",
|
|
1008
|
+
"Fetch a specific DataQueue doc page by slug. Returns full page content as markdown.",
|
|
1009
|
+
{
|
|
1010
|
+
slug: z.string().describe('The doc page slug, e.g. "usage/add-job" or "api/job-queue"')
|
|
1011
|
+
},
|
|
1012
|
+
async ({ slug }) => {
|
|
1013
|
+
const page = pages.find((p) => p.slug === slug);
|
|
1014
|
+
if (!page) {
|
|
1015
|
+
return {
|
|
1016
|
+
content: [
|
|
1017
|
+
{
|
|
1018
|
+
type: "text",
|
|
1019
|
+
text: `Page not found: "${slug}". Use list-doc-pages to see available slugs.`
|
|
1020
|
+
}
|
|
1021
|
+
],
|
|
1022
|
+
isError: true
|
|
1023
|
+
};
|
|
1024
|
+
}
|
|
1025
|
+
const header = page.description ? `# ${page.title}
|
|
1026
|
+
|
|
1027
|
+
> ${page.description}
|
|
1028
|
+
|
|
1029
|
+
` : `# ${page.title}
|
|
1030
|
+
|
|
1031
|
+
`;
|
|
1032
|
+
return {
|
|
1033
|
+
content: [{ type: "text", text: header + page.content }]
|
|
1034
|
+
};
|
|
1035
|
+
}
|
|
1036
|
+
);
|
|
1037
|
+
server.tool(
|
|
1038
|
+
"search-docs",
|
|
1039
|
+
"Full-text search across all DataQueue documentation pages. Returns matching sections with page titles and content excerpts.",
|
|
1040
|
+
{
|
|
1041
|
+
query: z.string().describe('Search query, e.g. "cron scheduling" or "waitForToken"')
|
|
1042
|
+
},
|
|
1043
|
+
async ({ query }) => {
|
|
1044
|
+
const queryTerms = query.toLowerCase().split(/\s+/).filter((t) => t.length > 1);
|
|
1045
|
+
if (queryTerms.length === 0) {
|
|
1046
|
+
return {
|
|
1047
|
+
content: [
|
|
1048
|
+
{ type: "text", text: "Please provide a search query." }
|
|
1049
|
+
],
|
|
1050
|
+
isError: true
|
|
1051
|
+
};
|
|
1052
|
+
}
|
|
1053
|
+
const scored = pages.map((page) => ({
|
|
1054
|
+
page,
|
|
1055
|
+
score: scorePageForQuery(page, queryTerms)
|
|
1056
|
+
})).filter((r) => r.score > 0).sort((a, b) => b.score - a.score).slice(0, 5);
|
|
1057
|
+
if (scored.length === 0) {
|
|
1058
|
+
return {
|
|
1059
|
+
content: [
|
|
1060
|
+
{
|
|
1061
|
+
type: "text",
|
|
1062
|
+
text: `No results for "${query}". Try different keywords or use list-doc-pages to browse.`
|
|
1063
|
+
}
|
|
1064
|
+
]
|
|
1065
|
+
};
|
|
1066
|
+
}
|
|
1067
|
+
const results = scored.map((r) => {
|
|
1068
|
+
const excerpt = extractExcerpt(r.page.content, queryTerms);
|
|
1069
|
+
return `## ${r.page.title} (${r.page.slug})
|
|
1070
|
+
|
|
1071
|
+
${r.page.description}
|
|
1072
|
+
|
|
1073
|
+
${excerpt}`;
|
|
1074
|
+
});
|
|
1075
|
+
return {
|
|
1076
|
+
content: [{ type: "text", text: results.join("\n\n---\n\n") }]
|
|
1077
|
+
};
|
|
1078
|
+
}
|
|
1079
|
+
);
|
|
1080
|
+
const transport = deps.transport ?? new StdioServerTransport();
|
|
1081
|
+
await server.connect(transport);
|
|
1082
|
+
return server;
|
|
1083
|
+
}
|
|
1084
|
+
var isDirectRun = process.argv[1] && (process.argv[1].endsWith("/mcp-server.js") || process.argv[1].endsWith("/mcp-server.cjs"));
|
|
1085
|
+
if (isDirectRun) {
|
|
1086
|
+
startMcpServer().catch((err) => {
|
|
1087
|
+
console.error("Failed to start MCP server:", err);
|
|
1088
|
+
process.exit(1);
|
|
1089
|
+
});
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
// src/cli.ts
|
|
1093
|
+
var __filename4 = fileURLToPath(import.meta.url);
|
|
1094
|
+
var __dirname4 = path3.dirname(__filename4);
|
|
8
1095
|
function runCli(argv, {
|
|
9
1096
|
log = console.log,
|
|
1097
|
+
error = console.error,
|
|
10
1098
|
exit = (code) => process.exit(code),
|
|
11
1099
|
spawnSyncImpl = spawnSync,
|
|
12
|
-
migrationsDir =
|
|
1100
|
+
migrationsDir = path3.join(__dirname4, "../migrations"),
|
|
1101
|
+
initDeps,
|
|
1102
|
+
runInitImpl = runInit,
|
|
1103
|
+
installSkillsDeps,
|
|
1104
|
+
runInstallSkillsImpl = runInstallSkills,
|
|
1105
|
+
installRulesDeps,
|
|
1106
|
+
runInstallRulesImpl = runInstallRules,
|
|
1107
|
+
installMcpDeps,
|
|
1108
|
+
runInstallMcpImpl = runInstallMcp,
|
|
1109
|
+
startMcpServerImpl = startMcpServer
|
|
13
1110
|
} = {}) {
|
|
14
1111
|
const [, , command, ...restArgs] = argv;
|
|
15
1112
|
function printUsage() {
|
|
1113
|
+
log("Usage:");
|
|
16
1114
|
log(
|
|
17
|
-
"
|
|
1115
|
+
" dataqueue-cli migrate [--envPath <path>] [-s <schema> | --schema <schema>]"
|
|
18
1116
|
);
|
|
1117
|
+
log(" dataqueue-cli init");
|
|
1118
|
+
log(" dataqueue-cli install-skills");
|
|
1119
|
+
log(" dataqueue-cli install-rules");
|
|
1120
|
+
log(" dataqueue-cli install-mcp");
|
|
1121
|
+
log(" dataqueue-cli mcp");
|
|
19
1122
|
log("");
|
|
20
|
-
log("Options:");
|
|
1123
|
+
log("Options for migrate:");
|
|
21
1124
|
log(
|
|
22
1125
|
" --envPath <path> Path to a .env file to load environment variables (passed to node-pg-migrate)"
|
|
23
1126
|
);
|
|
@@ -25,16 +1128,13 @@ function runCli(argv, {
|
|
|
25
1128
|
" -s, --schema <schema> Set the schema to use (passed to node-pg-migrate)"
|
|
26
1129
|
);
|
|
27
1130
|
log("");
|
|
28
|
-
log("
|
|
29
|
-
log(
|
|
30
|
-
|
|
31
|
-
);
|
|
32
|
-
log(
|
|
33
|
-
" - For managed Postgres (e.g., DigitalOcean) with SSL, set PGSSLMODE=require and PGSSLROOTCERT to your CA .crt file."
|
|
34
|
-
);
|
|
1131
|
+
log("AI tooling commands:");
|
|
1132
|
+
log(" install-skills Install DataQueue skill files for AI assistants");
|
|
1133
|
+
log(" install-rules Install DataQueue agent rules for AI clients");
|
|
35
1134
|
log(
|
|
36
|
-
"
|
|
1135
|
+
" install-mcp Configure the DataQueue MCP server for AI clients"
|
|
37
1136
|
);
|
|
1137
|
+
log(" mcp Start the DataQueue MCP server (stdio)");
|
|
38
1138
|
exit(1);
|
|
39
1139
|
}
|
|
40
1140
|
if (command === "migrate") {
|
|
@@ -71,6 +1171,39 @@ function runCli(argv, {
|
|
|
71
1171
|
{ stdio: "inherit" }
|
|
72
1172
|
);
|
|
73
1173
|
exit(result.status ?? 1);
|
|
1174
|
+
} else if (command === "init") {
|
|
1175
|
+
runInitImpl({
|
|
1176
|
+
log,
|
|
1177
|
+
error,
|
|
1178
|
+
exit,
|
|
1179
|
+
...initDeps
|
|
1180
|
+
});
|
|
1181
|
+
} else if (command === "install-skills") {
|
|
1182
|
+
runInstallSkillsImpl({
|
|
1183
|
+
log,
|
|
1184
|
+
error,
|
|
1185
|
+
exit,
|
|
1186
|
+
...installSkillsDeps
|
|
1187
|
+
});
|
|
1188
|
+
} else if (command === "install-rules") {
|
|
1189
|
+
runInstallRulesImpl({
|
|
1190
|
+
log,
|
|
1191
|
+
error,
|
|
1192
|
+
exit,
|
|
1193
|
+
...installRulesDeps
|
|
1194
|
+
});
|
|
1195
|
+
} else if (command === "install-mcp") {
|
|
1196
|
+
runInstallMcpImpl({
|
|
1197
|
+
log,
|
|
1198
|
+
error,
|
|
1199
|
+
exit,
|
|
1200
|
+
...installMcpDeps
|
|
1201
|
+
});
|
|
1202
|
+
} else if (command === "mcp") {
|
|
1203
|
+
startMcpServerImpl().catch((err) => {
|
|
1204
|
+
error("Failed to start MCP server:", err);
|
|
1205
|
+
exit(1);
|
|
1206
|
+
});
|
|
74
1207
|
} else {
|
|
75
1208
|
printUsage();
|
|
76
1209
|
}
|