@nicnocquee/dataqueue 1.25.0 → 1.26.0-beta.20260223202259

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/ai/build-docs-content.ts +96 -0
  2. package/ai/build-llms-full.ts +42 -0
  3. package/ai/docs-content.json +284 -0
  4. package/ai/rules/advanced.md +150 -0
  5. package/ai/rules/basic.md +159 -0
  6. package/ai/rules/react-dashboard.md +83 -0
  7. package/ai/skills/dataqueue-advanced/SKILL.md +370 -0
  8. package/ai/skills/dataqueue-core/SKILL.md +234 -0
  9. package/ai/skills/dataqueue-react/SKILL.md +189 -0
  10. package/dist/cli.cjs +1149 -14
  11. package/dist/cli.cjs.map +1 -1
  12. package/dist/cli.d.cts +66 -1
  13. package/dist/cli.d.ts +66 -1
  14. package/dist/cli.js +1146 -13
  15. package/dist/cli.js.map +1 -1
  16. package/dist/index.cjs +3236 -1237
  17. package/dist/index.cjs.map +1 -1
  18. package/dist/index.d.cts +697 -23
  19. package/dist/index.d.ts +697 -23
  20. package/dist/index.js +3235 -1238
  21. package/dist/index.js.map +1 -1
  22. package/dist/mcp-server.cjs +186 -0
  23. package/dist/mcp-server.cjs.map +1 -0
  24. package/dist/mcp-server.d.cts +32 -0
  25. package/dist/mcp-server.d.ts +32 -0
  26. package/dist/mcp-server.js +175 -0
  27. package/dist/mcp-server.js.map +1 -0
  28. package/migrations/1781200000004_create_cron_schedules_table.sql +33 -0
  29. package/migrations/1781200000005_add_retry_config_to_job_queue.sql +17 -0
  30. package/package.json +24 -21
  31. package/src/backend.ts +170 -5
  32. package/src/backends/postgres.ts +992 -63
  33. package/src/backends/redis-scripts.ts +358 -26
  34. package/src/backends/redis.test.ts +1532 -0
  35. package/src/backends/redis.ts +993 -35
  36. package/src/cli.test.ts +82 -6
  37. package/src/cli.ts +73 -10
  38. package/src/cron.test.ts +126 -0
  39. package/src/cron.ts +40 -0
  40. package/src/db-util.ts +1 -1
  41. package/src/index.test.ts +1034 -11
  42. package/src/index.ts +267 -39
  43. package/src/init-command.test.ts +449 -0
  44. package/src/init-command.ts +709 -0
  45. package/src/install-mcp-command.test.ts +216 -0
  46. package/src/install-mcp-command.ts +185 -0
  47. package/src/install-rules-command.test.ts +218 -0
  48. package/src/install-rules-command.ts +233 -0
  49. package/src/install-skills-command.test.ts +176 -0
  50. package/src/install-skills-command.ts +124 -0
  51. package/src/mcp-server.test.ts +162 -0
  52. package/src/mcp-server.ts +231 -0
  53. package/src/processor.ts +104 -113
  54. package/src/queue.test.ts +465 -0
  55. package/src/queue.ts +34 -252
  56. package/src/supervisor.test.ts +340 -0
  57. package/src/supervisor.ts +177 -0
  58. package/src/types.ts +476 -12
  59. package/LICENSE +0 -21
@@ -0,0 +1,186 @@
1
+ #!/usr/bin/env node
2
+ 'use strict';
3
+
4
+ var mcp_js = require('@modelcontextprotocol/sdk/server/mcp.js');
5
+ var stdio_js = require('@modelcontextprotocol/sdk/server/stdio.js');
6
+ var zod = require('zod');
7
+ var fs = require('fs');
8
+ var path = require('path');
9
+ var url = require('url');
10
+
11
+ var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
12
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
13
+
14
+ var fs__default = /*#__PURE__*/_interopDefault(fs);
15
+ var path__default = /*#__PURE__*/_interopDefault(path);
16
+
17
+ var __filename$1 = url.fileURLToPath((typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('mcp-server.cjs', document.baseURI).href)));
18
+ var __dirname$1 = path__default.default.dirname(__filename$1);
19
+ function loadDocsContent(docsPath = path__default.default.join(__dirname$1, "../ai/docs-content.json")) {
20
+ const raw = fs__default.default.readFileSync(docsPath, "utf-8");
21
+ return JSON.parse(raw);
22
+ }
23
+ function scorePageForQuery(page, queryTerms) {
24
+ const titleLower = page.title.toLowerCase();
25
+ const descLower = page.description.toLowerCase();
26
+ const contentLower = page.content.toLowerCase();
27
+ let score = 0;
28
+ for (const term of queryTerms) {
29
+ if (titleLower.includes(term)) score += 10;
30
+ if (descLower.includes(term)) score += 5;
31
+ const contentMatches = contentLower.split(term).length - 1;
32
+ score += Math.min(contentMatches, 10);
33
+ }
34
+ return score;
35
+ }
36
+ function extractExcerpt(content, queryTerms, maxLength = 500) {
37
+ const lower = content.toLowerCase();
38
+ let earliestIndex = -1;
39
+ for (const term of queryTerms) {
40
+ const idx = lower.indexOf(term);
41
+ if (idx !== -1 && (earliestIndex === -1 || idx < earliestIndex)) {
42
+ earliestIndex = idx;
43
+ }
44
+ }
45
+ if (earliestIndex === -1) {
46
+ return content.slice(0, maxLength);
47
+ }
48
+ const start = Math.max(0, earliestIndex - 100);
49
+ const end = Math.min(content.length, start + maxLength);
50
+ let excerpt = content.slice(start, end);
51
+ if (start > 0) excerpt = "..." + excerpt;
52
+ if (end < content.length) excerpt = excerpt + "...";
53
+ return excerpt;
54
+ }
55
+ async function startMcpServer(deps = {}) {
56
+ const pages = loadDocsContent(deps.docsPath);
57
+ const server = new mcp_js.McpServer({
58
+ name: "dataqueue-docs",
59
+ version: "1.0.0"
60
+ });
61
+ server.resource("llms-txt", "dataqueue://llms.txt", async () => {
62
+ const llmsPath = path__default.default.join(
63
+ __dirname$1,
64
+ "../ai/skills/dataqueue-core/SKILL.md"
65
+ );
66
+ let content;
67
+ try {
68
+ content = fs__default.default.readFileSync(llmsPath, "utf-8");
69
+ } catch {
70
+ content = pages.map((p) => `## ${p.title}
71
+
72
+ Slug: ${p.slug}
73
+
74
+ ${p.description}`).join("\n\n");
75
+ }
76
+ return { contents: [{ uri: "dataqueue://llms.txt", text: content }] };
77
+ });
78
+ server.tool(
79
+ "list-doc-pages",
80
+ "List all available DataQueue documentation pages with titles and descriptions.",
81
+ {},
82
+ async () => {
83
+ const listing = pages.map((p) => ({
84
+ slug: p.slug,
85
+ title: p.title,
86
+ description: p.description
87
+ }));
88
+ return {
89
+ content: [
90
+ { type: "text", text: JSON.stringify(listing, null, 2) }
91
+ ]
92
+ };
93
+ }
94
+ );
95
+ server.tool(
96
+ "get-doc-page",
97
+ "Fetch a specific DataQueue doc page by slug. Returns full page content as markdown.",
98
+ {
99
+ slug: zod.z.string().describe('The doc page slug, e.g. "usage/add-job" or "api/job-queue"')
100
+ },
101
+ async ({ slug }) => {
102
+ const page = pages.find((p) => p.slug === slug);
103
+ if (!page) {
104
+ return {
105
+ content: [
106
+ {
107
+ type: "text",
108
+ text: `Page not found: "${slug}". Use list-doc-pages to see available slugs.`
109
+ }
110
+ ],
111
+ isError: true
112
+ };
113
+ }
114
+ const header = page.description ? `# ${page.title}
115
+
116
+ > ${page.description}
117
+
118
+ ` : `# ${page.title}
119
+
120
+ `;
121
+ return {
122
+ content: [{ type: "text", text: header + page.content }]
123
+ };
124
+ }
125
+ );
126
+ server.tool(
127
+ "search-docs",
128
+ "Full-text search across all DataQueue documentation pages. Returns matching sections with page titles and content excerpts.",
129
+ {
130
+ query: zod.z.string().describe('Search query, e.g. "cron scheduling" or "waitForToken"')
131
+ },
132
+ async ({ query }) => {
133
+ const queryTerms = query.toLowerCase().split(/\s+/).filter((t) => t.length > 1);
134
+ if (queryTerms.length === 0) {
135
+ return {
136
+ content: [
137
+ { type: "text", text: "Please provide a search query." }
138
+ ],
139
+ isError: true
140
+ };
141
+ }
142
+ const scored = pages.map((page) => ({
143
+ page,
144
+ score: scorePageForQuery(page, queryTerms)
145
+ })).filter((r) => r.score > 0).sort((a, b) => b.score - a.score).slice(0, 5);
146
+ if (scored.length === 0) {
147
+ return {
148
+ content: [
149
+ {
150
+ type: "text",
151
+ text: `No results for "${query}". Try different keywords or use list-doc-pages to browse.`
152
+ }
153
+ ]
154
+ };
155
+ }
156
+ const results = scored.map((r) => {
157
+ const excerpt = extractExcerpt(r.page.content, queryTerms);
158
+ return `## ${r.page.title} (${r.page.slug})
159
+
160
+ ${r.page.description}
161
+
162
+ ${excerpt}`;
163
+ });
164
+ return {
165
+ content: [{ type: "text", text: results.join("\n\n---\n\n") }]
166
+ };
167
+ }
168
+ );
169
+ const transport = deps.transport ?? new stdio_js.StdioServerTransport();
170
+ await server.connect(transport);
171
+ return server;
172
+ }
173
+ var isDirectRun = process.argv[1] && (process.argv[1].endsWith("/mcp-server.js") || process.argv[1].endsWith("/mcp-server.cjs"));
174
+ if (isDirectRun) {
175
+ startMcpServer().catch((err) => {
176
+ console.error("Failed to start MCP server:", err);
177
+ process.exit(1);
178
+ });
179
+ }
180
+
181
+ exports.extractExcerpt = extractExcerpt;
182
+ exports.loadDocsContent = loadDocsContent;
183
+ exports.scorePageForQuery = scorePageForQuery;
184
+ exports.startMcpServer = startMcpServer;
185
+ //# sourceMappingURL=mcp-server.cjs.map
186
+ //# sourceMappingURL=mcp-server.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/mcp-server.ts"],"names":["__filename","fileURLToPath","__dirname","path","fs","McpServer","z","StdioServerTransport"],"mappings":";;;;;;;;;;;;;;;;AAcA,IAAMA,YAAA,GAAaC,iBAAA,CAAc,gQAAe,CAAA;AAChD,IAAMC,WAAA,GAAYC,qBAAA,CAAK,OAAA,CAAQH,YAAU,CAAA;AAUlC,SAAS,gBACd,QAAA,GAAmBG,qBAAA,CAAK,IAAA,CAAKD,WAAA,EAAW,yBAAyB,CAAA,EACtD;AACX,EAAA,MAAM,GAAA,GAAME,mBAAA,CAAG,YAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAC7C,EAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AACvB;AAGO,SAAS,iBAAA,CAAkB,MAAe,UAAA,EAA8B;AAC7E,EAAA,MAAM,UAAA,GAAa,IAAA,CAAK,KAAA,CAAM,WAAA,EAAY;AAC1C,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,WAAA,CAAY,WAAA,EAAY;AAC/C,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,WAAA,EAAY;AAE9C,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,IAAI,UAAA,CAAW,QAAA,CAAS,IAAI,CAAA,EAAG,KAAA,IAAS,EAAA;AACxC,IAAA,IAAI,SAAA,CAAU,QAAA,CAAS,IAAI,CAAA,EAAG,KAAA,IAAS,CAAA;AAEvC,IAAA,MAAM,cAAA,GAAiB,YAAA,CAAa,KAAA,CAAM,IAAI,EAAE,MAAA,GAAS,CAAA;AACzD,IAAA,KAAA,IAAS,IAAA,CAAK,GAAA,CAAI,cAAA,EAAgB,EAAE,CAAA;AAAA;AAEtC,EAAA,OAAO,KAAA;AACT;AAGO,SAAS,cAAA,CACd,OAAA,EACA,UAAA,EACA,SAAA,GAAY,GAAA,EACJ;AACR,EAAA,MAAM,KAAA,GAAQ,QAAQ,WAAA,EAAY;AAClC,EAAA,IAAI,aAAA,GAAgB,EAAA;AAEpB,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,MAAM,GAAA,GAAM,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA;AAC9B,IAAA,IAAI,GAAA,KAAQ,EAAA,KAAO,aAAA,KAAkB,EAAA,IAAM,MAAM,aAAA,CAAA,EAAgB;AAC/D,MAAA,aAAA,GAAgB,GAAA;AAAA;AAClB;AAGF,EAAA,IAAI,kBAAkB,EAAA,EAAI;AACxB,IAAA,OAAO,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,SAAS,CAAA;AAAA;AAGnC,EAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,gBAAgB,GAAG,CAAA;AAC7C,EAAA,MAAM,MAAM,IAAA,CAAK,GAAA,CAAI,OAAA,CAAQ,MAAA,EAAQ,QAAQ,SAAS,CAAA;AACtD,EAAA,IAAI,OAAA,GAAU,OAAA,CAAQ,KAAA,CAAM,KAAA,EAAO,GAAG,CAAA;AAEtC,EAAA,IAAI,KAAA,GAAQ,CAAA,EAAG,OAAA,GAAU,KAAA,GAAQ,OAAA;AACjC,EAAA,IAAI,GAAA,GAAM,OAAA,CAAQ,MAAA,EAAQ,OAAA,GAAU,OAAA,GAAU,KAAA;AAE9C,EAAA,OAAO,OAAA;AACT;AAOA,eAAsB,cAAA,CACpB,IAAA,GAGI,EAAC,EACe;AACpB,EAAA,MAAM,KAAA,GAAQ,eAAA,CAAgB,IAAA,CAAK,QAAQ,CAAA;AAE3C,EAAA,MAAM,MAAA,GAAS,IAAIC,gBAAA,CAAU;AAAA,IAC3B,IAAA,EAAM,gBAAA;AAAA,IACN,OAAA,EAAS;AAAA,GACV,CAAA;AAED,EAAA,MAAA,CAAO,QAAA,CAAS,UAAA,EAAY,sBAAA,EAAwB,YAAY;AAC9D,IAAA,MAAM,WAAWF,qBAAA,CAAK,IAAA;AAAA,MACpBD,WAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAI,OAAA;AACJ,IAAA,IAAI;AACF,MAAA,OAAA,GAAUE,mBAAA,CAAG,YAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAAA,KAC7C,CAAA,MAAQ;AACN,MAAA,OAAA,GAAU,MACP,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,GAAA,EAAM,EAAE,KAAK;;AAAA,MAAA,EAAa,EAAE,IAAI;;AAAA,EAAO,CAAA,CAAE,WAAW,CAAA,CAAE,CAAA,CACjE,KAAK,MAAM,CAAA;AAAA;AAEhB,IAAA,OAAO,EAAE,UAAU,CAAC,EAAE,KAAK,sBAAA,EAAwB,IAAA,EAAM,OAAA,EAAS,CAAA,EAAE;AAAA,GACrE,CAAA;AAED,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,gBAAA;AAAA,IACA,gFAAA;AAAA,IACA,EAAC;AAAA,IACD,YAAY;AACV,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,QAChC,MAAM,CAAA,CAAE,IAAA;AAAA,QACR,OAAO,CAAA,CAAE,KAAA;AAAA,QACT,aAAa,CAAA,CAAE;AAAA,OACjB,CAAE,CAAA;AACF,MAAA,OAAO;AAAA,QACL,OAAA,EAAS;AAAA,UACP,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,KAAK,SAAA,CAAU,OAAA,EAAS,IAAA,EAAM,CAAC,CAAA;AAAE;AAClE,OACF;AAAA;AACF,GACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,cAAA;AAAA,IACA,qFAAA;AAAA,IACA;AAAA,MACE,IAAA,EAAME,KAAA,CACH,MAAA,EAAO,CACP,SAAS,4DAA4D;AAAA,KAC1E;AAAA,IACA,OAAO,EAAE,IAAA,EAAK,KAAM;AAClB,MAAA,MAAM,OAAO,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,SAAS,IAAI,CAAA;AAC9C,MAAA,IAAI,CAAC,IAAA,EAAM;AACT,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,IAAA,EAAM,oBAAoB,IAAI,CAAA,6CAAA;AAAA;AAChC,WACF;AAAA,UACA,OAAA,EAAS;AAAA,SACX;AAAA;AAEF,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,WAAA,GAChB,CAAA,EAAA,EAAK,KAAK,KAAK;;AAAA,EAAA,EAAS,KAAK,WAAW;;AAAA,CAAA,GACxC,CAAA,EAAA,EAAK,KAAK,KAAK;;AAAA,CAAA;AACnB,MAAA,OAAO;AAAA,QACL,OAAA,EAAS,CAAC,EAAE,IAAA,EAAM,QAAiB,IAAA,EAAM,MAAA,GAAS,IAAA,CAAK,OAAA,EAAS;AAAA,OAClE;AAAA;AACF,GACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,aAAA;AAAA,IACA,6HAAA;AAAA,IACA;AAAA,MACE,KAAA,EAAOA,KAAA,CACJ,MAAA,EAAO,CACP,SAAS,wDAAwD;AAAA,KACtE;AAAA,IACA,OAAO,EAAE,KAAA,EAAM,KAAM;AACnB,MAAA,MAAM,UAAA,GAAa,KAAA,CAChB,WAAA,EAAY,CACZ,KAAA,CAAM,KAAK,CAAA,CACX,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA;AAE7B,MAAA,IAAI,UAAA,CAAW,WAAW,CAAA,EAAG;AAC3B,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP,EAAE,IAAA,EAAM,MAAA,EAAiB,IAAA,EAAM,gCAAA;AAAiC,WAClE;AAAA,UACA,OAAA,EAAS;AAAA,SACX;AAAA;AAGF,MAAA,MAAM,MAAA,GAAS,KAAA,CACZ,GAAA,CAAI,CAAC,IAAA,MAAU;AAAA,QACd,IAAA;AAAA,QACA,KAAA,EAAO,iBAAA,CAAkB,IAAA,EAAM,UAAU;AAAA,OAC3C,CAAE,EACD,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,KAAA,GAAQ,CAAC,CAAA,CACzB,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,EAAE,KAAA,GAAQ,CAAA,CAAE,KAAK,CAAA,CAChC,KAAA,CAAM,GAAG,CAAC,CAAA;AAEb,MAAA,IAAI,MAAA,CAAO,WAAW,CAAA,EAAG;AACvB,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,IAAA,EAAM,mBAAmB,KAAK,CAAA,0DAAA;AAAA;AAChC;AACF,SACF;AAAA;AAGF,MAAA,MAAM,OAAA,GAAU,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA,KAAM;AAChC,QAAA,MAAM,OAAA,GAAU,cAAA,CAAe,CAAA,CAAE,IAAA,CAAK,SAAS,UAAU,CAAA;AACzD,QAAA,OAAO,MAAM,CAAA,CAAE,IAAA,CAAK,KAAK,CAAA,EAAA,EAAK,CAAA,CAAE,KAAK,IAAI,CAAA;;AAAA,EAAQ,CAAA,CAAE,KAAK,WAAW;;AAAA,EAAO,OAAO,CAAA,CAAA;AAAA,OAClF,CAAA;AAED,MAAA,OAAO;AAAA,QACL,OAAA,EAAS,CAAC,EAAE,IAAA,EAAM,MAAA,EAAiB,MAAM,OAAA,CAAQ,IAAA,CAAK,aAAa,CAAA,EAAG;AAAA,OACxE;AAAA;AACF,GACF;AAEA,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,SAAA,IAAa,IAAIC,6BAAA,EAAqB;AAC7D,EAAA,MAAM,MAAA,CAAO,QAAQ,SAAS,CAAA;AAC9B,EAAA,OAAO,MAAA;AACT;AAEA,IAAM,cACJ,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,KACb,QAAQ,IAAA,CAAK,CAAC,CAAA,CAAE,QAAA,CAAS,gBAAgB,CAAA,IACxC,OAAA,CAAQ,KAAK,CAAC,CAAA,CAAE,SAAS,iBAAiB,CAAA,CAAA;AAE9C,IAAI,WAAA,EAAa;AACf,EAAA,cAAA,EAAe,CAAE,KAAA,CAAM,CAAC,GAAA,KAAQ;AAC9B,IAAA,OAAA,CAAQ,KAAA,CAAM,+BAA+B,GAAG,CAAA;AAChD,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,GACf,CAAA;AACH","file":"mcp-server.cjs","sourcesContent":["#!/usr/bin/env node\n\n/**\n * DataQueue MCP Server — exposes documentation search over stdio.\n * Run via: dataqueue-cli mcp\n */\n\nimport { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport { z } from 'zod';\nimport fs from 'fs';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\ninterface DocPage {\n slug: string;\n title: string;\n description: string;\n content: string;\n}\n\n/** @internal Loads docs-content.json from the ai/ directory bundled with the package. */\nexport function loadDocsContent(\n docsPath: string = path.join(__dirname, '../ai/docs-content.json'),\n): DocPage[] {\n const raw = fs.readFileSync(docsPath, 'utf-8');\n return JSON.parse(raw) as DocPage[];\n}\n\n/** @internal Scores a doc page against a search query using simple term matching. */\nexport function scorePageForQuery(page: DocPage, queryTerms: string[]): number {\n const titleLower = page.title.toLowerCase();\n const descLower = page.description.toLowerCase();\n const contentLower = page.content.toLowerCase();\n\n let score = 0;\n for (const term of queryTerms) {\n if (titleLower.includes(term)) score += 10;\n if (descLower.includes(term)) score += 5;\n\n const contentMatches = contentLower.split(term).length - 1;\n score += Math.min(contentMatches, 10);\n }\n return score;\n}\n\n/** @internal Extracts a relevant excerpt around the first match of any query term. */\nexport function extractExcerpt(\n content: string,\n queryTerms: string[],\n maxLength = 500,\n): string {\n const lower = content.toLowerCase();\n let earliestIndex = -1;\n\n for (const term of queryTerms) {\n const idx = lower.indexOf(term);\n if (idx !== -1 && (earliestIndex === -1 || idx < earliestIndex)) {\n earliestIndex = idx;\n }\n }\n\n if (earliestIndex === -1) {\n return content.slice(0, maxLength);\n }\n\n const start = Math.max(0, earliestIndex - 100);\n const end = Math.min(content.length, start + maxLength);\n let excerpt = content.slice(start, end);\n\n if (start > 0) excerpt = '...' + excerpt;\n if (end < content.length) excerpt = excerpt + '...';\n\n return excerpt;\n}\n\n/**\n * Creates and starts the DataQueue MCP server over stdio.\n *\n * @param deps - Injectable dependencies for testing.\n */\nexport async function startMcpServer(\n deps: {\n docsPath?: string;\n transport?: InstanceType<typeof StdioServerTransport>;\n } = {},\n): Promise<McpServer> {\n const pages = loadDocsContent(deps.docsPath);\n\n const server = new McpServer({\n name: 'dataqueue-docs',\n version: '1.0.0',\n });\n\n server.resource('llms-txt', 'dataqueue://llms.txt', async () => {\n const llmsPath = path.join(\n __dirname,\n '../ai/skills/dataqueue-core/SKILL.md',\n );\n let content: string;\n try {\n content = fs.readFileSync(llmsPath, 'utf-8');\n } catch {\n content = pages\n .map((p) => `## ${p.title}\\n\\nSlug: ${p.slug}\\n\\n${p.description}`)\n .join('\\n\\n');\n }\n return { contents: [{ uri: 'dataqueue://llms.txt', text: content }] };\n });\n\n server.tool(\n 'list-doc-pages',\n 'List all available DataQueue documentation pages with titles and descriptions.',\n {},\n async () => {\n const listing = pages.map((p) => ({\n slug: p.slug,\n title: p.title,\n description: p.description,\n }));\n return {\n content: [\n { type: 'text' as const, text: JSON.stringify(listing, null, 2) },\n ],\n };\n },\n );\n\n server.tool(\n 'get-doc-page',\n 'Fetch a specific DataQueue doc page by slug. Returns full page content as markdown.',\n {\n slug: z\n .string()\n .describe('The doc page slug, e.g. \"usage/add-job\" or \"api/job-queue\"'),\n },\n async ({ slug }) => {\n const page = pages.find((p) => p.slug === slug);\n if (!page) {\n return {\n content: [\n {\n type: 'text' as const,\n text: `Page not found: \"${slug}\". Use list-doc-pages to see available slugs.`,\n },\n ],\n isError: true,\n };\n }\n const header = page.description\n ? `# ${page.title}\\n\\n> ${page.description}\\n\\n`\n : `# ${page.title}\\n\\n`;\n return {\n content: [{ type: 'text' as const, text: header + page.content }],\n };\n },\n );\n\n server.tool(\n 'search-docs',\n 'Full-text search across all DataQueue documentation pages. Returns matching sections with page titles and content excerpts.',\n {\n query: z\n .string()\n .describe('Search query, e.g. \"cron scheduling\" or \"waitForToken\"'),\n },\n async ({ query }) => {\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 1);\n\n if (queryTerms.length === 0) {\n return {\n content: [\n { type: 'text' as const, text: 'Please provide a search query.' },\n ],\n isError: true,\n };\n }\n\n const scored = pages\n .map((page) => ({\n page,\n score: scorePageForQuery(page, queryTerms),\n }))\n .filter((r) => r.score > 0)\n .sort((a, b) => b.score - a.score)\n .slice(0, 5);\n\n if (scored.length === 0) {\n return {\n content: [\n {\n type: 'text' as const,\n text: `No results for \"${query}\". Try different keywords or use list-doc-pages to browse.`,\n },\n ],\n };\n }\n\n const results = scored.map((r) => {\n const excerpt = extractExcerpt(r.page.content, queryTerms);\n return `## ${r.page.title} (${r.page.slug})\\n\\n${r.page.description}\\n\\n${excerpt}`;\n });\n\n return {\n content: [{ type: 'text' as const, text: results.join('\\n\\n---\\n\\n') }],\n };\n },\n );\n\n const transport = deps.transport ?? new StdioServerTransport();\n await server.connect(transport);\n return server;\n}\n\nconst isDirectRun =\n process.argv[1] &&\n (process.argv[1].endsWith('/mcp-server.js') ||\n process.argv[1].endsWith('/mcp-server.cjs'));\n\nif (isDirectRun) {\n startMcpServer().catch((err) => {\n console.error('Failed to start MCP server:', err);\n process.exit(1);\n });\n}\n"]}
@@ -0,0 +1,32 @@
1
+ #!/usr/bin/env node
2
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
3
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
+
5
+ /**
6
+ * DataQueue MCP Server — exposes documentation search over stdio.
7
+ * Run via: dataqueue-cli mcp
8
+ */
9
+
10
+ interface DocPage {
11
+ slug: string;
12
+ title: string;
13
+ description: string;
14
+ content: string;
15
+ }
16
+ /** @internal Loads docs-content.json from the ai/ directory bundled with the package. */
17
+ declare function loadDocsContent(docsPath?: string): DocPage[];
18
+ /** @internal Scores a doc page against a search query using simple term matching. */
19
+ declare function scorePageForQuery(page: DocPage, queryTerms: string[]): number;
20
+ /** @internal Extracts a relevant excerpt around the first match of any query term. */
21
+ declare function extractExcerpt(content: string, queryTerms: string[], maxLength?: number): string;
22
+ /**
23
+ * Creates and starts the DataQueue MCP server over stdio.
24
+ *
25
+ * @param deps - Injectable dependencies for testing.
26
+ */
27
+ declare function startMcpServer(deps?: {
28
+ docsPath?: string;
29
+ transport?: InstanceType<typeof StdioServerTransport>;
30
+ }): Promise<McpServer>;
31
+
32
+ export { extractExcerpt, loadDocsContent, scorePageForQuery, startMcpServer };
@@ -0,0 +1,32 @@
1
+ #!/usr/bin/env node
2
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
3
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
+
5
+ /**
6
+ * DataQueue MCP Server — exposes documentation search over stdio.
7
+ * Run via: dataqueue-cli mcp
8
+ */
9
+
10
+ interface DocPage {
11
+ slug: string;
12
+ title: string;
13
+ description: string;
14
+ content: string;
15
+ }
16
+ /** @internal Loads docs-content.json from the ai/ directory bundled with the package. */
17
+ declare function loadDocsContent(docsPath?: string): DocPage[];
18
+ /** @internal Scores a doc page against a search query using simple term matching. */
19
+ declare function scorePageForQuery(page: DocPage, queryTerms: string[]): number;
20
+ /** @internal Extracts a relevant excerpt around the first match of any query term. */
21
+ declare function extractExcerpt(content: string, queryTerms: string[], maxLength?: number): string;
22
+ /**
23
+ * Creates and starts the DataQueue MCP server over stdio.
24
+ *
25
+ * @param deps - Injectable dependencies for testing.
26
+ */
27
+ declare function startMcpServer(deps?: {
28
+ docsPath?: string;
29
+ transport?: InstanceType<typeof StdioServerTransport>;
30
+ }): Promise<McpServer>;
31
+
32
+ export { extractExcerpt, loadDocsContent, scorePageForQuery, startMcpServer };
@@ -0,0 +1,175 @@
1
+ #!/usr/bin/env node
2
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
3
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4
+ import { z } from 'zod';
5
+ import fs from 'fs';
6
+ import path from 'path';
7
+ import { fileURLToPath } from 'url';
8
+
9
+ var __filename = fileURLToPath(import.meta.url);
10
+ var __dirname = path.dirname(__filename);
11
+ function loadDocsContent(docsPath = path.join(__dirname, "../ai/docs-content.json")) {
12
+ const raw = fs.readFileSync(docsPath, "utf-8");
13
+ return JSON.parse(raw);
14
+ }
15
+ function scorePageForQuery(page, queryTerms) {
16
+ const titleLower = page.title.toLowerCase();
17
+ const descLower = page.description.toLowerCase();
18
+ const contentLower = page.content.toLowerCase();
19
+ let score = 0;
20
+ for (const term of queryTerms) {
21
+ if (titleLower.includes(term)) score += 10;
22
+ if (descLower.includes(term)) score += 5;
23
+ const contentMatches = contentLower.split(term).length - 1;
24
+ score += Math.min(contentMatches, 10);
25
+ }
26
+ return score;
27
+ }
28
+ function extractExcerpt(content, queryTerms, maxLength = 500) {
29
+ const lower = content.toLowerCase();
30
+ let earliestIndex = -1;
31
+ for (const term of queryTerms) {
32
+ const idx = lower.indexOf(term);
33
+ if (idx !== -1 && (earliestIndex === -1 || idx < earliestIndex)) {
34
+ earliestIndex = idx;
35
+ }
36
+ }
37
+ if (earliestIndex === -1) {
38
+ return content.slice(0, maxLength);
39
+ }
40
+ const start = Math.max(0, earliestIndex - 100);
41
+ const end = Math.min(content.length, start + maxLength);
42
+ let excerpt = content.slice(start, end);
43
+ if (start > 0) excerpt = "..." + excerpt;
44
+ if (end < content.length) excerpt = excerpt + "...";
45
+ return excerpt;
46
+ }
47
+ async function startMcpServer(deps = {}) {
48
+ const pages = loadDocsContent(deps.docsPath);
49
+ const server = new McpServer({
50
+ name: "dataqueue-docs",
51
+ version: "1.0.0"
52
+ });
53
+ server.resource("llms-txt", "dataqueue://llms.txt", async () => {
54
+ const llmsPath = path.join(
55
+ __dirname,
56
+ "../ai/skills/dataqueue-core/SKILL.md"
57
+ );
58
+ let content;
59
+ try {
60
+ content = fs.readFileSync(llmsPath, "utf-8");
61
+ } catch {
62
+ content = pages.map((p) => `## ${p.title}
63
+
64
+ Slug: ${p.slug}
65
+
66
+ ${p.description}`).join("\n\n");
67
+ }
68
+ return { contents: [{ uri: "dataqueue://llms.txt", text: content }] };
69
+ });
70
+ server.tool(
71
+ "list-doc-pages",
72
+ "List all available DataQueue documentation pages with titles and descriptions.",
73
+ {},
74
+ async () => {
75
+ const listing = pages.map((p) => ({
76
+ slug: p.slug,
77
+ title: p.title,
78
+ description: p.description
79
+ }));
80
+ return {
81
+ content: [
82
+ { type: "text", text: JSON.stringify(listing, null, 2) }
83
+ ]
84
+ };
85
+ }
86
+ );
87
+ server.tool(
88
+ "get-doc-page",
89
+ "Fetch a specific DataQueue doc page by slug. Returns full page content as markdown.",
90
+ {
91
+ slug: z.string().describe('The doc page slug, e.g. "usage/add-job" or "api/job-queue"')
92
+ },
93
+ async ({ slug }) => {
94
+ const page = pages.find((p) => p.slug === slug);
95
+ if (!page) {
96
+ return {
97
+ content: [
98
+ {
99
+ type: "text",
100
+ text: `Page not found: "${slug}". Use list-doc-pages to see available slugs.`
101
+ }
102
+ ],
103
+ isError: true
104
+ };
105
+ }
106
+ const header = page.description ? `# ${page.title}
107
+
108
+ > ${page.description}
109
+
110
+ ` : `# ${page.title}
111
+
112
+ `;
113
+ return {
114
+ content: [{ type: "text", text: header + page.content }]
115
+ };
116
+ }
117
+ );
118
+ server.tool(
119
+ "search-docs",
120
+ "Full-text search across all DataQueue documentation pages. Returns matching sections with page titles and content excerpts.",
121
+ {
122
+ query: z.string().describe('Search query, e.g. "cron scheduling" or "waitForToken"')
123
+ },
124
+ async ({ query }) => {
125
+ const queryTerms = query.toLowerCase().split(/\s+/).filter((t) => t.length > 1);
126
+ if (queryTerms.length === 0) {
127
+ return {
128
+ content: [
129
+ { type: "text", text: "Please provide a search query." }
130
+ ],
131
+ isError: true
132
+ };
133
+ }
134
+ const scored = pages.map((page) => ({
135
+ page,
136
+ score: scorePageForQuery(page, queryTerms)
137
+ })).filter((r) => r.score > 0).sort((a, b) => b.score - a.score).slice(0, 5);
138
+ if (scored.length === 0) {
139
+ return {
140
+ content: [
141
+ {
142
+ type: "text",
143
+ text: `No results for "${query}". Try different keywords or use list-doc-pages to browse.`
144
+ }
145
+ ]
146
+ };
147
+ }
148
+ const results = scored.map((r) => {
149
+ const excerpt = extractExcerpt(r.page.content, queryTerms);
150
+ return `## ${r.page.title} (${r.page.slug})
151
+
152
+ ${r.page.description}
153
+
154
+ ${excerpt}`;
155
+ });
156
+ return {
157
+ content: [{ type: "text", text: results.join("\n\n---\n\n") }]
158
+ };
159
+ }
160
+ );
161
+ const transport = deps.transport ?? new StdioServerTransport();
162
+ await server.connect(transport);
163
+ return server;
164
+ }
165
+ var isDirectRun = process.argv[1] && (process.argv[1].endsWith("/mcp-server.js") || process.argv[1].endsWith("/mcp-server.cjs"));
166
+ if (isDirectRun) {
167
+ startMcpServer().catch((err) => {
168
+ console.error("Failed to start MCP server:", err);
169
+ process.exit(1);
170
+ });
171
+ }
172
+
173
+ export { extractExcerpt, loadDocsContent, scorePageForQuery, startMcpServer };
174
+ //# sourceMappingURL=mcp-server.js.map
175
+ //# sourceMappingURL=mcp-server.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/mcp-server.ts"],"names":[],"mappings":";;;;;;;;AAcA,IAAM,UAAA,GAAa,aAAA,CAAc,MAAA,CAAA,IAAA,CAAY,GAAG,CAAA;AAChD,IAAM,SAAA,GAAY,IAAA,CAAK,OAAA,CAAQ,UAAU,CAAA;AAUlC,SAAS,gBACd,QAAA,GAAmB,IAAA,CAAK,IAAA,CAAK,SAAA,EAAW,yBAAyB,CAAA,EACtD;AACX,EAAA,MAAM,GAAA,GAAM,EAAA,CAAG,YAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAC7C,EAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AACvB;AAGO,SAAS,iBAAA,CAAkB,MAAe,UAAA,EAA8B;AAC7E,EAAA,MAAM,UAAA,GAAa,IAAA,CAAK,KAAA,CAAM,WAAA,EAAY;AAC1C,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,WAAA,CAAY,WAAA,EAAY;AAC/C,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,OAAA,CAAQ,WAAA,EAAY;AAE9C,EAAA,IAAI,KAAA,GAAQ,CAAA;AACZ,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,IAAI,UAAA,CAAW,QAAA,CAAS,IAAI,CAAA,EAAG,KAAA,IAAS,EAAA;AACxC,IAAA,IAAI,SAAA,CAAU,QAAA,CAAS,IAAI,CAAA,EAAG,KAAA,IAAS,CAAA;AAEvC,IAAA,MAAM,cAAA,GAAiB,YAAA,CAAa,KAAA,CAAM,IAAI,EAAE,MAAA,GAAS,CAAA;AACzD,IAAA,KAAA,IAAS,IAAA,CAAK,GAAA,CAAI,cAAA,EAAgB,EAAE,CAAA;AAAA;AAEtC,EAAA,OAAO,KAAA;AACT;AAGO,SAAS,cAAA,CACd,OAAA,EACA,UAAA,EACA,SAAA,GAAY,GAAA,EACJ;AACR,EAAA,MAAM,KAAA,GAAQ,QAAQ,WAAA,EAAY;AAClC,EAAA,IAAI,aAAA,GAAgB,EAAA;AAEpB,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,MAAM,GAAA,GAAM,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA;AAC9B,IAAA,IAAI,GAAA,KAAQ,EAAA,KAAO,aAAA,KAAkB,EAAA,IAAM,MAAM,aAAA,CAAA,EAAgB;AAC/D,MAAA,aAAA,GAAgB,GAAA;AAAA;AAClB;AAGF,EAAA,IAAI,kBAAkB,EAAA,EAAI;AACxB,IAAA,OAAO,OAAA,CAAQ,KAAA,CAAM,CAAA,EAAG,SAAS,CAAA;AAAA;AAGnC,EAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,gBAAgB,GAAG,CAAA;AAC7C,EAAA,MAAM,MAAM,IAAA,CAAK,GAAA,CAAI,OAAA,CAAQ,MAAA,EAAQ,QAAQ,SAAS,CAAA;AACtD,EAAA,IAAI,OAAA,GAAU,OAAA,CAAQ,KAAA,CAAM,KAAA,EAAO,GAAG,CAAA;AAEtC,EAAA,IAAI,KAAA,GAAQ,CAAA,EAAG,OAAA,GAAU,KAAA,GAAQ,OAAA;AACjC,EAAA,IAAI,GAAA,GAAM,OAAA,CAAQ,MAAA,EAAQ,OAAA,GAAU,OAAA,GAAU,KAAA;AAE9C,EAAA,OAAO,OAAA;AACT;AAOA,eAAsB,cAAA,CACpB,IAAA,GAGI,EAAC,EACe;AACpB,EAAA,MAAM,KAAA,GAAQ,eAAA,CAAgB,IAAA,CAAK,QAAQ,CAAA;AAE3C,EAAA,MAAM,MAAA,GAAS,IAAI,SAAA,CAAU;AAAA,IAC3B,IAAA,EAAM,gBAAA;AAAA,IACN,OAAA,EAAS;AAAA,GACV,CAAA;AAED,EAAA,MAAA,CAAO,QAAA,CAAS,UAAA,EAAY,sBAAA,EAAwB,YAAY;AAC9D,IAAA,MAAM,WAAW,IAAA,CAAK,IAAA;AAAA,MACpB,SAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAI,OAAA;AACJ,IAAA,IAAI;AACF,MAAA,OAAA,GAAU,EAAA,CAAG,YAAA,CAAa,QAAA,EAAU,OAAO,CAAA;AAAA,KAC7C,CAAA,MAAQ;AACN,MAAA,OAAA,GAAU,MACP,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,GAAA,EAAM,EAAE,KAAK;;AAAA,MAAA,EAAa,EAAE,IAAI;;AAAA,EAAO,CAAA,CAAE,WAAW,CAAA,CAAE,CAAA,CACjE,KAAK,MAAM,CAAA;AAAA;AAEhB,IAAA,OAAO,EAAE,UAAU,CAAC,EAAE,KAAK,sBAAA,EAAwB,IAAA,EAAM,OAAA,EAAS,CAAA,EAAE;AAAA,GACrE,CAAA;AAED,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,gBAAA;AAAA,IACA,gFAAA;AAAA,IACA,EAAC;AAAA,IACD,YAAY;AACV,MAAA,MAAM,OAAA,GAAU,KAAA,CAAM,GAAA,CAAI,CAAC,CAAA,MAAO;AAAA,QAChC,MAAM,CAAA,CAAE,IAAA;AAAA,QACR,OAAO,CAAA,CAAE,KAAA;AAAA,QACT,aAAa,CAAA,CAAE;AAAA,OACjB,CAAE,CAAA;AACF,MAAA,OAAO;AAAA,QACL,OAAA,EAAS;AAAA,UACP,EAAE,MAAM,MAAA,EAAiB,IAAA,EAAM,KAAK,SAAA,CAAU,OAAA,EAAS,IAAA,EAAM,CAAC,CAAA;AAAE;AAClE,OACF;AAAA;AACF,GACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,cAAA;AAAA,IACA,qFAAA;AAAA,IACA;AAAA,MACE,IAAA,EAAM,CAAA,CACH,MAAA,EAAO,CACP,SAAS,4DAA4D;AAAA,KAC1E;AAAA,IACA,OAAO,EAAE,IAAA,EAAK,KAAM;AAClB,MAAA,MAAM,OAAO,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,SAAS,IAAI,CAAA;AAC9C,MAAA,IAAI,CAAC,IAAA,EAAM;AACT,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,IAAA,EAAM,oBAAoB,IAAI,CAAA,6CAAA;AAAA;AAChC,WACF;AAAA,UACA,OAAA,EAAS;AAAA,SACX;AAAA;AAEF,MAAA,MAAM,MAAA,GAAS,IAAA,CAAK,WAAA,GAChB,CAAA,EAAA,EAAK,KAAK,KAAK;;AAAA,EAAA,EAAS,KAAK,WAAW;;AAAA,CAAA,GACxC,CAAA,EAAA,EAAK,KAAK,KAAK;;AAAA,CAAA;AACnB,MAAA,OAAO;AAAA,QACL,OAAA,EAAS,CAAC,EAAE,IAAA,EAAM,QAAiB,IAAA,EAAM,MAAA,GAAS,IAAA,CAAK,OAAA,EAAS;AAAA,OAClE;AAAA;AACF,GACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,aAAA;AAAA,IACA,6HAAA;AAAA,IACA;AAAA,MACE,KAAA,EAAO,CAAA,CACJ,MAAA,EAAO,CACP,SAAS,wDAAwD;AAAA,KACtE;AAAA,IACA,OAAO,EAAE,KAAA,EAAM,KAAM;AACnB,MAAA,MAAM,UAAA,GAAa,KAAA,CAChB,WAAA,EAAY,CACZ,KAAA,CAAM,KAAK,CAAA,CACX,MAAA,CAAO,CAAC,CAAA,KAAM,CAAA,CAAE,MAAA,GAAS,CAAC,CAAA;AAE7B,MAAA,IAAI,UAAA,CAAW,WAAW,CAAA,EAAG;AAC3B,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP,EAAE,IAAA,EAAM,MAAA,EAAiB,IAAA,EAAM,gCAAA;AAAiC,WAClE;AAAA,UACA,OAAA,EAAS;AAAA,SACX;AAAA;AAGF,MAAA,MAAM,MAAA,GAAS,KAAA,CACZ,GAAA,CAAI,CAAC,IAAA,MAAU;AAAA,QACd,IAAA;AAAA,QACA,KAAA,EAAO,iBAAA,CAAkB,IAAA,EAAM,UAAU;AAAA,OAC3C,CAAE,EACD,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,KAAA,GAAQ,CAAC,CAAA,CACzB,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM,EAAE,KAAA,GAAQ,CAAA,CAAE,KAAK,CAAA,CAChC,KAAA,CAAM,GAAG,CAAC,CAAA;AAEb,MAAA,IAAI,MAAA,CAAO,WAAW,CAAA,EAAG;AACvB,QAAA,OAAO;AAAA,UACL,OAAA,EAAS;AAAA,YACP;AAAA,cACE,IAAA,EAAM,MAAA;AAAA,cACN,IAAA,EAAM,mBAAmB,KAAK,CAAA,0DAAA;AAAA;AAChC;AACF,SACF;AAAA;AAGF,MAAA,MAAM,OAAA,GAAU,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA,KAAM;AAChC,QAAA,MAAM,OAAA,GAAU,cAAA,CAAe,CAAA,CAAE,IAAA,CAAK,SAAS,UAAU,CAAA;AACzD,QAAA,OAAO,MAAM,CAAA,CAAE,IAAA,CAAK,KAAK,CAAA,EAAA,EAAK,CAAA,CAAE,KAAK,IAAI,CAAA;;AAAA,EAAQ,CAAA,CAAE,KAAK,WAAW;;AAAA,EAAO,OAAO,CAAA,CAAA;AAAA,OAClF,CAAA;AAED,MAAA,OAAO;AAAA,QACL,OAAA,EAAS,CAAC,EAAE,IAAA,EAAM,MAAA,EAAiB,MAAM,OAAA,CAAQ,IAAA,CAAK,aAAa,CAAA,EAAG;AAAA,OACxE;AAAA;AACF,GACF;AAEA,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,SAAA,IAAa,IAAI,oBAAA,EAAqB;AAC7D,EAAA,MAAM,MAAA,CAAO,QAAQ,SAAS,CAAA;AAC9B,EAAA,OAAO,MAAA;AACT;AAEA,IAAM,cACJ,OAAA,CAAQ,IAAA,CAAK,CAAC,CAAA,KACb,QAAQ,IAAA,CAAK,CAAC,CAAA,CAAE,QAAA,CAAS,gBAAgB,CAAA,IACxC,OAAA,CAAQ,KAAK,CAAC,CAAA,CAAE,SAAS,iBAAiB,CAAA,CAAA;AAE9C,IAAI,WAAA,EAAa;AACf,EAAA,cAAA,EAAe,CAAE,KAAA,CAAM,CAAC,GAAA,KAAQ;AAC9B,IAAA,OAAA,CAAQ,KAAA,CAAM,+BAA+B,GAAG,CAAA;AAChD,IAAA,OAAA,CAAQ,KAAK,CAAC,CAAA;AAAA,GACf,CAAA;AACH","file":"mcp-server.js","sourcesContent":["#!/usr/bin/env node\n\n/**\n * DataQueue MCP Server — exposes documentation search over stdio.\n * Run via: dataqueue-cli mcp\n */\n\nimport { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';\nimport { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';\nimport { z } from 'zod';\nimport fs from 'fs';\nimport path from 'path';\nimport { fileURLToPath } from 'url';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\n\ninterface DocPage {\n slug: string;\n title: string;\n description: string;\n content: string;\n}\n\n/** @internal Loads docs-content.json from the ai/ directory bundled with the package. */\nexport function loadDocsContent(\n docsPath: string = path.join(__dirname, '../ai/docs-content.json'),\n): DocPage[] {\n const raw = fs.readFileSync(docsPath, 'utf-8');\n return JSON.parse(raw) as DocPage[];\n}\n\n/** @internal Scores a doc page against a search query using simple term matching. */\nexport function scorePageForQuery(page: DocPage, queryTerms: string[]): number {\n const titleLower = page.title.toLowerCase();\n const descLower = page.description.toLowerCase();\n const contentLower = page.content.toLowerCase();\n\n let score = 0;\n for (const term of queryTerms) {\n if (titleLower.includes(term)) score += 10;\n if (descLower.includes(term)) score += 5;\n\n const contentMatches = contentLower.split(term).length - 1;\n score += Math.min(contentMatches, 10);\n }\n return score;\n}\n\n/** @internal Extracts a relevant excerpt around the first match of any query term. */\nexport function extractExcerpt(\n content: string,\n queryTerms: string[],\n maxLength = 500,\n): string {\n const lower = content.toLowerCase();\n let earliestIndex = -1;\n\n for (const term of queryTerms) {\n const idx = lower.indexOf(term);\n if (idx !== -1 && (earliestIndex === -1 || idx < earliestIndex)) {\n earliestIndex = idx;\n }\n }\n\n if (earliestIndex === -1) {\n return content.slice(0, maxLength);\n }\n\n const start = Math.max(0, earliestIndex - 100);\n const end = Math.min(content.length, start + maxLength);\n let excerpt = content.slice(start, end);\n\n if (start > 0) excerpt = '...' + excerpt;\n if (end < content.length) excerpt = excerpt + '...';\n\n return excerpt;\n}\n\n/**\n * Creates and starts the DataQueue MCP server over stdio.\n *\n * @param deps - Injectable dependencies for testing.\n */\nexport async function startMcpServer(\n deps: {\n docsPath?: string;\n transport?: InstanceType<typeof StdioServerTransport>;\n } = {},\n): Promise<McpServer> {\n const pages = loadDocsContent(deps.docsPath);\n\n const server = new McpServer({\n name: 'dataqueue-docs',\n version: '1.0.0',\n });\n\n server.resource('llms-txt', 'dataqueue://llms.txt', async () => {\n const llmsPath = path.join(\n __dirname,\n '../ai/skills/dataqueue-core/SKILL.md',\n );\n let content: string;\n try {\n content = fs.readFileSync(llmsPath, 'utf-8');\n } catch {\n content = pages\n .map((p) => `## ${p.title}\\n\\nSlug: ${p.slug}\\n\\n${p.description}`)\n .join('\\n\\n');\n }\n return { contents: [{ uri: 'dataqueue://llms.txt', text: content }] };\n });\n\n server.tool(\n 'list-doc-pages',\n 'List all available DataQueue documentation pages with titles and descriptions.',\n {},\n async () => {\n const listing = pages.map((p) => ({\n slug: p.slug,\n title: p.title,\n description: p.description,\n }));\n return {\n content: [\n { type: 'text' as const, text: JSON.stringify(listing, null, 2) },\n ],\n };\n },\n );\n\n server.tool(\n 'get-doc-page',\n 'Fetch a specific DataQueue doc page by slug. Returns full page content as markdown.',\n {\n slug: z\n .string()\n .describe('The doc page slug, e.g. \"usage/add-job\" or \"api/job-queue\"'),\n },\n async ({ slug }) => {\n const page = pages.find((p) => p.slug === slug);\n if (!page) {\n return {\n content: [\n {\n type: 'text' as const,\n text: `Page not found: \"${slug}\". Use list-doc-pages to see available slugs.`,\n },\n ],\n isError: true,\n };\n }\n const header = page.description\n ? `# ${page.title}\\n\\n> ${page.description}\\n\\n`\n : `# ${page.title}\\n\\n`;\n return {\n content: [{ type: 'text' as const, text: header + page.content }],\n };\n },\n );\n\n server.tool(\n 'search-docs',\n 'Full-text search across all DataQueue documentation pages. Returns matching sections with page titles and content excerpts.',\n {\n query: z\n .string()\n .describe('Search query, e.g. \"cron scheduling\" or \"waitForToken\"'),\n },\n async ({ query }) => {\n const queryTerms = query\n .toLowerCase()\n .split(/\\s+/)\n .filter((t) => t.length > 1);\n\n if (queryTerms.length === 0) {\n return {\n content: [\n { type: 'text' as const, text: 'Please provide a search query.' },\n ],\n isError: true,\n };\n }\n\n const scored = pages\n .map((page) => ({\n page,\n score: scorePageForQuery(page, queryTerms),\n }))\n .filter((r) => r.score > 0)\n .sort((a, b) => b.score - a.score)\n .slice(0, 5);\n\n if (scored.length === 0) {\n return {\n content: [\n {\n type: 'text' as const,\n text: `No results for \"${query}\". Try different keywords or use list-doc-pages to browse.`,\n },\n ],\n };\n }\n\n const results = scored.map((r) => {\n const excerpt = extractExcerpt(r.page.content, queryTerms);\n return `## ${r.page.title} (${r.page.slug})\\n\\n${r.page.description}\\n\\n${excerpt}`;\n });\n\n return {\n content: [{ type: 'text' as const, text: results.join('\\n\\n---\\n\\n') }],\n };\n },\n );\n\n const transport = deps.transport ?? new StdioServerTransport();\n await server.connect(transport);\n return server;\n}\n\nconst isDirectRun =\n process.argv[1] &&\n (process.argv[1].endsWith('/mcp-server.js') ||\n process.argv[1].endsWith('/mcp-server.cjs'));\n\nif (isDirectRun) {\n startMcpServer().catch((err) => {\n console.error('Failed to start MCP server:', err);\n process.exit(1);\n });\n}\n"]}
@@ -0,0 +1,33 @@
1
+ -- Create cron_schedules table for recurring job definitions
2
+
3
+ -- Up Migration
4
+ CREATE TABLE IF NOT EXISTS cron_schedules (
5
+ id SERIAL PRIMARY KEY,
6
+ schedule_name VARCHAR(255) NOT NULL UNIQUE,
7
+ cron_expression VARCHAR(255) NOT NULL,
8
+ job_type VARCHAR(255) NOT NULL,
9
+ payload JSONB NOT NULL DEFAULT '{}',
10
+ max_attempts INT DEFAULT 3,
11
+ priority INT DEFAULT 0,
12
+ timeout_ms INT,
13
+ force_kill_on_timeout BOOLEAN DEFAULT FALSE,
14
+ tags TEXT[],
15
+ timezone VARCHAR(100) DEFAULT 'UTC',
16
+ allow_overlap BOOLEAN DEFAULT FALSE,
17
+ status VARCHAR(50) DEFAULT 'active',
18
+ last_enqueued_at TIMESTAMPTZ,
19
+ last_job_id INT,
20
+ next_run_at TIMESTAMPTZ,
21
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
22
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
23
+ );
24
+
25
+ CREATE INDEX IF NOT EXISTS idx_cron_schedules_status ON cron_schedules(status);
26
+ CREATE INDEX IF NOT EXISTS idx_cron_schedules_next_run_at ON cron_schedules(next_run_at);
27
+ CREATE INDEX IF NOT EXISTS idx_cron_schedules_name ON cron_schedules(schedule_name);
28
+
29
+ -- Down Migration
30
+ DROP INDEX IF EXISTS idx_cron_schedules_name;
31
+ DROP INDEX IF EXISTS idx_cron_schedules_next_run_at;
32
+ DROP INDEX IF EXISTS idx_cron_schedules_status;
33
+ DROP TABLE IF EXISTS cron_schedules;
@@ -0,0 +1,17 @@
1
+ -- Up Migration
2
+ ALTER TABLE job_queue ADD COLUMN IF NOT EXISTS retry_delay INT;
3
+ ALTER TABLE job_queue ADD COLUMN IF NOT EXISTS retry_backoff BOOLEAN;
4
+ ALTER TABLE job_queue ADD COLUMN IF NOT EXISTS retry_delay_max INT;
5
+
6
+ ALTER TABLE cron_schedules ADD COLUMN IF NOT EXISTS retry_delay INT;
7
+ ALTER TABLE cron_schedules ADD COLUMN IF NOT EXISTS retry_backoff BOOLEAN;
8
+ ALTER TABLE cron_schedules ADD COLUMN IF NOT EXISTS retry_delay_max INT;
9
+
10
+ -- Down Migration
11
+ ALTER TABLE job_queue DROP COLUMN IF EXISTS retry_delay;
12
+ ALTER TABLE job_queue DROP COLUMN IF EXISTS retry_backoff;
13
+ ALTER TABLE job_queue DROP COLUMN IF EXISTS retry_delay_max;
14
+
15
+ ALTER TABLE cron_schedules DROP COLUMN IF EXISTS retry_delay;
16
+ ALTER TABLE cron_schedules DROP COLUMN IF EXISTS retry_backoff;
17
+ ALTER TABLE cron_schedules DROP COLUMN IF EXISTS retry_delay_max;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nicnocquee/dataqueue",
3
- "version": "1.25.0",
3
+ "version": "1.26.0-beta.20260223202259",
4
4
  "description": "PostgreSQL or Redis-backed job queue for Node.js applications with support for serverless environments",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -14,8 +14,22 @@
14
14
  "files": [
15
15
  "dist/",
16
16
  "src/",
17
- "migrations/"
17
+ "migrations/",
18
+ "ai/"
18
19
  ],
20
+ "scripts": {
21
+ "prebuild": "npx tsx ai/build-docs-content.ts && npx tsx ai/build-llms-full.ts",
22
+ "build": "tsup",
23
+ "ci": "npm run build && npm run check-format && npm run check-exports && npm run lint && npm run test",
24
+ "lint": "tsc",
25
+ "test": "vitest run --reporter=verbose",
26
+ "format": "prettier --write .",
27
+ "check-format": "prettier --check .",
28
+ "check-exports": "attw --pack .",
29
+ "build:docs-content": "npx tsx ai/build-docs-content.ts && npx tsx ai/build-llms-full.ts",
30
+ "dev": "tsup --watch",
31
+ "migrate": "node-pg-migrate -d $PG_DATAQUEUE_DATABASE -m ./migrations"
32
+ },
19
33
  "keywords": [
20
34
  "nextjs",
21
35
  "postgresql",
@@ -32,19 +46,21 @@
32
46
  "directory": "packages/dataqueue"
33
47
  },
34
48
  "dependencies": {
49
+ "@modelcontextprotocol/sdk": "^1.26.0",
50
+ "croner": "^10.0.1",
35
51
  "pg": "^8.0.0",
36
- "pg-connection-string": "^2.9.1"
52
+ "pg-connection-string": "^2.9.1",
53
+ "zod": "^3.25.67"
37
54
  },
38
55
  "devDependencies": {
39
56
  "@arethetypeswrong/cli": "^0.18.2",
40
- "@changesets/cli": "^2.29.5",
41
57
  "@types/node": "^24.0.4",
42
58
  "@types/pg": "^8.15.4",
43
59
  "ioredis": "^5.9.3",
44
60
  "node-pg-migrate": "^8.0.3",
45
61
  "pnpm": "^9.0.0",
46
- "ts-node": "^10.9.2",
47
62
  "prettier": "^3.6.2",
63
+ "ts-node": "^10.9.2",
48
64
  "tsup": "^8.5.0",
49
65
  "turbo": "^1.13.0",
50
66
  "typescript": "^5.8.3",
@@ -65,20 +81,7 @@
65
81
  }
66
82
  },
67
83
  "bin": {
68
- "dataqueue-cli": "./cli.cjs"
69
- },
70
- "scripts": {
71
- "build": "tsup",
72
- "ci": "npm run build && npm run check-format && npm run check-exports && npm run lint && npm run test",
73
- "lint": "tsc",
74
- "test": "vitest run --reporter=verbose",
75
- "format": "prettier --write .",
76
- "check-format": "prettier --check .",
77
- "check-exports": "attw --pack .",
78
- "local-release": "changeset version && changeset publish",
79
- "dev": "tsup --watch",
80
- "migrate": "node-pg-migrate -d $PG_DATAQUEUE_DATABASE -m ./migrations",
81
- "changeset:add": "changeset",
82
- "changeset:version": "changeset version && find .changeset -type f -name '*.md' ! -name 'README.md' -delete"
84
+ "dataqueue-cli": "./cli.cjs",
85
+ "dataqueue-mcp": "./dist/mcp-server.js"
83
86
  }
84
- }
87
+ }