task-o-matic 0.0.20 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/benchmark.js +203 -173
- package/dist/commands/install.d.ts +3 -0
- package/dist/commands/install.d.ts.map +1 -0
- package/dist/commands/install.js +150 -0
- package/dist/commands/prd.d.ts +5 -0
- package/dist/commands/prd.d.ts.map +1 -1
- package/dist/commands/prd.js +297 -189
- package/dist/commands/tasks/split.d.ts.map +1 -1
- package/dist/commands/tasks/split.js +129 -27
- package/dist/commands/utils/ai-parallel.d.ts +20 -0
- package/dist/commands/utils/ai-parallel.d.ts.map +1 -0
- package/dist/commands/utils/ai-parallel.js +115 -0
- package/dist/commands/workflow.d.ts.map +1 -1
- package/dist/commands/workflow.js +59 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -1
- package/dist/lib/ai-service/gemini-proxy.d.ts +15 -0
- package/dist/lib/ai-service/gemini-proxy.d.ts.map +1 -0
- package/dist/lib/ai-service/gemini-proxy.js +90 -0
- package/dist/lib/ai-service/json-parser.d.ts +16 -4
- package/dist/lib/ai-service/json-parser.d.ts.map +1 -1
- package/dist/lib/ai-service/json-parser.js +93 -19
- package/dist/lib/ai-service/model-provider.d.ts.map +1 -1
- package/dist/lib/ai-service/model-provider.js +31 -2
- package/dist/lib/ai-service/prd-operations.d.ts.map +1 -1
- package/dist/lib/ai-service/prd-operations.js +21 -5
- package/dist/lib/ai-service/task-operations.d.ts.map +1 -1
- package/dist/lib/ai-service/task-operations.js +10 -2
- package/dist/lib/better-t-stack-cli.d.ts.map +1 -1
- package/dist/lib/better-t-stack-cli.js +6 -5
- package/dist/lib/config-validation.d.ts +9 -9
- package/dist/lib/config-validation.d.ts.map +1 -1
- package/dist/lib/config-validation.js +11 -3
- package/dist/lib/config.d.ts.map +1 -1
- package/dist/lib/config.js +11 -2
- package/dist/lib/git-utils.d.ts +35 -0
- package/dist/lib/git-utils.d.ts.map +1 -1
- package/dist/lib/git-utils.js +69 -0
- package/dist/lib/provider-defaults.json +11 -1
- package/dist/lib/task-loop-execution.d.ts.map +1 -1
- package/dist/lib/task-loop-execution.js +5 -1
- package/dist/services/benchmark.d.ts +14 -0
- package/dist/services/benchmark.d.ts.map +1 -1
- package/dist/services/benchmark.js +325 -0
- package/dist/services/tasks.d.ts.map +1 -1
- package/dist/services/tasks.js +25 -15
- package/dist/services/workflow.d.ts +12 -0
- package/dist/services/workflow.d.ts.map +1 -1
- package/dist/services/workflow.js +20 -0
- package/dist/test/commands.test.js +10 -10
- package/dist/test/model-parsing.test.d.ts +2 -0
- package/dist/test/model-parsing.test.d.ts.map +1 -0
- package/dist/test/model-parsing.test.js +73 -0
- package/dist/types/cli-options.d.ts +2 -0
- package/dist/types/cli-options.d.ts.map +1 -1
- package/dist/types/index.d.ts +13 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +10 -0
- package/dist/types/workflow-options.d.ts +25 -0
- package/dist/types/workflow-options.d.ts.map +1 -1
- package/dist/utils/ai-operation-utility.d.ts.map +1 -1
- package/dist/utils/ai-operation-utility.js +26 -2
- package/dist/utils/metadata-utils.d.ts +1 -1
- package/dist/utils/streaming-utils.d.ts.map +1 -1
- package/dist/utils/streaming-utils.js +8 -0
- package/docs/agents/cli.md +19 -12
- package/package.json +1 -1
package/dist/commands/prd.js
CHANGED
|
@@ -1,43 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
"use strict";
|
|
3
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
4
|
-
if (k2 === undefined) k2 = k;
|
|
5
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
6
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
7
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
8
|
-
}
|
|
9
|
-
Object.defineProperty(o, k2, desc);
|
|
10
|
-
}) : (function(o, m, k, k2) {
|
|
11
|
-
if (k2 === undefined) k2 = k;
|
|
12
|
-
o[k2] = m[k];
|
|
13
|
-
}));
|
|
14
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
15
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
16
|
-
}) : function(o, v) {
|
|
17
|
-
o["default"] = v;
|
|
18
|
-
});
|
|
19
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
20
|
-
var ownKeys = function(o) {
|
|
21
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
22
|
-
var ar = [];
|
|
23
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
24
|
-
return ar;
|
|
25
|
-
};
|
|
26
|
-
return ownKeys(o);
|
|
27
|
-
};
|
|
28
|
-
return function (mod) {
|
|
29
|
-
if (mod && mod.__esModule) return mod;
|
|
30
|
-
var result = {};
|
|
31
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
32
|
-
__setModuleDefault(result, mod);
|
|
33
|
-
return result;
|
|
34
|
-
};
|
|
35
|
-
})();
|
|
36
3
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
37
4
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
38
5
|
};
|
|
39
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
7
|
exports.prdCommand = void 0;
|
|
8
|
+
exports.parseModelString = parseModelString;
|
|
41
9
|
const commander_1 = require("commander");
|
|
42
10
|
const chalk_1 = __importDefault(require("chalk"));
|
|
43
11
|
const inquirer_1 = __importDefault(require("inquirer"));
|
|
@@ -45,45 +13,103 @@ const fs_1 = require("fs");
|
|
|
45
13
|
const prd_1 = require("../services/prd");
|
|
46
14
|
const streaming_options_1 = require("../utils/streaming-options");
|
|
47
15
|
const progress_1 = require("../cli/display/progress");
|
|
16
|
+
const path_1 = require("path");
|
|
17
|
+
const ai_parallel_1 = require("./utils/ai-parallel");
|
|
48
18
|
exports.prdCommand = new commander_1.Command("prd").description("Manage PRDs and generate tasks");
|
|
49
|
-
const
|
|
50
|
-
|
|
19
|
+
const validation_1 = require("../lib/validation");
|
|
20
|
+
const config_1 = require("../lib/config");
|
|
21
|
+
const path_2 = __importDefault(require("path"));
|
|
22
|
+
// Helper to parse model string ([provider:]model[;reasoning[=budget]])
|
|
51
23
|
function parseModelString(modelStr) {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
24
|
+
let processingStr = modelStr;
|
|
25
|
+
let reasoning;
|
|
26
|
+
// 1. Extract reasoning
|
|
27
|
+
// Format: ;reasoning or ;reasoning=1000
|
|
28
|
+
const reasoningMatch = processingStr.match(/;reasoning(?:=(\d+))?$/);
|
|
29
|
+
if (reasoningMatch) {
|
|
30
|
+
// If specific budget provided (group 1), use it.
|
|
31
|
+
// Otherwise default to "2000" as requested.
|
|
32
|
+
reasoning = reasoningMatch[1] ? reasoningMatch[1] : "2000";
|
|
33
|
+
// Remove the reasoning suffix from the string
|
|
34
|
+
processingStr = processingStr.substring(0, reasoningMatch.index);
|
|
35
|
+
}
|
|
36
|
+
// 2. Extract provider and model
|
|
37
|
+
// We look for the first colon.
|
|
38
|
+
const firstColonIndex = processingStr.indexOf(":");
|
|
39
|
+
if (firstColonIndex === -1) {
|
|
40
|
+
// No colon -> It's just a model name (provider inferred from env/defaults later)
|
|
41
|
+
return {
|
|
42
|
+
provider: undefined,
|
|
43
|
+
model: processingStr,
|
|
44
|
+
reasoning,
|
|
45
|
+
};
|
|
60
46
|
}
|
|
61
|
-
|
|
47
|
+
// Has colon. Check if the part before is a valid provider.
|
|
48
|
+
const potentialProvider = processingStr.substring(0, firstColonIndex);
|
|
49
|
+
const potentialModel = processingStr.substring(firstColonIndex + 1);
|
|
50
|
+
if ((0, validation_1.isValidAIProvider)(potentialProvider)) {
|
|
51
|
+
// It is a known provider
|
|
52
|
+
return {
|
|
53
|
+
provider: potentialProvider,
|
|
54
|
+
model: potentialModel,
|
|
55
|
+
reasoning,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
// Not a known provider. Treat the whole thing as the model name.
|
|
59
|
+
// This handles cases like "google/gemini...:free" where "google/gemini..." isn't a provider key.
|
|
60
|
+
// Or just "model:with:colons".
|
|
61
|
+
return {
|
|
62
|
+
provider: undefined,
|
|
63
|
+
model: processingStr,
|
|
64
|
+
reasoning,
|
|
65
|
+
};
|
|
62
66
|
}
|
|
63
67
|
// Create PRD command
|
|
64
68
|
exports.prdCommand
|
|
65
69
|
.command("create")
|
|
66
70
|
.description("Generate PRD(s) from a product description")
|
|
67
71
|
.argument("<description>", "Product description")
|
|
68
|
-
.option("--ai <
|
|
72
|
+
.option("--ai <models...>", "AI model(s) to use. Format: [provider:]model[;reasoning[=budget]]. Example: openrouter:openai/gpt-5;reasoning=2000")
|
|
69
73
|
.option("--combine-ai <provider:model>", "AI model to combine multiple PRDs into master PRD")
|
|
70
74
|
.option("--output-dir <path>", "Directory to save PRDs", ".task-o-matic/prd")
|
|
75
|
+
.option("--ai-reasoning <tokens>", "Enable reasoning for OpenRouter models (max reasoning tokens)")
|
|
71
76
|
.option("--stream", "Enable streaming output (only for single AI)")
|
|
72
77
|
.action(async (description, options) => {
|
|
73
78
|
try {
|
|
74
|
-
|
|
79
|
+
// Load configuration to get defaults
|
|
80
|
+
await config_1.configManager.load();
|
|
81
|
+
const aiConfig = config_1.configManager.getAIConfig();
|
|
82
|
+
let defaultModelStr = `${aiConfig.provider}:${aiConfig.model}`;
|
|
83
|
+
// Handle default reasoning configuration
|
|
84
|
+
if (aiConfig.reasoning) {
|
|
85
|
+
defaultModelStr += ";reasoning";
|
|
86
|
+
if (aiConfig.reasoning.maxTokens) {
|
|
87
|
+
defaultModelStr += `=${aiConfig.reasoning.maxTokens}`;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
const cliModels = Array.isArray(options.ai)
|
|
91
|
+
? options.ai
|
|
92
|
+
: options.ai
|
|
93
|
+
? [options.ai]
|
|
94
|
+
: [];
|
|
95
|
+
// If CLI models provided, append them to default. If not, just use default.
|
|
96
|
+
// Logic: Default is ALWAYS included unless explicitly disabled (feature for later?)
|
|
97
|
+
// Current requirement: "I WANT BOTH"
|
|
98
|
+
const aiModels = [...new Set([defaultModelStr, ...cliModels])];
|
|
75
99
|
const isSingleModel = aiModels.length === 1;
|
|
76
100
|
// For single model, support streaming
|
|
77
|
-
if (isSingleModel) {
|
|
101
|
+
if (isSingleModel && !options.combineAi) {
|
|
78
102
|
const modelConfig = parseModelString(aiModels[0]);
|
|
79
103
|
const streamingOptions = (0, streaming_options_1.createStreamingOptions)(options.stream, "Generating PRD");
|
|
80
104
|
const result = await prd_1.prdService.generatePRD({
|
|
81
105
|
description,
|
|
82
106
|
outputDir: options.outputDir,
|
|
83
|
-
filename: `prd-${modelConfig.provider}
|
|
107
|
+
filename: `prd-${modelConfig.provider ? `${modelConfig.provider}-` : ""}${modelConfig.model.replace(/[:/]/g, "-")}.md`,
|
|
84
108
|
aiOptions: {
|
|
85
109
|
aiProvider: modelConfig.provider,
|
|
86
110
|
aiModel: modelConfig.model,
|
|
111
|
+
// CLI flag overrides model string config
|
|
112
|
+
aiReasoning: options.aiReasoning || modelConfig.reasoning,
|
|
87
113
|
},
|
|
88
114
|
streamingOptions,
|
|
89
115
|
callbacks: {
|
|
@@ -102,107 +128,56 @@ exports.prdCommand
|
|
|
102
128
|
}
|
|
103
129
|
}
|
|
104
130
|
else {
|
|
105
|
-
// Multiple models - use
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
const results = [];
|
|
110
|
-
// Print initial lines
|
|
111
|
-
aiModels.forEach((m, i) => {
|
|
112
|
-
modelMap.set(m, i);
|
|
113
|
-
modelStatus.set(m, "Waiting...");
|
|
114
|
-
console.log(chalk_1.default.dim(`- ${m}: Waiting...`));
|
|
115
|
-
});
|
|
116
|
-
const totalModels = aiModels.length;
|
|
117
|
-
// Generate PRDs concurrently
|
|
118
|
-
const promises = aiModels.map(async (modelStr) => {
|
|
119
|
-
const modelConfig = parseModelString(modelStr);
|
|
120
|
-
const index = modelMap.get(modelStr);
|
|
121
|
-
// Update status: Starting
|
|
122
|
-
const up = totalModels - index;
|
|
123
|
-
process.stdout.write(`\x1B[${up}A`);
|
|
124
|
-
process.stdout.write(`\x1B[2K`);
|
|
125
|
-
process.stdout.write(`- ${chalk_1.default.bold(modelStr)}: ${chalk_1.default.yellow("Starting...")}\r`);
|
|
126
|
-
process.stdout.write(`\x1B[${up}B`);
|
|
127
|
-
try {
|
|
128
|
-
const result = await prd_1.prdService.generatePRD({
|
|
129
|
-
description,
|
|
130
|
-
outputDir: options.outputDir,
|
|
131
|
-
filename: `prd-${modelConfig.provider}-${modelConfig.model.replace(/\//g, "-")}.md`,
|
|
132
|
-
aiOptions: {
|
|
133
|
-
aiProvider: modelConfig.provider,
|
|
134
|
-
aiModel: modelConfig.model,
|
|
135
|
-
},
|
|
136
|
-
callbacks: {
|
|
137
|
-
onProgress: (event) => {
|
|
138
|
-
if (event.type === "progress") {
|
|
139
|
-
const up = totalModels - index;
|
|
140
|
-
process.stdout.write(`\x1B[${up}A`);
|
|
141
|
-
process.stdout.write(`\x1B[2K`);
|
|
142
|
-
process.stdout.write(`- ${chalk_1.default.bold(modelStr)}: ${chalk_1.default.blue(event.message)}\r`);
|
|
143
|
-
process.stdout.write(`\x1B[${up}B`);
|
|
144
|
-
}
|
|
145
|
-
},
|
|
146
|
-
},
|
|
147
|
-
});
|
|
148
|
-
// Update status: Completed
|
|
149
|
-
const up2 = totalModels - index;
|
|
150
|
-
process.stdout.write(`\x1B[${up2}A`);
|
|
151
|
-
process.stdout.write(`\x1B[2K`);
|
|
152
|
-
process.stdout.write(`- ${chalk_1.default.bold(modelStr)}: ${chalk_1.default.green(`Completed (${result.stats.duration}ms)`)}\r`);
|
|
153
|
-
process.stdout.write(`\x1B[${up2}B`);
|
|
154
|
-
results.push({
|
|
155
|
-
modelId: modelStr,
|
|
156
|
-
path: result.path,
|
|
157
|
-
stats: result.stats,
|
|
158
|
-
});
|
|
159
|
-
return result;
|
|
160
|
-
}
|
|
161
|
-
catch (error) {
|
|
162
|
-
const up2 = totalModels - index;
|
|
163
|
-
process.stdout.write(`\x1B[${up2}A`);
|
|
164
|
-
process.stdout.write(`\x1B[2K`);
|
|
165
|
-
process.stdout.write(`- ${chalk_1.default.bold(modelStr)}: ${chalk_1.default.red(`Failed: ${error instanceof Error ? error.message : String(error)}`)}\r`);
|
|
166
|
-
process.stdout.write(`\x1B[${up2}B`);
|
|
167
|
-
throw error;
|
|
168
|
-
}
|
|
169
|
-
});
|
|
170
|
-
await Promise.all(promises);
|
|
171
|
-
// Display summary
|
|
172
|
-
console.log(chalk_1.default.green(`\n✓ Generated ${results.length} PRDs\n`));
|
|
173
|
-
console.log(chalk_1.default.bold(`${"Model".padEnd(40)} | ${"Duration".padEnd(10)} | ${"TTFT".padEnd(10)} | ${"Tokens".padEnd(10)}`));
|
|
174
|
-
console.log("-".repeat(80));
|
|
175
|
-
results.forEach((r) => {
|
|
176
|
-
const duration = `${r.stats.duration}ms`;
|
|
177
|
-
const ttft = r.stats.timeToFirstToken
|
|
178
|
-
? `${r.stats.timeToFirstToken}ms`
|
|
179
|
-
: "N/A";
|
|
180
|
-
const tokens = r.stats.tokenUsage
|
|
181
|
-
? r.stats.tokenUsage.total.toString()
|
|
182
|
-
: "N/A";
|
|
183
|
-
console.log(`${r.modelId.padEnd(40)} | ${duration.padEnd(10)} | ${ttft.padEnd(10)} | ${tokens.padEnd(10)}`);
|
|
184
|
-
});
|
|
185
|
-
// Combine if requested
|
|
186
|
-
if (options.combineAi) {
|
|
187
|
-
console.log(chalk_1.default.blue("\nCombining PRDs into master PRD..."));
|
|
188
|
-
const combineModelConfig = parseModelString(options.combineAi);
|
|
189
|
-
const prdContents = results.map((r) => (0, fs_1.readFileSync)(r.path, "utf-8"));
|
|
190
|
-
const masterResult = await prd_1.prdService.combinePRDs({
|
|
191
|
-
prds: prdContents,
|
|
192
|
-
originalDescription: description,
|
|
131
|
+
// Multiple models or combine request - use parallel utility
|
|
132
|
+
const results = await (0, ai_parallel_1.runAIParallel)(aiModels, async (modelConfig, streamingOptions) => {
|
|
133
|
+
const result = await prd_1.prdService.generatePRD({
|
|
134
|
+
description,
|
|
193
135
|
outputDir: options.outputDir,
|
|
194
|
-
filename: "
|
|
136
|
+
filename: `prd-${modelConfig.provider ? `${modelConfig.provider}-` : ""}${modelConfig.model.replace(/[:/]/g, "-")}.md`,
|
|
195
137
|
aiOptions: {
|
|
196
|
-
aiProvider:
|
|
197
|
-
aiModel:
|
|
138
|
+
aiProvider: modelConfig.provider,
|
|
139
|
+
aiModel: modelConfig.model,
|
|
140
|
+
aiReasoning: options.aiReasoning || modelConfig.reasoning,
|
|
198
141
|
},
|
|
142
|
+
streamingOptions,
|
|
199
143
|
callbacks: {
|
|
200
|
-
onProgress:
|
|
201
|
-
|
|
144
|
+
onProgress: (event) => {
|
|
145
|
+
/* handled by parallel util mostly, but could hook here */
|
|
146
|
+
},
|
|
202
147
|
},
|
|
203
148
|
});
|
|
204
|
-
|
|
205
|
-
|
|
149
|
+
// Wrap in expected structure
|
|
150
|
+
return {
|
|
151
|
+
data: result,
|
|
152
|
+
stats: result.stats,
|
|
153
|
+
};
|
|
154
|
+
}, {
|
|
155
|
+
description: "Generating PRDs",
|
|
156
|
+
showSummary: true,
|
|
157
|
+
});
|
|
158
|
+
// Combine if requested
|
|
159
|
+
if (options.combineAi) {
|
|
160
|
+
const prdContents = results.map((r) => r.data.content);
|
|
161
|
+
// Helper to get reasoning from combine string if present
|
|
162
|
+
const combineModelConfig = parseModelString(options.combineAi);
|
|
163
|
+
const reasoning = options.aiReasoning || combineModelConfig.reasoning;
|
|
164
|
+
const masterPath = await (0, ai_parallel_1.combinePRDs)(prdContents, description, options.combineAi, options.stream, reasoning);
|
|
165
|
+
// Need to manually save since combinePRDs utility returns content string or path?
|
|
166
|
+
// Wait, the utility I wrote returns string (content).
|
|
167
|
+
// Actually, let's correct the utility usage or the utility itself.
|
|
168
|
+
// Looking at the utility: it calls `aiOperations.combinePRDs` which returns string (content).
|
|
169
|
+
// But `prdService.combinePRDs` (old code) saved the file.
|
|
170
|
+
// I should probably use `prdService.combinePRDs` INSIDE the utility or here?
|
|
171
|
+
// The utility uses `aiOperations`. So I get content back. I need to save it.
|
|
172
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
173
|
+
const filename = "prd-master.md"; // Force master name or unique? Default to prd-master.md as per old code.
|
|
174
|
+
const savePath = path_2.default.join(options.outputDir, filename);
|
|
175
|
+
// Ensure dir exists
|
|
176
|
+
if (!(0, fs_1.existsSync)(options.outputDir)) {
|
|
177
|
+
(0, fs_1.mkdirSync)(options.outputDir, { recursive: true });
|
|
178
|
+
}
|
|
179
|
+
(0, fs_1.writeFileSync)(savePath, masterPath); // masterPath is actually content here based on my utility return type
|
|
180
|
+
console.log(chalk_1.default.green(`\n✓ Master PRD created: ${savePath}`));
|
|
206
181
|
}
|
|
207
182
|
}
|
|
208
183
|
}
|
|
@@ -219,6 +194,7 @@ exports.prdCommand
|
|
|
219
194
|
.requiredOption("--description <text>", "Original product description")
|
|
220
195
|
.requiredOption("--ai <provider:model>", "AI model to use for combining")
|
|
221
196
|
.option("--output <path>", "Output file path", "prd-master.md")
|
|
197
|
+
.option("--ai-reasoning <tokens>", "Enable reasoning for OpenRouter models (max reasoning tokens)")
|
|
222
198
|
.option("--stream", "Enable streaming output")
|
|
223
199
|
.action(async (options) => {
|
|
224
200
|
try {
|
|
@@ -233,6 +209,7 @@ exports.prdCommand
|
|
|
233
209
|
aiOptions: {
|
|
234
210
|
aiProvider: modelConfig.provider,
|
|
235
211
|
aiModel: modelConfig.model,
|
|
212
|
+
aiReasoning: options.aiReasoning || modelConfig.reasoning,
|
|
236
213
|
},
|
|
237
214
|
streamingOptions,
|
|
238
215
|
callbacks: {
|
|
@@ -257,6 +234,8 @@ exports.prdCommand
|
|
|
257
234
|
.command("parse")
|
|
258
235
|
.description("Parse a PRD file into structured tasks")
|
|
259
236
|
.requiredOption("--file <path>", "Path to PRD file")
|
|
237
|
+
.option("--ai <models...>", "AI model(s) to use. Format: [provider:]model[;reasoning[=budget]]")
|
|
238
|
+
.option("--combine-ai <provider:model>", "AI model to combine multiple parsed results")
|
|
260
239
|
.option("--prompt <prompt>", "Override prompt")
|
|
261
240
|
.option("--message <message>", "User message")
|
|
262
241
|
.option("--ai-provider <provider>", "AI provider override")
|
|
@@ -271,51 +250,182 @@ exports.prdCommand
|
|
|
271
250
|
// Determine working directory from current process location
|
|
272
251
|
// Service layer should receive this explicitly, not use process.cwd()
|
|
273
252
|
const workingDirectory = process.cwd();
|
|
274
|
-
|
|
275
|
-
const
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
253
|
+
// Support multi-model if array of models provided in options.ai (added to command option below)
|
|
254
|
+
const cliModels = Array.isArray(options.ai)
|
|
255
|
+
? options.ai
|
|
256
|
+
: options.ai
|
|
257
|
+
? [options.ai]
|
|
258
|
+
: [];
|
|
259
|
+
// If just single model or no model specified (uses service default), use old flow OR simple parallel entry
|
|
260
|
+
// But wait, the parse command options define --ai-provider, --ai-model separately in old code.
|
|
261
|
+
// I need to update the command definition to support --ai like create/split.
|
|
262
|
+
// Let's assume I updated the options in the definition (will do in next chunk).
|
|
263
|
+
// If we have "ai" models list, we use parallel.
|
|
264
|
+
if (cliModels.length > 0 || options.combineAi) {
|
|
265
|
+
// Parallel execution
|
|
266
|
+
const modelsToUse = cliModels.length > 0
|
|
267
|
+
? cliModels
|
|
268
|
+
: [`${options.aiProvider}:${options.aiModel}`];
|
|
269
|
+
const results = await (0, ai_parallel_1.runAIParallel)(modelsToUse, async (modelConfig, streamingOptions) => {
|
|
270
|
+
const result = await prd_1.prdService.parsePRD({
|
|
271
|
+
file: options.file,
|
|
272
|
+
workingDirectory,
|
|
273
|
+
enableFilesystemTools: options.tools,
|
|
274
|
+
aiOptions: {
|
|
275
|
+
aiProvider: modelConfig.provider,
|
|
276
|
+
aiModel: modelConfig.model,
|
|
277
|
+
aiReasoning: options.aiReasoning || modelConfig.reasoning,
|
|
278
|
+
},
|
|
279
|
+
promptOverride: options.prompt,
|
|
280
|
+
messageOverride: options.message,
|
|
281
|
+
streamingOptions,
|
|
282
|
+
callbacks: {
|
|
283
|
+
onProgress: (event) => { },
|
|
284
|
+
},
|
|
285
|
+
});
|
|
286
|
+
// Save intermediate result
|
|
287
|
+
const safeModel = (modelConfig.model || "")
|
|
288
|
+
.replace(/[^a-z0-9]/gi, "-")
|
|
289
|
+
.toLowerCase();
|
|
290
|
+
const filename = `tasks-${modelConfig.provider}-${safeModel}.json`;
|
|
291
|
+
const tasksDir = path_2.default.join(process.cwd(), ".task-o-matic", "tasks");
|
|
292
|
+
if (!(0, fs_1.existsSync)(tasksDir)) {
|
|
293
|
+
(0, fs_1.mkdirSync)(tasksDir, { recursive: true });
|
|
294
|
+
}
|
|
295
|
+
const outputPath = path_2.default.join(tasksDir, filename);
|
|
296
|
+
try {
|
|
297
|
+
(0, fs_1.writeFileSync)(outputPath, JSON.stringify(result.tasks, null, 2));
|
|
298
|
+
// console.log(chalk.dim(` Saved: ${filename}`)); // Too verbose for parallel output?
|
|
299
|
+
}
|
|
300
|
+
catch (e) {
|
|
301
|
+
// ignore write error
|
|
302
|
+
}
|
|
303
|
+
return {
|
|
304
|
+
data: result,
|
|
305
|
+
stats: result.stats,
|
|
306
|
+
};
|
|
307
|
+
}, {
|
|
308
|
+
description: "Parsing PRD",
|
|
309
|
+
showSummary: true,
|
|
310
|
+
});
|
|
311
|
+
// Phase 2: Cleanup intermediate tasks.json to prevent duplicates
|
|
312
|
+
const tasksJsonPath = (0, path_1.join)(workingDirectory, ".task-o-matic", "tasks.json");
|
|
313
|
+
if ((0, fs_1.existsSync)(tasksJsonPath)) {
|
|
314
|
+
try {
|
|
315
|
+
(0, fs_1.unlinkSync)(tasksJsonPath);
|
|
316
|
+
// console.log(chalk.dim("Cleaned up intermediate tasks.json"));
|
|
317
|
+
}
|
|
318
|
+
catch (e) {
|
|
319
|
+
console.warn(chalk_1.default.yellow("Warning: Failed to cleanup tasks.json"));
|
|
320
|
+
}
|
|
305
321
|
}
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
322
|
+
if (options.combineAi && results.length > 0) {
|
|
323
|
+
const taskLists = results.map((r) => r.data.tasks);
|
|
324
|
+
const combineModelConfig = parseModelString(options.combineAi);
|
|
325
|
+
console.log(chalk_1.default.blue(`\nCombining ${taskLists.length} task lists with ${combineModelConfig.model}...`));
|
|
326
|
+
// Construct the message with drafts
|
|
327
|
+
let draftsMessage = "Here are draft task lists generated by multiple models. Please combine them into the best possible single list ensuring strict schema compliance:\n\n";
|
|
328
|
+
results.forEach((r, idx) => {
|
|
329
|
+
// Try to identify model from earlier scope or just use index
|
|
330
|
+
// We can reconstruct model string or just label "Model N"
|
|
331
|
+
draftsMessage += `--- Model ${idx + 1} Draft ---\n${JSON.stringify(r.data.tasks, null, 2)}\n\n`;
|
|
332
|
+
});
|
|
333
|
+
// Phase 3: Service-Based Combination
|
|
334
|
+
// Calls prdService.parsePRD which will validate AND save the final tasks to the clean tasks.json
|
|
335
|
+
const result = await prd_1.prdService.parsePRD({
|
|
336
|
+
file: options.file,
|
|
337
|
+
workingDirectory,
|
|
338
|
+
enableFilesystemTools: options.tools,
|
|
339
|
+
aiOptions: {
|
|
340
|
+
aiProvider: combineModelConfig.provider,
|
|
341
|
+
aiModel: combineModelConfig.model,
|
|
342
|
+
aiReasoning: options.aiReasoning || combineModelConfig.reasoning,
|
|
343
|
+
},
|
|
344
|
+
promptOverride: options.prompt, // Pass original prompt if any
|
|
345
|
+
messageOverride: draftsMessage, // Inject drafts
|
|
346
|
+
streamingOptions: (0, streaming_options_1.createStreamingOptions)(options.stream, "Combining"),
|
|
347
|
+
callbacks: {
|
|
348
|
+
onProgress: progress_1.displayProgress,
|
|
349
|
+
onError: progress_1.displayError,
|
|
350
|
+
},
|
|
351
|
+
});
|
|
352
|
+
console.log(chalk_1.default.green(`\n✓ Combined and saved ${result.tasks.length} tasks.`));
|
|
313
353
|
}
|
|
314
|
-
if (
|
|
315
|
-
|
|
354
|
+
else if (results.length > 0) {
|
|
355
|
+
// No combine requested, but we deleted tasks.json.
|
|
356
|
+
// We use the service to save the "best" (first) result to ensure IDs and dependencies are preserved.
|
|
357
|
+
// We do this by feeding the tasks back to the service as a "draft" to strictly follow.
|
|
358
|
+
const bestResult = results[0].data;
|
|
359
|
+
console.log(chalk_1.default.yellow("\n⚠️ Multiple models used without --combine-ai. Saving result from first model."));
|
|
360
|
+
const draftsMessage = `Here is the pre-generated task list. Please validate and save it exactly as is, preserving all IDs and dependencies:\n\n${JSON.stringify(bestResult.tasks, null, 2)}`;
|
|
361
|
+
await prd_1.prdService.parsePRD({
|
|
362
|
+
file: options.file,
|
|
363
|
+
workingDirectory,
|
|
364
|
+
enableFilesystemTools: options.tools,
|
|
365
|
+
aiOptions: {
|
|
366
|
+
aiProvider: options.aiProvider, // Use default or overridden local options
|
|
367
|
+
aiModel: options.aiModel,
|
|
368
|
+
aiReasoning: options.aiReasoning,
|
|
369
|
+
},
|
|
370
|
+
promptOverride: options.prompt,
|
|
371
|
+
messageOverride: draftsMessage,
|
|
372
|
+
streamingOptions: (0, streaming_options_1.createStreamingOptions)(options.stream, "Saving"),
|
|
373
|
+
callbacks: {
|
|
374
|
+
onProgress: progress_1.displayProgress,
|
|
375
|
+
onError: progress_1.displayError,
|
|
376
|
+
},
|
|
377
|
+
});
|
|
378
|
+
console.log(chalk_1.default.green(`✓ Saved ${bestResult.tasks.length} tasks.`));
|
|
316
379
|
}
|
|
380
|
+
}
|
|
381
|
+
else {
|
|
382
|
+
// Fallback to original single flow
|
|
383
|
+
const streamingOptions = (0, streaming_options_1.createStreamingOptions)(options.stream, "Parsing");
|
|
384
|
+
const result = await prd_1.prdService.parsePRD({
|
|
385
|
+
file: options.file,
|
|
386
|
+
workingDirectory, // Pass working directory explicitly to service
|
|
387
|
+
enableFilesystemTools: options.tools,
|
|
388
|
+
aiOptions: {
|
|
389
|
+
aiProvider: options.aiProvider,
|
|
390
|
+
aiModel: options.aiModel,
|
|
391
|
+
aiKey: options.aiKey,
|
|
392
|
+
aiProviderUrl: options.aiProviderUrl,
|
|
393
|
+
aiReasoning: options.aiReasoning,
|
|
394
|
+
},
|
|
395
|
+
promptOverride: options.prompt,
|
|
396
|
+
messageOverride: options.message,
|
|
397
|
+
streamingOptions,
|
|
398
|
+
callbacks: {
|
|
399
|
+
onProgress: progress_1.displayProgress,
|
|
400
|
+
onError: progress_1.displayError,
|
|
401
|
+
},
|
|
402
|
+
});
|
|
317
403
|
console.log("");
|
|
318
|
-
|
|
404
|
+
console.log(chalk_1.default.blue(`📊 PRD Parsing Summary:`));
|
|
405
|
+
console.log(chalk_1.default.cyan(` Tasks created: ${result.stats.tasksCreated}`));
|
|
406
|
+
console.log(chalk_1.default.cyan(` Duration: ${result.stats.duration}ms`));
|
|
407
|
+
console.log(chalk_1.default.cyan(` AI Model: ${result.stats.aiModel}`));
|
|
408
|
+
console.log(chalk_1.default.blue("\n📋 Processing Steps:"));
|
|
409
|
+
result.steps.forEach((step) => {
|
|
410
|
+
const icon = step.status === "completed" ? "✓" : "✗";
|
|
411
|
+
console.log(` ${icon} ${step.step} (${step.duration}ms)`);
|
|
412
|
+
if (step.details) {
|
|
413
|
+
console.log(chalk_1.default.gray(` ${JSON.stringify(step.details)}`));
|
|
414
|
+
}
|
|
415
|
+
});
|
|
416
|
+
// Show created tasks
|
|
417
|
+
console.log(chalk_1.default.blue("\n✨ Created Tasks:"));
|
|
418
|
+
result.tasks.forEach((task, index) => {
|
|
419
|
+
console.log(`${index + 1}. ${chalk_1.default.bold(task.title)} (${task.id})`);
|
|
420
|
+
if (task.description) {
|
|
421
|
+
console.log(chalk_1.default.gray(` ${task.description.substring(0, 100)}${task.description.length > 100 ? "..." : ""}`));
|
|
422
|
+
}
|
|
423
|
+
if (task.estimatedEffort) {
|
|
424
|
+
console.log(chalk_1.default.cyan(` Effort: ${task.estimatedEffort}`));
|
|
425
|
+
}
|
|
426
|
+
console.log("");
|
|
427
|
+
});
|
|
428
|
+
}
|
|
319
429
|
}
|
|
320
430
|
catch (error) {
|
|
321
431
|
(0, progress_1.displayError)(error);
|
|
@@ -413,8 +523,7 @@ exports.prdCommand
|
|
|
413
523
|
},
|
|
414
524
|
});
|
|
415
525
|
const outputPath = options.output || "prd-questions.json";
|
|
416
|
-
|
|
417
|
-
fs.writeFileSync(outputPath, JSON.stringify({ questions }, null, 2));
|
|
526
|
+
(0, fs_1.writeFileSync)(outputPath, JSON.stringify({ questions }, null, 2));
|
|
418
527
|
console.log("");
|
|
419
528
|
console.log(chalk_1.default.green(`✓ Generated ${questions.length} questions`));
|
|
420
529
|
console.log(chalk_1.default.cyan(`Saved to: ${outputPath}`));
|
|
@@ -447,12 +556,11 @@ exports.prdCommand
|
|
|
447
556
|
.action(async (options) => {
|
|
448
557
|
try {
|
|
449
558
|
const workingDirectory = process.cwd();
|
|
450
|
-
const fs = await Promise.resolve().then(() => __importStar(require("fs")));
|
|
451
559
|
let questions = [];
|
|
452
560
|
// If questions file provided, load it
|
|
453
|
-
if (options.questions &&
|
|
561
|
+
if (options.questions && (0, fs_1.existsSync)(options.questions)) {
|
|
454
562
|
console.log(chalk_1.default.blue(`Loading questions from ${options.questions}...`));
|
|
455
|
-
const content =
|
|
563
|
+
const content = (0, fs_1.readFileSync)(options.questions, "utf-8");
|
|
456
564
|
const data = JSON.parse(content);
|
|
457
565
|
questions = data.questions || [];
|
|
458
566
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"split.d.ts","sourceRoot":"","sources":["../../../src/commands/tasks/split.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;
|
|
1
|
+
{"version":3,"file":"split.d.ts","sourceRoot":"","sources":["../../../src/commands/tasks/split.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AA0BpC,eAAO,MAAM,YAAY,SA+RtB,CAAC"}
|