ado-sync 0.1.54 → 0.1.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/ai/generate-spec.d.ts +47 -0
- package/dist/ai/generate-spec.js +397 -0
- package/dist/ai/generate-spec.js.map +1 -0
- package/dist/ai/summarizer.d.ts +17 -8
- package/dist/ai/summarizer.js +170 -3
- package/dist/ai/summarizer.js.map +1 -1
- package/dist/azure/work-items.d.ts +7 -1
- package/dist/azure/work-items.js +42 -2
- package/dist/azure/work-items.js.map +1 -1
- package/dist/cli.js +58 -20
- package/dist/cli.js.map +1 -1
- package/dist/config.js +12 -0
- package/dist/config.js.map +1 -1
- package/dist/mcp-server.js +25 -2
- package/dist/mcp-server.js.map +1 -1
- package/dist/sync/generate.d.ts +12 -0
- package/dist/sync/generate.js +83 -3
- package/dist/sync/generate.js.map +1 -1
- package/dist/types.d.ts +12 -6
- package/docs/cli.md +44 -1
- package/docs/mcp-server.md +7 -2
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -41,7 +41,7 @@ Minimum config:
|
|
|
41
41
|
| `pull` | Azure Test Cases → local files |
|
|
42
42
|
| `status` | Show pending changes without modifying anything |
|
|
43
43
|
| `diff` | Field-level drift between local and Azure |
|
|
44
|
-
| `generate` | Scaffold spec files from ADO User Stories |
|
|
44
|
+
| `generate` | Scaffold spec files from ADO User Stories (AI-powered or template) |
|
|
45
45
|
| `publish-test-results` | Publish TRX / JUnit / Playwright / Cucumber results to a Test Run |
|
|
46
46
|
| `story-context` | Show AC, suggested tags, and linked TCs for a User Story |
|
|
47
47
|
| `coverage` | Spec link rate and story coverage report |
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AI-powered spec file generation from ADO User Stories.
|
|
3
|
+
*
|
|
4
|
+
* Providers:
|
|
5
|
+
* local — node-llama-cpp GGUF model running in-process
|
|
6
|
+
* ollama — local Ollama REST API (http://localhost:11434)
|
|
7
|
+
* openai — OpenAI Chat Completions (or compatible endpoint)
|
|
8
|
+
* anthropic — Anthropic Messages API
|
|
9
|
+
* huggingface — Hugging Face Inference API (OpenAI-compatible /v1 endpoint)
|
|
10
|
+
* bedrock — AWS Bedrock (requires @aws-sdk/client-bedrock-runtime)
|
|
11
|
+
* azureai — Azure OpenAI Service (OpenAI-compatible, api-key header)
|
|
12
|
+
*
|
|
13
|
+
* Output formats:
|
|
14
|
+
* markdown — Playwright-planner style spec (Feature/Role/Options/Scenario blocks)
|
|
15
|
+
* gherkin — BDD .feature file with Scenario and Scenario Outline
|
|
16
|
+
*/
|
|
17
|
+
import { AdoStory } from '../azure/work-items';
|
|
18
|
+
export type AiGenerateProvider = 'local' | 'ollama' | 'openai' | 'anthropic' | 'huggingface' | 'bedrock' | 'azureai';
|
|
19
|
+
export interface AiGenerateOpts {
|
|
20
|
+
provider: AiGenerateProvider;
|
|
21
|
+
/**
|
|
22
|
+
* Model identifier:
|
|
23
|
+
* local: path to .gguf file
|
|
24
|
+
* ollama: model tag (e.g. qwen2.5-coder:7b)
|
|
25
|
+
* openai: model name (e.g. gpt-4o)
|
|
26
|
+
* anthropic: model name (e.g. claude-sonnet-4-6)
|
|
27
|
+
* huggingface: model id (e.g. mistralai/Mistral-7B-Instruct-v0.3)
|
|
28
|
+
* bedrock: model id (e.g. anthropic.claude-3-haiku-20240307-v1:0)
|
|
29
|
+
* azureai: deployment name (e.g. gpt-4o)
|
|
30
|
+
*/
|
|
31
|
+
model?: string;
|
|
32
|
+
/** Base URL override (ollama, openai-compatible, azureai full endpoint). */
|
|
33
|
+
baseUrl?: string;
|
|
34
|
+
/** API key — or $ENV_VAR reference. */
|
|
35
|
+
apiKey?: string;
|
|
36
|
+
/** AWS region for bedrock (default: AWS_REGION env or us-east-1). */
|
|
37
|
+
region?: string;
|
|
38
|
+
/** Fall back to template output if the AI call fails. Default: true. */
|
|
39
|
+
heuristicFallback?: boolean;
|
|
40
|
+
}
|
|
41
|
+
export type GenerateSpecFormat = 'markdown' | 'gherkin';
|
|
42
|
+
/**
|
|
43
|
+
* Generate a spec file (markdown or gherkin) for an ADO story using an AI provider.
|
|
44
|
+
* Returns the raw file content as a string.
|
|
45
|
+
* Falls back to null if generation fails and heuristicFallback is false (caller handles).
|
|
46
|
+
*/
|
|
47
|
+
export declare function generateSpecFromStory(story: AdoStory, format: GenerateSpecFormat, opts: AiGenerateOpts): Promise<string>;
|
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* AI-powered spec file generation from ADO User Stories.
|
|
4
|
+
*
|
|
5
|
+
* Providers:
|
|
6
|
+
* local — node-llama-cpp GGUF model running in-process
|
|
7
|
+
* ollama — local Ollama REST API (http://localhost:11434)
|
|
8
|
+
* openai — OpenAI Chat Completions (or compatible endpoint)
|
|
9
|
+
* anthropic — Anthropic Messages API
|
|
10
|
+
* huggingface — Hugging Face Inference API (OpenAI-compatible /v1 endpoint)
|
|
11
|
+
* bedrock — AWS Bedrock (requires @aws-sdk/client-bedrock-runtime)
|
|
12
|
+
* azureai — Azure OpenAI Service (OpenAI-compatible, api-key header)
|
|
13
|
+
*
|
|
14
|
+
* Output formats:
|
|
15
|
+
* markdown — Playwright-planner style spec (Feature/Role/Options/Scenario blocks)
|
|
16
|
+
* gherkin — BDD .feature file with Scenario and Scenario Outline
|
|
17
|
+
*/
|
|
18
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
19
|
+
exports.generateSpecFromStory = generateSpecFromStory;
|
|
20
|
+
// ─── Prompt templates ─────────────────────────────────────────────────────────
|
|
21
|
+
const MARKDOWN_PROMPT = `You are a QA engineer writing markdown spec files for E2E testing.
|
|
22
|
+
|
|
23
|
+
Given the ADO user story below, generate a complete markdown spec in this exact format:
|
|
24
|
+
|
|
25
|
+
# {Short feature area title}
|
|
26
|
+
|
|
27
|
+
Feature: {Feature name}
|
|
28
|
+
Role: {user role, e.g. "user" or "admin"}
|
|
29
|
+
Options: {feature permission flags needed, or "none"}
|
|
30
|
+
Timeout: 180000
|
|
31
|
+
Tags: {comma-separated tags, always include "smoke" for the main scenario}
|
|
32
|
+
|
|
33
|
+
## Scenario: {Main happy-path scenario title}
|
|
34
|
+
|
|
35
|
+
Preconditions:
|
|
36
|
+
- {Required permission or system state}
|
|
37
|
+
- User is authenticated
|
|
38
|
+
|
|
39
|
+
- Given {initial page/context}
|
|
40
|
+
- When {primary user action}
|
|
41
|
+
- And {additional action if needed}
|
|
42
|
+
- Then {expected result}
|
|
43
|
+
- And {additional assertion if applicable}
|
|
44
|
+
|
|
45
|
+
## Scenario: {Edge case or secondary scenario title}
|
|
46
|
+
|
|
47
|
+
Preconditions:
|
|
48
|
+
- {Precondition}
|
|
49
|
+
|
|
50
|
+
- Given {initial context}
|
|
51
|
+
- When {action}
|
|
52
|
+
- Then {expected result}
|
|
53
|
+
|
|
54
|
+
Rules:
|
|
55
|
+
- Generate 2-4 scenarios covering the main happy path and key edge cases from the acceptance criteria
|
|
56
|
+
- Be specific: use real button names, field labels, and UI messages from the description
|
|
57
|
+
- Identify the minimum user permissions needed for the feature
|
|
58
|
+
- Output ONLY the markdown — no preamble, explanation, or code fences
|
|
59
|
+
|
|
60
|
+
Story details:
|
|
61
|
+
Title: {TITLE}
|
|
62
|
+
Work Item Type: {TYPE}
|
|
63
|
+
State: {STATE}
|
|
64
|
+
|
|
65
|
+
Description:
|
|
66
|
+
{DESCRIPTION}
|
|
67
|
+
|
|
68
|
+
Acceptance Criteria:
|
|
69
|
+
{AC}`;
|
|
70
|
+
const GHERKIN_PROMPT = `You are a QA engineer writing BDD feature files in Gherkin syntax.
|
|
71
|
+
|
|
72
|
+
Given the ADO user story below, generate a complete Gherkin .feature file.
|
|
73
|
+
|
|
74
|
+
Requirements:
|
|
75
|
+
- Feature block with a short description
|
|
76
|
+
- 2-3 Scenario blocks for main happy-path and key negative cases
|
|
77
|
+
- 1 Scenario Outline with an Examples table if the AC mentions multiple values/inputs to test
|
|
78
|
+
- Place an @tc:0000 tag above each Scenario/Scenario Outline (placeholder for Azure TC ID)
|
|
79
|
+
- Steps must be specific: use real button names, field names, and expected messages
|
|
80
|
+
- Scenario Outline Examples table must have at least 2-3 rows
|
|
81
|
+
- Output ONLY the .feature content — no preamble, explanation, or code fences
|
|
82
|
+
|
|
83
|
+
Story details:
|
|
84
|
+
Title: {TITLE}
|
|
85
|
+
Work Item Type: {TYPE}
|
|
86
|
+
State: {STATE}
|
|
87
|
+
|
|
88
|
+
Description:
|
|
89
|
+
{DESCRIPTION}
|
|
90
|
+
|
|
91
|
+
Acceptance Criteria:
|
|
92
|
+
{AC}`;
|
|
93
|
+
function buildPrompt(story, format) {
|
|
94
|
+
const template = format === 'gherkin' ? GHERKIN_PROMPT : MARKDOWN_PROMPT;
|
|
95
|
+
return template
|
|
96
|
+
.replace('{TITLE}', story.title)
|
|
97
|
+
.replace('{TYPE}', story.workItemType ?? 'User Story')
|
|
98
|
+
.replace('{STATE}', story.state ?? 'Active')
|
|
99
|
+
.replace('{DESCRIPTION}', story.description?.trim() || '(not provided)')
|
|
100
|
+
.replace('{AC}', story.acceptanceCriteria?.trim() || '(not provided)');
|
|
101
|
+
}
|
|
102
|
+
// ─── Shared fetch helper ──────────────────────────────────────────────────────
|
|
103
|
+
async function fetchWithRetry(url, init, provider, maxRetries = 3, baseDelayMs = 5_000) {
|
|
104
|
+
let attempt = 0;
|
|
105
|
+
while (true) {
|
|
106
|
+
const res = await fetch(url, init);
|
|
107
|
+
if (res.status === 503 || res.status === 429) {
|
|
108
|
+
if (attempt >= maxRetries)
|
|
109
|
+
return res;
|
|
110
|
+
const retryAfter = res.headers.get('retry-after');
|
|
111
|
+
const delayMs = retryAfter
|
|
112
|
+
? parseInt(retryAfter, 10) * 1_000
|
|
113
|
+
: baseDelayMs * Math.pow(2, attempt);
|
|
114
|
+
const reason = res.status === 503 ? 'model loading' : 'rate limited';
|
|
115
|
+
process.stderr.write(` [ai-generate] ${provider} ${reason} — retrying in ${Math.round(delayMs / 1000)}s (${attempt + 1}/${maxRetries})\n`);
|
|
116
|
+
await new Promise((r) => setTimeout(r, delayMs));
|
|
117
|
+
attempt++;
|
|
118
|
+
continue;
|
|
119
|
+
}
|
|
120
|
+
return res;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
function resolveEnvVar(value) {
|
|
124
|
+
if (value.startsWith('$'))
|
|
125
|
+
return process.env[value.slice(1)] ?? value;
|
|
126
|
+
return value;
|
|
127
|
+
}
|
|
128
|
+
const llamaCache = new Map();
|
|
129
|
+
async function getLlamaSession(modelPath) {
|
|
130
|
+
if (llamaCache.has(modelPath))
|
|
131
|
+
return llamaCache.get(modelPath);
|
|
132
|
+
const promise = (async () => {
|
|
133
|
+
// eslint-disable-next-line @typescript-eslint/no-implied-eval, @typescript-eslint/no-explicit-any
|
|
134
|
+
const esmImport = new Function('m', 'return import(m)');
|
|
135
|
+
const llamaModule = await esmImport('node-llama-cpp');
|
|
136
|
+
const { getLlama, LlamaChatSession } = llamaModule;
|
|
137
|
+
const llama = await getLlama();
|
|
138
|
+
const model = await llama.loadModel({ modelPath });
|
|
139
|
+
return { LlamaChatSession, model };
|
|
140
|
+
})();
|
|
141
|
+
llamaCache.set(modelPath, promise);
|
|
142
|
+
return promise;
|
|
143
|
+
}
|
|
144
|
+
async function localProvider(prompt, modelPath) {
|
|
145
|
+
const { LlamaChatSession, model } = await getLlamaSession(modelPath);
|
|
146
|
+
const context = await model.createContext();
|
|
147
|
+
try {
|
|
148
|
+
const session = new LlamaChatSession({ contextSequence: context.getSequence() });
|
|
149
|
+
return await session.prompt(prompt);
|
|
150
|
+
}
|
|
151
|
+
finally {
|
|
152
|
+
if (typeof context.dispose === 'function')
|
|
153
|
+
await context.dispose();
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
async function ollamaProvider(prompt, model, baseUrl) {
|
|
157
|
+
const res = await fetchWithRetry(`${baseUrl}/api/generate`, {
|
|
158
|
+
method: 'POST',
|
|
159
|
+
headers: { 'Content-Type': 'application/json' },
|
|
160
|
+
body: JSON.stringify({ model, prompt, stream: false }),
|
|
161
|
+
signal: AbortSignal.timeout(120_000),
|
|
162
|
+
}, 'ollama');
|
|
163
|
+
if (!res.ok)
|
|
164
|
+
throw new Error(`Ollama ${res.status}: ${await res.text()}`);
|
|
165
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
166
|
+
return (await res.json()).response ?? '';
|
|
167
|
+
}
|
|
168
|
+
async function openaiProvider(prompt, model, apiKey, baseUrl, extraHeaders) {
|
|
169
|
+
const headers = {
|
|
170
|
+
'Content-Type': 'application/json',
|
|
171
|
+
...extraHeaders,
|
|
172
|
+
};
|
|
173
|
+
if (!extraHeaders?.['api-key']) {
|
|
174
|
+
headers['Authorization'] = `Bearer ${apiKey}`;
|
|
175
|
+
}
|
|
176
|
+
const res = await fetchWithRetry(`${baseUrl.replace(/\/$/, '')}/chat/completions`, {
|
|
177
|
+
method: 'POST',
|
|
178
|
+
headers,
|
|
179
|
+
body: JSON.stringify({
|
|
180
|
+
model,
|
|
181
|
+
messages: [{ role: 'user', content: prompt }],
|
|
182
|
+
temperature: 0.3,
|
|
183
|
+
max_tokens: 4096,
|
|
184
|
+
}),
|
|
185
|
+
signal: AbortSignal.timeout(120_000),
|
|
186
|
+
}, 'openai-compatible');
|
|
187
|
+
if (!res.ok)
|
|
188
|
+
throw new Error(`openai-compatible ${res.status}: ${await res.text()}`);
|
|
189
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
190
|
+
return (await res.json()).choices?.[0]?.message?.content ?? '';
|
|
191
|
+
}
|
|
192
|
+
async function anthropicProvider(prompt, model, apiKey, baseUrl) {
|
|
193
|
+
const res = await fetchWithRetry(`${baseUrl.replace(/\/$/, '')}/messages`, {
|
|
194
|
+
method: 'POST',
|
|
195
|
+
headers: {
|
|
196
|
+
'Content-Type': 'application/json',
|
|
197
|
+
'x-api-key': apiKey,
|
|
198
|
+
'anthropic-version': '2023-06-01',
|
|
199
|
+
},
|
|
200
|
+
body: JSON.stringify({
|
|
201
|
+
model,
|
|
202
|
+
max_tokens: 4096,
|
|
203
|
+
messages: [{ role: 'user', content: prompt }],
|
|
204
|
+
}),
|
|
205
|
+
signal: AbortSignal.timeout(120_000),
|
|
206
|
+
}, 'anthropic');
|
|
207
|
+
if (!res.ok)
|
|
208
|
+
throw new Error(`anthropic ${res.status}: ${await res.text()}`);
|
|
209
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
210
|
+
return (await res.json()).content?.[0]?.text ?? '';
|
|
211
|
+
}
|
|
212
|
+
async function huggingfaceProvider(prompt, model, apiKey) {
|
|
213
|
+
// Use the OpenAI-compatible endpoint
|
|
214
|
+
const baseUrl = 'https://api-inference.huggingface.co/v1';
|
|
215
|
+
return openaiProvider(prompt, model, apiKey, baseUrl);
|
|
216
|
+
}
|
|
217
|
+
// ─── Bedrock credential pre-flight ───────────────────────────────────────────
|
|
218
|
+
function warnIfNoBedrockCredentials() {
|
|
219
|
+
const hasEnv = !!(process.env['AWS_ACCESS_KEY_ID'] && process.env['AWS_SECRET_ACCESS_KEY']);
|
|
220
|
+
const hasProfile = !!(process.env['AWS_PROFILE'] || process.env['AWS_DEFAULT_PROFILE']);
|
|
221
|
+
if (hasEnv || hasProfile)
|
|
222
|
+
return;
|
|
223
|
+
try {
|
|
224
|
+
const credFile = require('path').join(require('os').homedir(), '.aws', 'credentials');
|
|
225
|
+
require('fs').accessSync(credFile);
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
catch { /* not found */ }
|
|
229
|
+
process.stderr.write(' [ai] Warning: No AWS credentials detected for bedrock provider.\n' +
|
|
230
|
+
' Set AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY, configure AWS_PROFILE,\n' +
|
|
231
|
+
' or ensure ~/.aws/credentials exists. The call will fail without credentials.\n');
|
|
232
|
+
}
|
|
233
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
234
|
+
async function bedrockInvokeWithRetry(client, command, maxRetries = 3, baseDelayMs = 5_000) {
|
|
235
|
+
let attempt = 0;
|
|
236
|
+
while (true) {
|
|
237
|
+
try {
|
|
238
|
+
return await client.send(command);
|
|
239
|
+
}
|
|
240
|
+
catch (err) {
|
|
241
|
+
const name = err?.name ?? '';
|
|
242
|
+
const isThrottle = name === 'ThrottlingException' || name === 'ServiceUnavailableException' || err?.$retryable;
|
|
243
|
+
if (isThrottle && attempt < maxRetries) {
|
|
244
|
+
const delay = baseDelayMs * Math.pow(2, attempt);
|
|
245
|
+
process.stderr.write(` [ai-generate] bedrock throttled — retrying in ${Math.round(delay / 1000)}s (${attempt + 1}/${maxRetries})\n`);
|
|
246
|
+
await new Promise((r) => setTimeout(r, delay));
|
|
247
|
+
attempt++;
|
|
248
|
+
continue;
|
|
249
|
+
}
|
|
250
|
+
throw err;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
async function bedrockProvider(prompt, model, region) {
|
|
255
|
+
warnIfNoBedrockCredentials();
|
|
256
|
+
// Dynamic import — requires @aws-sdk/client-bedrock-runtime to be installed
|
|
257
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
258
|
+
let bedrockModule;
|
|
259
|
+
try {
|
|
260
|
+
// eslint-disable-next-line @typescript-eslint/no-implied-eval, @typescript-eslint/no-explicit-any
|
|
261
|
+
const esmImport = new Function('m', 'return import(m)');
|
|
262
|
+
bedrockModule = await esmImport('@aws-sdk/client-bedrock-runtime');
|
|
263
|
+
}
|
|
264
|
+
catch {
|
|
265
|
+
throw new Error('AWS Bedrock requires @aws-sdk/client-bedrock-runtime. Install it with: npm install @aws-sdk/client-bedrock-runtime');
|
|
266
|
+
}
|
|
267
|
+
const { BedrockRuntimeClient, InvokeModelCommand } = bedrockModule;
|
|
268
|
+
const client = new BedrockRuntimeClient({ region });
|
|
269
|
+
// Support Claude models and Amazon Titan/Nova; default to Anthropic Claude format
|
|
270
|
+
const isClaudeModel = /anthropic\.claude/.test(model);
|
|
271
|
+
const isTitanModel = /amazon\.titan/.test(model);
|
|
272
|
+
const isNovaModel = /amazon\.nova/.test(model);
|
|
273
|
+
const isLlamaModel = /meta\.llama/.test(model);
|
|
274
|
+
let body;
|
|
275
|
+
if (isClaudeModel) {
|
|
276
|
+
body = JSON.stringify({
|
|
277
|
+
anthropic_version: 'bedrock-2023-05-31',
|
|
278
|
+
max_tokens: 4096,
|
|
279
|
+
messages: [{ role: 'user', content: prompt }],
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
else if (isTitanModel) {
|
|
283
|
+
body = JSON.stringify({
|
|
284
|
+
inputText: prompt,
|
|
285
|
+
textGenerationConfig: { maxTokenCount: 4096, temperature: 0.3 },
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
else if (isNovaModel || isLlamaModel) {
|
|
289
|
+
body = JSON.stringify({
|
|
290
|
+
messages: [{ role: 'user', content: [{ text: prompt }] }],
|
|
291
|
+
inferenceConfig: { max_new_tokens: 4096, temperature: 0.3 },
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
else {
|
|
295
|
+
// Generic: try OpenAI-compatible messages format
|
|
296
|
+
body = JSON.stringify({
|
|
297
|
+
messages: [{ role: 'user', content: prompt }],
|
|
298
|
+
max_tokens: 4096,
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
const response = await bedrockInvokeWithRetry(client, new InvokeModelCommand({
|
|
302
|
+
modelId: model,
|
|
303
|
+
body: new TextEncoder().encode(body),
|
|
304
|
+
contentType: 'application/json',
|
|
305
|
+
accept: 'application/json',
|
|
306
|
+
}));
|
|
307
|
+
const responseText = new TextDecoder().decode(response.body);
|
|
308
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
309
|
+
const data = JSON.parse(responseText);
|
|
310
|
+
// Handle different response shapes
|
|
311
|
+
return (data.content?.[0]?.text ?? // Anthropic Claude
|
|
312
|
+
data.results?.[0]?.outputText ?? // Amazon Titan
|
|
313
|
+
data.output?.message?.content?.[0]?.text ?? // Amazon Nova
|
|
314
|
+
data.generation ?? // Meta Llama
|
|
315
|
+
'');
|
|
316
|
+
}
|
|
317
|
+
async function azureaiProvider(prompt, model, apiKey, baseUrl) {
|
|
318
|
+
// Azure OpenAI uses api-key header instead of Authorization: Bearer
|
|
319
|
+
// baseUrl should be the full endpoint:
|
|
320
|
+
// https://{resource}.openai.azure.com/openai/deployments/{deployment}/chat/completions?api-version=2024-12-01-preview
|
|
321
|
+
// OR just https://{resource}.openai.azure.com — we'll append the path if needed
|
|
322
|
+
let url = baseUrl.replace(/\/$/, '');
|
|
323
|
+
if (!url.includes('/chat/completions')) {
|
|
324
|
+
url = `${url}/openai/deployments/${model}/chat/completions?api-version=2024-12-01-preview`;
|
|
325
|
+
}
|
|
326
|
+
const res = await fetchWithRetry(url, {
|
|
327
|
+
method: 'POST',
|
|
328
|
+
headers: {
|
|
329
|
+
'Content-Type': 'application/json',
|
|
330
|
+
'api-key': apiKey,
|
|
331
|
+
},
|
|
332
|
+
body: JSON.stringify({
|
|
333
|
+
messages: [{ role: 'user', content: prompt }],
|
|
334
|
+
temperature: 0.3,
|
|
335
|
+
max_tokens: 4096,
|
|
336
|
+
}),
|
|
337
|
+
signal: AbortSignal.timeout(120_000),
|
|
338
|
+
}, 'azureai');
|
|
339
|
+
if (!res.ok)
|
|
340
|
+
throw new Error(`Azure AI ${res.status}: ${await res.text()}`);
|
|
341
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
342
|
+
return (await res.json()).choices?.[0]?.message?.content ?? '';
|
|
343
|
+
}
|
|
344
|
+
// ─── Main entry point ─────────────────────────────────────────────────────────
|
|
345
|
+
/**
|
|
346
|
+
* Generate a spec file (markdown or gherkin) for an ADO story using an AI provider.
|
|
347
|
+
* Returns the raw file content as a string.
|
|
348
|
+
* Falls back to null if generation fails and heuristicFallback is false (caller handles).
|
|
349
|
+
*/
|
|
350
|
+
async function generateSpecFromStory(story, format, opts) {
|
|
351
|
+
const prompt = buildPrompt(story, format);
|
|
352
|
+
const heuristicFallback = opts.heuristicFallback ?? true;
|
|
353
|
+
const run = async () => {
|
|
354
|
+
switch (opts.provider) {
|
|
355
|
+
case 'local': {
|
|
356
|
+
if (!opts.model)
|
|
357
|
+
throw new Error('local provider requires --ai-model <path/to/model.gguf>');
|
|
358
|
+
return localProvider(prompt, opts.model);
|
|
359
|
+
}
|
|
360
|
+
case 'ollama': {
|
|
361
|
+
return ollamaProvider(prompt, opts.model ?? 'qwen2.5-coder:7b', opts.baseUrl ?? 'http://localhost:11434');
|
|
362
|
+
}
|
|
363
|
+
case 'openai': {
|
|
364
|
+
return openaiProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl ?? 'https://api.openai.com/v1');
|
|
365
|
+
}
|
|
366
|
+
case 'anthropic': {
|
|
367
|
+
return anthropicProvider(prompt, opts.model ?? 'claude-sonnet-4-6', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl ?? 'https://api.anthropic.com/v1');
|
|
368
|
+
}
|
|
369
|
+
case 'huggingface': {
|
|
370
|
+
if (!opts.model)
|
|
371
|
+
throw new Error('huggingface provider requires --ai-model <model-id>');
|
|
372
|
+
return huggingfaceProvider(prompt, opts.model, resolveEnvVar(opts.apiKey ?? ''));
|
|
373
|
+
}
|
|
374
|
+
case 'bedrock': {
|
|
375
|
+
return bedrockProvider(prompt, opts.model ?? 'anthropic.claude-3-haiku-20240307-v1:0', opts.region ?? process.env['AWS_REGION'] ?? 'us-east-1');
|
|
376
|
+
}
|
|
377
|
+
case 'azureai': {
|
|
378
|
+
if (!opts.baseUrl)
|
|
379
|
+
throw new Error('azureai provider requires --ai-url <azure-endpoint>');
|
|
380
|
+
return azureaiProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
};
|
|
384
|
+
try {
|
|
385
|
+
const result = await run();
|
|
386
|
+
return result.trim();
|
|
387
|
+
}
|
|
388
|
+
catch (err) {
|
|
389
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
390
|
+
if (heuristicFallback) {
|
|
391
|
+
process.stderr.write(` [ai-generate] ${opts.provider} failed (${msg}), using template output\n`);
|
|
392
|
+
return ''; // caller falls back to template
|
|
393
|
+
}
|
|
394
|
+
throw err;
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
//# sourceMappingURL=generate-spec.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-spec.js","sourceRoot":"","sources":["../../src/ai/generate-spec.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;AA8bH,sDA2EC;AAjeD,iFAAiF;AAEjF,MAAM,eAAe,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;KAgDnB,CAAC;AAEN,MAAM,cAAc,GAAG;;;;;;;;;;;;;;;;;;;;;;KAsBlB,CAAC;AAEN,SAAS,WAAW,CAAC,KAAe,EAAE,MAA0B;IAC9D,MAAM,QAAQ,GAAG,MAAM,KAAK,SAAS,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,eAAe,CAAC;IACzE,OAAO,QAAQ;SACZ,OAAO,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC;SAC/B,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,YAAY,IAAI,YAAY,CAAC;SACrD,OAAO,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,IAAI,QAAQ,CAAC;SAC3C,OAAO,CAAC,eAAe,EAAE,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,gBAAgB,CAAC;SACvE,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,kBAAkB,EAAE,IAAI,EAAE,IAAI,gBAAgB,CAAC,CAAC;AAC3E,CAAC;AAED,iFAAiF;AAEjF,KAAK,UAAU,cAAc,CAC3B,GAAW,EACX,IAAiB,EACjB,QAAgB,EAChB,UAAU,GAAG,CAAC,EACd,WAAW,GAAG,KAAK;IAEnB,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,OAAO,IAAI,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACnC,IAAI,GAAG,CAAC,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YAC7C,IAAI,OAAO,IAAI,UAAU;gBAAE,OAAO,GAAG,CAAC;YACtC,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;YAClD,MAAM,OAAO,GAAG,UAAU;gBACxB,CAAC,CAAC,QAAQ,CAAC,UAAU,EAAE,EAAE,CAAC,GAAG,KAAK;gBAClC,CAAC,CAAC,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;YACvC,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,cAAc,CAAC;YACrE,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,mBAAmB,QAAQ,IAAI,MAAM,kBAAkB,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,OAAO,GAAG,CAAC,IAAI,UAAU,KAAK,CACtH,CAAC;YACF,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC;YACjD,OAAO,EAAE,CAAC;YACV,SAAS;QACX,CAAC;QACD,OAAO,GAAG,CAAC;IACb,CAAC;AACH,CAAC;AAED,SAAS,aAAa,CAAC,KAAa;IAClC,IAAI,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;QAAE,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;IACvE,OAAO,KAAK,CAAC;AACf,CAAC;AAWD,MAAM,UAAU,GAAG,IAAI,GAAG,EAAiC,CAAC;AAE5D,KAAK,UAAU,eAAe,CAAC,SAAiB;IAC9C,IAAI,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC;QAAE,OAAO,UAAU,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;IACjE,MAAM,OAAO,GAAG,CAAC,KAAK,IAA2B,EAAE;QACjD,kGAAkG;QAClG,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,GAAG,EAAE,kBAAkB,CAAgC,CAAC;QACvF,MAAM,WAAW,GAAG,MAAM,SAAS,CAAC,gBAAgB,CAAC,CAAC;QACtD,MAAM,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAAG,WAAW,CAAC;QACnD,MAAM,KAAK,GAAG,MAAM,QAAQ,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,KAAK,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC;QACnD,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAE,CAAC;IACrC,CAAC,CAAC,EAAE,CAAC;IACL,UAAU,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IACnC,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,KAAK,UAAU,aAAa,CAAC,MAAc,EAAE,SAAiB;IAC5D,MAAM,EAAE,gBAAgB,EAAE,KAAK,EAAE,GAAG,MAAM,eAAe,CAAC,SAAS,CAAC,CAAC;IACrE,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,aAAa,EAAE,CAAC;IAC5C,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,EAAE,eAAe,EAAE,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;QACjF,OAAO,MAAM,OAAO,CAAC,MAAM,CAAC,MAAM,CAAW,CAAC;IAChD,CAAC;YAAS,CAAC;QACT,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,UAAU;YAAE,MAAM,OAAO,CAAC,OAAO,EAAE,CAAC;IACrE,CAAC;AACH,CAAC;AAED,KAAK,UAAU,cAAc,CAAC,MAAc,EAAE,KAAa,EAAE,OAAe;IAC1E,MAAM,GAAG,GAAG,MAAM,cAAc,CAC9B,GAAG,OAAO,eAAe,EACzB;QACE,MAAM,EAAE,MAAM;QACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;QAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC;QACtD,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC;KACrC,EACD,QAAQ,CACT,CAAC;IACF,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,UAAU,GAAG,CAAC,MAAM,KAAK,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;IAC1E,8DAA8D;IAC9D,OAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,CAAS,CAAC,QAAQ,IAAI,EAAE,CAAC;AACpD,CAAC;AAED,KAAK,UAAU,cAAc,CAC3B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAe,EACf,YAAqC;IAErC,MAAM,OAAO,GAA2B;QACtC,cAAc,EAAE,kBAAkB;QAClC,GAAG,YAAY;KAChB,CAAC;IACF,IAAI,CAAC,YAAY,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,MAAM,EAAE,CAAC;IAChD,CAAC;IACD,MAAM,GAAG,GAAG,MAAM,cAAc,CAC9B,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,mBAAmB,EAChD;QACE,MAAM,EAAE,MAAM;QACd,OAAO;QACP,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;YACnB,KAAK;YACL,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;YAC7C,WAAW,EAAE,GAAG;YAChB,UAAU,EAAE,IAAI;SACjB,CAAC;QACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC;KACrC,EACD,mBAAmB,CACpB,CAAC;IACF,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,qBAAqB,GAAG,CAAC,MAAM,KAAK,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;IACrF,8DAA8D;IAC9D,OAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,CAAS,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AAC1E,CAAC;AAED,KAAK,UAAU,iBAAiB,CAC9B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAe;IAEf,MAAM,GAAG,GAAG,MAAM,cAAc,CAC9B,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,WAAW,EACxC;QACE,MAAM,EAAE,MAAM;QACd,OAAO,EAAE;YACP,cAAc,EAAE,kBAAkB;YAClC,WAAW,EAAE,MAAM;YACnB,mBAAmB,EAAE,YAAY;SAClC;QACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;YACnB,KAAK;YACL,UAAU,EAAE,IAAI;YAChB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;SAC9C,CAAC;QACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC;KACrC,EACD,WAAW,CACZ,CAAC;IACF,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,aAAa,GAAG,CAAC,MAAM,KAAK,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;IAC7E,8DAA8D;IAC9D,OAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,CAAS,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,mBAAmB,CAChC,MAAc,EACd,KAAa,EACb,MAAc;IAEd,qCAAqC;IACrC,MAAM,OAAO,GAAG,yCAAyC,CAAC;IAC1D,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;AACxD,CAAC;AAED,gFAAgF;AAEhF,SAAS,0BAA0B;IACjC,MAAM,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC,CAAC;IAC5F,MAAM,UAAU,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC;IACxF,IAAI,MAAM,IAAI,UAAU;QAAE,OAAO;IACjC,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE,MAAM,EAAE,aAAa,CAAC,CAAC;QACtF,OAAO,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;QACnC,OAAO;IACT,CAAC;IAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC;IAC3B,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,qEAAqE;QACrE,2EAA2E;QAC3E,kFAAkF,CACnF,CAAC;AACJ,CAAC;AAED,8DAA8D;AAC9D,KAAK,UAAU,sBAAsB,CAAC,MAAW,EAAE,OAAY,EAAE,UAAU,GAAG,CAAC,EAAE,WAAW,GAAG,KAAK;IAClG,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,OAAO,IAAI,EAAE,CAAC;QACZ,IAAI,CAAC;YACH,OAAO,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpC,CAAC;QAAC,OAAO,GAAY,EAAE,CAAC;YACtB,MAAM,IAAI,GAAI,GAAW,EAAE,IAAI,IAAI,EAAE,CAAC;YACtC,MAAM,UAAU,GAAG,IAAI,KAAK,qBAAqB,IAAI,IAAI,KAAK,6BAA6B,IAAK,GAAW,EAAE,UAAU,CAAC;YACxH,IAAI,UAAU,IAAI,OAAO,GAAG,UAAU,EAAE,CAAC;gBACvC,MAAM,KAAK,GAAG,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;gBACjD,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,mDAAmD,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,OAAO,GAAG,CAAC,IAAI,UAAU,KAAK,CAChH,CAAC;gBACF,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;gBAC/C,OAAO,EAAE,CAAC;gBACV,SAAS;YACX,CAAC;YACD,MAAM,GAAG,CAAC;QACZ,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,eAAe,CAC5B,MAAc,EACd,KAAa,EACb,MAAc;IAEd,0BAA0B,EAAE,CAAC;IAE7B,4EAA4E;IAC5E,8DAA8D;IAC9D,IAAI,aAAkB,CAAC;IACvB,IAAI,CAAC;QACH,kGAAkG;QAClG,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,GAAG,EAAE,kBAAkB,CAAgC,CAAC;QACvF,aAAa,GAAG,MAAM,SAAS,CAAC,iCAAiC,CAAC,CAAC;IACrE,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CACb,oHAAoH,CACrH,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,GAAG,aAAa,CAAC;IACnE,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IAEpD,kFAAkF;IAClF,MAAM,aAAa,GAAG,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACjD,MAAM,WAAW,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/C,MAAM,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAE/C,IAAI,IAAY,CAAC;IACjB,IAAI,aAAa,EAAE,CAAC;QAClB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,iBAAiB,EAAE,oBAAoB;YACvC,UAAU,EAAE,IAAI;YAChB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;SAC9C,CAAC,CAAC;IACL,CAAC;SAAM,IAAI,YAAY,EAAE,CAAC;QACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,SAAS,EAAE,MAAM;YACjB,oBAAoB,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,EAAE;SAChE,CAAC,CAAC;IACL,CAAC;SAAM,IAAI,WAAW,IAAI,YAAY,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;YACzD,eAAe,EAAE,EAAE,cAAc,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,EAAE;SAC5D,CAAC,CAAC;IACL,CAAC;SAAM,CAAC;QACN,iDAAiD;QACjD,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;YAC7C,UAAU,EAAE,IAAI;SACjB,CAAC,CAAC;IACL,CAAC;IAED,MAAM,QAAQ,GAAG,MAAM,sBAAsB,CAAC,MAAM,EAAE,IAAI,kBAAkB,CAAC;QAC3E,OAAO,EAAE,KAAK;QACd,IAAI,EAAE,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;QACpC,WAAW,EAAE,kBAAkB;QAC/B,MAAM,EAAE,kBAAkB;KAC3B,CAAC,CAAC,CAAC;IACJ,MAAM,YAAY,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC7D,8DAA8D;IAC9D,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAQ,CAAC;IAE7C,mCAAmC;IACnC,OAAO,CACL,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,IAAc,mBAAmB;QACxD,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,UAAU,IAAQ,eAAe;QACpD,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,cAAc;QAC1D,IAAI,CAAC,UAAU,IAAsB,aAAa;QAClD,EAAE,CACH,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,eAAe,CAC5B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAe;IAEf,oEAAoE;IACpE,uCAAuC;IACvC,sHAAsH;IACtH,gFAAgF;IAChF,IAAI,GAAG,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;IACrC,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE,CAAC;QACvC,GAAG,GAAG,GAAG,GAAG,uBAAuB,KAAK,kDAAkD,CAAC;IAC7F,CAAC;IAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAC9B,GAAG,EACH;QACE,MAAM,EAAE,MAAM;QACd,OAAO,EAAE;YACP,cAAc,EAAE,kBAAkB;YAClC,SAAS,EAAE,MAAM;SAClB;QACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;YACnB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;YAC7C,WAAW,EAAE,GAAG;YAChB,UAAU,EAAE,IAAI;SACjB,CAAC;QACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC;KACrC,EACD,SAAS,CACV,CAAC;IACF,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,YAAY,GAAG,CAAC,MAAM,KAAK,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;IAC5E,8DAA8D;IAC9D,OAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,CAAS,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AAC1E,CAAC;AAED,iFAAiF;AAEjF;;;;GAIG;AACI,KAAK,UAAU,qBAAqB,CACzC,KAAe,EACf,MAA0B,EAC1B,IAAoB;IAEpB,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;IAC1C,MAAM,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC;IAEzD,MAAM,GAAG,GAAG,KAAK,IAAqB,EAAE;QACtC,QAAQ,IAAI,CAAC,QAAQ,EAAE,CAAC;YACtB,KAAK,OAAO,CAAC,CAAC,CAAC;gBACb,IAAI,CAAC,IAAI,CAAC,KAAK;oBAAE,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;gBAC5F,OAAO,aAAa,CAAC,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;YAC3C,CAAC;YACD,KAAK,QAAQ,CAAC,CAAC,CAAC;gBACd,OAAO,cAAc,CACnB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,kBAAkB,EAChC,IAAI,CAAC,OAAO,IAAI,wBAAwB,CACzC,CAAC;YACJ,CAAC;YACD,KAAK,QAAQ,CAAC,CAAC,CAAC;gBACd,OAAO,cAAc,CACnB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,IAAI,2BAA2B,CAC5C,CAAC;YACJ,CAAC;YACD,KAAK,WAAW,CAAC,CAAC,CAAC;gBACjB,OAAO,iBAAiB,CACtB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,mBAAmB,EACjC,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,IAAI,8BAA8B,CAC/C,CAAC;YACJ,CAAC;YACD,KAAK,aAAa,CAAC,CAAC,CAAC;gBACnB,IAAI,CAAC,IAAI,CAAC,KAAK;oBAAE,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;gBACxF,OAAO,mBAAmB,CACxB,MAAM,EACN,IAAI,CAAC,KAAK,EACV,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,CACjC,CAAC;YACJ,CAAC;YACD,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,OAAO,eAAe,CACpB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,wCAAwC,EACtD,IAAI,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,WAAW,CACxD,CAAC;YACJ,CAAC;YACD,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,IAAI,CAAC,IAAI,CAAC,OAAO;oBAAE,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;gBAC1F,OAAO,eAAe,CACpB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,CACb,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC,CAAC;IAEF,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC;QAC3B,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;IACvB,CAAC;IAAC,OAAO,GAAY,EAAE,CAAC;QACtB,MAAM,GAAG,GAAG,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAC7D,IAAI,iBAAiB,EAAE,CAAC;YACtB,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,QAAQ,YAAY,GAAG,4BAA4B,CAAC,CAAC;YAClG,OAAO,EAAE,CAAC,CAAC,gCAAgC;QAC7C,CAAC;QACD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC"}
|
package/dist/ai/summarizer.d.ts
CHANGED
|
@@ -29,21 +29,25 @@
|
|
|
29
29
|
* test.steps = steps;
|
|
30
30
|
*/
|
|
31
31
|
import { ParsedStep, ParsedTest } from '../types';
|
|
32
|
-
export type AiProvider = 'heuristic' | 'local' | 'ollama' | 'openai' | 'anthropic';
|
|
32
|
+
export type AiProvider = 'heuristic' | 'local' | 'ollama' | 'openai' | 'anthropic' | 'huggingface' | 'bedrock' | 'azureai';
|
|
33
33
|
export interface AiSummaryOpts {
|
|
34
34
|
provider: AiProvider;
|
|
35
35
|
/**
|
|
36
|
-
* For `local`:
|
|
37
|
-
*
|
|
38
|
-
* For `
|
|
39
|
-
* For `
|
|
40
|
-
* For `
|
|
36
|
+
* For `local`: absolute path to a GGUF model file
|
|
37
|
+
* For `ollama`: model tag, e.g. qwen2.5-coder:7b
|
|
38
|
+
* For `openai`: model name, e.g. gpt-4o-mini
|
|
39
|
+
* For `anthropic`: model name, e.g. claude-haiku-4-5-20251001
|
|
40
|
+
* For `huggingface`: model id, e.g. mistralai/Mistral-7B-Instruct-v0.3
|
|
41
|
+
* For `bedrock`: model id, e.g. anthropic.claude-3-haiku-20240307-v1:0
|
|
42
|
+
* For `azureai`: deployment name, e.g. gpt-4o
|
|
41
43
|
*/
|
|
42
44
|
model?: string;
|
|
43
|
-
/** Base URL for Ollama (default: http://localhost:11434)
|
|
45
|
+
/** Base URL for Ollama (default: http://localhost:11434), OpenAI-compatible endpoint, or Azure OpenAI full endpoint. */
|
|
44
46
|
baseUrl?: string;
|
|
45
|
-
/** API key for openai / anthropic — or $ENV_VAR reference. */
|
|
47
|
+
/** API key for openai / anthropic / huggingface / azureai — or $ENV_VAR reference. */
|
|
46
48
|
apiKey?: string;
|
|
49
|
+
/** AWS region for bedrock (default: AWS_REGION env or us-east-1). */
|
|
50
|
+
region?: string;
|
|
47
51
|
/** Fall back to heuristic if the LLM call fails. Default: true. */
|
|
48
52
|
heuristicFallback?: boolean;
|
|
49
53
|
/**
|
|
@@ -66,6 +70,11 @@ export declare function heuristicSummary(body: string, fallbackTitle: string): {
|
|
|
66
70
|
description: string;
|
|
67
71
|
steps: ParsedStep[];
|
|
68
72
|
};
|
|
73
|
+
/**
|
|
74
|
+
* Warn once if no AWS credentials appear to be configured.
|
|
75
|
+
* Checks env vars and ~/.aws/credentials existence — does not validate them.
|
|
76
|
+
*/
|
|
77
|
+
export declare function warnIfNoBedrockCredentials(): void;
|
|
69
78
|
/**
|
|
70
79
|
* Use AI to analyze a test failure and return a structured root-cause summary.
|
|
71
80
|
* The result can be added as a comment on the Azure test result.
|