ado-sync 0.1.54 → 0.1.56

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -41,7 +41,7 @@ Minimum config:
41
41
  | `pull` | Azure Test Cases → local files |
42
42
  | `status` | Show pending changes without modifying anything |
43
43
  | `diff` | Field-level drift between local and Azure |
44
- | `generate` | Scaffold spec files from ADO User Stories |
44
+ | `generate` | Scaffold spec files from ADO User Stories (AI-powered or template) |
45
45
  | `publish-test-results` | Publish TRX / JUnit / Playwright / Cucumber results to a Test Run |
46
46
  | `story-context` | Show AC, suggested tags, and linked TCs for a User Story |
47
47
  | `coverage` | Spec link rate and story coverage report |
@@ -85,4 +85,43 @@ Minimum config:
85
85
  | VS Code Extension | [docs/vscode-extension.md](docs/vscode-extension.md) |
86
86
  | Troubleshooting | [docs/troubleshooting.md](docs/troubleshooting.md) |
87
87
 
88
+ ---
89
+
90
+ ## AI providers
91
+
92
+ ado-sync supports multiple AI providers for test-step summarisation (`push`/`pull`/`status`), spec generation (`generate`), and failure analysis (`publish-test-results`). All provider SDKs are **optional** — install only what you need.
93
+
94
+ | Provider | Commands | SDK (install separately) | Auth |
95
+ |---|---|---|---|
96
+ | `heuristic` | push / pull / status | none — built-in | none |
97
+ | `local` | push / pull / status | `node-llama-cpp` *(included)* | GGUF model file path |
98
+ | `ollama` | all AI commands | `npm i ollama` | local Ollama server |
99
+ | `openai` | all AI commands | `npm i openai` | `$OPENAI_API_KEY` |
100
+ | `anthropic` | all AI commands | `npm i @anthropic-ai/sdk` | `$ANTHROPIC_API_KEY` |
101
+ | `huggingface` | all AI commands | `npm i openai` | `$HF_TOKEN` |
102
+ | `bedrock` | all AI commands | `npm i @aws-sdk/client-bedrock-runtime` | AWS credential chain |
103
+ | `azureai` | all AI commands | `npm i openai` | `$AZURE_OPENAI_KEY` + `--ai-url` |
104
+ | `github` | all AI commands | `npm i openai` | `$GITHUB_TOKEN` (auto-detected) |
105
+ | `azureinference` | all AI commands | `npm i @azure-rest/ai-inference @azure/core-auth` | `$AZURE_AI_KEY` + `--ai-url` |
106
+
107
+ ```bash
108
+ # GitHub Models — free tier available, no billing setup needed
109
+ ado-sync push --ai-provider github --ai-model gpt-4o
110
+
111
+ # Anthropic
112
+ ado-sync push --ai-provider anthropic --ai-key $ANTHROPIC_API_KEY
113
+
114
+ # AWS Bedrock
115
+ ado-sync push --ai-provider bedrock --ai-model anthropic.claude-3-haiku-20240307-v1:0 --ai-region us-east-1
116
+
117
+ # Azure AI Inference (AI Foundry)
118
+ ado-sync generate --story-ids 1234 --ai-provider azureinference \
119
+ --ai-url https://myendpoint.inference.azure.com --ai-model gpt-4o --ai-key $AZURE_AI_KEY
120
+ ```
121
+
122
+ Set once in `ado-sync.json` to apply to all commands:
123
+ ```json
124
+ { "sync": { "ai": { "provider": "github", "model": "gpt-4o" } } }
125
+ ```
126
+
88
127
  > **LLM / AI crawlers:** [`llms.txt`](llms.txt) contains a single-file summary of the entire project — config schema, CLI flags, ID writeback formats, and the full doc index.
@@ -0,0 +1,49 @@
1
+ /**
2
+ * AI-powered spec file generation from ADO User Stories.
3
+ *
4
+ * Providers:
5
+ * local — node-llama-cpp GGUF model running in-process
6
+ * ollama — local Ollama REST API (http://localhost:11434)
7
+ * openai — OpenAI Chat Completions (or compatible endpoint)
8
+ * anthropic — Anthropic Messages API
9
+ * huggingface — Hugging Face Inference API (OpenAI-compatible /v1 endpoint)
10
+ * bedrock — AWS Bedrock (requires @aws-sdk/client-bedrock-runtime)
11
+ * azureai — Azure OpenAI Service (OpenAI-compatible, api-key header)
12
+ *
13
+ * Output formats:
14
+ * markdown — Playwright-planner style spec (Feature/Role/Options/Scenario blocks)
15
+ * gherkin — BDD .feature file with Scenario and Scenario Outline
16
+ */
17
+ import { AdoStory } from '../azure/work-items';
18
+ export type AiGenerateProvider = 'local' | 'ollama' | 'openai' | 'anthropic' | 'huggingface' | 'bedrock' | 'azureai' | 'github' | 'azureinference';
19
+ export interface AiGenerateOpts {
20
+ provider: AiGenerateProvider;
21
+ /**
22
+ * Model identifier:
23
+ * local: path to .gguf file
24
+ * ollama: model tag (e.g. qwen2.5-coder:7b)
25
+ * openai: model name (e.g. gpt-4o)
26
+ * anthropic: model name (e.g. claude-sonnet-4-6)
27
+ * huggingface: model id (e.g. mistralai/Mistral-7B-Instruct-v0.3)
28
+ * bedrock: model id (e.g. anthropic.claude-3-haiku-20240307-v1:0)
29
+ * azureai: deployment name (e.g. gpt-4o)
30
+ * github: GitHub Models model name (e.g. gpt-4o, Meta-Llama-3.1-70B-Instruct)
31
+ * azureinference: model name deployed on Azure AI Inference endpoint
32
+ */
33
+ model?: string;
34
+ /** Base URL override (ollama, openai-compatible, azureai/azureinference full endpoint). */
35
+ baseUrl?: string;
36
+ /** API key — or $ENV_VAR reference. */
37
+ apiKey?: string;
38
+ /** AWS region for bedrock (default: AWS_REGION env or us-east-1). */
39
+ region?: string;
40
+ /** Fall back to template output if the AI call fails. Default: true. */
41
+ heuristicFallback?: boolean;
42
+ }
43
+ export type GenerateSpecFormat = 'markdown' | 'gherkin';
44
+ /**
45
+ * Generate a spec file (markdown or gherkin) for an ADO story using an AI provider.
46
+ * Returns the raw file content as a string.
47
+ * Falls back to null if generation fails and heuristicFallback is false (caller handles).
48
+ */
49
+ export declare function generateSpecFromStory(story: AdoStory, format: GenerateSpecFormat, opts: AiGenerateOpts): Promise<string>;
@@ -0,0 +1,414 @@
1
+ "use strict";
2
+ /**
3
+ * AI-powered spec file generation from ADO User Stories.
4
+ *
5
+ * Providers:
6
+ * local — node-llama-cpp GGUF model running in-process
7
+ * ollama — local Ollama REST API (http://localhost:11434)
8
+ * openai — OpenAI Chat Completions (or compatible endpoint)
9
+ * anthropic — Anthropic Messages API
10
+ * huggingface — Hugging Face Inference API (OpenAI-compatible /v1 endpoint)
11
+ * bedrock — AWS Bedrock (requires @aws-sdk/client-bedrock-runtime)
12
+ * azureai — Azure OpenAI Service (OpenAI-compatible, api-key header)
13
+ *
14
+ * Output formats:
15
+ * markdown — Playwright-planner style spec (Feature/Role/Options/Scenario blocks)
16
+ * gherkin — BDD .feature file with Scenario and Scenario Outline
17
+ */
18
+ Object.defineProperty(exports, "__esModule", { value: true });
19
+ exports.generateSpecFromStory = generateSpecFromStory;
20
+ // ─── Prompt templates ─────────────────────────────────────────────────────────
21
+ const MARKDOWN_PROMPT = `You are a QA engineer writing markdown spec files for E2E testing.
22
+
23
+ Given the ADO user story below, generate a complete markdown spec in this exact format:
24
+
25
+ # {Short feature area title}
26
+
27
+ Feature: {Feature name}
28
+ Role: {user role, e.g. "user" or "admin"}
29
+ Options: {feature permission flags needed, or "none"}
30
+ Timeout: 180000
31
+ Tags: {comma-separated tags, always include "smoke" for the main scenario}
32
+
33
+ ## Scenario: {Main happy-path scenario title}
34
+
35
+ Preconditions:
36
+ - {Required permission or system state}
37
+ - User is authenticated
38
+
39
+ - Given {initial page/context}
40
+ - When {primary user action}
41
+ - And {additional action if needed}
42
+ - Then {expected result}
43
+ - And {additional assertion if applicable}
44
+
45
+ ## Scenario: {Edge case or secondary scenario title}
46
+
47
+ Preconditions:
48
+ - {Precondition}
49
+
50
+ - Given {initial context}
51
+ - When {action}
52
+ - Then {expected result}
53
+
54
+ Rules:
55
+ - Generate 2-4 scenarios covering the main happy path and key edge cases from the acceptance criteria
56
+ - Be specific: use real button names, field labels, and UI messages from the description
57
+ - Identify the minimum user permissions needed for the feature
58
+ - Output ONLY the markdown — no preamble, explanation, or code fences
59
+
60
+ Story details:
61
+ Title: {TITLE}
62
+ Work Item Type: {TYPE}
63
+ State: {STATE}
64
+
65
+ Description:
66
+ {DESCRIPTION}
67
+
68
+ Acceptance Criteria:
69
+ {AC}`;
70
+ const GHERKIN_PROMPT = `You are a QA engineer writing BDD feature files in Gherkin syntax.
71
+
72
+ Given the ADO user story below, generate a complete Gherkin .feature file.
73
+
74
+ Requirements:
75
+ - Feature block with a short description
76
+ - 2-3 Scenario blocks for main happy-path and key negative cases
77
+ - 1 Scenario Outline with an Examples table if the AC mentions multiple values/inputs to test
78
+ - Place an @tc:0000 tag above each Scenario/Scenario Outline (placeholder for Azure TC ID)
79
+ - Steps must be specific: use real button names, field names, and expected messages
80
+ - Scenario Outline Examples table must have at least 2-3 rows
81
+ - Output ONLY the .feature content — no preamble, explanation, or code fences
82
+
83
+ Story details:
84
+ Title: {TITLE}
85
+ Work Item Type: {TYPE}
86
+ State: {STATE}
87
+
88
+ Description:
89
+ {DESCRIPTION}
90
+
91
+ Acceptance Criteria:
92
+ {AC}`;
93
+ function buildPrompt(story, format) {
94
+ const template = format === 'gherkin' ? GHERKIN_PROMPT : MARKDOWN_PROMPT;
95
+ return template
96
+ .replace('{TITLE}', story.title)
97
+ .replace('{TYPE}', story.workItemType ?? 'User Story')
98
+ .replace('{STATE}', story.state ?? 'Active')
99
+ .replace('{DESCRIPTION}', story.description?.trim() || '(not provided)')
100
+ .replace('{AC}', story.acceptanceCriteria?.trim() || '(not provided)');
101
+ }
102
+ // ─── Shared helpers ───────────────────────────────────────────────────────────
103
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
104
+ const esmImport = new Function('m', 'return import(m)');
105
+ function resolveEnvVar(value) {
106
+ if (value.startsWith('$'))
107
+ return process.env[value.slice(1)] ?? value;
108
+ return value;
109
+ }
110
+ const llamaCache = new Map();
111
+ async function getLlamaSession(modelPath) {
112
+ if (llamaCache.has(modelPath))
113
+ return llamaCache.get(modelPath);
114
+ const promise = (async () => {
115
+ // eslint-disable-next-line @typescript-eslint/no-implied-eval, @typescript-eslint/no-explicit-any
116
+ const esmImport = new Function('m', 'return import(m)');
117
+ const llamaModule = await esmImport('node-llama-cpp');
118
+ const { getLlama, LlamaChatSession } = llamaModule;
119
+ const llama = await getLlama();
120
+ const model = await llama.loadModel({ modelPath });
121
+ return { LlamaChatSession, model };
122
+ })();
123
+ llamaCache.set(modelPath, promise);
124
+ return promise;
125
+ }
126
+ async function localProvider(prompt, modelPath) {
127
+ const { LlamaChatSession, model } = await getLlamaSession(modelPath);
128
+ const context = await model.createContext();
129
+ try {
130
+ const session = new LlamaChatSession({ contextSequence: context.getSequence() });
131
+ return await session.prompt(prompt);
132
+ }
133
+ finally {
134
+ if (typeof context.dispose === 'function')
135
+ await context.dispose();
136
+ }
137
+ }
138
+ async function ollamaProvider(prompt, model, baseUrl) {
139
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
140
+ let Ollama;
141
+ try {
142
+ ({ Ollama } = await esmImport('ollama'));
143
+ }
144
+ catch {
145
+ throw new Error("'ollama' provider requires the ollama package. Install it with: npm install ollama");
146
+ }
147
+ const client = new Ollama({ host: baseUrl });
148
+ const response = await client.chat({
149
+ model,
150
+ messages: [{ role: 'user', content: prompt }],
151
+ });
152
+ return response.message?.content ?? '';
153
+ }
154
+ async function openaiProvider(prompt, model, apiKey, baseUrl) {
155
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
156
+ let OpenAI;
157
+ try {
158
+ ({ OpenAI } = await esmImport('openai'));
159
+ }
160
+ catch {
161
+ throw new Error("'openai' provider requires the openai package. Install it with: npm install openai");
162
+ }
163
+ const client = new OpenAI({
164
+ apiKey,
165
+ ...(baseUrl ? { baseURL: baseUrl.replace(/\/$/, '') } : {}),
166
+ });
167
+ const msg = await client.chat.completions.create({
168
+ model,
169
+ messages: [{ role: 'user', content: prompt }],
170
+ temperature: 0.3,
171
+ max_tokens: 4096,
172
+ });
173
+ return msg.choices[0]?.message?.content ?? '';
174
+ }
175
+ async function anthropicProvider(prompt, model, apiKey, baseUrl) {
176
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
177
+ let Anthropic;
178
+ try {
179
+ ({ default: Anthropic } = await esmImport('@anthropic-ai/sdk'));
180
+ }
181
+ catch {
182
+ throw new Error("'anthropic' provider requires @anthropic-ai/sdk. Install it with: npm install @anthropic-ai/sdk");
183
+ }
184
+ const client = new Anthropic({
185
+ apiKey,
186
+ ...(baseUrl ? { baseURL: baseUrl.replace(/\/$/, '') } : {}),
187
+ });
188
+ const msg = await client.messages.create({
189
+ model,
190
+ max_tokens: 4096,
191
+ messages: [{ role: 'user', content: prompt }],
192
+ });
193
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
194
+ return msg.content[0]?.text ?? '';
195
+ }
196
+ async function huggingfaceProvider(prompt, model, apiKey) {
197
+ // Hugging Face exposes an OpenAI-compatible /v1 endpoint — use the openai SDK with a custom baseURL
198
+ return openaiProvider(prompt, model, apiKey, 'https://api-inference.huggingface.co/v1');
199
+ }
200
+ async function githubProvider(prompt, model, apiKey) {
201
+ // GitHub Models is OpenAI-compatible — use the openai SDK with the GitHub Models endpoint
202
+ return openaiProvider(prompt, model, apiKey, 'https://models.inference.ai.azure.com');
203
+ }
204
+ async function azureinferenceProvider(prompt, model, apiKey, endpoint) {
205
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
206
+ let ModelClient, AzureKeyCredential;
207
+ try {
208
+ ({ default: ModelClient } = await esmImport('@azure-rest/ai-inference'));
209
+ ({ AzureKeyCredential } = await esmImport('@azure/core-auth'));
210
+ }
211
+ catch {
212
+ throw new Error("'azureinference' provider requires @azure-rest/ai-inference and @azure/core-auth. " +
213
+ 'Install with: npm install @azure-rest/ai-inference @azure/core-auth');
214
+ }
215
+ const client = ModelClient(endpoint.replace(/\/$/, ''), new AzureKeyCredential(apiKey));
216
+ const response = await client.path('/chat/completions').post({
217
+ body: {
218
+ model,
219
+ messages: [{ role: 'user', content: prompt }],
220
+ temperature: 0.3,
221
+ max_tokens: 4096,
222
+ },
223
+ });
224
+ if (response.status !== '200') {
225
+ throw new Error(`Azure AI Inference ${response.status}: ${JSON.stringify(response.body)}`);
226
+ }
227
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
228
+ return response.body.choices?.[0]?.message?.content ?? '';
229
+ }
230
+ // ─── Bedrock credential pre-flight ───────────────────────────────────────────
231
+ function warnIfNoBedrockCredentials() {
232
+ const hasEnv = !!(process.env['AWS_ACCESS_KEY_ID'] && process.env['AWS_SECRET_ACCESS_KEY']);
233
+ const hasProfile = !!(process.env['AWS_PROFILE'] || process.env['AWS_DEFAULT_PROFILE']);
234
+ if (hasEnv || hasProfile)
235
+ return;
236
+ try {
237
+ const credFile = require('path').join(require('os').homedir(), '.aws', 'credentials');
238
+ require('fs').accessSync(credFile);
239
+ return;
240
+ }
241
+ catch { /* not found */ }
242
+ process.stderr.write(' [ai] Warning: No AWS credentials detected for bedrock provider.\n' +
243
+ ' Set AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY, configure AWS_PROFILE,\n' +
244
+ ' or ensure ~/.aws/credentials exists. The call will fail without credentials.\n');
245
+ }
246
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
247
+ async function bedrockInvokeWithRetry(client, command, maxRetries = 3, baseDelayMs = 5_000) {
248
+ let attempt = 0;
249
+ while (true) {
250
+ try {
251
+ return await client.send(command);
252
+ }
253
+ catch (err) {
254
+ const name = err?.name ?? '';
255
+ const isThrottle = name === 'ThrottlingException' || name === 'ServiceUnavailableException' || err?.$retryable;
256
+ if (isThrottle && attempt < maxRetries) {
257
+ const delay = baseDelayMs * Math.pow(2, attempt);
258
+ process.stderr.write(` [ai-generate] bedrock throttled — retrying in ${Math.round(delay / 1000)}s (${attempt + 1}/${maxRetries})\n`);
259
+ await new Promise((r) => setTimeout(r, delay));
260
+ attempt++;
261
+ continue;
262
+ }
263
+ throw err;
264
+ }
265
+ }
266
+ }
267
+ async function bedrockProvider(prompt, model, region) {
268
+ warnIfNoBedrockCredentials();
269
+ // Dynamic import — requires @aws-sdk/client-bedrock-runtime to be installed
270
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
271
+ let bedrockModule;
272
+ try {
273
+ // eslint-disable-next-line @typescript-eslint/no-implied-eval, @typescript-eslint/no-explicit-any
274
+ const esmImport = new Function('m', 'return import(m)');
275
+ bedrockModule = await esmImport('@aws-sdk/client-bedrock-runtime');
276
+ }
277
+ catch {
278
+ throw new Error('AWS Bedrock requires @aws-sdk/client-bedrock-runtime. Install it with: npm install @aws-sdk/client-bedrock-runtime');
279
+ }
280
+ const { BedrockRuntimeClient, InvokeModelCommand } = bedrockModule;
281
+ const client = new BedrockRuntimeClient({ region });
282
+ // Support Claude models and Amazon Titan/Nova; default to Anthropic Claude format
283
+ const isClaudeModel = /anthropic\.claude/.test(model);
284
+ const isTitanModel = /amazon\.titan/.test(model);
285
+ const isNovaModel = /amazon\.nova/.test(model);
286
+ const isLlamaModel = /meta\.llama/.test(model);
287
+ let body;
288
+ if (isClaudeModel) {
289
+ body = JSON.stringify({
290
+ anthropic_version: 'bedrock-2023-05-31',
291
+ max_tokens: 4096,
292
+ messages: [{ role: 'user', content: prompt }],
293
+ });
294
+ }
295
+ else if (isTitanModel) {
296
+ body = JSON.stringify({
297
+ inputText: prompt,
298
+ textGenerationConfig: { maxTokenCount: 4096, temperature: 0.3 },
299
+ });
300
+ }
301
+ else if (isNovaModel || isLlamaModel) {
302
+ body = JSON.stringify({
303
+ messages: [{ role: 'user', content: [{ text: prompt }] }],
304
+ inferenceConfig: { max_new_tokens: 4096, temperature: 0.3 },
305
+ });
306
+ }
307
+ else {
308
+ // Generic: try OpenAI-compatible messages format
309
+ body = JSON.stringify({
310
+ messages: [{ role: 'user', content: prompt }],
311
+ max_tokens: 4096,
312
+ });
313
+ }
314
+ const response = await bedrockInvokeWithRetry(client, new InvokeModelCommand({
315
+ modelId: model,
316
+ body: new TextEncoder().encode(body),
317
+ contentType: 'application/json',
318
+ accept: 'application/json',
319
+ }));
320
+ const responseText = new TextDecoder().decode(response.body);
321
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
322
+ const data = JSON.parse(responseText);
323
+ // Handle different response shapes
324
+ return (data.content?.[0]?.text ?? // Anthropic Claude
325
+ data.results?.[0]?.outputText ?? // Amazon Titan
326
+ data.output?.message?.content?.[0]?.text ?? // Amazon Nova
327
+ data.generation ?? // Meta Llama
328
+ '');
329
+ }
330
+ async function azureaiProvider(prompt, model, apiKey, baseUrl) {
331
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
332
+ let AzureOpenAI;
333
+ try {
334
+ ({ AzureOpenAI } = await esmImport('openai'));
335
+ }
336
+ catch {
337
+ throw new Error("'azureai' provider requires the openai package. Install it with: npm install openai");
338
+ }
339
+ const client = new AzureOpenAI({
340
+ apiKey,
341
+ endpoint: baseUrl.replace(/\/$/, ''),
342
+ apiVersion: '2024-12-01-preview',
343
+ deployment: model,
344
+ });
345
+ const msg = await client.chat.completions.create({
346
+ model,
347
+ messages: [{ role: 'user', content: prompt }],
348
+ temperature: 0.3,
349
+ max_tokens: 4096,
350
+ });
351
+ return msg.choices[0]?.message?.content ?? '';
352
+ }
353
+ // ─── Main entry point ─────────────────────────────────────────────────────────
354
+ /**
355
+ * Generate a spec file (markdown or gherkin) for an ADO story using an AI provider.
356
+ * Returns the raw file content as a string.
357
+ * Falls back to null if generation fails and heuristicFallback is false (caller handles).
358
+ */
359
+ async function generateSpecFromStory(story, format, opts) {
360
+ const prompt = buildPrompt(story, format);
361
+ const heuristicFallback = opts.heuristicFallback ?? true;
362
+ const run = async () => {
363
+ switch (opts.provider) {
364
+ case 'local': {
365
+ if (!opts.model)
366
+ throw new Error('local provider requires --ai-model <path/to/model.gguf>');
367
+ return localProvider(prompt, opts.model);
368
+ }
369
+ case 'ollama': {
370
+ return ollamaProvider(prompt, opts.model ?? 'qwen2.5-coder:7b', opts.baseUrl ?? 'http://localhost:11434');
371
+ }
372
+ case 'openai': {
373
+ return openaiProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl ?? 'https://api.openai.com/v1');
374
+ }
375
+ case 'anthropic': {
376
+ return anthropicProvider(prompt, opts.model ?? 'claude-sonnet-4-6', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl ?? 'https://api.anthropic.com/v1');
377
+ }
378
+ case 'huggingface': {
379
+ if (!opts.model)
380
+ throw new Error('huggingface provider requires --ai-model <model-id>');
381
+ return huggingfaceProvider(prompt, opts.model, resolveEnvVar(opts.apiKey ?? ''));
382
+ }
383
+ case 'bedrock': {
384
+ return bedrockProvider(prompt, opts.model ?? 'anthropic.claude-3-haiku-20240307-v1:0', opts.region ?? process.env['AWS_REGION'] ?? 'us-east-1');
385
+ }
386
+ case 'azureai': {
387
+ if (!opts.baseUrl)
388
+ throw new Error('azureai provider requires --ai-url <azure-endpoint>');
389
+ return azureaiProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl);
390
+ }
391
+ case 'github': {
392
+ return githubProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? process.env['GITHUB_TOKEN'] ?? ''));
393
+ }
394
+ case 'azureinference': {
395
+ if (!opts.baseUrl)
396
+ throw new Error('azureinference provider requires --ai-url <endpoint>');
397
+ return azureinferenceProvider(prompt, opts.model ?? 'gpt-4o', resolveEnvVar(opts.apiKey ?? ''), opts.baseUrl);
398
+ }
399
+ }
400
+ };
401
+ try {
402
+ const result = await run();
403
+ return result.trim();
404
+ }
405
+ catch (err) {
406
+ const msg = err instanceof Error ? err.message : String(err);
407
+ if (heuristicFallback) {
408
+ process.stderr.write(` [ai-generate] ${opts.provider} failed (${msg}), using template output\n`);
409
+ return ''; // caller falls back to template
410
+ }
411
+ throw err;
412
+ }
413
+ }
414
+ //# sourceMappingURL=generate-spec.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"generate-spec.js","sourceRoot":"","sources":["../../src/ai/generate-spec.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;GAeG;;AA8bH,sDA2FC;AA7eD,iFAAiF;AAEjF,MAAM,eAAe,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;KAgDnB,CAAC;AAEN,MAAM,cAAc,GAAG;;;;;;;;;;;;;;;;;;;;;;KAsBlB,CAAC;AAEN,SAAS,WAAW,CAAC,KAAe,EAAE,MAA0B;IAC9D,MAAM,QAAQ,GAAG,MAAM,KAAK,SAAS,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,eAAe,CAAC;IACzE,OAAO,QAAQ;SACZ,OAAO,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,CAAC;SAC/B,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,YAAY,IAAI,YAAY,CAAC;SACrD,OAAO,CAAC,SAAS,EAAE,KAAK,CAAC,KAAK,IAAI,QAAQ,CAAC;SAC3C,OAAO,CAAC,eAAe,EAAE,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,gBAAgB,CAAC;SACvE,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,kBAAkB,EAAE,IAAI,EAAE,IAAI,gBAAgB,CAAC,CAAC;AAC3E,CAAC;AAED,iFAAiF;AAEjF,8DAA8D;AAC9D,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,GAAG,EAAE,kBAAkB,CAAgC,CAAC;AAEvF,SAAS,aAAa,CAAC,KAAa;IAClC,IAAI,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;QAAE,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;IACvE,OAAO,KAAK,CAAC;AACf,CAAC;AAWD,MAAM,UAAU,GAAG,IAAI,GAAG,EAAiC,CAAC;AAE5D,KAAK,UAAU,eAAe,CAAC,SAAiB;IAC9C,IAAI,UAAU,CAAC,GAAG,CAAC,SAAS,CAAC;QAAE,OAAO,UAAU,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;IACjE,MAAM,OAAO,GAAG,CAAC,KAAK,IAA2B,EAAE;QACjD,kGAAkG;QAClG,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,GAAG,EAAE,kBAAkB,CAAgC,CAAC;QACvF,MAAM,WAAW,GAAG,MAAM,SAAS,CAAC,gBAAgB,CAAC,CAAC;QACtD,MAAM,EAAE,QAAQ,EAAE,gBAAgB,EAAE,GAAG,WAAW,CAAC;QACnD,MAAM,KAAK,GAAG,MAAM,QAAQ,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,KAAK,CAAC,SAAS,CAAC,EAAE,SAAS,EAAE,CAAC,CAAC;QACnD,OAAO,EAAE,gBAAgB,EAAE,KAAK,EAAE,CAAC;IACrC,CAAC,CAAC,EAAE,CAAC;IACL,UAAU,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IACnC,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,KAAK,UAAU,aAAa,CAAC,MAAc,EAAE,SAAiB;IAC5D,MAAM,EAAE,gBAAgB,EAAE,KAAK,EAAE,GAAG,MAAM,eAAe,CAAC,SAAS,CAAC,CAAC;IACrE,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,aAAa,EAAE,CAAC;IAC5C,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,EAAE,eAAe,EAAE,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;QACjF,OAAO,MAAM,OAAO,CAAC,MAAM,CAAC,MAAM,CAAW,CAAC;IAChD,CAAC;YAAS,CAAC;QACT,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,UAAU;YAAE,MAAM,OAAO,CAAC,OAAO,EAAE,CAAC;IACrE,CAAC;AACH,CAAC;AAED,KAAK,UAAU,cAAc,CAAC,MAAc,EAAE,KAAa,EAAE,OAAe;IAC1E,8DAA8D;IAC9D,IAAI,MAAW,CAAC;IAChB,IAAI,CAAC;QACH,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC3C,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CAAC,oFAAoF,CAAC,CAAC;IACxG,CAAC;IACD,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;QACjC,KAAK;QACL,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;KAC9C,CAAC,CAAC;IACH,OAAO,QAAQ,CAAC,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AACzC,CAAC;AAED,KAAK,UAAU,cAAc,CAC3B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAgB;IAEhB,8DAA8D;IAC9D,IAAI,MAAW,CAAC;IAChB,IAAI,CAAC;QACH,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC3C,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CAAC,oFAAoF,CAAC,CAAC;IACxG,CAAC;IACD,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC;QACxB,MAAM;QACN,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;QAC/C,KAAK;QACL,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;QAC7C,WAAW,EAAE,GAAG;QAChB,UAAU,EAAE,IAAI;KACjB,CAAC,CAAC;IACH,OAAO,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AAChD,CAAC;AAED,KAAK,UAAU,iBAAiB,CAC9B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAgB;IAEhB,8DAA8D;IAC9D,IAAI,SAAc,CAAC;IACnB,IAAI,CAAC;QACH,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,GAAG,MAAM,SAAS,CAAC,mBAAmB,CAAC,CAAC,CAAC;IAClE,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CAAC,iGAAiG,CAAC,CAAC;IACrH,CAAC;IACD,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QAC3B,MAAM;QACN,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC;QACvC,KAAK;QACL,UAAU,EAAE,IAAI;QAChB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;KAC9C,CAAC,CAAC;IACH,8DAA8D;IAC9D,OAAQ,GAAG,CAAC,OAAO,CAAC,CAAC,CAAS,EAAE,IAAI,IAAI,EAAE,CAAC;AAC7C,CAAC;AAED,KAAK,UAAU,mBAAmB,CAChC,MAAc,EACd,KAAa,EACb,MAAc;IAEd,oGAAoG;IACpG,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,yCAAyC,CAAC,CAAC;AAC1F,CAAC;AAED,KAAK,UAAU,cAAc,CAC3B,MAAc,EACd,KAAa,EACb,MAAc;IAEd,0FAA0F;IAC1F,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,uCAAuC,CAAC,CAAC;AACxF,CAAC;AAED,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,MAAc,EACd,QAAgB;IAEhB,8DAA8D;IAC9D,IAAI,WAAgB,EAAE,kBAAuB,CAAC;IAC9C,IAAI,CAAC;QACH,CAAC,EAAE,OAAO,EAAE,WAAW,EAAE,GAAG,MAAM,SAAS,CAAC,0BAA0B,CAAC,CAAC,CAAC;QACzE,CAAC,EAAE,kBAAkB,EAAE,GAAG,MAAM,SAAS,CAAC,kBAAkB,CAAC,CAAC,CAAC;IACjE,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CACb,oFAAoF;YACpF,qEAAqE,CACtE,CAAC;IACJ,CAAC;IACD,MAAM,MAAM,GAAG,WAAW,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,EAAE,IAAI,kBAAkB,CAAC,MAAM,CAAC,CAAC,CAAC;IACxF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,IAAI,CAAC;QAC3D,IAAI,EAAE;YACJ,KAAK;YACL,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;YAC7C,WAAW,EAAE,GAAG;YAChB,UAAU,EAAE,IAAI;SACjB;KACF,CAAC,CAAC;IACH,IAAI,QAAQ,CAAC,MAAM,KAAK,KAAK,EAAE,CAAC;QAC9B,MAAM,IAAI,KAAK,CAAC,sBAAsB,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC7F,CAAC;IACD,8DAA8D;IAC9D,OAAQ,QAAQ,CAAC,IAAY,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AACrE,CAAC;AAED,gFAAgF;AAEhF,SAAS,0BAA0B;IACjC,MAAM,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC,CAAC;IAC5F,MAAM,UAAU,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC;IACxF,IAAI,MAAM,IAAI,UAAU;QAAE,OAAO;IACjC,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE,MAAM,EAAE,aAAa,CAAC,CAAC;QACtF,OAAO,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;QACnC,OAAO;IACT,CAAC;IAAC,MAAM,CAAC,CAAC,eAAe,CAAC,CAAC;IAC3B,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,qEAAqE;QACrE,2EAA2E;QAC3E,kFAAkF,CACnF,CAAC;AACJ,CAAC;AAED,8DAA8D;AAC9D,KAAK,UAAU,sBAAsB,CAAC,MAAW,EAAE,OAAY,EAAE,UAAU,GAAG,CAAC,EAAE,WAAW,GAAG,KAAK;IAClG,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,OAAO,IAAI,EAAE,CAAC;QACZ,IAAI,CAAC;YACH,OAAO,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACpC,CAAC;QAAC,OAAO,GAAY,EAAE,CAAC;YACtB,MAAM,IAAI,GAAI,GAAW,EAAE,IAAI,IAAI,EAAE,CAAC;YACtC,MAAM,UAAU,GAAG,IAAI,KAAK,qBAAqB,IAAI,IAAI,KAAK,6BAA6B,IAAK,GAAW,EAAE,UAAU,CAAC;YACxH,IAAI,UAAU,IAAI,OAAO,GAAG,UAAU,EAAE,CAAC;gBACvC,MAAM,KAAK,GAAG,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;gBACjD,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,mDAAmD,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,OAAO,GAAG,CAAC,IAAI,UAAU,KAAK,CAChH,CAAC;gBACF,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;gBAC/C,OAAO,EAAE,CAAC;gBACV,SAAS;YACX,CAAC;YACD,MAAM,GAAG,CAAC;QACZ,CAAC;IACH,CAAC;AACH,CAAC;AAED,KAAK,UAAU,eAAe,CAC5B,MAAc,EACd,KAAa,EACb,MAAc;IAEd,0BAA0B,EAAE,CAAC;IAE7B,4EAA4E;IAC5E,8DAA8D;IAC9D,IAAI,aAAkB,CAAC;IACvB,IAAI,CAAC;QACH,kGAAkG;QAClG,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,GAAG,EAAE,kBAAkB,CAAgC,CAAC;QACvF,aAAa,GAAG,MAAM,SAAS,CAAC,iCAAiC,CAAC,CAAC;IACrE,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CACb,oHAAoH,CACrH,CAAC;IACJ,CAAC;IAED,MAAM,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,GAAG,aAAa,CAAC;IACnE,MAAM,MAAM,GAAG,IAAI,oBAAoB,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IAEpD,kFAAkF;IAClF,MAAM,aAAa,GAAG,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACjD,MAAM,WAAW,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/C,MAAM,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAE/C,IAAI,IAAY,CAAC;IACjB,IAAI,aAAa,EAAE,CAAC;QAClB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,iBAAiB,EAAE,oBAAoB;YACvC,UAAU,EAAE,IAAI;YAChB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;SAC9C,CAAC,CAAC;IACL,CAAC;SAAM,IAAI,YAAY,EAAE,CAAC;QACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,SAAS,EAAE,MAAM;YACjB,oBAAoB,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,EAAE;SAChE,CAAC,CAAC;IACL,CAAC;SAAM,IAAI,WAAW,IAAI,YAAY,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC;YACzD,eAAe,EAAE,EAAE,cAAc,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,EAAE;SAC5D,CAAC,CAAC;IACL,CAAC;SAAM,CAAC;QACN,iDAAiD;QACjD,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;YACpB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;YAC7C,UAAU,EAAE,IAAI;SACjB,CAAC,CAAC;IACL,CAAC;IAED,MAAM,QAAQ,GAAG,MAAM,sBAAsB,CAAC,MAAM,EAAE,IAAI,kBAAkB,CAAC;QAC3E,OAAO,EAAE,KAAK;QACd,IAAI,EAAE,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC;QACpC,WAAW,EAAE,kBAAkB;QAC/B,MAAM,EAAE,kBAAkB;KAC3B,CAAC,CAAC,CAAC;IACJ,MAAM,YAAY,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;IAC7D,8DAA8D;IAC9D,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAQ,CAAC;IAE7C,mCAAmC;IACnC,OAAO,CACL,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,IAAc,mBAAmB;QACxD,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,UAAU,IAAQ,eAAe;QACpD,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,cAAc;QAC1D,IAAI,CAAC,UAAU,IAAsB,aAAa;QAClD,EAAE,CACH,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,eAAe,CAC5B,MAAc,EACd,KAAa,EACb,MAAc,EACd,OAAe;IAEf,8DAA8D;IAC9D,IAAI,WAAgB,CAAC;IACrB,IAAI,CAAC;QACH,CAAC,EAAE,WAAW,EAAE,GAAG,MAAM,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC;IAChD,CAAC;IAAC,MAAM,CAAC;QACP,MAAM,IAAI,KAAK,CAAC,qFAAqF,CAAC,CAAC;IACzG,CAAC;IACD,MAAM,MAAM,GAAG,IAAI,WAAW,CAAC;QAC7B,MAAM;QACN,QAAQ,EAAE,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;QACpC,UAAU,EAAE,oBAAoB;QAChC,UAAU,EAAE,KAAK;KAClB,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;QAC/C,KAAK;QACL,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC;QAC7C,WAAW,EAAE,GAAG;QAChB,UAAU,EAAE,IAAI;KACjB,CAAC,CAAC;IACH,OAAO,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;AAChD,CAAC;AAED,iFAAiF;AAEjF;;;;GAIG;AACI,KAAK,UAAU,qBAAqB,CACzC,KAAe,EACf,MAA0B,EAC1B,IAAoB;IAEpB,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;IAC1C,MAAM,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC;IAEzD,MAAM,GAAG,GAAG,KAAK,IAAqB,EAAE;QACtC,QAAQ,IAAI,CAAC,QAAQ,EAAE,CAAC;YACtB,KAAK,OAAO,CAAC,CAAC,CAAC;gBACb,IAAI,CAAC,IAAI,CAAC,KAAK;oBAAE,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;gBAC5F,OAAO,aAAa,CAAC,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;YAC3C,CAAC;YACD,KAAK,QAAQ,CAAC,CAAC,CAAC;gBACd,OAAO,cAAc,CACnB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,kBAAkB,EAChC,IAAI,CAAC,OAAO,IAAI,wBAAwB,CACzC,CAAC;YACJ,CAAC;YACD,KAAK,QAAQ,CAAC,CAAC,CAAC;gBACd,OAAO,cAAc,CACnB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,IAAI,2BAA2B,CAC5C,CAAC;YACJ,CAAC;YACD,KAAK,WAAW,CAAC,CAAC,CAAC;gBACjB,OAAO,iBAAiB,CACtB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,mBAAmB,EACjC,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,IAAI,8BAA8B,CAC/C,CAAC;YACJ,CAAC;YACD,KAAK,aAAa,CAAC,CAAC,CAAC;gBACnB,IAAI,CAAC,IAAI,CAAC,KAAK;oBAAE,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;gBACxF,OAAO,mBAAmB,CACxB,MAAM,EACN,IAAI,CAAC,KAAK,EACV,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,CACjC,CAAC;YACJ,CAAC;YACD,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,OAAO,eAAe,CACpB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,wCAAwC,EACtD,IAAI,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,WAAW,CACxD,CAAC;YACJ,CAAC;YACD,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,IAAI,CAAC,IAAI,CAAC,OAAO;oBAAE,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;gBAC1F,OAAO,eAAe,CACpB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,CACb,CAAC;YACJ,CAAC;YACD,KAAK,QAAQ,CAAC,CAAC,CAAC;gBACd,OAAO,cAAc,CACnB,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAChE,CAAC;YACJ,CAAC;YACD,KAAK,gBAAgB,CAAC,CAAC,CAAC;gBACtB,IAAI,CAAC,IAAI,CAAC,OAAO;oBAAE,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;gBAC3F,OAAO,sBAAsB,CAC3B,MAAM,EACN,IAAI,CAAC,KAAK,IAAI,QAAQ,EACtB,aAAa,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,EAChC,IAAI,CAAC,OAAO,CACb,CAAC;YACJ,CAAC;QACH,CAAC;IACH,CAAC,CAAC;IAEF,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,MAAM,GAAG,EAAE,CAAC;QAC3B,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;IACvB,CAAC;IAAC,OAAO,GAAY,EAAE,CAAC;QACtB,MAAM,GAAG,GAAG,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAC7D,IAAI,iBAAiB,EAAE,CAAC;YACtB,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,QAAQ,YAAY,GAAG,4BAA4B,CAAC,CAAC;YAClG,OAAO,EAAE,CAAC,CAAC,gCAAgC;QAC7C,CAAC;QACD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC"}
@@ -29,21 +29,25 @@
29
29
  * test.steps = steps;
30
30
  */
31
31
  import { ParsedStep, ParsedTest } from '../types';
32
- export type AiProvider = 'heuristic' | 'local' | 'ollama' | 'openai' | 'anthropic';
32
+ export type AiProvider = 'heuristic' | 'local' | 'ollama' | 'openai' | 'anthropic' | 'huggingface' | 'bedrock' | 'azureai' | 'github' | 'azureinference';
33
33
  export interface AiSummaryOpts {
34
34
  provider: AiProvider;
35
35
  /**
36
- * For `local`: absolute path to a GGUF model file, e.g.
37
- * ~/.cache/ado-sync/models/qwen2.5-coder-1.5b-instruct-q4_k_m.gguf
38
- * For `ollama`: model tag, e.g. qwen2.5-coder:7b
39
- * For `openai`: model name, e.g. gpt-4o-mini
40
- * For `anthropic`: model name, e.g. claude-haiku-4-5-20251001
36
+ * For `local`: absolute path to a GGUF model file
37
+ * For `ollama`: model tag, e.g. qwen2.5-coder:7b
38
+ * For `openai`: model name, e.g. gpt-4o-mini
39
+ * For `anthropic`: model name, e.g. claude-haiku-4-5-20251001
40
+ * For `huggingface`: model id, e.g. mistralai/Mistral-7B-Instruct-v0.3
41
+ * For `bedrock`: model id, e.g. anthropic.claude-3-haiku-20240307-v1:0
42
+ * For `azureai`: deployment name, e.g. gpt-4o
41
43
  */
42
44
  model?: string;
43
- /** Base URL for Ollama (default: http://localhost:11434) or OpenAI-compatible endpoint. */
45
+ /** Base URL for Ollama (default: http://localhost:11434), OpenAI-compatible endpoint, or Azure OpenAI full endpoint. */
44
46
  baseUrl?: string;
45
- /** API key for openai / anthropic — or $ENV_VAR reference. */
47
+ /** API key for openai / anthropic / huggingface / azureai — or $ENV_VAR reference. */
46
48
  apiKey?: string;
49
+ /** AWS region for bedrock (default: AWS_REGION env or us-east-1). */
50
+ region?: string;
47
51
  /** Fall back to heuristic if the LLM call fails. Default: true. */
48
52
  heuristicFallback?: boolean;
49
53
  /**
@@ -66,6 +70,11 @@ export declare function heuristicSummary(body: string, fallbackTitle: string): {
66
70
  description: string;
67
71
  steps: ParsedStep[];
68
72
  };
73
+ /**
74
+ * Warn once if no AWS credentials appear to be configured.
75
+ * Checks env vars and ~/.aws/credentials existence — does not validate them.
76
+ */
77
+ export declare function warnIfNoBedrockCredentials(): void;
69
78
  /**
70
79
  * Use AI to analyze a test failure and return a structured root-cause summary.
71
80
  * The result can be added as a comment on the Azure test result.