@vfarcic/dot-ai 0.150.0 → 0.151.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/ai-provider-factory.d.ts +0 -9
- package/dist/core/ai-provider-factory.d.ts.map +1 -1
- package/dist/core/ai-provider-factory.js +2 -34
- package/dist/core/ai-provider.interface.d.ts +2 -14
- package/dist/core/ai-provider.interface.d.ts.map +1 -1
- package/dist/core/embedding-service.js +1 -1
- package/dist/core/providers/noop-provider.d.ts +0 -4
- package/dist/core/providers/noop-provider.d.ts.map +1 -1
- package/dist/core/providers/noop-provider.js +0 -6
- package/dist/core/providers/vercel-provider.d.ts +0 -1
- package/dist/core/providers/vercel-provider.d.ts.map +1 -1
- package/dist/core/providers/vercel-provider.js +1 -4
- package/dist/tools/answer-question.d.ts +1 -1
- package/package.json +11 -12
- package/shared-prompts/prd-update-progress.md +18 -8
- package/dist/core/providers/anthropic-provider.d.ts +0 -51
- package/dist/core/providers/anthropic-provider.d.ts.map +0 -1
- package/dist/core/providers/anthropic-provider.js +0 -468
|
@@ -41,18 +41,9 @@ export declare class AIProviderFactory {
|
|
|
41
41
|
* If no API keys are configured, returns a NoOpAIProvider that allows
|
|
42
42
|
* the MCP server to start but returns helpful errors when AI is needed.
|
|
43
43
|
*
|
|
44
|
-
* Supports AI_PROVIDER_SDK env var to override SDK choice:
|
|
45
|
-
* - 'native' (default): Use native provider SDK
|
|
46
|
-
* - 'vercel': Use Vercel AI SDK for the provider
|
|
47
|
-
*
|
|
48
44
|
* @returns Configured AI provider instance or NoOpProvider if no keys available
|
|
49
45
|
*/
|
|
50
46
|
static createFromEnv(): AIProvider;
|
|
51
|
-
/**
|
|
52
|
-
* Create Anthropic provider instance
|
|
53
|
-
* @private
|
|
54
|
-
*/
|
|
55
|
-
private static createAnthropicProvider;
|
|
56
47
|
/**
|
|
57
48
|
* Check if a provider is available (has API key configured)
|
|
58
49
|
*
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai-provider-factory.d.ts","sourceRoot":"","sources":["../../src/core/ai-provider-factory.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EACL,UAAU,EACV,gBAAgB,EACjB,MAAM,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"ai-provider-factory.d.ts","sourceRoot":"","sources":["../../src/core/ai-provider-factory.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAEH,OAAO,EACL,UAAU,EACV,gBAAgB,EACjB,MAAM,yBAAyB,CAAC;AA2BjC;;;;;;;;;;;;;;GAcG;AACH,qBAAa,iBAAiB;IAC5B;;;;;;OAMG;IACH,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,gBAAgB,GAAG,UAAU;IAuBnD;;;;;;;;;;OAUG;IACH,MAAM,CAAC,aAAa,IAAI,UAAU;IA8ElC;;;;;OAKG;IACH,MAAM,CAAC,mBAAmB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO;IAMrD;;;;OAIG;IACH,MAAM,CAAC,qBAAqB,IAAI,MAAM,EAAE;IAMxC;;;;;OAKG;IACH,MAAM,CAAC,qBAAqB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO;CAGxD;AAED;;;GAGG;AACH,wBAAgB,gBAAgB,IAAI,UAAU,CAE7C"}
|
|
@@ -11,7 +11,6 @@
|
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
12
|
exports.AIProviderFactory = void 0;
|
|
13
13
|
exports.createAIProvider = createAIProvider;
|
|
14
|
-
const anthropic_provider_1 = require("./providers/anthropic-provider");
|
|
15
14
|
const vercel_provider_1 = require("./providers/vercel-provider");
|
|
16
15
|
const noop_provider_1 = require("./providers/noop-provider");
|
|
17
16
|
const model_config_1 = require("./model-config");
|
|
@@ -67,17 +66,8 @@ class AIProviderFactory {
|
|
|
67
66
|
`Phase 1 providers: ${IMPLEMENTED_PROVIDERS.join(', ')}. ` +
|
|
68
67
|
`Future phases will add support for additional Vercel AI SDK providers.`);
|
|
69
68
|
}
|
|
70
|
-
//
|
|
71
|
-
|
|
72
|
-
case 'anthropic':
|
|
73
|
-
case 'anthropic_opus':
|
|
74
|
-
case 'anthropic_haiku':
|
|
75
|
-
return this.createAnthropicProvider(config);
|
|
76
|
-
default:
|
|
77
|
-
// All non-Anthropic providers use VercelProvider
|
|
78
|
-
// This matches the integration test behavior with AI_PROVIDER_SDK=vercel
|
|
79
|
-
return new vercel_provider_1.VercelProvider(config);
|
|
80
|
-
}
|
|
69
|
+
// All providers use VercelProvider (PRD #238: consolidated on Vercel AI SDK)
|
|
70
|
+
return new vercel_provider_1.VercelProvider(config);
|
|
81
71
|
}
|
|
82
72
|
/**
|
|
83
73
|
* Create provider from environment variables
|
|
@@ -88,15 +78,10 @@ class AIProviderFactory {
|
|
|
88
78
|
* If no API keys are configured, returns a NoOpAIProvider that allows
|
|
89
79
|
* the MCP server to start but returns helpful errors when AI is needed.
|
|
90
80
|
*
|
|
91
|
-
* Supports AI_PROVIDER_SDK env var to override SDK choice:
|
|
92
|
-
* - 'native' (default): Use native provider SDK
|
|
93
|
-
* - 'vercel': Use Vercel AI SDK for the provider
|
|
94
|
-
*
|
|
95
81
|
* @returns Configured AI provider instance or NoOpProvider if no keys available
|
|
96
82
|
*/
|
|
97
83
|
static createFromEnv() {
|
|
98
84
|
const providerType = process.env.AI_PROVIDER || 'anthropic';
|
|
99
|
-
const sdkPreference = process.env.AI_PROVIDER_SDK || 'native';
|
|
100
85
|
// Validate provider is implemented
|
|
101
86
|
if (!IMPLEMENTED_PROVIDERS.includes(providerType)) {
|
|
102
87
|
// Write to stderr for logging
|
|
@@ -150,16 +135,6 @@ class AIProviderFactory {
|
|
|
150
135
|
// Generic custom endpoint (Ollama, vLLM, LiteLLM, etc.)
|
|
151
136
|
effectiveProviderType = 'custom';
|
|
152
137
|
}
|
|
153
|
-
// If SDK override to 'vercel', use VercelProvider for all providers
|
|
154
|
-
if (sdkPreference === 'vercel') {
|
|
155
|
-
return new vercel_provider_1.VercelProvider({
|
|
156
|
-
provider: effectiveProviderType,
|
|
157
|
-
apiKey,
|
|
158
|
-
model,
|
|
159
|
-
debugMode,
|
|
160
|
-
baseURL
|
|
161
|
-
});
|
|
162
|
-
}
|
|
163
138
|
return this.create({
|
|
164
139
|
provider: effectiveProviderType,
|
|
165
140
|
apiKey,
|
|
@@ -168,13 +143,6 @@ class AIProviderFactory {
|
|
|
168
143
|
baseURL
|
|
169
144
|
});
|
|
170
145
|
}
|
|
171
|
-
/**
|
|
172
|
-
* Create Anthropic provider instance
|
|
173
|
-
* @private
|
|
174
|
-
*/
|
|
175
|
-
static createAnthropicProvider(config) {
|
|
176
|
-
return new anthropic_provider_1.AnthropicProvider(config);
|
|
177
|
-
}
|
|
178
146
|
/**
|
|
179
147
|
* Check if a provider is available (has API key configured)
|
|
180
148
|
*
|
|
@@ -220,23 +220,11 @@ export interface AIProvider {
|
|
|
220
220
|
*/
|
|
221
221
|
getModelName(): string;
|
|
222
222
|
/**
|
|
223
|
-
*
|
|
224
|
-
*
|
|
225
|
-
* @returns SDK provider name (e.g., 'xai', 'anthropic', 'openai') or the provider type for native SDKs
|
|
226
|
-
*/
|
|
227
|
-
getSDKProvider(): string;
|
|
228
|
-
/**
|
|
229
|
-
* Execute agentic loop with tool calling (NEW - PRD #136)
|
|
223
|
+
* Execute agentic loop with tool calling (PRD #136)
|
|
230
224
|
*
|
|
231
225
|
* AI autonomously decides which tools to call and when to stop.
|
|
232
226
|
* Supports multi-turn conversations with tool execution.
|
|
233
|
-
*
|
|
234
|
-
* NOTE: Currently NOT USED in codebase. PRD #136 analysis showed JSON-based loops
|
|
235
|
-
* achieve same goals without SDK overhead. Kept for potential future use.
|
|
236
|
-
*
|
|
237
|
-
* IMPLEMENTATION STATUS:
|
|
238
|
-
* - AnthropicProvider: ✅ Implemented
|
|
239
|
-
* - VercelAIProvider: ❌ Not implemented (not needed for current workflows)
|
|
227
|
+
* Used by remediate and operate tools for AI-driven investigations.
|
|
240
228
|
*
|
|
241
229
|
* @param config Tool loop configuration with system prompt, tools, and executor
|
|
242
230
|
* @returns Agentic result with final message, iterations, tool calls, and token usage
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai-provider.interface.d.ts","sourceRoot":"","sources":["../../src/core/ai-provider.interface.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,YAAY,EAAE,MAAM,CAAC;QACrB,aAAa,EAAE,MAAM,CAAC;QACtB,2BAA2B,CAAC,EAAE,MAAM,CAAC;QACrC,uBAAuB,CAAC,EAAE,MAAM,CAAC;KAClC,CAAC;CACH;AAED;;GAEG;AACH,MAAM,MAAM,WAAW,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAEpD;;GAEG;AACH,MAAM,MAAM,qBAAqB,GAC7B,0BAA0B,GAC1B,uBAAuB,GACvB,0BAA0B,GAC1B,qBAAqB,GACrB,0BAA0B,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,QAAQ,EAAE,qBAAqB,CAAC;IAChC,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,EAAE,WAAW,CAAC;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC9B,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,0BAA0B,EAAE,wBAAwB,EAAE,CAAC;IACvD,iBAAiB,EAAE;QACjB,oBAAoB,EAAE,WAAW,CAAC;QAClC,WAAW,EAAE,MAAM,EAAE,CAAC;QACtB,gBAAgB,EAAE,MAAM,CAAC;KAC1B,CAAC;IACF,aAAa,EAAE;QACb,kBAAkB,EAAE,MAAM,CAAC;QAC3B,aAAa,EAAE,MAAM,EAAE,CAAC;QACxB,gBAAgB,EAAE,MAAM,EAAE,CAAC;KAC5B,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,+BAA+B;IAC/B,MAAM,EAAE,MAAM,CAAC;IAEf,8EAA8E;IAC9E,QAAQ,EAAE,MAAM,CAAC;IAEjB,sEAAsE;IACtE,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,oDAAoD;IACpD,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB;;;;;;;;;;;;;;OAcG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,MAAM;IACrB,oEAAoE;IACpE,IAAI,EAAE,MAAM,CAAC;IAEb,0EAA0E;IAC1E,WAAW,EAAE,MAAM,CAAC;IAEpB,4CAA4C;IAC5C,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ,CAAC;QACf,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;KACrB,CAAC;CACH;AAED;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,KAAK,OAAO,CAAC,GAAG,CAAC,CAAC;AAE1E;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,wDAAwD;IACxD,YAAY,EAAE,MAAM,CAAC;IAErB,uCAAuC;IACvC,WAAW,EAAE,MAAM,CAAC;IAEpB,mEAAmE;IACnE,KAAK,EAAE,MAAM,EAAE,CAAC;IAEhB,qCAAqC;IACrC,YAAY,EAAE,YAAY,CAAC;IAE3B,oDAAoD;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,qDAAqD;IACrD,WAAW,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAE5D,8DAA8D;IAC9D,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,0DAA0D;IAC1D,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;IAEF,8DAA8D;IAC9D,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,6DAA6D;IAC7D,YAAY,EAAE,MAAM,CAAC;IAErB,oCAAoC;IACpC,UAAU,EAAE,MAAM,CAAC;IAEnB,8CAA8C;IAC9C,iBAAiB,EAAE,KAAK,CAAC;QACvB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,EAAE,GAAG,CAAC;QACX,MAAM,EAAE,GAAG,CAAC;KACb,CAAC,CAAC;IAEH,qDAAqD;IACrD,WAAW,EAAE;QACX,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;IAEF,6CAA6C;IAC7C,MAAM,CAAC,EAAE,SAAS,GAAG,QAAQ,GAAG,SAAS,GAAG,aAAa,CAAC;IAE1D,uDAAuD;IACvD,gBAAgB,CAAC,EAAE,wBAAwB,GAAG,gBAAgB,GAAG,eAAe,GAAG,eAAe,GAAG,OAAO,CAAC;IAE7G,wDAAwD;IACxD,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,+DAA+D;IAC/D,UAAU,CAAC,EAAE;QACX,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED;;;;;GAKG;AACH,MAAM,WAAW,UAAU;IACzB;;;;;;;;;;;OAWG;IACH,WAAW,CACT,OAAO,EAAE,MAAM,EACf,SAAS,CAAC,EAAE,MAAM,EAClB,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,UAAU,CAAC,CAAC;IAEvB;;;;;;OAMG;IACH,aAAa,IAAI,OAAO,CAAC;IAEzB;;;;;;;OAOG;IACH,eAAe,IAAI,MAAM,CAAC;IAE1B;;;;;;OAMG;IACH,eAAe,IAAI,MAAM,CAAC;IAE1B;;;;OAIG;IACH,YAAY,IAAI,MAAM,CAAC;IAEvB
|
|
1
|
+
{"version":3,"file":"ai-provider.interface.d.ts","sourceRoot":"","sources":["../../src/core/ai-provider.interface.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,YAAY,EAAE,MAAM,CAAC;QACrB,aAAa,EAAE,MAAM,CAAC;QACtB,2BAA2B,CAAC,EAAE,MAAM,CAAC;QACrC,uBAAuB,CAAC,EAAE,MAAM,CAAC;KAClC,CAAC;CACH;AAED;;GAEG;AACH,MAAM,MAAM,WAAW,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAEpD;;GAEG;AACH,MAAM,MAAM,qBAAqB,GAC7B,0BAA0B,GAC1B,uBAAuB,GACvB,0BAA0B,GAC1B,qBAAqB,GACrB,0BAA0B,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,QAAQ,EAAE,qBAAqB,CAAC;IAChC,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,EAAE,WAAW,CAAC;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC9B,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,0BAA0B,EAAE,wBAAwB,EAAE,CAAC;IACvD,iBAAiB,EAAE;QACjB,oBAAoB,EAAE,WAAW,CAAC;QAClC,WAAW,EAAE,MAAM,EAAE,CAAC;QACtB,gBAAgB,EAAE,MAAM,CAAC;KAC1B,CAAC;IACF,aAAa,EAAE;QACb,kBAAkB,EAAE,MAAM,CAAC;QAC3B,aAAa,EAAE,MAAM,EAAE,CAAC;QACxB,gBAAgB,EAAE,MAAM,EAAE,CAAC;KAC5B,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,+BAA+B;IAC/B,MAAM,EAAE,MAAM,CAAC;IAEf,8EAA8E;IAC9E,QAAQ,EAAE,MAAM,CAAC;IAEjB,sEAAsE;IACtE,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,oDAAoD;IACpD,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB;;;;;;;;;;;;;;OAcG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,MAAM;IACrB,oEAAoE;IACpE,IAAI,EAAE,MAAM,CAAC;IAEb,0EAA0E;IAC1E,WAAW,EAAE,MAAM,CAAC;IAEpB,4CAA4C;IAC5C,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ,CAAC;QACf,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;KACrB,CAAC;CACH;AAED;;;GAGG;AACH,MAAM,MAAM,YAAY,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,KAAK,OAAO,CAAC,GAAG,CAAC,CAAC;AAE1E;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,wDAAwD;IACxD,YAAY,EAAE,MAAM,CAAC;IAErB,uCAAuC;IACvC,WAAW,EAAE,MAAM,CAAC;IAEpB,mEAAmE;IACnE,KAAK,EAAE,MAAM,EAAE,CAAC;IAEhB,qCAAqC;IACrC,YAAY,EAAE,YAAY,CAAC;IAE3B,oDAAoD;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,qDAAqD;IACrD,WAAW,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,IAAI,CAAC;IAE5D,8DAA8D;IAC9D,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,0DAA0D;IAC1D,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;IAEF,8DAA8D;IAC9D,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,6DAA6D;IAC7D,YAAY,EAAE,MAAM,CAAC;IAErB,oCAAoC;IACpC,UAAU,EAAE,MAAM,CAAC;IAEnB,8CAA8C;IAC9C,iBAAiB,EAAE,KAAK,CAAC;QACvB,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,EAAE,GAAG,CAAC;QACX,MAAM,EAAE,GAAG,CAAC;KACb,CAAC,CAAC;IAEH,qDAAqD;IACrD,WAAW,EAAE;QACX,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;IAEF,6CAA6C;IAC7C,MAAM,CAAC,EAAE,SAAS,GAAG,QAAQ,GAAG,SAAS,GAAG,aAAa,CAAC;IAE1D,uDAAuD;IACvD,gBAAgB,CAAC,EAAE,wBAAwB,GAAG,gBAAgB,GAAG,eAAe,GAAG,eAAe,GAAG,OAAO,CAAC;IAE7G,wDAAwD;IACxD,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,+DAA+D;IAC/D,UAAU,CAAC,EAAE;QACX,WAAW,EAAE,MAAM,CAAC;QACpB,aAAa,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED;;;;;GAKG;AACH,MAAM,WAAW,UAAU;IACzB;;;;;;;;;;;OAWG;IACH,WAAW,CACT,OAAO,EAAE,MAAM,EACf,SAAS,CAAC,EAAE,MAAM,EAClB,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,UAAU,CAAC,CAAC;IAEvB;;;;;;OAMG;IACH,aAAa,IAAI,OAAO,CAAC;IAEzB;;;;;;;OAOG;IACH,eAAe,IAAI,MAAM,CAAC;IAE1B;;;;;;OAMG;IACH,eAAe,IAAI,MAAM,CAAC;IAE1B;;;;OAIG;IACH,YAAY,IAAI,MAAM,CAAC;IAEvB;;;;;;;;;OASG;IACH,QAAQ,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;CAE1D"}
|
|
@@ -39,7 +39,7 @@ class VercelEmbeddingProvider {
|
|
|
39
39
|
break;
|
|
40
40
|
case 'google':
|
|
41
41
|
this.apiKey = config.apiKey || process.env.GOOGLE_API_KEY || '';
|
|
42
|
-
this.model = config.model || '
|
|
42
|
+
this.model = config.model || process.env.EMBEDDINGS_MODEL || 'gemini-embedding-001';
|
|
43
43
|
this.dimensions = config.dimensions || 768;
|
|
44
44
|
break;
|
|
45
45
|
case 'amazon_bedrock':
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"noop-provider.d.ts","sourceRoot":"","sources":["../../../src/core/providers/noop-provider.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,UAAU,EACV,UAAU,EACV,aAAa,EACb,cAAc,EACf,MAAM,0BAA0B,CAAC;AAElC,qBAAa,cAAe,YAAW,UAAU;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEgD;IAErF;;OAEG;;IAKH;;OAEG;IACH,aAAa,IAAI,OAAO;IAIxB;;OAEG;IACH,eAAe,IAAI,MAAM;IAIzB;;OAEG;IACH,eAAe,IAAI,MAAM;IAIzB;;OAEG;IACH,YAAY,IAAI,MAAM;IAItB;;OAEG;
|
|
1
|
+
{"version":3,"file":"noop-provider.d.ts","sourceRoot":"","sources":["../../../src/core/providers/noop-provider.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,UAAU,EACV,UAAU,EACV,aAAa,EACb,cAAc,EACf,MAAM,0BAA0B,CAAC;AAElC,qBAAa,cAAe,YAAW,UAAU;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,aAAa,CAEgD;IAErF;;OAEG;;IAKH;;OAEG;IACH,aAAa,IAAI,OAAO;IAIxB;;OAEG;IACH,eAAe,IAAI,MAAM;IAIzB;;OAEG;IACH,eAAe,IAAI,MAAM;IAIzB;;OAEG;IACH,YAAY,IAAI,MAAM;IAItB;;OAEG;IACG,WAAW,CACf,QAAQ,EAAE,MAAM,EAChB,UAAU,CAAC,EAAE,MAAM,EACnB,kBAAkB,CAAC,EAAE;QACnB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,UAAU,CAAC;IAItB;;OAEG;IACG,QAAQ,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;CAGhE"}
|
|
@@ -18,7 +18,6 @@ export declare class VercelProvider implements AIProvider {
|
|
|
18
18
|
getProviderType(): string;
|
|
19
19
|
getDefaultModel(): string;
|
|
20
20
|
getModelName(): string;
|
|
21
|
-
getSDKProvider(): string;
|
|
22
21
|
isInitialized(): boolean;
|
|
23
22
|
private logDebugIfEnabled;
|
|
24
23
|
sendMessage(message: string, operation?: string, evaluationContext?: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vercel-provider.d.ts","sourceRoot":"","sources":["../../../src/core/providers/vercel-provider.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AASH,OAAO,EACL,UAAU,EACV,UAAU,EACV,gBAAgB,EAChB,cAAc,EACd,aAAa,EACd,MAAM,0BAA0B,CAAC;AAiBlC,qBAAa,cAAe,YAAW,UAAU;IAC/C,OAAO,CAAC,YAAY,CAAoB;IACxC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,SAAS,CAAU;IAC3B,OAAO,CAAC,OAAO,CAAC,CAAS;IACzB,OAAO,CAAC,aAAa,CAAM;gBAEf,MAAM,EAAE,gBAAgB;IAWpC,OAAO,CAAC,qBAAqB;IAY7B,OAAO,CAAC,eAAe;IAqFvB,eAAe,IAAI,MAAM;IAIzB,eAAe,IAAI,MAAM;IAIzB,YAAY,IAAI,MAAM;IAItB,
|
|
1
|
+
{"version":3,"file":"vercel-provider.d.ts","sourceRoot":"","sources":["../../../src/core/providers/vercel-provider.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AASH,OAAO,EACL,UAAU,EACV,UAAU,EACV,gBAAgB,EAChB,cAAc,EACd,aAAa,EACd,MAAM,0BAA0B,CAAC;AAiBlC,qBAAa,cAAe,YAAW,UAAU;IAC/C,OAAO,CAAC,YAAY,CAAoB;IACxC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,SAAS,CAAU;IAC3B,OAAO,CAAC,OAAO,CAAC,CAAS;IACzB,OAAO,CAAC,aAAa,CAAM;gBAEf,MAAM,EAAE,gBAAgB;IAWpC,OAAO,CAAC,qBAAqB;IAY7B,OAAO,CAAC,eAAe;IAqFvB,eAAe,IAAI,MAAM;IAIzB,eAAe,IAAI,MAAM;IAIzB,YAAY,IAAI,MAAM;IAItB,aAAa,IAAI,OAAO;IAIxB,OAAO,CAAC,iBAAiB;IAyBnB,WAAW,CACf,OAAO,EAAE,MAAM,EACf,SAAS,GAAE,MAAkB,EAC7B,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,UAAU,CAAC;IAoJtB;;;;;;;;;;;;OAYG;IACG,QAAQ,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;CAmW/D"}
|
|
@@ -121,7 +121,7 @@ class VercelProvider {
|
|
|
121
121
|
}
|
|
122
122
|
}
|
|
123
123
|
getProviderType() {
|
|
124
|
-
return
|
|
124
|
+
return this.providerType;
|
|
125
125
|
}
|
|
126
126
|
getDefaultModel() {
|
|
127
127
|
return model_config_1.CURRENT_MODELS[this.providerType];
|
|
@@ -129,9 +129,6 @@ class VercelProvider {
|
|
|
129
129
|
getModelName() {
|
|
130
130
|
return this.model;
|
|
131
131
|
}
|
|
132
|
-
getSDKProvider() {
|
|
133
|
-
return this.providerType;
|
|
134
|
-
}
|
|
135
132
|
isInitialized() {
|
|
136
133
|
return this.modelInstance !== undefined;
|
|
137
134
|
}
|
|
@@ -9,8 +9,8 @@ export declare const ANSWERQUESTION_TOOL_DESCRIPTION = "Process user answers and
|
|
|
9
9
|
export declare const ANSWERQUESTION_TOOL_INPUT_SCHEMA: {
|
|
10
10
|
solutionId: z.ZodString;
|
|
11
11
|
stage: z.ZodEnum<{
|
|
12
|
-
basic: "basic";
|
|
13
12
|
required: "required";
|
|
13
|
+
basic: "basic";
|
|
14
14
|
advanced: "advanced";
|
|
15
15
|
open: "open";
|
|
16
16
|
}>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@vfarcic/dot-ai",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.151.0",
|
|
4
4
|
"description": "AI-powered development productivity platform that enhances software development workflows through intelligent automation and AI-driven assistance",
|
|
5
5
|
"mcpName": "io.github.vfarcic/dot-ai",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -18,16 +18,16 @@
|
|
|
18
18
|
"test:integration:server": "KUBECONFIG=./kubeconfig-test.yaml PORT=3456 DOT_AI_SESSION_DIR=./tmp/sessions TRANSPORT_TYPE=http QDRANT_URL=http://localhost:6335 QDRANT_CAPABILITIES_COLLECTION=capabilities-policies ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY OPENAI_API_KEY=$OPENAI_API_KEY node dist/mcp/server.js",
|
|
19
19
|
"test:integration": "./tests/integration/infrastructure/run-integration-tests.sh",
|
|
20
20
|
"test:integration:watch": "vitest --config=vitest.integration.config.ts --test-timeout=1200000",
|
|
21
|
-
"test:integration:sonnet": "AI_PROVIDER=anthropic
|
|
22
|
-
"test:integration:opus": "AI_PROVIDER=anthropic_opus
|
|
23
|
-
"test:integration:haiku": "AI_PROVIDER=anthropic_haiku
|
|
24
|
-
"test:integration:gpt": "AI_PROVIDER=openai
|
|
25
|
-
"test:integration:gemini": "AI_PROVIDER=google
|
|
26
|
-
"test:integration:grok": "AI_PROVIDER=xai
|
|
27
|
-
"test:integration:kimi": "AI_PROVIDER=kimi
|
|
28
|
-
"test:integration:kimi-thinking": "AI_PROVIDER=kimi_thinking
|
|
29
|
-
"test:integration:bedrock": "AI_PROVIDER=amazon_bedrock AI_MODEL=global.anthropic.claude-sonnet-4-20250514-v1:0
|
|
30
|
-
"test:integration:custom-endpoint": "AI_PROVIDER=openai
|
|
21
|
+
"test:integration:sonnet": "AI_PROVIDER=anthropic DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
22
|
+
"test:integration:opus": "AI_PROVIDER=anthropic_opus DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
23
|
+
"test:integration:haiku": "AI_PROVIDER=anthropic_haiku DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
24
|
+
"test:integration:gpt": "AI_PROVIDER=openai DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
25
|
+
"test:integration:gemini": "AI_PROVIDER=google DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
26
|
+
"test:integration:grok": "AI_PROVIDER=xai DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
27
|
+
"test:integration:kimi": "AI_PROVIDER=kimi DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
28
|
+
"test:integration:kimi-thinking": "AI_PROVIDER=kimi_thinking DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
29
|
+
"test:integration:bedrock": "AI_PROVIDER=amazon_bedrock AI_MODEL=global.anthropic.claude-sonnet-4-20250514-v1:0 DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
30
|
+
"test:integration:custom-endpoint": "AI_PROVIDER=openai DEBUG_DOT_AI=true ./tests/integration/infrastructure/run-integration-tests.sh",
|
|
31
31
|
"eval:comparative": "DEBUG_DOT_AI=true npx tsx src/evaluation/eval-runner.ts",
|
|
32
32
|
"eval:platform-synthesis": "DEBUG_DOT_AI=true npx tsx src/evaluation/run-platform-synthesis.ts",
|
|
33
33
|
"clean": "rm -rf dist",
|
|
@@ -100,7 +100,6 @@
|
|
|
100
100
|
"@ai-sdk/google": "^2.0.17",
|
|
101
101
|
"@ai-sdk/openai": "^2.0.42",
|
|
102
102
|
"@ai-sdk/xai": "^2.0.26",
|
|
103
|
-
"@anthropic-ai/sdk": "^0.65.0",
|
|
104
103
|
"@kubernetes/client-node": "^1.3.0",
|
|
105
104
|
"@modelcontextprotocol/sdk": "^1.13.2",
|
|
106
105
|
"@openrouter/ai-sdk-provider": "^1.2.0",
|
|
@@ -312,22 +312,32 @@ Progress: X% complete - [next major milestone]"
|
|
|
312
312
|
|
|
313
313
|
**Note**: Do NOT push commits unless explicitly requested by the user. Commits preserve local progress checkpoints without affecting remote branches.
|
|
314
314
|
|
|
315
|
-
## Step 9:
|
|
315
|
+
## Step 9: Next Steps Based on PRD Status
|
|
316
316
|
|
|
317
|
-
After completing the PRD update and committing changes,
|
|
317
|
+
After completing the PRD update and committing changes, guide the user based on completion status:
|
|
318
|
+
|
|
319
|
+
### If PRD has remaining tasks
|
|
318
320
|
|
|
319
321
|
---
|
|
320
322
|
|
|
321
323
|
**PRD progress updated and committed.**
|
|
322
324
|
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
To continue working on the next task:
|
|
325
|
+
To continue working on this PRD:
|
|
326
326
|
1. Clear/reset the conversation context
|
|
327
|
-
2. Run the `prd-next` prompt to get the next task
|
|
327
|
+
2. Run the `prd-next` prompt to get the next task
|
|
328
|
+
|
|
329
|
+
---
|
|
330
|
+
|
|
331
|
+
### If PRD is 100% complete
|
|
328
332
|
|
|
329
|
-
|
|
333
|
+
---
|
|
334
|
+
|
|
335
|
+
**PRD #X is complete!**
|
|
336
|
+
|
|
337
|
+
To finalize:
|
|
338
|
+
1. Clear/reset the conversation context
|
|
339
|
+
2. Run the `prd-done` prompt to move the PRD to the done folder and close the GitHub issue
|
|
330
340
|
|
|
331
341
|
---
|
|
332
342
|
|
|
333
|
-
|
|
343
|
+
*Note: Different agents/clients may have different syntax for executing prompts (e.g., `/prd-next`, `/prd-done` in Claude Code, or other syntax in different MCP clients).*
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Anthropic AI Provider Implementation
|
|
3
|
-
*
|
|
4
|
-
* Implements AIProvider interface using Anthropic SDK directly.
|
|
5
|
-
* Supports streaming for long operations and debug logging.
|
|
6
|
-
*/
|
|
7
|
-
import { AIProvider, AIResponse, AIProviderConfig, ToolLoopConfig, AgenticResult } from '../ai-provider.interface';
|
|
8
|
-
export declare class AnthropicProvider implements AIProvider {
|
|
9
|
-
private client;
|
|
10
|
-
private apiKey;
|
|
11
|
-
private providerType;
|
|
12
|
-
private model;
|
|
13
|
-
private debugMode;
|
|
14
|
-
constructor(config: AIProviderConfig);
|
|
15
|
-
private validateApiKey;
|
|
16
|
-
getProviderType(): string;
|
|
17
|
-
getDefaultModel(): string;
|
|
18
|
-
getModelName(): string;
|
|
19
|
-
getSDKProvider(): string;
|
|
20
|
-
isInitialized(): boolean;
|
|
21
|
-
/**
|
|
22
|
-
* Helper method to log debug information if debug mode is enabled
|
|
23
|
-
*/
|
|
24
|
-
private logDebugIfEnabled;
|
|
25
|
-
sendMessage(message: string, operation?: string, evaluationContext?: {
|
|
26
|
-
user_intent?: string;
|
|
27
|
-
interaction_id?: string;
|
|
28
|
-
}): Promise<AIResponse>;
|
|
29
|
-
/**
|
|
30
|
-
* Agentic tool loop implementation
|
|
31
|
-
*
|
|
32
|
-
* NOTE: This method is currently NOT USED in the codebase (as of PRD #136 completion).
|
|
33
|
-
*
|
|
34
|
-
* Analysis showed that SDK-based tool loops and JSON-based agentic loops are functionally
|
|
35
|
-
* equivalent - both allow AI to decide which tools to call and when to stop. The JSON-based
|
|
36
|
-
* approach we already use provides the same capabilities without the token overhead of
|
|
37
|
-
* tool schemas in every request.
|
|
38
|
-
*
|
|
39
|
-
* This implementation is kept for potential future use cases where SDK-managed tool loops
|
|
40
|
-
* might provide advantages (e.g., better provider-specific optimizations, simpler code for
|
|
41
|
-
* highly exploratory workflows).
|
|
42
|
-
*
|
|
43
|
-
* ONLY IMPLEMENTED IN ANTHROPIC PROVIDER - VercelAIProvider does not implement this method
|
|
44
|
-
* as it's not needed for current workflows. If you need toolLoop for other providers, you'll
|
|
45
|
-
* need to implement it there as well.
|
|
46
|
-
*
|
|
47
|
-
* See PRD #136 for full architecture analysis and decision rationale.
|
|
48
|
-
*/
|
|
49
|
-
toolLoop(config: ToolLoopConfig): Promise<AgenticResult>;
|
|
50
|
-
}
|
|
51
|
-
//# sourceMappingURL=anthropic-provider.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"anthropic-provider.d.ts","sourceRoot":"","sources":["../../../src/core/providers/anthropic-provider.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,OAAO,EACL,UAAU,EACV,UAAU,EACV,gBAAgB,EAChB,cAAc,EACd,aAAa,EACd,MAAM,0BAA0B,CAAC;AAYlC,qBAAa,iBAAkB,YAAW,UAAU;IAClD,OAAO,CAAC,MAAM,CAAY;IAC1B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,YAAY,CAAS;IAC7B,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,SAAS,CAAU;gBAEf,MAAM,EAAE,gBAAgB;IAgBpC,OAAO,CAAC,cAAc;IAStB,eAAe,IAAI,MAAM;IAIzB,eAAe,IAAI,MAAM;IAIzB,YAAY,IAAI,MAAM;IAItB,cAAc,IAAI,MAAM;IAIxB,aAAa,IAAI,OAAO;IAIxB;;OAEG;IACH,OAAO,CAAC,iBAAiB;IAyBnB,WAAW,CACf,OAAO,EAAE,MAAM,EACf,SAAS,GAAE,MAAkB,EAC7B,iBAAiB,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GACA,OAAO,CAAC,UAAU,CAAC;IA4FtB;;;;;;;;;;;;;;;;;;;OAmBG;IACG,QAAQ,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;CA4V/D"}
|
|
@@ -1,468 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
/**
|
|
3
|
-
* Anthropic AI Provider Implementation
|
|
4
|
-
*
|
|
5
|
-
* Implements AIProvider interface using Anthropic SDK directly.
|
|
6
|
-
* Supports streaming for long operations and debug logging.
|
|
7
|
-
*/
|
|
8
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
9
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
10
|
-
};
|
|
11
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
-
exports.AnthropicProvider = void 0;
|
|
13
|
-
const sdk_1 = __importDefault(require("@anthropic-ai/sdk"));
|
|
14
|
-
const provider_debug_utils_1 = require("./provider-debug-utils");
|
|
15
|
-
const model_config_1 = require("../model-config");
|
|
16
|
-
const ai_tracing_1 = require("../tracing/ai-tracing");
|
|
17
|
-
const api_1 = require("@opentelemetry/api");
|
|
18
|
-
class AnthropicProvider {
|
|
19
|
-
client;
|
|
20
|
-
apiKey;
|
|
21
|
-
providerType;
|
|
22
|
-
model;
|
|
23
|
-
debugMode;
|
|
24
|
-
constructor(config) {
|
|
25
|
-
this.apiKey = config.apiKey;
|
|
26
|
-
this.providerType = config.provider; // Store 'anthropic' or 'anthropic_haiku'
|
|
27
|
-
this.model = config.model || this.getDefaultModel();
|
|
28
|
-
this.debugMode = config.debugMode ?? process.env.DEBUG_DOT_AI === 'true';
|
|
29
|
-
this.validateApiKey();
|
|
30
|
-
this.client = new sdk_1.default({
|
|
31
|
-
apiKey: this.apiKey,
|
|
32
|
-
// Enable 1M token context window for Claude Sonnet 4 (5x increase from 200K)
|
|
33
|
-
defaultHeaders: {
|
|
34
|
-
'anthropic-beta': 'context-1m-2025-08-07',
|
|
35
|
-
},
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
validateApiKey() {
|
|
39
|
-
if (!this.apiKey) {
|
|
40
|
-
throw new Error('API key is required for Anthropic provider');
|
|
41
|
-
}
|
|
42
|
-
if (this.apiKey.length === 0) {
|
|
43
|
-
throw new Error('Invalid API key: API key cannot be empty');
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
getProviderType() {
|
|
47
|
-
return 'anthropic';
|
|
48
|
-
}
|
|
49
|
-
getDefaultModel() {
|
|
50
|
-
return (0, model_config_1.getCurrentModel)(this.providerType);
|
|
51
|
-
}
|
|
52
|
-
getModelName() {
|
|
53
|
-
return this.model;
|
|
54
|
-
}
|
|
55
|
-
getSDKProvider() {
|
|
56
|
-
return 'anthropic';
|
|
57
|
-
}
|
|
58
|
-
isInitialized() {
|
|
59
|
-
return this.client !== undefined;
|
|
60
|
-
}
|
|
61
|
-
/**
|
|
62
|
-
* Helper method to log debug information if debug mode is enabled
|
|
63
|
-
*/
|
|
64
|
-
logDebugIfEnabled(operation, prompt, response) {
|
|
65
|
-
if (!this.debugMode)
|
|
66
|
-
return null;
|
|
67
|
-
const debugId = (0, provider_debug_utils_1.generateDebugId)(operation);
|
|
68
|
-
(0, provider_debug_utils_1.debugLogInteraction)(debugId, prompt, response, operation, this.getProviderType(), this.model, this.debugMode);
|
|
69
|
-
// Return the actual debug file names created
|
|
70
|
-
return {
|
|
71
|
-
promptFile: `${debugId}_prompt.md`,
|
|
72
|
-
responseFile: `${debugId}_response.md`,
|
|
73
|
-
};
|
|
74
|
-
}
|
|
75
|
-
async sendMessage(message, operation = 'generic', evaluationContext) {
|
|
76
|
-
if (!this.client) {
|
|
77
|
-
throw new Error('Anthropic client not initialized');
|
|
78
|
-
}
|
|
79
|
-
return await (0, ai_tracing_1.withAITracing)({
|
|
80
|
-
provider: 'anthropic',
|
|
81
|
-
model: this.model,
|
|
82
|
-
operation: 'chat',
|
|
83
|
-
}, async () => {
|
|
84
|
-
const startTime = Date.now();
|
|
85
|
-
try {
|
|
86
|
-
// Make real API call to Anthropic with streaming
|
|
87
|
-
const stream = await this.client.messages.create({
|
|
88
|
-
model: this.model,
|
|
89
|
-
max_tokens: 64000,
|
|
90
|
-
messages: [{ role: 'user', content: message }],
|
|
91
|
-
stream: true, // Enable streaming by default to support long operations (>10 minutes)
|
|
92
|
-
});
|
|
93
|
-
let content = '';
|
|
94
|
-
let input_tokens = 0;
|
|
95
|
-
let output_tokens = 0;
|
|
96
|
-
for await (const chunk of stream) {
|
|
97
|
-
if (chunk.type === 'message_start') {
|
|
98
|
-
input_tokens = chunk.message.usage.input_tokens;
|
|
99
|
-
}
|
|
100
|
-
else if (chunk.type === 'content_block_delta') {
|
|
101
|
-
if (chunk.delta.type === 'text_delta') {
|
|
102
|
-
content += chunk.delta.text;
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
else if (chunk.type === 'message_delta') {
|
|
106
|
-
output_tokens = chunk.usage.output_tokens;
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
const response = {
|
|
110
|
-
content,
|
|
111
|
-
usage: {
|
|
112
|
-
input_tokens,
|
|
113
|
-
output_tokens,
|
|
114
|
-
},
|
|
115
|
-
};
|
|
116
|
-
// Debug log the interaction if enabled
|
|
117
|
-
this.logDebugIfEnabled(operation, message, response);
|
|
118
|
-
// PRD #154: Log evaluation dataset if evaluation context is provided
|
|
119
|
-
if (this.debugMode && evaluationContext?.interaction_id) {
|
|
120
|
-
const durationMs = Date.now() - startTime;
|
|
121
|
-
const evaluationMetrics = {
|
|
122
|
-
// Core execution data
|
|
123
|
-
operation,
|
|
124
|
-
sdk: this.getProviderType(),
|
|
125
|
-
inputTokens: input_tokens,
|
|
126
|
-
outputTokens: output_tokens,
|
|
127
|
-
durationMs,
|
|
128
|
-
// Required fields
|
|
129
|
-
iterationCount: 1,
|
|
130
|
-
toolCallCount: 0,
|
|
131
|
-
status: 'completed',
|
|
132
|
-
completionReason: 'stop',
|
|
133
|
-
modelVersion: this.model,
|
|
134
|
-
// Required evaluation context - NO DEFAULTS, must be provided
|
|
135
|
-
test_scenario: operation,
|
|
136
|
-
ai_response_summary: content,
|
|
137
|
-
user_intent: evaluationContext?.user_intent || '',
|
|
138
|
-
interaction_id: evaluationContext?.interaction_id || '',
|
|
139
|
-
};
|
|
140
|
-
(0, provider_debug_utils_1.logEvaluationDataset)(evaluationMetrics, this.debugMode);
|
|
141
|
-
}
|
|
142
|
-
return response;
|
|
143
|
-
}
|
|
144
|
-
catch (error) {
|
|
145
|
-
throw new Error(`Anthropic API error: ${error}`);
|
|
146
|
-
}
|
|
147
|
-
}, (response) => ({
|
|
148
|
-
inputTokens: response.usage.input_tokens,
|
|
149
|
-
outputTokens: response.usage.output_tokens,
|
|
150
|
-
cacheReadTokens: response.usage.cache_read_input_tokens,
|
|
151
|
-
cacheCreationTokens: response.usage.cache_creation_input_tokens,
|
|
152
|
-
}));
|
|
153
|
-
}
|
|
154
|
-
/**
|
|
155
|
-
* Agentic tool loop implementation
|
|
156
|
-
*
|
|
157
|
-
* NOTE: This method is currently NOT USED in the codebase (as of PRD #136 completion).
|
|
158
|
-
*
|
|
159
|
-
* Analysis showed that SDK-based tool loops and JSON-based agentic loops are functionally
|
|
160
|
-
* equivalent - both allow AI to decide which tools to call and when to stop. The JSON-based
|
|
161
|
-
* approach we already use provides the same capabilities without the token overhead of
|
|
162
|
-
* tool schemas in every request.
|
|
163
|
-
*
|
|
164
|
-
* This implementation is kept for potential future use cases where SDK-managed tool loops
|
|
165
|
-
* might provide advantages (e.g., better provider-specific optimizations, simpler code for
|
|
166
|
-
* highly exploratory workflows).
|
|
167
|
-
*
|
|
168
|
-
* ONLY IMPLEMENTED IN ANTHROPIC PROVIDER - VercelAIProvider does not implement this method
|
|
169
|
-
* as it's not needed for current workflows. If you need toolLoop for other providers, you'll
|
|
170
|
-
* need to implement it there as well.
|
|
171
|
-
*
|
|
172
|
-
* See PRD #136 for full architecture analysis and decision rationale.
|
|
173
|
-
*/
|
|
174
|
-
async toolLoop(config) {
|
|
175
|
-
if (!this.client) {
|
|
176
|
-
throw new Error('Anthropic client not initialized');
|
|
177
|
-
}
|
|
178
|
-
return await (0, ai_tracing_1.withAITracing)({
|
|
179
|
-
provider: 'anthropic',
|
|
180
|
-
model: this.model,
|
|
181
|
-
operation: 'tool_loop',
|
|
182
|
-
}, async () => {
|
|
183
|
-
const startTime = Date.now();
|
|
184
|
-
// Convert AITool[] to Anthropic Tool format with caching on last tool
|
|
185
|
-
const tools = config.tools.map((t, index) => {
|
|
186
|
-
const tool = {
|
|
187
|
-
name: t.name,
|
|
188
|
-
description: t.description,
|
|
189
|
-
input_schema: t.inputSchema,
|
|
190
|
-
};
|
|
191
|
-
// Add cache control to the last tool to cache the entire tools array
|
|
192
|
-
if (index === config.tools.length - 1) {
|
|
193
|
-
tool.cache_control = { type: 'ephemeral' };
|
|
194
|
-
}
|
|
195
|
-
return tool;
|
|
196
|
-
});
|
|
197
|
-
// Separate system prompt with caching from user message
|
|
198
|
-
const systemPrompt = [
|
|
199
|
-
{
|
|
200
|
-
type: 'text',
|
|
201
|
-
text: config.systemPrompt,
|
|
202
|
-
cache_control: { type: 'ephemeral' },
|
|
203
|
-
},
|
|
204
|
-
];
|
|
205
|
-
// Initialize conversation history with just the user message
|
|
206
|
-
const conversationHistory = [
|
|
207
|
-
{
|
|
208
|
-
role: 'user',
|
|
209
|
-
content: config.userMessage,
|
|
210
|
-
},
|
|
211
|
-
];
|
|
212
|
-
let iterations = 0;
|
|
213
|
-
const toolCallsExecuted = [];
|
|
214
|
-
const totalTokens = {
|
|
215
|
-
input: 0,
|
|
216
|
-
output: 0,
|
|
217
|
-
cacheCreation: 0,
|
|
218
|
-
cacheRead: 0,
|
|
219
|
-
};
|
|
220
|
-
const maxIterations = config.maxIterations || 20;
|
|
221
|
-
const operation = config.operation || 'tool-loop';
|
|
222
|
-
try {
|
|
223
|
-
const tracer = api_1.trace.getTracer('dot-ai-mcp');
|
|
224
|
-
while (iterations < maxIterations) {
|
|
225
|
-
iterations++;
|
|
226
|
-
// Wrap entire iteration in a span
|
|
227
|
-
// Returns result if investigation complete, null to continue
|
|
228
|
-
const iterationResult = await tracer.startActiveSpan('tool_loop_iteration', { kind: api_1.SpanKind.INTERNAL }, async (span) => {
|
|
229
|
-
try {
|
|
230
|
-
// Build current prompt for debug logging
|
|
231
|
-
const currentPrompt = `System: ${config.systemPrompt}\n\n${conversationHistory
|
|
232
|
-
.map(msg => {
|
|
233
|
-
let content = '';
|
|
234
|
-
if (typeof msg.content === 'string') {
|
|
235
|
-
content = msg.content;
|
|
236
|
-
}
|
|
237
|
-
else if (Array.isArray(msg.content)) {
|
|
238
|
-
// Extract text from content blocks
|
|
239
|
-
content = msg.content
|
|
240
|
-
.map(block => {
|
|
241
|
-
if (block.type === 'text') {
|
|
242
|
-
return block.text;
|
|
243
|
-
}
|
|
244
|
-
else if (block.type === 'tool_use') {
|
|
245
|
-
return `[TOOL_USE: ${block.name}]`;
|
|
246
|
-
}
|
|
247
|
-
else if (block.type === 'tool_result') {
|
|
248
|
-
const content = block.content;
|
|
249
|
-
if (typeof content === 'string') {
|
|
250
|
-
return `[TOOL_RESULT: ${block.tool_use_id}]\n${content}`;
|
|
251
|
-
}
|
|
252
|
-
else if (Array.isArray(content)) {
|
|
253
|
-
const textContent = content
|
|
254
|
-
.map(c => c.type === 'text' ? c.text : `[${c.type}]`)
|
|
255
|
-
.join(' ');
|
|
256
|
-
return `[TOOL_RESULT: ${block.tool_use_id}]\n${textContent}`;
|
|
257
|
-
}
|
|
258
|
-
return `[TOOL_RESULT: ${block.tool_use_id}]`;
|
|
259
|
-
}
|
|
260
|
-
return `[${block.type}]`;
|
|
261
|
-
})
|
|
262
|
-
.join(' ');
|
|
263
|
-
}
|
|
264
|
-
else {
|
|
265
|
-
content = '[complex_content]';
|
|
266
|
-
}
|
|
267
|
-
return `${msg.role}: ${content}`;
|
|
268
|
-
})
|
|
269
|
-
.join('\n\n')}`;
|
|
270
|
-
// Call Anthropic API with tools and cached system prompt
|
|
271
|
-
const response = await this.client.messages.create({
|
|
272
|
-
model: this.model,
|
|
273
|
-
max_tokens: 4096,
|
|
274
|
-
system: systemPrompt,
|
|
275
|
-
messages: conversationHistory,
|
|
276
|
-
tools: tools,
|
|
277
|
-
});
|
|
278
|
-
// Track token usage including cache metrics
|
|
279
|
-
totalTokens.input += response.usage.input_tokens;
|
|
280
|
-
totalTokens.output += response.usage.output_tokens;
|
|
281
|
-
// Track cache usage (available when prompt caching is used)
|
|
282
|
-
if ('cache_creation_input_tokens' in response.usage) {
|
|
283
|
-
totalTokens.cacheCreation +=
|
|
284
|
-
response.usage.cache_creation_input_tokens || 0;
|
|
285
|
-
}
|
|
286
|
-
if ('cache_read_input_tokens' in response.usage) {
|
|
287
|
-
totalTokens.cacheRead +=
|
|
288
|
-
response.usage.cache_read_input_tokens || 0;
|
|
289
|
-
}
|
|
290
|
-
// Check if AI wants to use tools
|
|
291
|
-
const toolUses = response.content.filter((c) => c.type === 'tool_use');
|
|
292
|
-
// Log debug for final iteration to capture complete prompts/responses for evaluation
|
|
293
|
-
let debugFiles = null;
|
|
294
|
-
if (toolUses.length === 0) {
|
|
295
|
-
const aiResponse = {
|
|
296
|
-
content: response.content
|
|
297
|
-
.filter((c) => c.type === 'text')
|
|
298
|
-
.map(c => c.text)
|
|
299
|
-
.join('\n\n'),
|
|
300
|
-
usage: {
|
|
301
|
-
input_tokens: response.usage.input_tokens,
|
|
302
|
-
output_tokens: response.usage.output_tokens,
|
|
303
|
-
cache_creation_input_tokens: response.usage
|
|
304
|
-
.cache_creation_input_tokens,
|
|
305
|
-
cache_read_input_tokens: response.usage
|
|
306
|
-
.cache_read_input_tokens,
|
|
307
|
-
},
|
|
308
|
-
};
|
|
309
|
-
debugFiles = this.logDebugIfEnabled(`${config.operation}-summary`, currentPrompt, aiResponse);
|
|
310
|
-
}
|
|
311
|
-
// If AI is done (no more tools to call), return the result
|
|
312
|
-
if (toolUses.length === 0) {
|
|
313
|
-
// AI is done - extract final text message
|
|
314
|
-
const textContent = response.content.find((c) => c.type === 'text');
|
|
315
|
-
// Return result from span callback
|
|
316
|
-
return (0, provider_debug_utils_1.createAndLogAgenticResult)({
|
|
317
|
-
finalMessage: textContent?.text || '',
|
|
318
|
-
iterations,
|
|
319
|
-
toolCallsExecuted,
|
|
320
|
-
totalTokens: {
|
|
321
|
-
input: totalTokens.input,
|
|
322
|
-
output: totalTokens.output,
|
|
323
|
-
cacheCreation: totalTokens.cacheCreation,
|
|
324
|
-
cacheRead: totalTokens.cacheRead,
|
|
325
|
-
},
|
|
326
|
-
status: 'success',
|
|
327
|
-
completionReason: 'investigation_complete',
|
|
328
|
-
modelVersion: this.model,
|
|
329
|
-
operation: `${operation}-summary`,
|
|
330
|
-
sdk: this.getProviderType(),
|
|
331
|
-
startTime,
|
|
332
|
-
debugMode: this.debugMode,
|
|
333
|
-
debugFiles,
|
|
334
|
-
evaluationContext: config.evaluationContext,
|
|
335
|
-
interaction_id: config.interaction_id,
|
|
336
|
-
});
|
|
337
|
-
}
|
|
338
|
-
// Execute all requested tools in parallel
|
|
339
|
-
const toolResults = [];
|
|
340
|
-
// Create promises for parallel execution
|
|
341
|
-
const toolExecutionPromises = toolUses.map(async (toolUse) => {
|
|
342
|
-
try {
|
|
343
|
-
const result = await config.toolExecutor(toolUse.name, toolUse.input);
|
|
344
|
-
return {
|
|
345
|
-
success: true,
|
|
346
|
-
toolUse,
|
|
347
|
-
result,
|
|
348
|
-
};
|
|
349
|
-
}
|
|
350
|
-
catch (error) {
|
|
351
|
-
return {
|
|
352
|
-
success: false,
|
|
353
|
-
toolUse,
|
|
354
|
-
error: error instanceof Error
|
|
355
|
-
? error.message
|
|
356
|
-
: String(error),
|
|
357
|
-
};
|
|
358
|
-
}
|
|
359
|
-
});
|
|
360
|
-
// Execute all tools simultaneously
|
|
361
|
-
const executionResults = await Promise.all(toolExecutionPromises);
|
|
362
|
-
// Process results and build tool_result blocks
|
|
363
|
-
for (const executionResult of executionResults) {
|
|
364
|
-
if (executionResult.success) {
|
|
365
|
-
toolCallsExecuted.push({
|
|
366
|
-
tool: executionResult.toolUse.name,
|
|
367
|
-
input: executionResult.toolUse.input,
|
|
368
|
-
output: executionResult.result,
|
|
369
|
-
});
|
|
370
|
-
toolResults.push({
|
|
371
|
-
type: 'tool_result',
|
|
372
|
-
tool_use_id: executionResult.toolUse.id,
|
|
373
|
-
content: JSON.stringify(executionResult.result),
|
|
374
|
-
});
|
|
375
|
-
}
|
|
376
|
-
else {
|
|
377
|
-
// Feed error back to AI as tool result
|
|
378
|
-
toolResults.push({
|
|
379
|
-
type: 'tool_result',
|
|
380
|
-
tool_use_id: executionResult.toolUse.id,
|
|
381
|
-
content: JSON.stringify({
|
|
382
|
-
error: executionResult.error,
|
|
383
|
-
}),
|
|
384
|
-
is_error: true,
|
|
385
|
-
});
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
// Add AI response and tool results to conversation history
|
|
389
|
-
conversationHistory.push({ role: 'assistant', content: response.content }, { role: 'user', content: toolResults });
|
|
390
|
-
// Invoke iteration callback if provided
|
|
391
|
-
if (config.onIteration) {
|
|
392
|
-
config.onIteration(iterations, toolCallsExecuted);
|
|
393
|
-
}
|
|
394
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
|
395
|
-
// Return null to continue the loop
|
|
396
|
-
return null;
|
|
397
|
-
}
|
|
398
|
-
catch (error) {
|
|
399
|
-
span.recordException(error);
|
|
400
|
-
span.setStatus({
|
|
401
|
-
code: api_1.SpanStatusCode.ERROR,
|
|
402
|
-
message: error instanceof Error ? error.message : String(error),
|
|
403
|
-
});
|
|
404
|
-
throw error;
|
|
405
|
-
}
|
|
406
|
-
finally {
|
|
407
|
-
span.end();
|
|
408
|
-
}
|
|
409
|
-
});
|
|
410
|
-
// If iteration returned a result, investigation is complete
|
|
411
|
-
if (iterationResult) {
|
|
412
|
-
return iterationResult;
|
|
413
|
-
}
|
|
414
|
-
}
|
|
415
|
-
// Reached max iterations without completion
|
|
416
|
-
return (0, provider_debug_utils_1.createAndLogAgenticResult)({
|
|
417
|
-
finalMessage: `Investigation incomplete - reached maximum ${maxIterations} iterations`,
|
|
418
|
-
iterations,
|
|
419
|
-
toolCallsExecuted,
|
|
420
|
-
totalTokens: {
|
|
421
|
-
input: totalTokens.input,
|
|
422
|
-
output: totalTokens.output,
|
|
423
|
-
cacheCreation: totalTokens.cacheCreation,
|
|
424
|
-
cacheRead: totalTokens.cacheRead,
|
|
425
|
-
},
|
|
426
|
-
status: 'failed',
|
|
427
|
-
completionReason: 'max_iterations',
|
|
428
|
-
modelVersion: this.model,
|
|
429
|
-
operation: `${operation}-max-iterations`,
|
|
430
|
-
sdk: this.getProviderType(),
|
|
431
|
-
startTime,
|
|
432
|
-
debugMode: this.debugMode,
|
|
433
|
-
evaluationContext: config.evaluationContext,
|
|
434
|
-
interaction_id: config.interaction_id,
|
|
435
|
-
});
|
|
436
|
-
}
|
|
437
|
-
catch (error) {
|
|
438
|
-
// Return error result with extended metrics
|
|
439
|
-
return (0, provider_debug_utils_1.createAndLogAgenticResult)({
|
|
440
|
-
finalMessage: `Error during investigation: ${error instanceof Error ? error.message : String(error)}`,
|
|
441
|
-
iterations,
|
|
442
|
-
toolCallsExecuted,
|
|
443
|
-
totalTokens: {
|
|
444
|
-
input: totalTokens.input,
|
|
445
|
-
output: totalTokens.output,
|
|
446
|
-
cacheCreation: totalTokens.cacheCreation,
|
|
447
|
-
cacheRead: totalTokens.cacheRead,
|
|
448
|
-
},
|
|
449
|
-
status: 'failed',
|
|
450
|
-
completionReason: 'error',
|
|
451
|
-
modelVersion: this.model,
|
|
452
|
-
operation: `${operation}-error`,
|
|
453
|
-
sdk: this.getProviderType(),
|
|
454
|
-
startTime,
|
|
455
|
-
debugMode: this.debugMode,
|
|
456
|
-
evaluationContext: config.evaluationContext,
|
|
457
|
-
interaction_id: config.interaction_id,
|
|
458
|
-
});
|
|
459
|
-
}
|
|
460
|
-
}, (result) => ({
|
|
461
|
-
inputTokens: result.totalTokens.input,
|
|
462
|
-
outputTokens: result.totalTokens.output,
|
|
463
|
-
cacheReadTokens: result.totalTokens.cacheRead,
|
|
464
|
-
cacheCreationTokens: result.totalTokens.cacheCreation,
|
|
465
|
-
}));
|
|
466
|
-
}
|
|
467
|
-
}
|
|
468
|
-
exports.AnthropicProvider = AnthropicProvider;
|