@sparkleideas/providers 3.5.2-patch.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,356 @@
1
+ #!/usr/bin/env npx tsx
2
+ /**
3
+ * Quick Provider Test Script
4
+ *
5
+ * Tests all available providers using .env credentials
6
+ *
7
+ * Usage:
8
+ * cd v3/@sparkleideas/providers
9
+ * npm run test:quick
10
+ *
11
+ * Or directly:
12
+ * npx tsx src/__tests__/quick-test.ts
13
+ */
14
+
15
+ import { config } from 'dotenv';
16
+ import { resolve, dirname } from 'path';
17
+ import { fileURLToPath } from 'url';
18
+
19
+ const __dirname = dirname(fileURLToPath(import.meta.url));
20
+
21
+ // Load .env from project root
22
+ config({ path: resolve(__dirname, '../../../../../.env') });
23
+
24
+ import {
25
+ AnthropicProvider,
26
+ OpenAIProvider,
27
+ GoogleProvider,
28
+ OllamaProvider,
29
+ RuVectorProvider,
30
+ createProviderManager,
31
+ LLMRequest,
32
+ } from '../index.js';
33
+ import { consoleLogger } from '../base-provider.js';
34
+
35
+ const TEST_PROMPT = 'Say "Hello from Claude Flow V3!" Be brief.';
36
+
37
+ const createTestRequest = (model?: string): LLMRequest => ({
38
+ messages: [{ role: 'user', content: TEST_PROMPT }],
39
+ model,
40
+ maxTokens: 50,
41
+ temperature: 0.1,
42
+ requestId: `test-${Date.now()}`,
43
+ });
44
+
45
+ async function testAnthropic() {
46
+ const apiKey = process.env.ANTHROPIC_API_KEY;
47
+ if (!apiKey) {
48
+ console.log('ā­ļø Skipping Anthropic - no API key');
49
+ return null;
50
+ }
51
+
52
+ console.log('\nšŸ”· Testing Anthropic Claude...');
53
+
54
+ const provider = new AnthropicProvider({
55
+ config: {
56
+ provider: 'anthropic',
57
+ apiKey,
58
+ model: 'claude-3-haiku-20240307', // Use cheaper, widely-available model
59
+ maxTokens: 100,
60
+ },
61
+ logger: consoleLogger,
62
+ });
63
+
64
+ try {
65
+ await provider.initialize();
66
+ const response = await provider.complete(createTestRequest());
67
+
68
+ console.log('āœ… Anthropic Response:', response.content);
69
+ console.log(' Tokens:', response.usage);
70
+ console.log(' Cost:', response.cost);
71
+
72
+ provider.destroy();
73
+ return response;
74
+ } catch (error) {
75
+ console.error('āŒ Anthropic Error:', error);
76
+ provider.destroy();
77
+ return null;
78
+ }
79
+ }
80
+
81
+ async function testGoogle() {
82
+ const apiKey = process.env.GOOGLE_GEMINI_API_KEY;
83
+ if (!apiKey) {
84
+ console.log('ā­ļø Skipping Google - no API key');
85
+ return null;
86
+ }
87
+
88
+ console.log('\nšŸ”· Testing Google Gemini...');
89
+
90
+ const provider = new GoogleProvider({
91
+ config: {
92
+ provider: 'google',
93
+ apiKey,
94
+ model: 'gemini-2.0-flash',
95
+ maxTokens: 100,
96
+ },
97
+ logger: consoleLogger,
98
+ });
99
+
100
+ try {
101
+ await provider.initialize();
102
+ const response = await provider.complete(createTestRequest());
103
+
104
+ console.log('āœ… Google Response:', response.content);
105
+ console.log(' Tokens:', response.usage);
106
+ console.log(' Cost:', response.cost);
107
+
108
+ provider.destroy();
109
+ return response;
110
+ } catch (error) {
111
+ console.error('āŒ Google Error:', error);
112
+ provider.destroy();
113
+ return null;
114
+ }
115
+ }
116
+
117
+ async function testOpenRouter() {
118
+ const apiKey = process.env.OPENROUTER_API_KEY;
119
+ if (!apiKey) {
120
+ console.log('ā­ļø Skipping OpenRouter - no API key');
121
+ return null;
122
+ }
123
+
124
+ console.log('\nšŸ”· Testing OpenRouter (GPT-4o-mini)...');
125
+
126
+ const provider = new OpenAIProvider({
127
+ config: {
128
+ provider: 'openai',
129
+ apiKey,
130
+ apiUrl: 'https://openrouter.ai/api/v1',
131
+ model: 'openai/gpt-4o-mini',
132
+ maxTokens: 100,
133
+ providerOptions: {
134
+ headers: {
135
+ 'HTTP-Referer': 'https://claude-flow.dev',
136
+ 'X-Title': 'Claude Flow V3 Test',
137
+ },
138
+ },
139
+ },
140
+ logger: consoleLogger,
141
+ });
142
+
143
+ try {
144
+ await provider.initialize();
145
+ const response = await provider.complete(createTestRequest('openai/gpt-4o-mini'));
146
+
147
+ console.log('āœ… OpenRouter Response:', response.content);
148
+ console.log(' Tokens:', response.usage);
149
+
150
+ provider.destroy();
151
+ return response;
152
+ } catch (error) {
153
+ console.error('āŒ OpenRouter Error:', error);
154
+ provider.destroy();
155
+ return null;
156
+ }
157
+ }
158
+
159
+ async function testOllama() {
160
+ console.log('\nšŸ”· Testing Ollama (local)...');
161
+
162
+ const provider = new OllamaProvider({
163
+ config: {
164
+ provider: 'ollama',
165
+ apiUrl: 'http://localhost:11434',
166
+ model: 'qwen2.5:0.5b',
167
+ maxTokens: 100,
168
+ },
169
+ logger: consoleLogger,
170
+ });
171
+
172
+ try {
173
+ await provider.initialize();
174
+ const response = await provider.complete(createTestRequest('qwen2.5:0.5b'));
175
+
176
+ console.log('āœ… Ollama Response:', response.content);
177
+ console.log(' Tokens:', response.usage);
178
+ console.log(' Cost: $0.00 (local)');
179
+
180
+ provider.destroy();
181
+ return response;
182
+ } catch (error: any) {
183
+ if (error.message?.includes('ECONNREFUSED') || error.message?.includes('fetch failed')) {
184
+ console.log('ā­ļø Skipping Ollama - not running locally');
185
+ console.log(' To test: ollama pull qwen2.5:0.5b && ollama serve');
186
+ } else {
187
+ console.error('āŒ Ollama Error:', error.message);
188
+ }
189
+ provider.destroy();
190
+ return null;
191
+ }
192
+ }
193
+
194
+ async function testRuVector() {
195
+ console.log('\nšŸ”· Testing RuVector (SONA + Local Qwen)...');
196
+
197
+ const provider = new RuVectorProvider({
198
+ config: {
199
+ provider: 'ruvector',
200
+ model: 'qwen2.5:0.5b',
201
+ maxTokens: 100,
202
+ providerOptions: {
203
+ sonaEnabled: true,
204
+ hnswEnabled: true,
205
+ fastgrnnEnabled: true,
206
+ localModel: 'qwen2.5:0.5b',
207
+ ollamaUrl: 'http://localhost:11434',
208
+ },
209
+ },
210
+ logger: consoleLogger,
211
+ });
212
+
213
+ try {
214
+ await provider.initialize();
215
+ const response = await provider.complete(createTestRequest('qwen2.5:0.5b'));
216
+
217
+ console.log('āœ… RuVector Response:', response.content);
218
+ console.log(' Tokens:', response.usage);
219
+
220
+ // Show SONA metrics
221
+ try {
222
+ const sonaMetrics = await provider.getSonaMetrics();
223
+ console.log(' SONA Metrics:', sonaMetrics);
224
+ } catch {
225
+ console.log(' SONA: Not available (optional)');
226
+ }
227
+
228
+ provider.destroy();
229
+ return response;
230
+ } catch (error: any) {
231
+ if (error.message?.includes('ECONNREFUSED') || error.message?.includes('fetch failed')) {
232
+ console.log('ā­ļø Skipping RuVector - Ollama not running locally');
233
+ console.log(' To test: ollama pull qwen2.5:0.5b && ollama serve');
234
+ } else {
235
+ console.error('āŒ RuVector Error:', error.message);
236
+ }
237
+ provider.destroy();
238
+ return null;
239
+ }
240
+ }
241
+
242
+ async function testProviderManager() {
243
+ console.log('\nšŸ”· Testing Provider Manager (multi-provider)...');
244
+
245
+ const providers = [];
246
+
247
+ if (process.env.ANTHROPIC_API_KEY) {
248
+ providers.push({
249
+ provider: 'anthropic' as const,
250
+ apiKey: process.env.ANTHROPIC_API_KEY,
251
+ model: 'claude-3-haiku-20240307',
252
+ maxTokens: 100,
253
+ });
254
+ }
255
+
256
+ // Add OpenRouter as second provider for load balancing/failover
257
+ if (process.env.OPENROUTER_API_KEY) {
258
+ providers.push({
259
+ provider: 'openai' as const, // OpenRouter uses OpenAI-compatible API
260
+ apiKey: process.env.OPENROUTER_API_KEY,
261
+ apiUrl: 'https://openrouter.ai/api/v1',
262
+ model: 'openai/gpt-4o-mini',
263
+ maxTokens: 100,
264
+ } as any);
265
+ }
266
+
267
+ if (providers.length === 0) {
268
+ console.log('ā­ļø Skipping Provider Manager - no cloud API keys');
269
+ return null;
270
+ }
271
+
272
+ try {
273
+ const manager = await createProviderManager({
274
+ providers,
275
+ loadBalancing: {
276
+ enabled: true,
277
+ strategy: 'round-robin',
278
+ },
279
+ fallback: {
280
+ enabled: true,
281
+ maxAttempts: 2,
282
+ },
283
+ cache: {
284
+ enabled: true,
285
+ ttl: 60000,
286
+ maxSize: 100,
287
+ },
288
+ }, consoleLogger);
289
+
290
+ console.log(' Active providers:', manager.listProviders());
291
+
292
+ // Make request
293
+ const response = await manager.complete(createTestRequest());
294
+ console.log('āœ… Manager Response:', response.content);
295
+ console.log(' Used provider:', response.provider);
296
+
297
+ // Test cache
298
+ console.log(' Testing cache...');
299
+ const start = Date.now();
300
+ const cached = await manager.complete(createTestRequest());
301
+ const cacheTime = Date.now() - start;
302
+ console.log(` Cache hit time: ${cacheTime}ms`);
303
+
304
+ manager.destroy();
305
+ return response;
306
+ } catch (error) {
307
+ console.error('āŒ Manager Error:', error);
308
+ return null;
309
+ }
310
+ }
311
+
312
+ async function main() {
313
+ console.log('╔════════════════════════════════════════════════╗');
314
+ console.log('ā•‘ Claude Flow V3 - Provider Test Suite ā•‘');
315
+ console.log('ā•šā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•');
316
+
317
+ console.log('\nšŸ“‹ Loaded .env from:', resolve(__dirname, '../../../../../.env'));
318
+ console.log('\nšŸ”‘ Available API Keys:');
319
+ console.log(' ANTHROPIC_API_KEY:', process.env.ANTHROPIC_API_KEY ? 'āœ“' : 'āœ—');
320
+ console.log(' GOOGLE_GEMINI_API_KEY:', process.env.GOOGLE_GEMINI_API_KEY ? 'āœ“' : 'āœ—');
321
+ console.log(' OPENROUTER_API_KEY:', process.env.OPENROUTER_API_KEY ? 'āœ“' : 'āœ—');
322
+
323
+ const results = {
324
+ anthropic: await testAnthropic(),
325
+ google: await testGoogle(),
326
+ openrouter: await testOpenRouter(),
327
+ ollama: await testOllama(),
328
+ ruvector: await testRuVector(),
329
+ manager: await testProviderManager(),
330
+ };
331
+
332
+ // Summary
333
+ console.log('\n╔════════════════════════════════════════════════╗');
334
+ console.log('ā•‘ Test Summary ā•‘');
335
+ console.log('ā•šā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•');
336
+
337
+ const passed = Object.entries(results).filter(([_, r]) => r !== null).length;
338
+ const total = Object.keys(results).length;
339
+
340
+ Object.entries(results).forEach(([name, result]) => {
341
+ const status = result !== null ? 'āœ…' : 'ā­ļø';
342
+ console.log(` ${status} ${name}`);
343
+ });
344
+
345
+ console.log(`\nšŸ“Š Passed: ${passed}/${total}`);
346
+
347
+ if (results.ollama === null && results.ruvector === null) {
348
+ console.log('\nšŸ’” To test local models:');
349
+ console.log(' 1. Install Ollama: https://ollama.ai');
350
+ console.log(' 2. Pull Qwen: ollama pull qwen2.5:0.5b');
351
+ console.log(' 3. Start server: ollama serve');
352
+ console.log(' 4. Re-run this test');
353
+ }
354
+ }
355
+
356
+ main().catch(console.error);