opc-agent 1.1.2 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,5 +5,5 @@ export interface LLMProvider {
5
5
  chatStream(messages: Message[], systemPrompt?: string): AsyncIterable<string>;
6
6
  }
7
7
  export declare function createProvider(name?: string, model?: string, baseUrl?: string, apiKey?: string): LLMProvider;
8
- export declare const SUPPORTED_PROVIDERS: readonly ["openai", "deepseek", "qwen"];
8
+ export declare const SUPPORTED_PROVIDERS: readonly ["openai", "deepseek", "qwen", "gemini"];
9
9
  //# sourceMappingURL=index.d.ts.map
@@ -69,20 +69,27 @@ class OpenAICompatibleProvider {
69
69
  throw new Error('No API key configured. Set OPC_LLM_API_KEY or OPENAI_API_KEY environment variable.');
70
70
  }
71
71
  const url = new URL(`${this.baseUrl}/chat/completions`);
72
+ const isGemini = url.hostname.includes('googleapis.com');
73
+ if (isGemini) {
74
+ url.searchParams.set('key', this.apiKey);
75
+ }
72
76
  const isHttps = url.protocol === 'https:';
73
77
  const lib = isHttps ? https : http;
74
78
  const postData = JSON.stringify(body);
79
+ const headers = {
80
+ 'Content-Type': 'application/json',
81
+ 'Content-Length': String(Buffer.byteLength(postData)),
82
+ };
83
+ if (!isGemini) {
84
+ headers['Authorization'] = `Bearer ${this.apiKey}`;
85
+ }
75
86
  return new Promise((resolve, reject) => {
76
87
  const req = lib.request({
77
88
  hostname: url.hostname,
78
89
  port: url.port || (isHttps ? 443 : 80),
79
- path: url.pathname,
90
+ path: url.pathname + url.search,
80
91
  method: 'POST',
81
- headers: {
82
- 'Content-Type': 'application/json',
83
- Authorization: `Bearer ${this.apiKey}`,
84
- 'Content-Length': Buffer.byteLength(postData),
85
- },
92
+ headers,
86
93
  }, (res) => {
87
94
  let data = '';
88
95
  res.on('data', (chunk) => (data += chunk.toString()));
@@ -127,6 +134,10 @@ class OpenAICompatibleProvider {
127
134
  }
128
135
  const formatted = this.formatMessages(messages, systemPrompt);
129
136
  const url = new URL(`${this.baseUrl}/chat/completions`);
137
+ const isGemini = url.hostname.includes('googleapis.com');
138
+ if (isGemini) {
139
+ url.searchParams.set('key', this.apiKey);
140
+ }
130
141
  const isHttps = url.protocol === 'https:';
131
142
  const lib = isHttps ? https : http;
132
143
  const postData = JSON.stringify({
@@ -136,17 +147,20 @@ class OpenAICompatibleProvider {
136
147
  max_tokens: 2048,
137
148
  stream: true,
138
149
  });
150
+ const streamHeaders = {
151
+ 'Content-Type': 'application/json',
152
+ 'Content-Length': String(Buffer.byteLength(postData)),
153
+ };
154
+ if (!isGemini) {
155
+ streamHeaders['Authorization'] = `Bearer ${this.apiKey}`;
156
+ }
139
157
  const response = await new Promise((resolve, reject) => {
140
158
  const req = lib.request({
141
159
  hostname: url.hostname,
142
160
  port: url.port || (isHttps ? 443 : 80),
143
- path: url.pathname,
161
+ path: url.pathname + url.search,
144
162
  method: 'POST',
145
- headers: {
146
- 'Content-Type': 'application/json',
147
- Authorization: `Bearer ${this.apiKey}`,
148
- 'Content-Length': Buffer.byteLength(postData),
149
- },
163
+ headers: streamHeaders,
150
164
  }, resolve);
151
165
  req.on('error', reject);
152
166
  req.write(postData);
@@ -183,9 +197,130 @@ class OpenAICompatibleProvider {
183
197
  }
184
198
  }
185
199
  }
200
+ class GeminiNativeProvider {
201
+ name = 'gemini';
202
+ model;
203
+ apiKey;
204
+ constructor(model, apiKey) {
205
+ this.model = model;
206
+ this.apiKey = apiKey || getApiKey();
207
+ }
208
+ buildUrl(stream) {
209
+ const action = stream ? 'streamGenerateContent?alt=sse&' : 'generateContent?';
210
+ return `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:${action}key=${this.apiKey}`;
211
+ }
212
+ formatContents(messages, systemPrompt) {
213
+ const contents = [];
214
+ for (const m of messages) {
215
+ contents.push({ role: m.role === 'assistant' ? 'model' : 'user', parts: [{ text: m.content }] });
216
+ }
217
+ const result = { contents };
218
+ if (systemPrompt) {
219
+ result.systemInstruction = { parts: [{ text: systemPrompt }] };
220
+ }
221
+ return result;
222
+ }
223
+ async chat(messages, systemPrompt) {
224
+ if (!this.apiKey) {
225
+ const last = messages[messages.length - 1];
226
+ return `[gemini/${this.model} - no API key] Echo: ${last?.content ?? ''}`;
227
+ }
228
+ const body = this.formatContents(messages, systemPrompt);
229
+ const url = this.buildUrl(false);
230
+ const postData = JSON.stringify(body);
231
+ return new Promise((resolve, reject) => {
232
+ const parsedUrl = new URL(url);
233
+ const req = https.request({
234
+ hostname: parsedUrl.hostname,
235
+ path: parsedUrl.pathname + parsedUrl.search,
236
+ method: 'POST',
237
+ headers: { 'Content-Type': 'application/json', 'Content-Length': String(Buffer.byteLength(postData)) },
238
+ }, (res) => {
239
+ let data = '';
240
+ res.on('data', (chunk) => (data += chunk.toString()));
241
+ res.on('end', () => {
242
+ if (res.statusCode && res.statusCode >= 400) {
243
+ reject(new Error(`Gemini API error ${res.statusCode}: ${data}`));
244
+ return;
245
+ }
246
+ try {
247
+ const parsed = JSON.parse(data);
248
+ resolve(parsed.candidates?.[0]?.content?.parts?.[0]?.text ?? '');
249
+ }
250
+ catch {
251
+ reject(new Error(`Invalid Gemini response: ${data.slice(0, 200)}`));
252
+ }
253
+ });
254
+ });
255
+ req.on('error', reject);
256
+ req.write(postData);
257
+ req.end();
258
+ });
259
+ }
260
+ async *chatStream(messages, systemPrompt) {
261
+ if (!this.apiKey) {
262
+ const last = messages[messages.length - 1];
263
+ yield `[gemini/${this.model} - no API key] Echo: ${last?.content ?? ''}`;
264
+ return;
265
+ }
266
+ const body = this.formatContents(messages, systemPrompt);
267
+ const url = this.buildUrl(true);
268
+ const postData = JSON.stringify(body);
269
+ const parsedUrl = new URL(url);
270
+ const response = await new Promise((resolve, reject) => {
271
+ const req = https.request({
272
+ hostname: parsedUrl.hostname,
273
+ path: parsedUrl.pathname + parsedUrl.search,
274
+ method: 'POST',
275
+ headers: { 'Content-Type': 'application/json', 'Content-Length': String(Buffer.byteLength(postData)) },
276
+ }, resolve);
277
+ req.on('error', reject);
278
+ req.write(postData);
279
+ req.end();
280
+ });
281
+ if (response.statusCode && response.statusCode >= 400) {
282
+ let data = '';
283
+ for await (const chunk of response)
284
+ data += chunk.toString();
285
+ throw new Error(`Gemini API error ${response.statusCode}: ${data}`);
286
+ }
287
+ let buffer = '';
288
+ for await (const chunk of response) {
289
+ buffer += chunk.toString();
290
+ const lines = buffer.split('\n');
291
+ buffer = lines.pop() ?? '';
292
+ for (const line of lines) {
293
+ const trimmed = line.trim();
294
+ if (!trimmed.startsWith('data: '))
295
+ continue;
296
+ const data = trimmed.slice(6);
297
+ if (data === '[DONE]')
298
+ return;
299
+ try {
300
+ const parsed = JSON.parse(data);
301
+ const text = parsed.candidates?.[0]?.content?.parts?.[0]?.text;
302
+ if (text)
303
+ yield text;
304
+ }
305
+ catch { }
306
+ }
307
+ }
308
+ }
309
+ }
310
+ function isGeminiNative() {
311
+ const baseUrl = process.env.OPC_LLM_BASE_URL || '';
312
+ const key = getApiKey();
313
+ // Use native Gemini API when: key starts with AQ. (new format) OR base URL points to googleapis
314
+ return key.startsWith('AQ.') || (baseUrl.includes('googleapis.com') && !baseUrl.includes('/openai'));
315
+ }
186
316
  function createProvider(name = 'openai', model, baseUrl, apiKey) {
187
317
  const finalModel = model || process.env.OPC_LLM_MODEL || 'gpt-4o-mini';
318
+ const finalKey = apiKey || getApiKey();
319
+ // Auto-detect Gemini native when key is new format
320
+ if (finalKey.startsWith('AQ.') || isGeminiNative()) {
321
+ return new GeminiNativeProvider(finalModel, finalKey);
322
+ }
188
323
  return new OpenAICompatibleProvider(name, finalModel, baseUrl, apiKey);
189
324
  }
190
- exports.SUPPORTED_PROVIDERS = ['openai', 'deepseek', 'qwen'];
325
+ exports.SUPPORTED_PROVIDERS = ['openai', 'deepseek', 'qwen', 'gemini'];
191
326
  //# sourceMappingURL=index.js.map
@@ -577,6 +577,7 @@ export declare const SpecSchema: z.ZodObject<{
577
577
  config?: Record<string, unknown> | undefined;
578
578
  }[] | undefined;
579
579
  }, {
580
+ model?: string | undefined;
580
581
  auth?: {
581
582
  enabled?: boolean | undefined;
582
583
  apiKeys?: string[] | undefined;
@@ -597,7 +598,6 @@ export declare const SpecSchema: z.ZodObject<{
597
598
  default?: string | undefined;
598
599
  allowed?: string[] | undefined;
599
600
  } | undefined;
600
- model?: string | undefined;
601
601
  systemPrompt?: string | undefined;
602
602
  skills?: {
603
603
  name: string;
@@ -995,6 +995,7 @@ export declare const OADSchema: z.ZodObject<{
995
995
  config?: Record<string, unknown> | undefined;
996
996
  }[] | undefined;
997
997
  }, {
998
+ model?: string | undefined;
998
999
  auth?: {
999
1000
  enabled?: boolean | undefined;
1000
1001
  apiKeys?: string[] | undefined;
@@ -1015,7 +1016,6 @@ export declare const OADSchema: z.ZodObject<{
1015
1016
  default?: string | undefined;
1016
1017
  allowed?: string[] | undefined;
1017
1018
  } | undefined;
1018
- model?: string | undefined;
1019
1019
  systemPrompt?: string | undefined;
1020
1020
  skills?: {
1021
1021
  name: string;
@@ -1183,6 +1183,7 @@ export declare const OADSchema: z.ZodObject<{
1183
1183
  } | undefined;
1184
1184
  };
1185
1185
  spec: {
1186
+ model?: string | undefined;
1186
1187
  auth?: {
1187
1188
  enabled?: boolean | undefined;
1188
1189
  apiKeys?: string[] | undefined;
@@ -1203,7 +1204,6 @@ export declare const OADSchema: z.ZodObject<{
1203
1204
  default?: string | undefined;
1204
1205
  allowed?: string[] | undefined;
1205
1206
  } | undefined;
1206
- model?: string | undefined;
1207
1207
  systemPrompt?: string | undefined;
1208
1208
  skills?: {
1209
1209
  name: string;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opc-agent",
3
- "version": "1.1.2",
3
+ "version": "1.1.3",
4
4
  "description": "Open Agent Framework — Build, test, and run AI Agents for business workstations",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -51,23 +51,31 @@ class OpenAICompatibleProvider implements LLMProvider {
51
51
  }
52
52
 
53
53
  const url = new URL(`${this.baseUrl}/chat/completions`);
54
+ const isGemini = url.hostname.includes('googleapis.com');
55
+ if (isGemini) {
56
+ url.searchParams.set('key', this.apiKey);
57
+ }
54
58
  const isHttps = url.protocol === 'https:';
55
59
  const lib = isHttps ? https : http;
56
60
 
57
61
  const postData = JSON.stringify(body);
58
62
 
63
+ const headers: Record<string, string> = {
64
+ 'Content-Type': 'application/json',
65
+ 'Content-Length': String(Buffer.byteLength(postData)),
66
+ };
67
+ if (!isGemini) {
68
+ headers['Authorization'] = `Bearer ${this.apiKey}`;
69
+ }
70
+
59
71
  return new Promise((resolve, reject) => {
60
72
  const req = lib.request(
61
73
  {
62
74
  hostname: url.hostname,
63
75
  port: url.port || (isHttps ? 443 : 80),
64
- path: url.pathname,
76
+ path: url.pathname + url.search,
65
77
  method: 'POST',
66
- headers: {
67
- 'Content-Type': 'application/json',
68
- Authorization: `Bearer ${this.apiKey}`,
69
- 'Content-Length': Buffer.byteLength(postData),
70
- },
78
+ headers,
71
79
  },
72
80
  (res) => {
73
81
  let data = '';
@@ -116,6 +124,10 @@ class OpenAICompatibleProvider implements LLMProvider {
116
124
 
117
125
  const formatted = this.formatMessages(messages, systemPrompt);
118
126
  const url = new URL(`${this.baseUrl}/chat/completions`);
127
+ const isGemini = url.hostname.includes('googleapis.com');
128
+ if (isGemini) {
129
+ url.searchParams.set('key', this.apiKey);
130
+ }
119
131
  const isHttps = url.protocol === 'https:';
120
132
  const lib = isHttps ? https : http;
121
133
  const postData = JSON.stringify({
@@ -126,18 +138,22 @@ class OpenAICompatibleProvider implements LLMProvider {
126
138
  stream: true,
127
139
  });
128
140
 
141
+ const streamHeaders: Record<string, string> = {
142
+ 'Content-Type': 'application/json',
143
+ 'Content-Length': String(Buffer.byteLength(postData)),
144
+ };
145
+ if (!isGemini) {
146
+ streamHeaders['Authorization'] = `Bearer ${this.apiKey}`;
147
+ }
148
+
129
149
  const response = await new Promise<http.IncomingMessage>((resolve, reject) => {
130
150
  const req = lib.request(
131
151
  {
132
152
  hostname: url.hostname,
133
153
  port: url.port || (isHttps ? 443 : 80),
134
- path: url.pathname,
154
+ path: url.pathname + url.search,
135
155
  method: 'POST',
136
- headers: {
137
- 'Content-Type': 'application/json',
138
- Authorization: `Bearer ${this.apiKey}`,
139
- 'Content-Length': Buffer.byteLength(postData),
140
- },
156
+ headers: streamHeaders,
141
157
  },
142
158
  resolve,
143
159
  );
@@ -175,9 +191,132 @@ class OpenAICompatibleProvider implements LLMProvider {
175
191
  }
176
192
  }
177
193
 
194
+ class GeminiNativeProvider implements LLMProvider {
195
+ name = 'gemini';
196
+ private model: string;
197
+ private apiKey: string;
198
+
199
+ constructor(model: string, apiKey?: string) {
200
+ this.model = model;
201
+ this.apiKey = apiKey || getApiKey();
202
+ }
203
+
204
+ private buildUrl(stream: boolean): string {
205
+ const action = stream ? 'streamGenerateContent?alt=sse&' : 'generateContent?';
206
+ return `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:${action}key=${this.apiKey}`;
207
+ }
208
+
209
+ private formatContents(messages: Message[], systemPrompt?: string): { contents: any[]; systemInstruction?: any } {
210
+ const contents: any[] = [];
211
+ for (const m of messages) {
212
+ contents.push({ role: m.role === 'assistant' ? 'model' : 'user', parts: [{ text: m.content }] });
213
+ }
214
+ const result: any = { contents };
215
+ if (systemPrompt) {
216
+ result.systemInstruction = { parts: [{ text: systemPrompt }] };
217
+ }
218
+ return result;
219
+ }
220
+
221
+ async chat(messages: Message[], systemPrompt?: string): Promise<string> {
222
+ if (!this.apiKey) {
223
+ const last = messages[messages.length - 1];
224
+ return `[gemini/${this.model} - no API key] Echo: ${last?.content ?? ''}`;
225
+ }
226
+ const body = this.formatContents(messages, systemPrompt);
227
+ const url = this.buildUrl(false);
228
+ const postData = JSON.stringify(body);
229
+
230
+ return new Promise((resolve, reject) => {
231
+ const parsedUrl = new URL(url);
232
+ const req = https.request({
233
+ hostname: parsedUrl.hostname,
234
+ path: parsedUrl.pathname + parsedUrl.search,
235
+ method: 'POST',
236
+ headers: { 'Content-Type': 'application/json', 'Content-Length': String(Buffer.byteLength(postData)) },
237
+ }, (res) => {
238
+ let data = '';
239
+ res.on('data', (chunk: Buffer) => (data += chunk.toString()));
240
+ res.on('end', () => {
241
+ if (res.statusCode && res.statusCode >= 400) { reject(new Error(`Gemini API error ${res.statusCode}: ${data}`)); return; }
242
+ try {
243
+ const parsed = JSON.parse(data);
244
+ resolve(parsed.candidates?.[0]?.content?.parts?.[0]?.text ?? '');
245
+ } catch { reject(new Error(`Invalid Gemini response: ${data.slice(0, 200)}`)); }
246
+ });
247
+ });
248
+ req.on('error', reject);
249
+ req.write(postData);
250
+ req.end();
251
+ });
252
+ }
253
+
254
+ async *chatStream(messages: Message[], systemPrompt?: string): AsyncIterable<string> {
255
+ if (!this.apiKey) {
256
+ const last = messages[messages.length - 1];
257
+ yield `[gemini/${this.model} - no API key] Echo: ${last?.content ?? ''}`;
258
+ return;
259
+ }
260
+ const body = this.formatContents(messages, systemPrompt);
261
+ const url = this.buildUrl(true);
262
+ const postData = JSON.stringify(body);
263
+ const parsedUrl = new URL(url);
264
+
265
+ const response = await new Promise<http.IncomingMessage>((resolve, reject) => {
266
+ const req = https.request({
267
+ hostname: parsedUrl.hostname,
268
+ path: parsedUrl.pathname + parsedUrl.search,
269
+ method: 'POST',
270
+ headers: { 'Content-Type': 'application/json', 'Content-Length': String(Buffer.byteLength(postData)) },
271
+ }, resolve);
272
+ req.on('error', reject);
273
+ req.write(postData);
274
+ req.end();
275
+ });
276
+
277
+ if (response.statusCode && response.statusCode >= 400) {
278
+ let data = '';
279
+ for await (const chunk of response) data += chunk.toString();
280
+ throw new Error(`Gemini API error ${response.statusCode}: ${data}`);
281
+ }
282
+
283
+ let buffer = '';
284
+ for await (const chunk of response) {
285
+ buffer += chunk.toString();
286
+ const lines = buffer.split('\n');
287
+ buffer = lines.pop() ?? '';
288
+ for (const line of lines) {
289
+ const trimmed = line.trim();
290
+ if (!trimmed.startsWith('data: ')) continue;
291
+ const data = trimmed.slice(6);
292
+ if (data === '[DONE]') return;
293
+ try {
294
+ const parsed = JSON.parse(data);
295
+ const text = parsed.candidates?.[0]?.content?.parts?.[0]?.text;
296
+ if (text) yield text;
297
+ } catch {}
298
+ }
299
+ }
300
+ }
301
+ }
302
+
303
+ function isGeminiNative(): boolean {
304
+ const baseUrl = process.env.OPC_LLM_BASE_URL || '';
305
+ const key = getApiKey();
306
+ // Use native Gemini API when: key starts with AQ. (new format) OR base URL points to googleapis
307
+ return key.startsWith('AQ.') || (baseUrl.includes('googleapis.com') && !baseUrl.includes('/openai'));
308
+ }
309
+
178
310
  export function createProvider(name: string = 'openai', model?: string, baseUrl?: string, apiKey?: string): LLMProvider {
179
311
  const finalModel = model || process.env.OPC_LLM_MODEL || 'gpt-4o-mini';
312
+ const finalKey = apiKey || getApiKey();
313
+
314
+ // Auto-detect Gemini native when key is new format
315
+ if (finalKey.startsWith('AQ.') || isGeminiNative()) {
316
+ return new GeminiNativeProvider(finalModel, finalKey);
317
+ }
318
+
180
319
  return new OpenAICompatibleProvider(name, finalModel, baseUrl, apiKey);
181
320
  }
182
321
 
183
- export const SUPPORTED_PROVIDERS = ['openai', 'deepseek', 'qwen'] as const;
322
+ export const SUPPORTED_PROVIDERS = ['openai', 'deepseek', 'qwen', 'gemini'] as const;