@imisbahk/hive 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/.gitattributes +7 -0
  2. package/.rocket/README.md +9 -9
  3. package/dist/agent/agent.d.ts +4 -0
  4. package/dist/agent/agent.d.ts.map +1 -1
  5. package/dist/agent/agent.js +40 -4
  6. package/dist/agent/agent.js.map +1 -1
  7. package/dist/cli/commands/chat.d.ts.map +1 -1
  8. package/dist/cli/commands/chat.js +642 -12
  9. package/dist/cli/commands/chat.js.map +1 -1
  10. package/dist/cli/commands/doctor.d.ts +8 -0
  11. package/dist/cli/commands/doctor.d.ts.map +1 -0
  12. package/dist/cli/commands/doctor.js +503 -0
  13. package/dist/cli/commands/doctor.js.map +1 -0
  14. package/dist/cli/commands/memory.d.ts +3 -0
  15. package/dist/cli/commands/memory.d.ts.map +1 -0
  16. package/dist/cli/commands/memory.js +104 -0
  17. package/dist/cli/commands/memory.js.map +1 -0
  18. package/dist/cli/index.js +5 -1
  19. package/dist/cli/index.js.map +1 -1
  20. package/dist/providers/api-key.js +1 -1
  21. package/dist/providers/api-key.js.map +1 -1
  22. package/dist/providers/base.js +1 -1
  23. package/dist/providers/base.js.map +1 -1
  24. package/dist/providers/openai-compatible.js +2 -2
  25. package/dist/providers/openai-compatible.js.map +1 -1
  26. package/dist/storage/db.d.ts +31 -2
  27. package/dist/storage/db.d.ts.map +1 -1
  28. package/dist/storage/db.js +165 -3
  29. package/dist/storage/db.js.map +1 -1
  30. package/dist/storage/schema.d.ts +12 -1
  31. package/dist/storage/schema.d.ts.map +1 -1
  32. package/dist/storage/schema.js +29 -1
  33. package/dist/storage/schema.js.map +1 -1
  34. package/package.json +8 -2
  35. package/.github/workflows/publish.yml +0 -31
  36. package/.rocket/ARCHITECTURE.md +0 -7
  37. package/.rocket/SYMBOLS.md +0 -425
  38. package/001-local-first-storage.md +0 -43
  39. package/003-memory-architechture.md +0 -71
  40. package/CONTRIBUTING.md +0 -150
  41. package/FEATURES.md +0 -55
  42. package/index.md +0 -16
  43. package/prompts/Behaviour.md +0 -23
  44. package/prompts/Browser.md +0 -13
  45. package/prompts/Code.md +0 -12
  46. package/prompts/Debugging.md +0 -15
  47. package/prompts/Execution.md +0 -13
  48. package/prompts/Memory.md +0 -11
  49. package/prompts/Planning.md +0 -13
  50. package/prompts/Product.md +0 -14
  51. package/prompts/Review.md +0 -15
  52. package/prompts/Safety.md +0 -12
  53. package/prompts/Search.md +0 -14
  54. package/prompts/System.md +0 -6
  55. package/prompts/Tools.md +0 -14
  56. package/prompts/Writing.md +0 -13
  57. package/releases/v1/v0.1/RELEASE-NOTES.md +0 -46
  58. package/src/agent/agent.ts +0 -595
  59. package/src/agent/index.ts +0 -2
  60. package/src/browser/browser.ts +0 -410
  61. package/src/cli/commands/chat.ts +0 -864
  62. package/src/cli/commands/config.ts +0 -610
  63. package/src/cli/commands/init.ts +0 -288
  64. package/src/cli/commands/nuke.ts +0 -64
  65. package/src/cli/commands/status.ts +0 -170
  66. package/src/cli/helpers/providerPrompts.ts +0 -192
  67. package/src/cli/index.ts +0 -66
  68. package/src/cli/theme.ts +0 -88
  69. package/src/cli/ui.ts +0 -127
  70. package/src/providers/anthropic.ts +0 -146
  71. package/src/providers/api-key.ts +0 -23
  72. package/src/providers/base.ts +0 -409
  73. package/src/providers/google.ts +0 -21
  74. package/src/providers/groq.ts +0 -21
  75. package/src/providers/index.ts +0 -65
  76. package/src/providers/mistral.ts +0 -21
  77. package/src/providers/ollama.ts +0 -22
  78. package/src/providers/openai-compatible.ts +0 -82
  79. package/src/providers/openai.ts +0 -21
  80. package/src/providers/openrouter.ts +0 -21
  81. package/src/providers/together.ts +0 -21
  82. package/src/storage/db.ts +0 -476
  83. package/src/storage/schema.ts +0 -116
  84. package/tsconfig.json +0 -51
@@ -1,409 +0,0 @@
1
- import fetch, { type Response } from "node-fetch";
2
-
3
- export const SUPPORTED_PROVIDER_NAMES = [
4
- "openai",
5
- "anthropic",
6
- "ollama",
7
- "groq",
8
- "mistral",
9
- "google",
10
- "openrouter",
11
- "together",
12
- ] as const;
13
-
14
- export type ProviderName = (typeof SUPPORTED_PROVIDER_NAMES)[number];
15
- export type ProviderMessageRole = "system" | "user" | "assistant" | "tool";
16
-
17
- export interface ProviderToolDefinition {
18
- type: "function";
19
- function: {
20
- name: string;
21
- description: string;
22
- parameters: Record<string, unknown>;
23
- };
24
- }
25
-
26
- export interface ProviderToolCallPayload {
27
- id: string;
28
- type: "function";
29
- function: {
30
- name: string;
31
- arguments: string;
32
- };
33
- }
34
-
35
- export interface ProviderToolCall {
36
- id: string;
37
- name: string;
38
- arguments: string;
39
- }
40
-
41
- export interface ProviderMessage {
42
- role: ProviderMessageRole;
43
- content: string;
44
- name?: string;
45
- tool_call_id?: string;
46
- tool_calls?: ProviderToolCallPayload[];
47
- }
48
-
49
- export interface StreamChatRequest {
50
- messages: ProviderMessage[];
51
- model?: string;
52
- temperature?: number;
53
- maxTokens?: number;
54
- }
55
-
56
- export interface CompleteChatRequest extends StreamChatRequest {
57
- tools?: ProviderToolDefinition[];
58
- }
59
-
60
- export interface CompleteChatResponse {
61
- content: string;
62
- toolCalls: ProviderToolCall[];
63
- }
64
-
65
- export interface Provider {
66
- readonly name: ProviderName;
67
- readonly defaultModel: string;
68
- streamChat(request: StreamChatRequest): AsyncGenerator<string>;
69
- completeChat?(request: CompleteChatRequest): Promise<CompleteChatResponse>;
70
- }
71
-
72
- export class ProviderConfigurationError extends Error {
73
- constructor(message: string) {
74
- super(message);
75
- this.name = "ProviderConfigurationError";
76
- }
77
- }
78
-
79
- export class ProviderRequestError extends Error {
80
- constructor(message: string) {
81
- super(message);
82
- this.name = "ProviderRequestError";
83
- }
84
- }
85
-
86
- export interface OpenAICompatibleStreamInput {
87
- provider: ProviderName;
88
- baseUrl: string;
89
- apiKey?: string;
90
- model: string;
91
- messages: ProviderMessage[];
92
- temperature?: number;
93
- maxTokens?: number;
94
- extraHeaders?: Record<string, string>;
95
- extraBody?: Record<string, unknown>;
96
- }
97
-
98
- export interface OpenAICompatibleCompleteInput extends OpenAICompatibleStreamInput {
99
- tools?: ProviderToolDefinition[];
100
- }
101
-
102
- export function normalizeProviderName(raw?: string): ProviderName {
103
- if (!raw) {
104
- return "openai";
105
- }
106
-
107
- const normalized = raw.trim().toLowerCase();
108
- if ((SUPPORTED_PROVIDER_NAMES as readonly string[]).includes(normalized)) {
109
- return normalized as ProviderName;
110
- }
111
-
112
- throw new ProviderConfigurationError(
113
- `Unsupported provider \"${raw}\". Supported providers: ${SUPPORTED_PROVIDER_NAMES.join(", ")}.`,
114
- );
115
- }
116
-
117
- export async function* streamOpenAICompatibleChat(
118
- input: OpenAICompatibleStreamInput,
119
- ): AsyncGenerator<string> {
120
- const endpoint = `${input.baseUrl.replace(/\/$/, "")}/chat/completions`;
121
-
122
- const headers: Record<string, string> = {
123
- "content-type": "application/json",
124
- ...(input.extraHeaders ?? {}),
125
- };
126
-
127
- if (input.apiKey) {
128
- headers.authorization = `Bearer ${input.apiKey}`;
129
- }
130
-
131
- const body: Record<string, unknown> = {
132
- model: input.model,
133
- messages: input.messages,
134
- stream: true,
135
- ...(input.extraBody ?? {}),
136
- };
137
-
138
- if (input.temperature !== undefined) {
139
- body.temperature = input.temperature;
140
- }
141
-
142
- if (input.maxTokens !== undefined) {
143
- body.max_tokens = input.maxTokens;
144
- }
145
-
146
- const response = await fetch(endpoint, {
147
- method: "POST",
148
- headers,
149
- body: JSON.stringify(body),
150
- });
151
-
152
- await ensureOk(response, `${input.provider} request failed`);
153
-
154
- for await (const data of iterateSseData(response)) {
155
- if (data === "[DONE]") {
156
- return;
157
- }
158
-
159
- const payload = parseJson<Record<string, unknown>>(data);
160
- if (!payload) {
161
- continue;
162
- }
163
-
164
- const errorMessage = pickErrorMessage(payload);
165
- if (errorMessage) {
166
- throw new ProviderRequestError(`${input.provider} error: ${errorMessage}`);
167
- }
168
-
169
- const maybeChoices = payload.choices;
170
- if (!Array.isArray(maybeChoices) || maybeChoices.length === 0) {
171
- continue;
172
- }
173
-
174
- const firstChoice = maybeChoices[0] as Record<string, unknown>;
175
- const delta = firstChoice.delta as Record<string, unknown> | undefined;
176
-
177
- const text =
178
- typeof delta?.content === "string"
179
- ? delta.content
180
- : typeof firstChoice.text === "string"
181
- ? firstChoice.text
182
- : "";
183
-
184
- if (text.length > 0) {
185
- yield text;
186
- }
187
- }
188
- }
189
-
190
- export async function completeOpenAICompatibleChat(
191
- input: OpenAICompatibleCompleteInput,
192
- ): Promise<CompleteChatResponse> {
193
- const endpoint = `${input.baseUrl.replace(/\/$/, "")}/chat/completions`;
194
-
195
- const headers: Record<string, string> = {
196
- "content-type": "application/json",
197
- ...(input.extraHeaders ?? {}),
198
- };
199
-
200
- if (input.apiKey) {
201
- headers.authorization = `Bearer ${input.apiKey}`;
202
- }
203
-
204
- const body: Record<string, unknown> = {
205
- model: input.model,
206
- messages: input.messages,
207
- stream: false,
208
- ...(input.extraBody ?? {}),
209
- };
210
-
211
- if (input.temperature !== undefined) {
212
- body.temperature = input.temperature;
213
- }
214
-
215
- if (input.maxTokens !== undefined) {
216
- body.max_tokens = input.maxTokens;
217
- }
218
-
219
- if (input.tools && input.tools.length > 0) {
220
- body.tools = input.tools;
221
- body.tool_choice = "auto";
222
- }
223
-
224
- const response = await fetch(endpoint, {
225
- method: "POST",
226
- headers,
227
- body: JSON.stringify(body),
228
- });
229
-
230
- await ensureOk(response, `${input.provider} request failed`);
231
-
232
- const payload = (await response.json()) as Record<string, unknown>;
233
- const errorMessage = pickErrorMessage(payload);
234
- if (errorMessage) {
235
- throw new ProviderRequestError(`${input.provider} error: ${errorMessage}`);
236
- }
237
-
238
- const maybeChoices = payload.choices;
239
- if (!Array.isArray(maybeChoices) || maybeChoices.length === 0) {
240
- throw new ProviderRequestError(`${input.provider} response did not include choices.`);
241
- }
242
-
243
- const firstChoice = maybeChoices[0] as Record<string, unknown>;
244
- const message = firstChoice.message as Record<string, unknown> | undefined;
245
-
246
- const content = pickMessageContent(message);
247
- const toolCalls = pickToolCalls(message);
248
-
249
- return {
250
- content,
251
- toolCalls,
252
- };
253
- }
254
-
255
- export async function* iterateSseData(response: Response): AsyncGenerator<string> {
256
- if (!response.body) {
257
- return;
258
- }
259
-
260
- let buffer = "";
261
-
262
- for await (const chunk of response.body) {
263
- buffer += chunk.toString("utf8").replace(/\r\n/g, "\n");
264
-
265
- let eventBoundary = buffer.indexOf("\n\n");
266
- while (eventBoundary !== -1) {
267
- const rawEvent = buffer.slice(0, eventBoundary);
268
- buffer = buffer.slice(eventBoundary + 2);
269
-
270
- const data = parseSseData(rawEvent);
271
- if (data !== null) {
272
- yield data;
273
- }
274
-
275
- eventBoundary = buffer.indexOf("\n\n");
276
- }
277
- }
278
-
279
- const remaining = parseSseData(buffer);
280
- if (remaining !== null) {
281
- yield remaining;
282
- }
283
- }
284
-
285
- export async function* chunkText(text: string, chunkSize = 32): AsyncGenerator<string> {
286
- for (let start = 0; start < text.length; start += chunkSize) {
287
- yield text.slice(start, start + chunkSize);
288
- }
289
- }
290
-
291
- function parseSseData(rawEvent: string): string | null {
292
- const lines = rawEvent
293
- .split("\n")
294
- .map((line) => line.trimEnd())
295
- .filter((line) => line.startsWith("data:"))
296
- .map((line) => line.slice(5).trimStart());
297
-
298
- if (lines.length === 0) {
299
- return null;
300
- }
301
-
302
- return lines.join("\n").trim();
303
- }
304
-
305
- function parseJson<T>(value: string): T | null {
306
- try {
307
- return JSON.parse(value) as T;
308
- } catch {
309
- return null;
310
- }
311
- }
312
-
313
- function pickErrorMessage(payload: Record<string, unknown>): string | null {
314
- const maybeError = payload.error;
315
- if (typeof maybeError === "string") {
316
- return maybeError;
317
- }
318
-
319
- if (maybeError && typeof maybeError === "object") {
320
- const message = (maybeError as Record<string, unknown>).message;
321
- if (typeof message === "string") {
322
- return message;
323
- }
324
- }
325
-
326
- return null;
327
- }
328
-
329
- function pickMessageContent(message: Record<string, unknown> | undefined): string {
330
- if (!message) {
331
- return "";
332
- }
333
-
334
- const rawContent = message.content;
335
- if (typeof rawContent === "string") {
336
- return rawContent;
337
- }
338
-
339
- if (Array.isArray(rawContent)) {
340
- return rawContent
341
- .map((part) => {
342
- if (typeof part === "string") {
343
- return part;
344
- }
345
-
346
- if (part && typeof part === "object") {
347
- const asRecord = part as Record<string, unknown>;
348
- if (typeof asRecord.text === "string") {
349
- return asRecord.text;
350
- }
351
- }
352
-
353
- return "";
354
- })
355
- .join("");
356
- }
357
-
358
- return "";
359
- }
360
-
361
- function pickToolCalls(message: Record<string, unknown> | undefined): ProviderToolCall[] {
362
- if (!message) {
363
- return [];
364
- }
365
-
366
- const rawToolCalls = message.tool_calls;
367
- if (!Array.isArray(rawToolCalls)) {
368
- return [];
369
- }
370
-
371
- const calls: ProviderToolCall[] = [];
372
- for (const toolCall of rawToolCalls) {
373
- if (!toolCall || typeof toolCall !== "object") {
374
- continue;
375
- }
376
-
377
- const callRecord = toolCall as Record<string, unknown>;
378
- const callId = typeof callRecord.id === "string" ? callRecord.id : "";
379
- const callFunction = callRecord.function as Record<string, unknown> | undefined;
380
- const callName = typeof callFunction?.name === "string" ? callFunction.name : "";
381
- const callArguments =
382
- typeof callFunction?.arguments === "string" ? callFunction.arguments : "{}";
383
-
384
- if (callId.length === 0 || callName.length === 0) {
385
- continue;
386
- }
387
-
388
- calls.push({
389
- id: callId,
390
- name: callName,
391
- arguments: callArguments,
392
- });
393
- }
394
-
395
- return calls;
396
- }
397
-
398
- async function ensureOk(response: Response, fallbackMessage: string): Promise<void> {
399
- if (response.ok) {
400
- return;
401
- }
402
-
403
- const bodyText = (await response.text()).trim();
404
- const details = bodyText.length > 0 ? ` ${bodyText}` : "";
405
-
406
- throw new ProviderRequestError(
407
- `${fallbackMessage}: HTTP ${response.status} ${response.statusText}${details}`,
408
- );
409
- }
@@ -1,21 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_GOOGLE_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
5
- const DEFAULT_GOOGLE_MODEL = "gemini-3.0-flash";
6
-
7
- export class GoogleProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "google",
11
- baseUrl: process.env.GOOGLE_BASE_URL ?? DEFAULT_GOOGLE_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.GOOGLE_MODEL ?? DEFAULT_GOOGLE_MODEL,
14
- });
15
- }
16
- }
17
-
18
- export async function createGoogleProvider(): Promise<GoogleProvider> {
19
- const apiKey = await resolveProviderApiKey("google", "GOOGLE_API_KEY");
20
- return new GoogleProvider(apiKey);
21
- }
@@ -1,21 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_GROQ_BASE_URL = "https://api.groq.com/openai/v1";
5
- const DEFAULT_GROQ_MODEL = "llama-3.3-70b-versatile";
6
-
7
- export class GroqProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "groq",
11
- baseUrl: process.env.GROQ_BASE_URL ?? DEFAULT_GROQ_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.GROQ_MODEL ?? DEFAULT_GROQ_MODEL,
14
- });
15
- }
16
- }
17
-
18
- export async function createGroqProvider(): Promise<GroqProvider> {
19
- const apiKey = await resolveProviderApiKey("groq", "GROQ_API_KEY");
20
- return new GroqProvider(apiKey);
21
- }
@@ -1,65 +0,0 @@
1
- import {
2
- normalizeProviderName,
3
- type Provider,
4
- type ProviderName,
5
- } from "./base.js";
6
- import { createAnthropicProvider } from "./anthropic.js";
7
- import { createGoogleProvider } from "./google.js";
8
- import { createGroqProvider } from "./groq.js";
9
- import { createMistralProvider } from "./mistral.js";
10
- import { createOllamaProvider } from "./ollama.js";
11
- import { createOpenAIProvider } from "./openai.js";
12
- import { createOpenRouterProvider } from "./openrouter.js";
13
- import { createTogetherProvider } from "./together.js";
14
-
15
- export async function createProvider(name?: string): Promise<Provider> {
16
- const resolvedName = normalizeProviderName(name ?? process.env.HIVE_PROVIDER);
17
-
18
- switch (resolvedName) {
19
- case "openai":
20
- return createOpenAIProvider();
21
- case "anthropic":
22
- return createAnthropicProvider();
23
- case "ollama":
24
- return createOllamaProvider();
25
- case "groq":
26
- return createGroqProvider();
27
- case "mistral":
28
- return createMistralProvider();
29
- case "google":
30
- return createGoogleProvider();
31
- case "openrouter":
32
- return createOpenRouterProvider();
33
- case "together":
34
- return createTogetherProvider();
35
- default:
36
- return assertNever(resolvedName);
37
- }
38
- }
39
-
40
- export function getDefaultModelForProvider(name: ProviderName): string {
41
- switch (name) {
42
- case "openai":
43
- return process.env.OPENAI_MODEL ?? "gpt-4o-mini";
44
- case "anthropic":
45
- return process.env.ANTHROPIC_MODEL ?? "claude-3-5-haiku-latest";
46
- case "ollama":
47
- return process.env.OLLAMA_MODEL ?? "llama3.2";
48
- case "groq":
49
- return process.env.GROQ_MODEL ?? "llama-3.3-70b-versatile";
50
- case "mistral":
51
- return process.env.MISTRAL_MODEL ?? "mistral-small-latest";
52
- case "google":
53
- return process.env.GOOGLE_MODEL ?? "gemini-2.0-flash";
54
- case "openrouter":
55
- return process.env.OPENROUTER_MODEL ?? "openai/gpt-4o-mini";
56
- case "together":
57
- return process.env.TOGETHER_MODEL ?? "meta-llama/Llama-3.3-70B-Instruct-Turbo";
58
- default:
59
- return assertNever(name);
60
- }
61
- }
62
-
63
- function assertNever(value: never): never {
64
- throw new Error(`Unsupported provider: ${String(value)}`);
65
- }
@@ -1,21 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_MISTRAL_BASE_URL = "https://api.mistral.ai/v1";
5
- const DEFAULT_MISTRAL_MODEL = "mistral-small-latest";
6
-
7
- export class MistralProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "mistral",
11
- baseUrl: process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
14
- });
15
- }
16
- }
17
-
18
- export async function createMistralProvider(): Promise<MistralProvider> {
19
- const apiKey = await resolveProviderApiKey("mistral", "MISTRAL_API_KEY");
20
- return new MistralProvider(apiKey);
21
- }
@@ -1,22 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_OLLAMA_BASE_URL = "http://localhost:11434/v1";
5
- const DEFAULT_OLLAMA_MODEL = "llama3.2";
6
-
7
- export class OllamaProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "ollama",
11
- baseUrl: process.env.OLLAMA_BASE_URL ?? DEFAULT_OLLAMA_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.OLLAMA_MODEL ?? DEFAULT_OLLAMA_MODEL,
14
- allowMissingApiKey: true,
15
- });
16
- }
17
- }
18
-
19
- export async function createOllamaProvider(): Promise<OllamaProvider> {
20
- const apiKey = await resolveProviderApiKey("ollama", "OLLAMA_API_KEY");
21
- return new OllamaProvider(apiKey);
22
- }
@@ -1,82 +0,0 @@
1
- import {
2
- completeOpenAICompatibleChat,
3
- type CompleteChatRequest,
4
- type CompleteChatResponse,
5
- ProviderConfigurationError,
6
- type Provider,
7
- type ProviderName,
8
- type StreamChatRequest,
9
- streamOpenAICompatibleChat,
10
- } from "./base.js";
11
-
12
- export interface OpenAICompatibleProviderConfig {
13
- name: ProviderName;
14
- baseUrl: string;
15
- defaultModel: string;
16
- apiKey?: string;
17
- allowMissingApiKey?: boolean;
18
- extraHeaders?: Record<string, string>;
19
- extraBody?: Record<string, unknown>;
20
- }
21
-
22
- export class OpenAICompatibleProvider implements Provider {
23
- readonly name: ProviderName;
24
- readonly defaultModel: string;
25
-
26
- private readonly baseUrl: string;
27
- private readonly apiKey?: string;
28
- private readonly allowMissingApiKey: boolean;
29
- private readonly extraHeaders?: Record<string, string>;
30
- private readonly extraBody?: Record<string, unknown>;
31
-
32
- constructor(config: OpenAICompatibleProviderConfig) {
33
- this.name = config.name;
34
- this.baseUrl = config.baseUrl;
35
- this.apiKey = config.apiKey;
36
- this.defaultModel = config.defaultModel;
37
- this.allowMissingApiKey = config.allowMissingApiKey ?? false;
38
- this.extraHeaders = config.extraHeaders;
39
- this.extraBody = config.extraBody;
40
- }
41
-
42
- async *streamChat(request: StreamChatRequest): AsyncGenerator<string> {
43
- if (!this.allowMissingApiKey && !this.apiKey) {
44
- throw new ProviderConfigurationError(
45
- `Provider \"${this.name}\" is missing an API key.`,
46
- );
47
- }
48
-
49
- yield* streamOpenAICompatibleChat({
50
- provider: this.name,
51
- baseUrl: this.baseUrl,
52
- apiKey: this.apiKey,
53
- model: request.model ?? this.defaultModel,
54
- messages: request.messages,
55
- temperature: request.temperature,
56
- maxTokens: request.maxTokens,
57
- extraHeaders: this.extraHeaders,
58
- extraBody: this.extraBody,
59
- });
60
- }
61
-
62
- async completeChat(request: CompleteChatRequest): Promise<CompleteChatResponse> {
63
- if (!this.allowMissingApiKey && !this.apiKey) {
64
- throw new ProviderConfigurationError(
65
- `Provider \"${this.name}\" is missing an API key.`,
66
- );
67
- }
68
-
69
- return completeOpenAICompatibleChat({
70
- provider: this.name,
71
- baseUrl: this.baseUrl,
72
- apiKey: this.apiKey,
73
- model: request.model ?? this.defaultModel,
74
- messages: request.messages,
75
- temperature: request.temperature,
76
- maxTokens: request.maxTokens,
77
- tools: request.tools,
78
- extraHeaders: this.extraHeaders,
79
- extraBody: this.extraBody,
80
- });
81
- }
82
- }
@@ -1,21 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_OPENAI_BASE_URL = "https://api.openai.com/v1";
5
- const DEFAULT_OPENAI_MODEL = "gpt-4o-mini";
6
-
7
- export class OpenAIProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "openai",
11
- baseUrl: process.env.OPENAI_BASE_URL ?? DEFAULT_OPENAI_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.OPENAI_MODEL ?? DEFAULT_OPENAI_MODEL,
14
- });
15
- }
16
- }
17
-
18
- export async function createOpenAIProvider(): Promise<OpenAIProvider> {
19
- const apiKey = await resolveProviderApiKey("openai", "OPENAI_API_KEY");
20
- return new OpenAIProvider(apiKey);
21
- }
@@ -1,21 +0,0 @@
1
- import { resolveProviderApiKey } from "./api-key.js";
2
- import { OpenAICompatibleProvider } from "./openai-compatible.js";
3
-
4
- const DEFAULT_OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
5
- const DEFAULT_OPENROUTER_MODEL = "openai/gpt-4o-mini";
6
-
7
- export class OpenRouterProvider extends OpenAICompatibleProvider {
8
- constructor(apiKey?: string) {
9
- super({
10
- name: "openrouter",
11
- baseUrl: process.env.OPENROUTER_BASE_URL ?? DEFAULT_OPENROUTER_BASE_URL,
12
- apiKey,
13
- defaultModel: process.env.OPENROUTER_MODEL ?? DEFAULT_OPENROUTER_MODEL,
14
- });
15
- }
16
- }
17
-
18
- export async function createOpenRouterProvider(): Promise<OpenRouterProvider> {
19
- const apiKey = await resolveProviderApiKey("openrouter", "OPENROUTER_API_KEY");
20
- return new OpenRouterProvider(apiKey);
21
- }