@imisbahk/hive 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.rocket/ARCHITECTURE.md +7 -0
- package/.rocket/README.md +31 -0
- package/.rocket/SYMBOLS.md +282 -0
- package/.rocket/config.json +18 -0
- package/001-local-first-storage.md +43 -0
- package/003-memory-architechture.md +71 -0
- package/CONTRIBUTING.md +149 -0
- package/LICENSE.md +21 -0
- package/README.md +146 -0
- package/dist/agent/agent.d.ts +32 -0
- package/dist/agent/agent.d.ts.map +1 -0
- package/dist/agent/agent.js +103 -0
- package/dist/agent/agent.js.map +1 -0
- package/dist/agent/index.d.ts +3 -0
- package/dist/agent/index.d.ts.map +1 -0
- package/dist/agent/index.js +2 -0
- package/dist/agent/index.js.map +1 -0
- package/dist/cli/commands/chat.d.ts +12 -0
- package/dist/cli/commands/chat.d.ts.map +1 -0
- package/dist/cli/commands/chat.js +117 -0
- package/dist/cli/commands/chat.js.map +1 -0
- package/dist/cli/commands/config.d.ts +7 -0
- package/dist/cli/commands/config.d.ts.map +1 -0
- package/dist/cli/commands/config.js +234 -0
- package/dist/cli/commands/config.js.map +1 -0
- package/dist/cli/commands/init.d.ts +8 -0
- package/dist/cli/commands/init.d.ts.map +1 -0
- package/dist/cli/commands/init.js +186 -0
- package/dist/cli/commands/init.js.map +1 -0
- package/dist/cli/commands/nuke.d.ts +4 -0
- package/dist/cli/commands/nuke.d.ts.map +1 -0
- package/dist/cli/commands/nuke.js +47 -0
- package/dist/cli/commands/nuke.js.map +1 -0
- package/dist/cli/commands/status.d.ts +4 -0
- package/dist/cli/commands/status.d.ts.map +1 -0
- package/dist/cli/commands/status.js +114 -0
- package/dist/cli/commands/status.js.map +1 -0
- package/dist/cli/helpers/providerPrompts.d.ts +13 -0
- package/dist/cli/helpers/providerPrompts.d.ts.map +1 -0
- package/dist/cli/helpers/providerPrompts.js +138 -0
- package/dist/cli/helpers/providerPrompts.js.map +1 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +31 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/providers/anthropic.d.ts +10 -0
- package/dist/providers/anthropic.d.ts.map +1 -0
- package/dist/providers/anthropic.js +108 -0
- package/dist/providers/anthropic.js.map +1 -0
- package/dist/providers/api-key.d.ts +3 -0
- package/dist/providers/api-key.d.ts.map +1 -0
- package/dist/providers/api-key.js +15 -0
- package/dist/providers/api-key.js.map +1 -0
- package/dist/providers/base.d.ts +41 -0
- package/dist/providers/base.d.ts.map +1 -0
- package/dist/providers/base.js +157 -0
- package/dist/providers/base.js.map +1 -0
- package/dist/providers/google.d.ts +6 -0
- package/dist/providers/google.d.ts.map +1 -0
- package/dist/providers/google.js +19 -0
- package/dist/providers/google.js.map +1 -0
- package/dist/providers/groq.d.ts +6 -0
- package/dist/providers/groq.d.ts.map +1 -0
- package/dist/providers/groq.js +19 -0
- package/dist/providers/groq.js.map +1 -0
- package/dist/providers/index.d.ts +4 -0
- package/dist/providers/index.d.ts.map +1 -0
- package/dist/providers/index.js +58 -0
- package/dist/providers/index.js.map +1 -0
- package/dist/providers/mistral.d.ts +6 -0
- package/dist/providers/mistral.d.ts.map +1 -0
- package/dist/providers/mistral.js +19 -0
- package/dist/providers/mistral.js.map +1 -0
- package/dist/providers/ollama.d.ts +6 -0
- package/dist/providers/ollama.d.ts.map +1 -0
- package/dist/providers/ollama.js +20 -0
- package/dist/providers/ollama.js.map +1 -0
- package/dist/providers/openai-compatible.d.ts +22 -0
- package/dist/providers/openai-compatible.d.ts.map +1 -0
- package/dist/providers/openai-compatible.js +36 -0
- package/dist/providers/openai-compatible.js.map +1 -0
- package/dist/providers/openai.d.ts +6 -0
- package/dist/providers/openai.d.ts.map +1 -0
- package/dist/providers/openai.js +19 -0
- package/dist/providers/openai.js.map +1 -0
- package/dist/providers/openrouter.d.ts +6 -0
- package/dist/providers/openrouter.d.ts.map +1 -0
- package/dist/providers/openrouter.js +19 -0
- package/dist/providers/openrouter.js.map +1 -0
- package/dist/providers/together.d.ts +6 -0
- package/dist/providers/together.d.ts.map +1 -0
- package/dist/providers/together.js +19 -0
- package/dist/providers/together.js.map +1 -0
- package/dist/storage/db.d.ts +48 -0
- package/dist/storage/db.d.ts.map +1 -0
- package/dist/storage/db.js +298 -0
- package/dist/storage/db.js.map +1 -0
- package/dist/storage/schema.d.ts +43 -0
- package/dist/storage/schema.d.ts.map +1 -0
- package/dist/storage/schema.js +69 -0
- package/dist/storage/schema.js.map +1 -0
- package/index.md +16 -0
- package/package.json +48 -0
- package/prompts/Behaviour.md +23 -0
- package/prompts/Code.md +12 -0
- package/prompts/Memory.md +11 -0
- package/prompts/System.md +6 -0
- package/releases/v1/v0.1/RELEASE-NOTES.md +0 -0
- package/src/agent/agent.ts +155 -0
- package/src/agent/index.ts +2 -0
- package/src/cli/commands/chat.ts +169 -0
- package/src/cli/commands/config.ts +282 -0
- package/src/cli/commands/init.ts +242 -0
- package/src/cli/commands/nuke.ts +60 -0
- package/src/cli/commands/status.ts +147 -0
- package/src/cli/helpers/providerPrompts.ts +192 -0
- package/src/cli/index.ts +38 -0
- package/src/providers/anthropic.ts +146 -0
- package/src/providers/api-key.ts +23 -0
- package/src/providers/base.ts +234 -0
- package/src/providers/google.ts +21 -0
- package/src/providers/groq.ts +21 -0
- package/src/providers/index.ts +65 -0
- package/src/providers/mistral.ts +21 -0
- package/src/providers/ollama.ts +22 -0
- package/src/providers/openai-compatible.ts +58 -0
- package/src/providers/openai.ts +21 -0
- package/src/providers/openrouter.ts +21 -0
- package/src/providers/together.ts +21 -0
- package/src/storage/db.ts +476 -0
- package/src/storage/schema.ts +116 -0
- package/tsconfig.json +51 -0
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import fetch from "node-fetch";
|
|
2
|
+
|
|
3
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
4
|
+
import {
|
|
5
|
+
ProviderConfigurationError,
|
|
6
|
+
ProviderRequestError,
|
|
7
|
+
iterateSseData,
|
|
8
|
+
type Provider,
|
|
9
|
+
type ProviderMessage,
|
|
10
|
+
type StreamChatRequest,
|
|
11
|
+
} from "./base.js";
|
|
12
|
+
|
|
13
|
+
const ANTHROPIC_API_URL = "https://api.anthropic.com/v1/messages";
|
|
14
|
+
const DEFAULT_ANTHROPIC_MODEL = "claude-3-5-haiku-latest";
|
|
15
|
+
|
|
16
|
+
interface AnthropicMessage {
|
|
17
|
+
role: "user" | "assistant";
|
|
18
|
+
content: string;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export class AnthropicProvider implements Provider {
|
|
22
|
+
readonly name = "anthropic" as const;
|
|
23
|
+
readonly defaultModel: string;
|
|
24
|
+
|
|
25
|
+
private readonly apiKey?: string;
|
|
26
|
+
|
|
27
|
+
constructor(apiKey?: string) {
|
|
28
|
+
this.apiKey = apiKey;
|
|
29
|
+
this.defaultModel = process.env.ANTHROPIC_MODEL ?? DEFAULT_ANTHROPIC_MODEL;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
async *streamChat(request: StreamChatRequest): AsyncGenerator<string> {
|
|
33
|
+
if (!this.apiKey) {
|
|
34
|
+
throw new ProviderConfigurationError(
|
|
35
|
+
'Provider "anthropic" is missing ANTHROPIC_API_KEY.',
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const system = request.messages
|
|
40
|
+
.filter((message) => message.role === "system")
|
|
41
|
+
.map((message) => message.content)
|
|
42
|
+
.join("\n\n");
|
|
43
|
+
|
|
44
|
+
const messages = toAnthropicMessages(request.messages);
|
|
45
|
+
|
|
46
|
+
const response = await fetch(ANTHROPIC_API_URL, {
|
|
47
|
+
method: "POST",
|
|
48
|
+
headers: {
|
|
49
|
+
"content-type": "application/json",
|
|
50
|
+
"x-api-key": this.apiKey,
|
|
51
|
+
"anthropic-version": "2023-06-01",
|
|
52
|
+
},
|
|
53
|
+
body: JSON.stringify({
|
|
54
|
+
model: request.model ?? this.defaultModel,
|
|
55
|
+
stream: true,
|
|
56
|
+
max_tokens: request.maxTokens ?? 1024,
|
|
57
|
+
temperature: request.temperature,
|
|
58
|
+
system: system.length > 0 ? system : undefined,
|
|
59
|
+
messages,
|
|
60
|
+
}),
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
if (!response.ok) {
|
|
64
|
+
const bodyText = await response.text();
|
|
65
|
+
throw new ProviderRequestError(
|
|
66
|
+
`anthropic request failed: HTTP ${response.status} ${response.statusText} ${bodyText}`,
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
for await (const data of iterateSseData(response)) {
|
|
71
|
+
if (data === "[DONE]") {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const payload = parseJson<Record<string, unknown>>(data);
|
|
76
|
+
if (!payload) {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (payload.type === "error") {
|
|
81
|
+
const error = payload.error as Record<string, unknown> | undefined;
|
|
82
|
+
const message =
|
|
83
|
+
typeof error?.message === "string"
|
|
84
|
+
? error.message
|
|
85
|
+
: "anthropic stream error";
|
|
86
|
+
|
|
87
|
+
throw new ProviderRequestError(message);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const text = pickAnthropicDelta(payload);
|
|
91
|
+
if (text.length > 0) {
|
|
92
|
+
yield text;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
export async function createAnthropicProvider(): Promise<AnthropicProvider> {
|
|
99
|
+
const apiKey = await resolveProviderApiKey("anthropic", "ANTHROPIC_API_KEY");
|
|
100
|
+
return new AnthropicProvider(apiKey);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function toAnthropicMessages(messages: ProviderMessage[]): AnthropicMessage[] {
|
|
104
|
+
const filtered = messages.filter((message) => message.role !== "system");
|
|
105
|
+
if (filtered.length === 0) {
|
|
106
|
+
return [{ role: "user", content: "Hello." }];
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return filtered.map((message) => ({
|
|
110
|
+
role: message.role === "assistant" ? "assistant" : "user",
|
|
111
|
+
content: message.content,
|
|
112
|
+
}));
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
function pickAnthropicDelta(payload: Record<string, unknown>): string {
|
|
116
|
+
if (payload.type === "content_block_start") {
|
|
117
|
+
const contentBlock = payload.content_block as Record<string, unknown> | undefined;
|
|
118
|
+
if (contentBlock?.type === "text" && typeof contentBlock.text === "string") {
|
|
119
|
+
return contentBlock.text;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (payload.type === "content_block_delta") {
|
|
124
|
+
const delta = payload.delta as Record<string, unknown> | undefined;
|
|
125
|
+
if (delta?.type === "text_delta" && typeof delta.text === "string") {
|
|
126
|
+
return delta.text;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (payload.type === "message_delta") {
|
|
131
|
+
const delta = payload.delta as Record<string, unknown> | undefined;
|
|
132
|
+
if (typeof delta?.text === "string") {
|
|
133
|
+
return delta.text;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return "";
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
function parseJson<T>(value: string): T | null {
|
|
141
|
+
try {
|
|
142
|
+
return JSON.parse(value) as T;
|
|
143
|
+
} catch {
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import keytar from "keytar";
|
|
2
|
+
|
|
3
|
+
import type { ProviderName } from "./base.js";
|
|
4
|
+
|
|
5
|
+
const KEYCHAIN_SERVICE = "hive";
|
|
6
|
+
|
|
7
|
+
export async function resolveProviderApiKey(
|
|
8
|
+
providerName: ProviderName,
|
|
9
|
+
envVarName: string,
|
|
10
|
+
): Promise<string | undefined> {
|
|
11
|
+
let keychainValue: string | null = null;
|
|
12
|
+
try {
|
|
13
|
+
keychainValue = await keytar.getPassword(KEYCHAIN_SERVICE, providerName);
|
|
14
|
+
} catch {
|
|
15
|
+
keychainValue = null;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return (
|
|
19
|
+
keychainValue ??
|
|
20
|
+
process.env[envVarName] ??
|
|
21
|
+
undefined
|
|
22
|
+
);
|
|
23
|
+
}
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import fetch, { type Response } from "node-fetch";
|
|
2
|
+
|
|
3
|
+
export const SUPPORTED_PROVIDER_NAMES = [
|
|
4
|
+
"openai",
|
|
5
|
+
"anthropic",
|
|
6
|
+
"ollama",
|
|
7
|
+
"groq",
|
|
8
|
+
"mistral",
|
|
9
|
+
"google",
|
|
10
|
+
"openrouter",
|
|
11
|
+
"together",
|
|
12
|
+
] as const;
|
|
13
|
+
|
|
14
|
+
export type ProviderName = (typeof SUPPORTED_PROVIDER_NAMES)[number];
|
|
15
|
+
export type ProviderMessageRole = "system" | "user" | "assistant";
|
|
16
|
+
|
|
17
|
+
export interface ProviderMessage {
|
|
18
|
+
role: ProviderMessageRole;
|
|
19
|
+
content: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface StreamChatRequest {
|
|
23
|
+
messages: ProviderMessage[];
|
|
24
|
+
model?: string;
|
|
25
|
+
temperature?: number;
|
|
26
|
+
maxTokens?: number;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export interface Provider {
|
|
30
|
+
readonly name: ProviderName;
|
|
31
|
+
readonly defaultModel: string;
|
|
32
|
+
streamChat(request: StreamChatRequest): AsyncGenerator<string>;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export class ProviderConfigurationError extends Error {
|
|
36
|
+
constructor(message: string) {
|
|
37
|
+
super(message);
|
|
38
|
+
this.name = "ProviderConfigurationError";
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export class ProviderRequestError extends Error {
|
|
43
|
+
constructor(message: string) {
|
|
44
|
+
super(message);
|
|
45
|
+
this.name = "ProviderRequestError";
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export interface OpenAICompatibleStreamInput {
|
|
50
|
+
provider: ProviderName;
|
|
51
|
+
baseUrl: string;
|
|
52
|
+
apiKey?: string;
|
|
53
|
+
model: string;
|
|
54
|
+
messages: ProviderMessage[];
|
|
55
|
+
temperature?: number;
|
|
56
|
+
maxTokens?: number;
|
|
57
|
+
extraHeaders?: Record<string, string>;
|
|
58
|
+
extraBody?: Record<string, unknown>;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export function normalizeProviderName(raw?: string): ProviderName {
|
|
62
|
+
if (!raw) {
|
|
63
|
+
return "openai";
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const normalized = raw.trim().toLowerCase();
|
|
67
|
+
if ((SUPPORTED_PROVIDER_NAMES as readonly string[]).includes(normalized)) {
|
|
68
|
+
return normalized as ProviderName;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
throw new ProviderConfigurationError(
|
|
72
|
+
`Unsupported provider \"${raw}\". Supported providers: ${SUPPORTED_PROVIDER_NAMES.join(", ")}.`,
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export async function* streamOpenAICompatibleChat(
|
|
77
|
+
input: OpenAICompatibleStreamInput,
|
|
78
|
+
): AsyncGenerator<string> {
|
|
79
|
+
const endpoint = `${input.baseUrl.replace(/\/$/, "")}/chat/completions`;
|
|
80
|
+
|
|
81
|
+
const headers: Record<string, string> = {
|
|
82
|
+
"content-type": "application/json",
|
|
83
|
+
...(input.extraHeaders ?? {}),
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
if (input.apiKey) {
|
|
87
|
+
headers.authorization = `Bearer ${input.apiKey}`;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const body: Record<string, unknown> = {
|
|
91
|
+
model: input.model,
|
|
92
|
+
messages: input.messages,
|
|
93
|
+
stream: true,
|
|
94
|
+
...(input.extraBody ?? {}),
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
if (input.temperature !== undefined) {
|
|
98
|
+
body.temperature = input.temperature;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (input.maxTokens !== undefined) {
|
|
102
|
+
body.max_tokens = input.maxTokens;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const response = await fetch(endpoint, {
|
|
106
|
+
method: "POST",
|
|
107
|
+
headers,
|
|
108
|
+
body: JSON.stringify(body),
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
await ensureOk(response, `${input.provider} request failed`);
|
|
112
|
+
|
|
113
|
+
for await (const data of iterateSseData(response)) {
|
|
114
|
+
if (data === "[DONE]") {
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const payload = parseJson<Record<string, unknown>>(data);
|
|
119
|
+
if (!payload) {
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const errorMessage = pickErrorMessage(payload);
|
|
124
|
+
if (errorMessage) {
|
|
125
|
+
throw new ProviderRequestError(`${input.provider} error: ${errorMessage}`);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
const maybeChoices = payload.choices;
|
|
129
|
+
if (!Array.isArray(maybeChoices) || maybeChoices.length === 0) {
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const firstChoice = maybeChoices[0] as Record<string, unknown>;
|
|
134
|
+
const delta = firstChoice.delta as Record<string, unknown> | undefined;
|
|
135
|
+
|
|
136
|
+
const text =
|
|
137
|
+
typeof delta?.content === "string"
|
|
138
|
+
? delta.content
|
|
139
|
+
: typeof firstChoice.text === "string"
|
|
140
|
+
? firstChoice.text
|
|
141
|
+
: "";
|
|
142
|
+
|
|
143
|
+
if (text.length > 0) {
|
|
144
|
+
yield text;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
export async function* iterateSseData(response: Response): AsyncGenerator<string> {
|
|
150
|
+
if (!response.body) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
let buffer = "";
|
|
155
|
+
|
|
156
|
+
for await (const chunk of response.body) {
|
|
157
|
+
buffer += chunk.toString("utf8").replace(/\r\n/g, "\n");
|
|
158
|
+
|
|
159
|
+
let eventBoundary = buffer.indexOf("\n\n");
|
|
160
|
+
while (eventBoundary !== -1) {
|
|
161
|
+
const rawEvent = buffer.slice(0, eventBoundary);
|
|
162
|
+
buffer = buffer.slice(eventBoundary + 2);
|
|
163
|
+
|
|
164
|
+
const data = parseSseData(rawEvent);
|
|
165
|
+
if (data !== null) {
|
|
166
|
+
yield data;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
eventBoundary = buffer.indexOf("\n\n");
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const remaining = parseSseData(buffer);
|
|
174
|
+
if (remaining !== null) {
|
|
175
|
+
yield remaining;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
export async function* chunkText(text: string, chunkSize = 32): AsyncGenerator<string> {
|
|
180
|
+
for (let start = 0; start < text.length; start += chunkSize) {
|
|
181
|
+
yield text.slice(start, start + chunkSize);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
function parseSseData(rawEvent: string): string | null {
|
|
186
|
+
const lines = rawEvent
|
|
187
|
+
.split("\n")
|
|
188
|
+
.map((line) => line.trimEnd())
|
|
189
|
+
.filter((line) => line.startsWith("data:"))
|
|
190
|
+
.map((line) => line.slice(5).trimStart());
|
|
191
|
+
|
|
192
|
+
if (lines.length === 0) {
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return lines.join("\n").trim();
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function parseJson<T>(value: string): T | null {
|
|
200
|
+
try {
|
|
201
|
+
return JSON.parse(value) as T;
|
|
202
|
+
} catch {
|
|
203
|
+
return null;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
function pickErrorMessage(payload: Record<string, unknown>): string | null {
|
|
208
|
+
const maybeError = payload.error;
|
|
209
|
+
if (typeof maybeError === "string") {
|
|
210
|
+
return maybeError;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
if (maybeError && typeof maybeError === "object") {
|
|
214
|
+
const message = (maybeError as Record<string, unknown>).message;
|
|
215
|
+
if (typeof message === "string") {
|
|
216
|
+
return message;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
return null;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
async function ensureOk(response: Response, fallbackMessage: string): Promise<void> {
|
|
224
|
+
if (response.ok) {
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const bodyText = (await response.text()).trim();
|
|
229
|
+
const details = bodyText.length > 0 ? ` ${bodyText}` : "";
|
|
230
|
+
|
|
231
|
+
throw new ProviderRequestError(
|
|
232
|
+
`${fallbackMessage}: HTTP ${response.status} ${response.statusText}${details}`,
|
|
233
|
+
);
|
|
234
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_GOOGLE_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
5
|
+
const DEFAULT_GOOGLE_MODEL = "gemini-3.0-flash";
|
|
6
|
+
|
|
7
|
+
export class GoogleProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "google",
|
|
11
|
+
baseUrl: process.env.GOOGLE_BASE_URL ?? DEFAULT_GOOGLE_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.GOOGLE_MODEL ?? DEFAULT_GOOGLE_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createGoogleProvider(): Promise<GoogleProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("google", "GOOGLE_API_KEY");
|
|
20
|
+
return new GoogleProvider(apiKey);
|
|
21
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_GROQ_BASE_URL = "https://api.groq.com/openai/v1";
|
|
5
|
+
const DEFAULT_GROQ_MODEL = "llama-3.3-70b-versatile";
|
|
6
|
+
|
|
7
|
+
export class GroqProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "groq",
|
|
11
|
+
baseUrl: process.env.GROQ_BASE_URL ?? DEFAULT_GROQ_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.GROQ_MODEL ?? DEFAULT_GROQ_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createGroqProvider(): Promise<GroqProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("groq", "GROQ_API_KEY");
|
|
20
|
+
return new GroqProvider(apiKey);
|
|
21
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import {
|
|
2
|
+
normalizeProviderName,
|
|
3
|
+
type Provider,
|
|
4
|
+
type ProviderName,
|
|
5
|
+
} from "./base.js";
|
|
6
|
+
import { createAnthropicProvider } from "./anthropic.js";
|
|
7
|
+
import { createGoogleProvider } from "./google.js";
|
|
8
|
+
import { createGroqProvider } from "./groq.js";
|
|
9
|
+
import { createMistralProvider } from "./mistral.js";
|
|
10
|
+
import { createOllamaProvider } from "./ollama.js";
|
|
11
|
+
import { createOpenAIProvider } from "./openai.js";
|
|
12
|
+
import { createOpenRouterProvider } from "./openrouter.js";
|
|
13
|
+
import { createTogetherProvider } from "./together.js";
|
|
14
|
+
|
|
15
|
+
export async function createProvider(name?: string): Promise<Provider> {
|
|
16
|
+
const resolvedName = normalizeProviderName(name ?? process.env.HIVE_PROVIDER);
|
|
17
|
+
|
|
18
|
+
switch (resolvedName) {
|
|
19
|
+
case "openai":
|
|
20
|
+
return createOpenAIProvider();
|
|
21
|
+
case "anthropic":
|
|
22
|
+
return createAnthropicProvider();
|
|
23
|
+
case "ollama":
|
|
24
|
+
return createOllamaProvider();
|
|
25
|
+
case "groq":
|
|
26
|
+
return createGroqProvider();
|
|
27
|
+
case "mistral":
|
|
28
|
+
return createMistralProvider();
|
|
29
|
+
case "google":
|
|
30
|
+
return createGoogleProvider();
|
|
31
|
+
case "openrouter":
|
|
32
|
+
return createOpenRouterProvider();
|
|
33
|
+
case "together":
|
|
34
|
+
return createTogetherProvider();
|
|
35
|
+
default:
|
|
36
|
+
return assertNever(resolvedName);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function getDefaultModelForProvider(name: ProviderName): string {
|
|
41
|
+
switch (name) {
|
|
42
|
+
case "openai":
|
|
43
|
+
return process.env.OPENAI_MODEL ?? "gpt-4o-mini";
|
|
44
|
+
case "anthropic":
|
|
45
|
+
return process.env.ANTHROPIC_MODEL ?? "claude-3-5-haiku-latest";
|
|
46
|
+
case "ollama":
|
|
47
|
+
return process.env.OLLAMA_MODEL ?? "llama3.2";
|
|
48
|
+
case "groq":
|
|
49
|
+
return process.env.GROQ_MODEL ?? "llama-3.3-70b-versatile";
|
|
50
|
+
case "mistral":
|
|
51
|
+
return process.env.MISTRAL_MODEL ?? "mistral-small-latest";
|
|
52
|
+
case "google":
|
|
53
|
+
return process.env.GOOGLE_MODEL ?? "gemini-2.0-flash";
|
|
54
|
+
case "openrouter":
|
|
55
|
+
return process.env.OPENROUTER_MODEL ?? "openai/gpt-4o-mini";
|
|
56
|
+
case "together":
|
|
57
|
+
return process.env.TOGETHER_MODEL ?? "meta-llama/Llama-3.3-70B-Instruct-Turbo";
|
|
58
|
+
default:
|
|
59
|
+
return assertNever(name);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function assertNever(value: never): never {
|
|
64
|
+
throw new Error(`Unsupported provider: ${String(value)}`);
|
|
65
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_MISTRAL_BASE_URL = "https://api.mistral.ai/v1";
|
|
5
|
+
const DEFAULT_MISTRAL_MODEL = "mistral-small-latest";
|
|
6
|
+
|
|
7
|
+
export class MistralProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "mistral",
|
|
11
|
+
baseUrl: process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createMistralProvider(): Promise<MistralProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("mistral", "MISTRAL_API_KEY");
|
|
20
|
+
return new MistralProvider(apiKey);
|
|
21
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_OLLAMA_BASE_URL = "http://localhost:11434/v1";
|
|
5
|
+
const DEFAULT_OLLAMA_MODEL = "llama3.2";
|
|
6
|
+
|
|
7
|
+
export class OllamaProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "ollama",
|
|
11
|
+
baseUrl: process.env.OLLAMA_BASE_URL ?? DEFAULT_OLLAMA_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.OLLAMA_MODEL ?? DEFAULT_OLLAMA_MODEL,
|
|
14
|
+
allowMissingApiKey: true,
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export async function createOllamaProvider(): Promise<OllamaProvider> {
|
|
20
|
+
const apiKey = await resolveProviderApiKey("ollama", "OLLAMA_API_KEY");
|
|
21
|
+
return new OllamaProvider(apiKey);
|
|
22
|
+
}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ProviderConfigurationError,
|
|
3
|
+
type Provider,
|
|
4
|
+
type ProviderName,
|
|
5
|
+
type StreamChatRequest,
|
|
6
|
+
streamOpenAICompatibleChat,
|
|
7
|
+
} from "./base.js";
|
|
8
|
+
|
|
9
|
+
export interface OpenAICompatibleProviderConfig {
|
|
10
|
+
name: ProviderName;
|
|
11
|
+
baseUrl: string;
|
|
12
|
+
defaultModel: string;
|
|
13
|
+
apiKey?: string;
|
|
14
|
+
allowMissingApiKey?: boolean;
|
|
15
|
+
extraHeaders?: Record<string, string>;
|
|
16
|
+
extraBody?: Record<string, unknown>;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class OpenAICompatibleProvider implements Provider {
|
|
20
|
+
readonly name: ProviderName;
|
|
21
|
+
readonly defaultModel: string;
|
|
22
|
+
|
|
23
|
+
private readonly baseUrl: string;
|
|
24
|
+
private readonly apiKey?: string;
|
|
25
|
+
private readonly allowMissingApiKey: boolean;
|
|
26
|
+
private readonly extraHeaders?: Record<string, string>;
|
|
27
|
+
private readonly extraBody?: Record<string, unknown>;
|
|
28
|
+
|
|
29
|
+
constructor(config: OpenAICompatibleProviderConfig) {
|
|
30
|
+
this.name = config.name;
|
|
31
|
+
this.baseUrl = config.baseUrl;
|
|
32
|
+
this.apiKey = config.apiKey;
|
|
33
|
+
this.defaultModel = config.defaultModel;
|
|
34
|
+
this.allowMissingApiKey = config.allowMissingApiKey ?? false;
|
|
35
|
+
this.extraHeaders = config.extraHeaders;
|
|
36
|
+
this.extraBody = config.extraBody;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async *streamChat(request: StreamChatRequest): AsyncGenerator<string> {
|
|
40
|
+
if (!this.allowMissingApiKey && !this.apiKey) {
|
|
41
|
+
throw new ProviderConfigurationError(
|
|
42
|
+
`Provider \"${this.name}\" is missing an API key.`,
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
yield* streamOpenAICompatibleChat({
|
|
47
|
+
provider: this.name,
|
|
48
|
+
baseUrl: this.baseUrl,
|
|
49
|
+
apiKey: this.apiKey,
|
|
50
|
+
model: request.model ?? this.defaultModel,
|
|
51
|
+
messages: request.messages,
|
|
52
|
+
temperature: request.temperature,
|
|
53
|
+
maxTokens: request.maxTokens,
|
|
54
|
+
extraHeaders: this.extraHeaders,
|
|
55
|
+
extraBody: this.extraBody,
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_OPENAI_BASE_URL = "https://api.openai.com/v1";
|
|
5
|
+
const DEFAULT_OPENAI_MODEL = "gpt-4o-mini";
|
|
6
|
+
|
|
7
|
+
export class OpenAIProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "openai",
|
|
11
|
+
baseUrl: process.env.OPENAI_BASE_URL ?? DEFAULT_OPENAI_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.OPENAI_MODEL ?? DEFAULT_OPENAI_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createOpenAIProvider(): Promise<OpenAIProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("openai", "OPENAI_API_KEY");
|
|
20
|
+
return new OpenAIProvider(apiKey);
|
|
21
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
|
|
5
|
+
const DEFAULT_OPENROUTER_MODEL = "openai/gpt-4o-mini";
|
|
6
|
+
|
|
7
|
+
export class OpenRouterProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "openrouter",
|
|
11
|
+
baseUrl: process.env.OPENROUTER_BASE_URL ?? DEFAULT_OPENROUTER_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.OPENROUTER_MODEL ?? DEFAULT_OPENROUTER_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createOpenRouterProvider(): Promise<OpenRouterProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("openrouter", "OPENROUTER_API_KEY");
|
|
20
|
+
return new OpenRouterProvider(apiKey);
|
|
21
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { resolveProviderApiKey } from "./api-key.js";
|
|
2
|
+
import { OpenAICompatibleProvider } from "./openai-compatible.js";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_TOGETHER_BASE_URL = "https://api.together.xyz/v1";
|
|
5
|
+
const DEFAULT_TOGETHER_MODEL = "meta-llama/Llama-3.3-70B-Instruct-Turbo";
|
|
6
|
+
|
|
7
|
+
export class TogetherProvider extends OpenAICompatibleProvider {
|
|
8
|
+
constructor(apiKey?: string) {
|
|
9
|
+
super({
|
|
10
|
+
name: "together",
|
|
11
|
+
baseUrl: process.env.TOGETHER_BASE_URL ?? DEFAULT_TOGETHER_BASE_URL,
|
|
12
|
+
apiKey,
|
|
13
|
+
defaultModel: process.env.TOGETHER_MODEL ?? DEFAULT_TOGETHER_MODEL,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createTogetherProvider(): Promise<TogetherProvider> {
|
|
19
|
+
const apiKey = await resolveProviderApiKey("together", "TOGETHER_API_KEY");
|
|
20
|
+
return new TogetherProvider(apiKey);
|
|
21
|
+
}
|