iris-chatbot 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +49 -0
- package/bin/iris.mjs +267 -0
- package/package.json +61 -0
- package/template/LICENSE +21 -0
- package/template/README.md +49 -0
- package/template/eslint.config.mjs +18 -0
- package/template/next.config.ts +7 -0
- package/template/package-lock.json +9193 -0
- package/template/package.json +46 -0
- package/template/postcss.config.mjs +7 -0
- package/template/public/file.svg +1 -0
- package/template/public/globe.svg +1 -0
- package/template/public/next.svg +1 -0
- package/template/public/vercel.svg +1 -0
- package/template/public/window.svg +1 -0
- package/template/src/app/api/chat/route.ts +2445 -0
- package/template/src/app/api/connections/models/route.ts +255 -0
- package/template/src/app/api/connections/test/route.ts +124 -0
- package/template/src/app/api/local-sync/route.ts +74 -0
- package/template/src/app/api/tool-approval/route.ts +47 -0
- package/template/src/app/favicon.ico +0 -0
- package/template/src/app/globals.css +808 -0
- package/template/src/app/layout.tsx +74 -0
- package/template/src/app/page.tsx +444 -0
- package/template/src/components/ChatView.tsx +1537 -0
- package/template/src/components/Composer.tsx +160 -0
- package/template/src/components/MapView.tsx +244 -0
- package/template/src/components/MessageCard.tsx +955 -0
- package/template/src/components/SearchModal.tsx +72 -0
- package/template/src/components/SettingsModal.tsx +1257 -0
- package/template/src/components/Sidebar.tsx +153 -0
- package/template/src/components/TopBar.tsx +164 -0
- package/template/src/lib/connections.ts +275 -0
- package/template/src/lib/data.ts +324 -0
- package/template/src/lib/db.ts +49 -0
- package/template/src/lib/hooks.ts +76 -0
- package/template/src/lib/local-sync.ts +192 -0
- package/template/src/lib/memory.ts +695 -0
- package/template/src/lib/model-presets.ts +251 -0
- package/template/src/lib/store.ts +36 -0
- package/template/src/lib/tooling/approvals.ts +78 -0
- package/template/src/lib/tooling/providers/anthropic.ts +155 -0
- package/template/src/lib/tooling/providers/ollama.ts +73 -0
- package/template/src/lib/tooling/providers/openai.ts +267 -0
- package/template/src/lib/tooling/providers/openai_compatible.ts +16 -0
- package/template/src/lib/tooling/providers/types.ts +44 -0
- package/template/src/lib/tooling/registry.ts +103 -0
- package/template/src/lib/tooling/runtime.ts +189 -0
- package/template/src/lib/tooling/safety.ts +165 -0
- package/template/src/lib/tooling/tools/apps.ts +108 -0
- package/template/src/lib/tooling/tools/apps_plus.ts +153 -0
- package/template/src/lib/tooling/tools/communication.ts +883 -0
- package/template/src/lib/tooling/tools/files.ts +395 -0
- package/template/src/lib/tooling/tools/music.ts +988 -0
- package/template/src/lib/tooling/tools/notes.ts +461 -0
- package/template/src/lib/tooling/tools/notes_plus.ts +294 -0
- package/template/src/lib/tooling/tools/numbers.ts +175 -0
- package/template/src/lib/tooling/tools/schedule.ts +579 -0
- package/template/src/lib/tooling/tools/system.ts +142 -0
- package/template/src/lib/tooling/tools/web.ts +212 -0
- package/template/src/lib/tooling/tools/workflow.ts +218 -0
- package/template/src/lib/tooling/types.ts +27 -0
- package/template/src/lib/types.ts +309 -0
- package/template/src/lib/utils.ts +108 -0
- package/template/tsconfig.json +34 -0
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
import type { BuiltInProvider, ModelConnection } from "./types";
|
|
2
|
+
|
|
3
|
+
export const OPENAI_FRONTIER_MODEL_PRESETS = [
|
|
4
|
+
"gpt-5.2",
|
|
5
|
+
"gpt-5-mini",
|
|
6
|
+
"gpt-5-nano",
|
|
7
|
+
"gpt-5.2-pro",
|
|
8
|
+
"gpt-5",
|
|
9
|
+
"gpt-4.1",
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
const OPENAI_FRONTIER_MODEL_SET = new Set(OPENAI_FRONTIER_MODEL_PRESETS);
|
|
13
|
+
|
|
14
|
+
export const BUILTIN_MODEL_PRESETS: Record<BuiltInProvider, string[]> = {
|
|
15
|
+
openai: [
|
|
16
|
+
"gpt-5.2",
|
|
17
|
+
"gpt-5.2-pro",
|
|
18
|
+
"gpt-5.2-chat-latest",
|
|
19
|
+
"gpt-5.2-codex",
|
|
20
|
+
"gpt-5.1",
|
|
21
|
+
"gpt-5.1-chat-latest",
|
|
22
|
+
"gpt-5.1-codex",
|
|
23
|
+
"gpt-5.1-codex-max",
|
|
24
|
+
"gpt-5",
|
|
25
|
+
"gpt-5-chat-latest",
|
|
26
|
+
"gpt-5-mini",
|
|
27
|
+
"gpt-5-nano",
|
|
28
|
+
"gpt-5-codex",
|
|
29
|
+
"gpt-5-pro",
|
|
30
|
+
"gpt-4.1",
|
|
31
|
+
"gpt-4.1-mini",
|
|
32
|
+
"gpt-4.1-nano",
|
|
33
|
+
"gpt-4o",
|
|
34
|
+
"gpt-4o-mini",
|
|
35
|
+
"o3-pro",
|
|
36
|
+
"o3",
|
|
37
|
+
"o4-mini",
|
|
38
|
+
"o3-mini",
|
|
39
|
+
],
|
|
40
|
+
anthropic: [
|
|
41
|
+
"claude-opus-4-61",
|
|
42
|
+
"claude-sonnet-4-51",
|
|
43
|
+
"claude-haiku-4-51",
|
|
44
|
+
"claude-opus-4-5-20251101",
|
|
45
|
+
"claude-opus-4-51",
|
|
46
|
+
"claude-opus-4-11",
|
|
47
|
+
"claude-opus-4-1-20250805",
|
|
48
|
+
"claude-sonnet-4-01",
|
|
49
|
+
"claude-sonnet-4-20250514",
|
|
50
|
+
"claude-3-7-sonnet-latest",
|
|
51
|
+
"claude-3-7-sonnet-20250219",
|
|
52
|
+
"claude-opus-4-01",
|
|
53
|
+
"claude-opus-4-20250514",
|
|
54
|
+
"claude-3-haiku-20240307",
|
|
55
|
+
],
|
|
56
|
+
google: [
|
|
57
|
+
"gemini-3-pro-preview",
|
|
58
|
+
"gemini-3-flash-preview",
|
|
59
|
+
"gemini-3-pro-image-preview",
|
|
60
|
+
"gemini-2.5-flash",
|
|
61
|
+
"gemini-2.5-flash-preview-09-2025",
|
|
62
|
+
"gemini-2.5-pro",
|
|
63
|
+
"gemini-2.5-flash-lite",
|
|
64
|
+
"gemini-2.5-flash-lite-preview-09-2025",
|
|
65
|
+
"gemini-2.5-flash-preview-tts",
|
|
66
|
+
"gemini-2.5-pro-preview-tts",
|
|
67
|
+
"gemini-2.0-flash",
|
|
68
|
+
"gemini-2.0-flash-001",
|
|
69
|
+
"gemini-2.0-flash-exp",
|
|
70
|
+
"gemini-2.0-flash-lite",
|
|
71
|
+
"gemini-flash-latest",
|
|
72
|
+
],
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
export const OPENAI_COMPAT_MODEL_PRESETS = [
|
|
76
|
+
"gpt-4o-mini",
|
|
77
|
+
"gpt-4o",
|
|
78
|
+
"gpt-4.1-mini",
|
|
79
|
+
"gpt-4.1",
|
|
80
|
+
];
|
|
81
|
+
|
|
82
|
+
export const OLLAMA_MODEL_PRESETS = [
|
|
83
|
+
"llama3.2:latest",
|
|
84
|
+
"qwen2.5:latest",
|
|
85
|
+
"mistral:latest",
|
|
86
|
+
"gemma2:latest",
|
|
87
|
+
];
|
|
88
|
+
|
|
89
|
+
export function getBuiltinModelPresets(provider: BuiltInProvider) {
|
|
90
|
+
return BUILTIN_MODEL_PRESETS[provider] ?? [];
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
export function getConnectionModelPresets(connection: ModelConnection | null): string[] {
|
|
94
|
+
if (!connection) {
|
|
95
|
+
return [];
|
|
96
|
+
}
|
|
97
|
+
if (connection.kind === "builtin") {
|
|
98
|
+
if (!connection.provider) {
|
|
99
|
+
return [];
|
|
100
|
+
}
|
|
101
|
+
return getBuiltinModelPresets(connection.provider);
|
|
102
|
+
}
|
|
103
|
+
if (connection.kind === "ollama") {
|
|
104
|
+
return OLLAMA_MODEL_PRESETS;
|
|
105
|
+
}
|
|
106
|
+
return OPENAI_COMPAT_MODEL_PRESETS;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
export function isOpenAIFrontierModel(modelId: string): boolean {
|
|
110
|
+
return OPENAI_FRONTIER_MODEL_SET.has(modelId);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export function filterModelIdsForConnection(params: {
|
|
114
|
+
connection: ModelConnection | null;
|
|
115
|
+
modelIds: string[];
|
|
116
|
+
includeExtendedOpenAI?: boolean;
|
|
117
|
+
}): string[] {
|
|
118
|
+
const ids = [...new Set(params.modelIds.filter(Boolean))];
|
|
119
|
+
if (ids.length === 0) {
|
|
120
|
+
return ids;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (
|
|
124
|
+
params.connection?.kind !== "builtin" ||
|
|
125
|
+
params.connection.provider !== "openai" ||
|
|
126
|
+
params.includeExtendedOpenAI
|
|
127
|
+
) {
|
|
128
|
+
return ids;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const frontierOnly = OPENAI_FRONTIER_MODEL_PRESETS.filter((modelId) => ids.includes(modelId));
|
|
132
|
+
if (frontierOnly.length > 0) {
|
|
133
|
+
return frontierOnly;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return ids.slice(0, Math.min(ids.length, OPENAI_FRONTIER_MODEL_PRESETS.length));
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
const ANTHROPIC_MODEL_LABELS: Record<string, string> = {
|
|
140
|
+
"claude-opus-4-61": "Claude Opus 4.6",
|
|
141
|
+
"claude-sonnet-4-51": "Claude Sonnet 4.5",
|
|
142
|
+
"claude-haiku-4-51": "Claude Haiku 4.5",
|
|
143
|
+
"claude-opus-4-5-20251101": "Claude Opus 4.5",
|
|
144
|
+
"claude-opus-4-51": "Claude Opus 4.5",
|
|
145
|
+
"claude-opus-4-11": "Claude Opus 4.1",
|
|
146
|
+
"claude-opus-4-1-20250805": "Claude Opus 4.1",
|
|
147
|
+
"claude-sonnet-4-01": "Claude Sonnet 4",
|
|
148
|
+
"claude-sonnet-4-20250514": "Claude Sonnet 4",
|
|
149
|
+
"claude-3-7-sonnet-latest": "Claude 3.7 Sonnet (Latest)",
|
|
150
|
+
"claude-3-7-sonnet-20250219": "Claude 3.7 Sonnet",
|
|
151
|
+
"claude-opus-4-01": "Claude Opus 4",
|
|
152
|
+
"claude-opus-4-20250514": "Claude Opus 4",
|
|
153
|
+
"claude-3-haiku-20240307": "Claude 3 Haiku",
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
const GOOGLE_MODEL_LABELS: Record<string, string> = {
|
|
157
|
+
"gemini-3-pro-preview": "Gemini 3 Pro (Preview)",
|
|
158
|
+
"gemini-3-flash-preview": "Gemini 3 Flash (Preview)",
|
|
159
|
+
"gemini-3-pro-image-preview": "Gemini 3 Pro Image (Preview)",
|
|
160
|
+
"gemini-2.5-flash": "Gemini 2.5 Flash",
|
|
161
|
+
"gemini-2.5-flash-preview-09-2025": "Gemini 2.5 Flash (Preview Sep 2025)",
|
|
162
|
+
"gemini-2.5-pro": "Gemini 2.5 Pro",
|
|
163
|
+
"gemini-2.5-flash-lite": "Gemini 2.5 Flash Lite",
|
|
164
|
+
"gemini-2.5-flash-lite-preview-09-2025": "Gemini 2.5 Flash Lite (Preview Sep 2025)",
|
|
165
|
+
"gemini-2.5-flash-preview-tts": "Gemini 2.5 Flash TTS (Preview)",
|
|
166
|
+
"gemini-2.5-pro-preview-tts": "Gemini 2.5 Pro TTS (Preview)",
|
|
167
|
+
"gemini-2.0-flash": "Gemini 2.0 Flash",
|
|
168
|
+
"gemini-2.0-flash-001": "Gemini 2.0 Flash 001",
|
|
169
|
+
"gemini-2.0-flash-exp": "Gemini 2.0 Flash Experimental",
|
|
170
|
+
"gemini-2.0-flash-lite": "Gemini 2.0 Flash Lite",
|
|
171
|
+
"gemini-flash-latest": "Gemini Flash (Latest Alias)",
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
function formatAnthropicMinor(minorToken: string): string {
|
|
175
|
+
if (/^\d1$/.test(minorToken)) {
|
|
176
|
+
return minorToken[0];
|
|
177
|
+
}
|
|
178
|
+
if (/^\d$/.test(minorToken)) {
|
|
179
|
+
return minorToken;
|
|
180
|
+
}
|
|
181
|
+
return minorToken.replace(/^0+/, "") || "0";
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function anthropicLabelFromId(modelId: string): string | null {
|
|
185
|
+
const base = modelId.replace(/-\d{8}$/g, "");
|
|
186
|
+
const familyMatch = base.match(/^claude-(opus|sonnet|haiku)-(\d+)-(\d+)$/);
|
|
187
|
+
if (!familyMatch) {
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
const family = familyMatch[1];
|
|
191
|
+
const major = familyMatch[2];
|
|
192
|
+
const minor = formatAnthropicMinor(familyMatch[3]);
|
|
193
|
+
const familyTitle = family.charAt(0).toUpperCase() + family.slice(1);
|
|
194
|
+
return `Claude ${familyTitle} ${major}.${minor}`;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function humanizeModelId(modelId: string): string {
|
|
198
|
+
const withoutDateSuffix = modelId.replace(/-\d{8}$/g, "");
|
|
199
|
+
return withoutDateSuffix.replace(/[-_]+/g, " ").replace(/\b\w/g, (char) => char.toUpperCase());
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
export function getModelDisplayLabel(modelId: string, connection: ModelConnection | null): string {
|
|
203
|
+
if (!connection || connection.kind !== "builtin" || !connection.provider) {
|
|
204
|
+
return modelId;
|
|
205
|
+
}
|
|
206
|
+
if (connection.provider === "anthropic") {
|
|
207
|
+
return ANTHROPIC_MODEL_LABELS[modelId] ?? anthropicLabelFromId(modelId) ?? humanizeModelId(modelId);
|
|
208
|
+
}
|
|
209
|
+
if (connection.provider === "google") {
|
|
210
|
+
return GOOGLE_MODEL_LABELS[modelId] ?? humanizeModelId(modelId);
|
|
211
|
+
}
|
|
212
|
+
return modelId;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
export function sortModelIdsForConnection(connection: ModelConnection | null, modelIds: string[]): string[] {
|
|
216
|
+
const ids = [...new Set(modelIds.filter(Boolean))];
|
|
217
|
+
if (!connection || connection.kind !== "builtin" || connection.provider !== "anthropic") {
|
|
218
|
+
return ids;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
const familyRank = (value: string): number => {
|
|
222
|
+
if (value.includes("-opus-")) return 0;
|
|
223
|
+
if (value.includes("-sonnet-")) return 1;
|
|
224
|
+
if (value.includes("-haiku-")) return 2;
|
|
225
|
+
return 3;
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
const parseVersion = (value: string): { major: number; minor: number } => {
|
|
229
|
+
const normalized = value.replace(/-\d{8}$/g, "");
|
|
230
|
+
const match = normalized.match(/^claude-(?:opus|sonnet|haiku)-(\d+)-(\d+)$/);
|
|
231
|
+
if (!match) {
|
|
232
|
+
return { major: 0, minor: 0 };
|
|
233
|
+
}
|
|
234
|
+
const major = Number(match[1]) || 0;
|
|
235
|
+
const minor = Number(formatAnthropicMinor(match[2])) || 0;
|
|
236
|
+
return { major, minor };
|
|
237
|
+
};
|
|
238
|
+
|
|
239
|
+
return ids.sort((a, b) => {
|
|
240
|
+
const familyDelta = familyRank(a) - familyRank(b);
|
|
241
|
+
if (familyDelta !== 0) return familyDelta;
|
|
242
|
+
const va = parseVersion(a);
|
|
243
|
+
const vb = parseVersion(b);
|
|
244
|
+
if (va.major !== vb.major) return vb.major - va.major;
|
|
245
|
+
if (va.minor !== vb.minor) return vb.minor - va.minor;
|
|
246
|
+
const aHasDate = /-\d{8}$/.test(a);
|
|
247
|
+
const bHasDate = /-\d{8}$/.test(b);
|
|
248
|
+
if (aHasDate !== bHasDate) return aHasDate ? 1 : -1;
|
|
249
|
+
return a.localeCompare(b);
|
|
250
|
+
});
|
|
251
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { create } from "zustand";
|
|
2
|
+
export type ViewMode = "chat" | "map";
|
|
3
|
+
|
|
4
|
+
type UIState = {
|
|
5
|
+
viewMode: ViewMode;
|
|
6
|
+
activeThreadId: string | null;
|
|
7
|
+
focusedMessageId: string | null;
|
|
8
|
+
settingsOpen: boolean;
|
|
9
|
+
connectionOverrideId: string | null;
|
|
10
|
+
modelOverride: string;
|
|
11
|
+
sidebarCollapsed: boolean;
|
|
12
|
+
setViewMode: (view: ViewMode) => void;
|
|
13
|
+
setActiveThreadId: (id: string | null) => void;
|
|
14
|
+
setFocusedMessageId: (id: string | null) => void;
|
|
15
|
+
setSettingsOpen: (open: boolean) => void;
|
|
16
|
+
setConnectionOverrideId: (id: string | null) => void;
|
|
17
|
+
setModelOverride: (model: string) => void;
|
|
18
|
+
setSidebarCollapsed: (collapsed: boolean) => void;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
export const useUIStore = create<UIState>((set) => ({
|
|
22
|
+
viewMode: "chat",
|
|
23
|
+
activeThreadId: null,
|
|
24
|
+
focusedMessageId: null,
|
|
25
|
+
settingsOpen: false,
|
|
26
|
+
connectionOverrideId: null,
|
|
27
|
+
modelOverride: "",
|
|
28
|
+
sidebarCollapsed: false,
|
|
29
|
+
setViewMode: (viewMode) => set({ viewMode }),
|
|
30
|
+
setActiveThreadId: (activeThreadId) => set({ activeThreadId }),
|
|
31
|
+
setFocusedMessageId: (focusedMessageId) => set({ focusedMessageId }),
|
|
32
|
+
setSettingsOpen: (settingsOpen) => set({ settingsOpen }),
|
|
33
|
+
setConnectionOverrideId: (connectionOverrideId) => set({ connectionOverrideId }),
|
|
34
|
+
setModelOverride: (modelOverride) => set({ modelOverride }),
|
|
35
|
+
setSidebarCollapsed: (sidebarCollapsed) => set({ sidebarCollapsed }),
|
|
36
|
+
}));
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { nanoid } from "nanoid";
|
|
2
|
+
|
|
3
|
+
export type ApprovalDecision = "approve" | "deny" | "timeout";
|
|
4
|
+
|
|
5
|
+
type PendingApproval = {
|
|
6
|
+
resolve: (decision: ApprovalDecision) => void;
|
|
7
|
+
timeout: NodeJS.Timeout;
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
type ApprovalStore = {
|
|
11
|
+
pendingApprovals: Map<string, PendingApproval>;
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
declare global {
|
|
15
|
+
var __zenithApprovalStore: ApprovalStore | undefined;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function getApprovalStore(): ApprovalStore {
|
|
19
|
+
if (!globalThis.__zenithApprovalStore) {
|
|
20
|
+
globalThis.__zenithApprovalStore = {
|
|
21
|
+
pendingApprovals: new Map<string, PendingApproval>(),
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
return globalThis.__zenithApprovalStore;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function createApprovalRequest(params?: { timeoutMs?: number }) {
|
|
28
|
+
const store = getApprovalStore();
|
|
29
|
+
const approvalId = nanoid();
|
|
30
|
+
const timeoutMs = params?.timeoutMs ?? 5 * 60_000;
|
|
31
|
+
|
|
32
|
+
let resolveRef: ((decision: ApprovalDecision) => void) | null = null;
|
|
33
|
+
const promise = new Promise<ApprovalDecision>((resolve) => {
|
|
34
|
+
resolveRef = resolve;
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const timeout = setTimeout(() => {
|
|
38
|
+
const pending = store.pendingApprovals.get(approvalId);
|
|
39
|
+
if (!pending) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
store.pendingApprovals.delete(approvalId);
|
|
43
|
+
pending.resolve("timeout");
|
|
44
|
+
}, timeoutMs);
|
|
45
|
+
|
|
46
|
+
store.pendingApprovals.set(approvalId, {
|
|
47
|
+
resolve: (decision) => {
|
|
48
|
+
clearTimeout(timeout);
|
|
49
|
+
resolveRef?.(decision);
|
|
50
|
+
},
|
|
51
|
+
timeout,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
approvalId,
|
|
56
|
+
promise,
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function resolveApprovalDecision(
|
|
61
|
+
approvalId: string,
|
|
62
|
+
decision: "approve" | "deny",
|
|
63
|
+
): boolean {
|
|
64
|
+
const store = getApprovalStore();
|
|
65
|
+
const pending = store.pendingApprovals.get(approvalId);
|
|
66
|
+
if (!pending) {
|
|
67
|
+
return false;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
store.pendingApprovals.delete(approvalId);
|
|
71
|
+
clearTimeout(pending.timeout);
|
|
72
|
+
pending.resolve(decision);
|
|
73
|
+
return true;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function hasPendingApproval(approvalId: string): boolean {
|
|
77
|
+
return getApprovalStore().pendingApprovals.has(approvalId);
|
|
78
|
+
}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
import type { ProviderToolSchema } from "../registry";
|
|
3
|
+
import type {
|
|
4
|
+
CreateProviderSessionArgs,
|
|
5
|
+
NormalizedToolCall,
|
|
6
|
+
ProviderStep,
|
|
7
|
+
ToolLoopMessage,
|
|
8
|
+
ToolProviderSession,
|
|
9
|
+
} from "./types";
|
|
10
|
+
|
|
11
|
+
type AnthropicMessage = {
|
|
12
|
+
role: "user" | "assistant";
|
|
13
|
+
content: unknown;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
function mapMessages(messages: ToolLoopMessage[]): AnthropicMessage[] {
|
|
17
|
+
const mapped: AnthropicMessage[] = [];
|
|
18
|
+
|
|
19
|
+
for (const message of messages) {
|
|
20
|
+
if (message.role === "system") {
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (message.role === "tool") {
|
|
25
|
+
if (!message.toolCallId) {
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
mapped.push({
|
|
29
|
+
role: "user",
|
|
30
|
+
content: [
|
|
31
|
+
{
|
|
32
|
+
type: "tool_result",
|
|
33
|
+
tool_use_id: message.toolCallId,
|
|
34
|
+
content: message.content,
|
|
35
|
+
is_error: false,
|
|
36
|
+
},
|
|
37
|
+
],
|
|
38
|
+
});
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
mapped.push({
|
|
43
|
+
role: message.role,
|
|
44
|
+
content: message.content,
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return mapped;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function mapTools(tools: ProviderToolSchema[]) {
|
|
52
|
+
return tools.map((tool) => ({
|
|
53
|
+
name: tool.name,
|
|
54
|
+
description: tool.description,
|
|
55
|
+
input_schema: tool.input_schema,
|
|
56
|
+
}));
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function parseContentBlocks(content: unknown): {
|
|
60
|
+
text: string;
|
|
61
|
+
toolCalls: NormalizedToolCall[];
|
|
62
|
+
} {
|
|
63
|
+
if (!Array.isArray(content)) {
|
|
64
|
+
return { text: "", toolCalls: [] };
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
let text = "";
|
|
68
|
+
const toolCalls: NormalizedToolCall[] = [];
|
|
69
|
+
|
|
70
|
+
for (const block of content) {
|
|
71
|
+
if (!block || typeof block !== "object") {
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const candidate = block as {
|
|
76
|
+
type?: unknown;
|
|
77
|
+
text?: unknown;
|
|
78
|
+
id?: unknown;
|
|
79
|
+
name?: unknown;
|
|
80
|
+
input?: unknown;
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
if (candidate.type === "text" && typeof candidate.text === "string") {
|
|
84
|
+
text += candidate.text;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (
|
|
88
|
+
candidate.type === "tool_use" &&
|
|
89
|
+
typeof candidate.id === "string" &&
|
|
90
|
+
typeof candidate.name === "string"
|
|
91
|
+
) {
|
|
92
|
+
const parsedInput =
|
|
93
|
+
candidate.input && typeof candidate.input === "object"
|
|
94
|
+
? (candidate.input as Record<string, unknown>)
|
|
95
|
+
: {};
|
|
96
|
+
|
|
97
|
+
toolCalls.push({
|
|
98
|
+
id: candidate.id,
|
|
99
|
+
name: candidate.name,
|
|
100
|
+
input: parsedInput,
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return { text, toolCalls };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function createAnthropicSession(
|
|
109
|
+
args: CreateProviderSessionArgs,
|
|
110
|
+
): ToolProviderSession {
|
|
111
|
+
const client = new Anthropic({ apiKey: args.apiKey });
|
|
112
|
+
const history = mapMessages(args.messages);
|
|
113
|
+
const tools = mapTools(args.tools);
|
|
114
|
+
const system = args.system?.trim() || undefined;
|
|
115
|
+
|
|
116
|
+
return {
|
|
117
|
+
async step(signal?: AbortSignal): Promise<ProviderStep> {
|
|
118
|
+
const response = await client.messages.create(
|
|
119
|
+
{
|
|
120
|
+
model: args.model,
|
|
121
|
+
max_tokens: 1024,
|
|
122
|
+
system,
|
|
123
|
+
messages: history as Anthropic.MessageParam[],
|
|
124
|
+
tools: tools.length > 0 ? (tools as Anthropic.Tool[]) : undefined,
|
|
125
|
+
},
|
|
126
|
+
{ signal },
|
|
127
|
+
);
|
|
128
|
+
|
|
129
|
+
history.push({
|
|
130
|
+
role: "assistant",
|
|
131
|
+
content: response.content,
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
const { text, toolCalls } = parseContentBlocks(response.content);
|
|
135
|
+
return {
|
|
136
|
+
text,
|
|
137
|
+
toolCalls,
|
|
138
|
+
};
|
|
139
|
+
},
|
|
140
|
+
|
|
141
|
+
addToolResult(params) {
|
|
142
|
+
history.push({
|
|
143
|
+
role: "user",
|
|
144
|
+
content: [
|
|
145
|
+
{
|
|
146
|
+
type: "tool_result",
|
|
147
|
+
tool_use_id: params.callId,
|
|
148
|
+
content: params.result,
|
|
149
|
+
is_error: Boolean(params.isError),
|
|
150
|
+
},
|
|
151
|
+
],
|
|
152
|
+
});
|
|
153
|
+
},
|
|
154
|
+
};
|
|
155
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import type { ChatMessageInput } from "../../types";
|
|
2
|
+
|
|
3
|
+
type OllamaStreamEvent = {
|
|
4
|
+
model?: string;
|
|
5
|
+
created_at?: string;
|
|
6
|
+
message?: { role?: string; content?: string };
|
|
7
|
+
done?: boolean;
|
|
8
|
+
error?: string;
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
export async function streamOllamaChat(params: {
|
|
12
|
+
baseUrl: string;
|
|
13
|
+
model: string;
|
|
14
|
+
messages: ChatMessageInput[];
|
|
15
|
+
signal?: AbortSignal;
|
|
16
|
+
onToken: (value: string) => void;
|
|
17
|
+
}): Promise<void> {
|
|
18
|
+
const response = await fetch(`${params.baseUrl.replace(/\/$/, "")}/api/chat`, {
|
|
19
|
+
method: "POST",
|
|
20
|
+
headers: {
|
|
21
|
+
"Content-Type": "application/json",
|
|
22
|
+
},
|
|
23
|
+
body: JSON.stringify({
|
|
24
|
+
model: params.model,
|
|
25
|
+
messages: params.messages.map((message) => ({
|
|
26
|
+
role: message.role === "system" ? "user" : message.role,
|
|
27
|
+
content: message.content,
|
|
28
|
+
})),
|
|
29
|
+
stream: true,
|
|
30
|
+
}),
|
|
31
|
+
signal: params.signal,
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
if (!response.ok || !response.body) {
|
|
35
|
+
const text = await response.text().catch(() => "");
|
|
36
|
+
throw new Error(text || `Ollama request failed (${response.status})`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const reader = response.body.getReader();
|
|
40
|
+
const decoder = new TextDecoder();
|
|
41
|
+
let buffer = "";
|
|
42
|
+
|
|
43
|
+
while (true) {
|
|
44
|
+
const { done, value } = await reader.read();
|
|
45
|
+
if (done) {
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
buffer += decoder.decode(value, { stream: true });
|
|
49
|
+
const lines = buffer.split("\n");
|
|
50
|
+
buffer = lines.pop() || "";
|
|
51
|
+
|
|
52
|
+
for (const line of lines) {
|
|
53
|
+
const trimmed = line.trim();
|
|
54
|
+
if (!trimmed) {
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
let event: OllamaStreamEvent;
|
|
58
|
+
try {
|
|
59
|
+
event = JSON.parse(trimmed) as OllamaStreamEvent;
|
|
60
|
+
} catch {
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
if (event.error) {
|
|
64
|
+
throw new Error(event.error);
|
|
65
|
+
}
|
|
66
|
+
const content = event.message?.content;
|
|
67
|
+
if (typeof content === "string" && content.length > 0) {
|
|
68
|
+
params.onToken(content);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|