fullstacked 0.12.1-1354 → 0.12.1-1356
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/fullstacked_modules/@fullstacked/ai-agent/ai-agent.js +73 -63
- package/fullstacked_modules/@fullstacked/ai-agent/index.ts +5 -3
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/anthropic.ts +11 -7
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/deepseek.ts +11 -7
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/google.ts +11 -7
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/index.ts +10 -12
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/interface.ts +10 -6
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/mistral.ts +11 -7
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/ollama.ts +19 -14
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/openai.ts +11 -7
- package/fullstacked_modules/@fullstacked/ai-agent/src/providers/xai.ts +11 -7
- package/package.json +1 -1
|
@@ -3377,9 +3377,9 @@ function ensureAll(state, addrs) {
|
|
|
3377
3377
|
changed = true;
|
|
3378
3378
|
return changed;
|
|
3379
3379
|
}
|
|
3380
|
-
function dynamicFacetSlot(addresses, facet,
|
|
3381
|
-
let providerAddrs =
|
|
3382
|
-
let providerTypes =
|
|
3380
|
+
function dynamicFacetSlot(addresses, facet, providers) {
|
|
3381
|
+
let providerAddrs = providers.map((p) => addresses[p.id]);
|
|
3382
|
+
let providerTypes = providers.map((p) => p.type);
|
|
3383
3383
|
let dynamic = providerAddrs.filter((p) => !(p & 1));
|
|
3384
3384
|
let idx = addresses[facet.id] >> 1;
|
|
3385
3385
|
function get2(state) {
|
|
@@ -3413,7 +3413,7 @@ function dynamicFacetSlot(addresses, facet, providers2) {
|
|
|
3413
3413
|
reconfigure(state, oldState) {
|
|
3414
3414
|
let depChanged = ensureAll(state, providerAddrs);
|
|
3415
3415
|
let oldProviders = oldState.config.facets[facet.id], oldValue = oldState.facet(facet);
|
|
3416
|
-
if (oldProviders && !depChanged && sameArray(
|
|
3416
|
+
if (oldProviders && !depChanged && sameArray(providers, oldProviders)) {
|
|
3417
3417
|
state.values[idx] = oldValue;
|
|
3418
3418
|
return 0;
|
|
3419
3419
|
}
|
|
@@ -5254,21 +5254,21 @@ var init_dist = __esm({
|
|
|
5254
5254
|
}
|
|
5255
5255
|
let oldFacets = oldState === null || oldState === void 0 ? void 0 : oldState.config.facets;
|
|
5256
5256
|
for (let id4 in facets) {
|
|
5257
|
-
let
|
|
5257
|
+
let providers = facets[id4], facet = providers[0].facet;
|
|
5258
5258
|
let oldProviders = oldFacets && oldFacets[id4] || [];
|
|
5259
|
-
if (
|
|
5259
|
+
if (providers.every(
|
|
5260
5260
|
(p) => p.type == 0
|
|
5261
5261
|
/* Provider.Static */
|
|
5262
5262
|
)) {
|
|
5263
5263
|
address[facet.id] = staticValues.length << 1 | 1;
|
|
5264
|
-
if (sameArray(oldProviders,
|
|
5264
|
+
if (sameArray(oldProviders, providers)) {
|
|
5265
5265
|
staticValues.push(oldState.facet(facet));
|
|
5266
5266
|
} else {
|
|
5267
|
-
let value = facet.combine(
|
|
5267
|
+
let value = facet.combine(providers.map((p) => p.value));
|
|
5268
5268
|
staticValues.push(oldState && facet.compare(value, oldState.facet(facet)) ? oldState.facet(facet) : value);
|
|
5269
5269
|
}
|
|
5270
5270
|
} else {
|
|
5271
|
-
for (let p of
|
|
5271
|
+
for (let p of providers) {
|
|
5272
5272
|
if (p.type == 0) {
|
|
5273
5273
|
address[p.id] = staticValues.length << 1 | 1;
|
|
5274
5274
|
staticValues.push(p.value);
|
|
@@ -5278,7 +5278,7 @@ var init_dist = __esm({
|
|
|
5278
5278
|
}
|
|
5279
5279
|
}
|
|
5280
5280
|
address[facet.id] = dynamicSlots.length << 1;
|
|
5281
|
-
dynamicSlots.push((a) => dynamicFacetSlot(a, facet,
|
|
5281
|
+
dynamicSlots.push((a) => dynamicFacetSlot(a, facet, providers));
|
|
5282
5282
|
}
|
|
5283
5283
|
}
|
|
5284
5284
|
let dynamic = dynamicSlots.map((f5) => f5(address));
|
|
@@ -143038,6 +143038,8 @@ function createCodeMirrorView(opts) {
|
|
|
143038
143038
|
lintersCompartment.of([...loadedLinters])
|
|
143039
143039
|
]
|
|
143040
143040
|
});
|
|
143041
|
+
editorView.dom.style.height = "100%";
|
|
143042
|
+
editorView.dom.style.width = "100%";
|
|
143041
143043
|
const reloadExtensions = () => {
|
|
143042
143044
|
const effects = compartment.reconfigure([...loadedExtensions]);
|
|
143043
143045
|
editorView.dispatch({ effects });
|
|
@@ -143142,12 +143144,32 @@ function createCodeMirrorView(opts) {
|
|
|
143142
143144
|
scrollIntoView: true
|
|
143143
143145
|
});
|
|
143144
143146
|
};
|
|
143147
|
+
const getVisibleLines = () => {
|
|
143148
|
+
const height = editorView.dom.getBoundingClientRect().height;
|
|
143149
|
+
const scrollTop = editorView.scrollDOM.scrollTop;
|
|
143150
|
+
const visibleLines = [
|
|
143151
|
+
editorView.lineBlockAtHeight(scrollTop),
|
|
143152
|
+
editorView.lineBlockAtHeight(height + scrollTop)
|
|
143153
|
+
];
|
|
143154
|
+
const firstVisibleLine = editorView.state.doc.lineAt(
|
|
143155
|
+
visibleLines.at(0).from
|
|
143156
|
+
).number;
|
|
143157
|
+
const lastVisibleLine = editorView.state.doc.lineAt(
|
|
143158
|
+
visibleLines.at(-1).from
|
|
143159
|
+
).number;
|
|
143160
|
+
return [firstVisibleLine, lastVisibleLine];
|
|
143161
|
+
};
|
|
143145
143162
|
const goTo = (pos) => {
|
|
143146
143163
|
const position = typeof pos === "number" ? pos : editorView.state.doc.line(pos.line)?.from + pos.character;
|
|
143147
143164
|
editorView.dispatch({
|
|
143148
143165
|
selection: { anchor: position, head: position }
|
|
143149
143166
|
});
|
|
143150
143167
|
setTimeout(() => {
|
|
143168
|
+
const line = editorView.state.doc.lineAt(position).number;
|
|
143169
|
+
const [firstLine, lastLine] = getVisibleLines();
|
|
143170
|
+
if (line > firstLine && line < lastLine) {
|
|
143171
|
+
return;
|
|
143172
|
+
}
|
|
143151
143173
|
const { top: top2 } = editorView.lineBlockAt(position);
|
|
143152
143174
|
editorView.scrollDOM.scrollTo({ top: top2 });
|
|
143153
143175
|
});
|
|
@@ -162706,14 +162728,13 @@ Object.defineProperty(DallEAPIWrapper, "toolName", {
|
|
|
162706
162728
|
var OpenAIInfo = {
|
|
162707
162729
|
id: "openai",
|
|
162708
162730
|
title: "OpenAI",
|
|
162709
|
-
configs:
|
|
162710
|
-
{
|
|
162711
|
-
id: "apiKey",
|
|
162731
|
+
configs: {
|
|
162732
|
+
apiKey: {
|
|
162712
162733
|
title: "API Key",
|
|
162713
162734
|
type: "string",
|
|
162714
162735
|
value: ""
|
|
162715
162736
|
}
|
|
162716
|
-
|
|
162737
|
+
}
|
|
162717
162738
|
};
|
|
162718
162739
|
async function models(apiKey) {
|
|
162719
162740
|
const openAIClient = new OpenAI({
|
|
@@ -162725,7 +162746,7 @@ async function models(apiKey) {
|
|
|
162725
162746
|
return models8.data.map(({ id: id4 }) => id4);
|
|
162726
162747
|
}
|
|
162727
162748
|
function createOpenAI(opts) {
|
|
162728
|
-
const apiKey = opts?.
|
|
162749
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
162729
162750
|
return {
|
|
162730
162751
|
models: () => models(apiKey),
|
|
162731
162752
|
client: (model) => new ChatOpenAI({
|
|
@@ -164357,20 +164378,18 @@ var ChatOllama = class extends BaseChatModel {
|
|
|
164357
164378
|
var OllamaInfo = {
|
|
164358
164379
|
id: "ollama",
|
|
164359
164380
|
title: "Ollama",
|
|
164360
|
-
configs:
|
|
164361
|
-
{
|
|
164362
|
-
id: "host",
|
|
164381
|
+
configs: {
|
|
164382
|
+
host: {
|
|
164363
164383
|
title: "Host",
|
|
164364
164384
|
type: "string",
|
|
164365
|
-
value: "
|
|
164385
|
+
value: ""
|
|
164366
164386
|
},
|
|
164367
|
-
{
|
|
164368
|
-
id: "headers",
|
|
164387
|
+
headers: {
|
|
164369
164388
|
title: "Custom Headers",
|
|
164370
164389
|
type: "key-value",
|
|
164371
164390
|
value: []
|
|
164372
164391
|
}
|
|
164373
|
-
|
|
164392
|
+
}
|
|
164374
164393
|
};
|
|
164375
164394
|
async function models2(opts) {
|
|
164376
164395
|
const ollamaClient = new Ollama$1({
|
|
@@ -164386,10 +164405,8 @@ function keyValueArrToObject(arr2) {
|
|
|
164386
164405
|
return obj;
|
|
164387
164406
|
}
|
|
164388
164407
|
function createOllama(opts) {
|
|
164389
|
-
const baseUrl = opts?.
|
|
164390
|
-
const headers = keyValueArrToObject(
|
|
164391
|
-
opts?.find(({ id: id4 }) => id4 === "headers")?.value || []
|
|
164392
|
-
);
|
|
164408
|
+
const baseUrl = opts?.host?.value || "http://localhost:11434";
|
|
164409
|
+
const headers = keyValueArrToObject(opts?.headers?.value || []);
|
|
164393
164410
|
return {
|
|
164394
164411
|
models: () => models2({
|
|
164395
164412
|
host: baseUrl,
|
|
@@ -174205,14 +174222,13 @@ import { core_fetch2 as core_fetch23 } from "fetch";
|
|
|
174205
174222
|
var AnthropicInfo = {
|
|
174206
174223
|
id: "anthropic",
|
|
174207
174224
|
title: "Anthropic",
|
|
174208
|
-
configs:
|
|
174209
|
-
{
|
|
174210
|
-
id: "apiKey",
|
|
174225
|
+
configs: {
|
|
174226
|
+
apiKey: {
|
|
174211
174227
|
title: "API Key",
|
|
174212
174228
|
type: "string",
|
|
174213
174229
|
value: ""
|
|
174214
174230
|
}
|
|
174215
|
-
|
|
174231
|
+
}
|
|
174216
174232
|
};
|
|
174217
174233
|
async function models3(apiKey) {
|
|
174218
174234
|
const anthropicClient = new Anthropic({
|
|
@@ -174224,7 +174240,7 @@ async function models3(apiKey) {
|
|
|
174224
174240
|
return models8.data.map(({ id: id4 }) => id4);
|
|
174225
174241
|
}
|
|
174226
174242
|
function createClaude(opts) {
|
|
174227
|
-
const apiKey = opts?.
|
|
174243
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
174228
174244
|
return {
|
|
174229
174245
|
models: () => models3(apiKey),
|
|
174230
174246
|
client: (model) => new ChatAnthropic({
|
|
@@ -189551,14 +189567,13 @@ var GoogleGenAI = class {
|
|
|
189551
189567
|
var GoogleInfo = {
|
|
189552
189568
|
id: "google",
|
|
189553
189569
|
title: "Google",
|
|
189554
|
-
configs:
|
|
189555
|
-
{
|
|
189556
|
-
id: "apiKey",
|
|
189570
|
+
configs: {
|
|
189571
|
+
apiKey: {
|
|
189557
189572
|
title: "API Key",
|
|
189558
189573
|
type: "string",
|
|
189559
189574
|
value: ""
|
|
189560
189575
|
}
|
|
189561
|
-
|
|
189576
|
+
}
|
|
189562
189577
|
};
|
|
189563
189578
|
async function models4(apiKey) {
|
|
189564
189579
|
const googleClient = new GoogleGenAI({
|
|
@@ -189568,7 +189583,7 @@ async function models4(apiKey) {
|
|
|
189568
189583
|
return models8.page.map(({ name: name2 }) => name2.slice("models/".length));
|
|
189569
189584
|
}
|
|
189570
189585
|
function createGemini(opts) {
|
|
189571
|
-
const apiKey = opts?.
|
|
189586
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
189572
189587
|
return {
|
|
189573
189588
|
models: () => models4(apiKey),
|
|
189574
189589
|
client: (model) => new ChatGoogleGenerativeAI({
|
|
@@ -189691,14 +189706,13 @@ var ChatXAI = class extends ChatOpenAICompletions {
|
|
|
189691
189706
|
var xAIInfo = {
|
|
189692
189707
|
id: "xai",
|
|
189693
189708
|
title: "xAI",
|
|
189694
|
-
configs:
|
|
189695
|
-
{
|
|
189696
|
-
id: "apiKey",
|
|
189709
|
+
configs: {
|
|
189710
|
+
apiKey: {
|
|
189697
189711
|
title: "API Key",
|
|
189698
189712
|
type: "string",
|
|
189699
189713
|
value: ""
|
|
189700
189714
|
}
|
|
189701
|
-
|
|
189715
|
+
}
|
|
189702
189716
|
};
|
|
189703
189717
|
async function models5(apiKey) {
|
|
189704
189718
|
const client2 = new OpenAI({
|
|
@@ -189711,7 +189725,7 @@ async function models5(apiKey) {
|
|
|
189711
189725
|
return models8.data.map(({ id: id4 }) => id4);
|
|
189712
189726
|
}
|
|
189713
189727
|
function createGrok(opts) {
|
|
189714
|
-
const apiKey = opts?.
|
|
189728
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
189715
189729
|
return {
|
|
189716
189730
|
models: () => models5(apiKey),
|
|
189717
189731
|
client: (model) => new ChatXAI({
|
|
@@ -189787,14 +189801,13 @@ var ChatDeepSeek = class extends ChatOpenAICompletions {
|
|
|
189787
189801
|
var DeepSeekInfo = {
|
|
189788
189802
|
id: "deepseek",
|
|
189789
189803
|
title: "DeepSeek",
|
|
189790
|
-
configs:
|
|
189791
|
-
{
|
|
189792
|
-
id: "apiKey",
|
|
189804
|
+
configs: {
|
|
189805
|
+
apiKey: {
|
|
189793
189806
|
title: "API Key",
|
|
189794
189807
|
type: "string",
|
|
189795
189808
|
value: ""
|
|
189796
189809
|
}
|
|
189797
|
-
|
|
189810
|
+
}
|
|
189798
189811
|
};
|
|
189799
189812
|
async function models6(apiKey) {
|
|
189800
189813
|
const client2 = new OpenAI({
|
|
@@ -189807,7 +189820,7 @@ async function models6(apiKey) {
|
|
|
189807
189820
|
return models8.data.map(({ id: id4 }) => id4);
|
|
189808
189821
|
}
|
|
189809
189822
|
function createDeepSeek(opts) {
|
|
189810
|
-
const apiKey = opts?.
|
|
189823
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
189811
189824
|
return {
|
|
189812
189825
|
models: () => models6(apiKey),
|
|
189813
189826
|
client: (model) => new ChatDeepSeek({
|
|
@@ -190645,14 +190658,13 @@ var import_mistralai3 = __toESM(require_mistralai());
|
|
|
190645
190658
|
var MistralInfo = {
|
|
190646
190659
|
id: "mistral",
|
|
190647
190660
|
title: "Mistral AI",
|
|
190648
|
-
configs:
|
|
190649
|
-
{
|
|
190650
|
-
id: "apiKey",
|
|
190661
|
+
configs: {
|
|
190662
|
+
apiKey: {
|
|
190651
190663
|
title: "API Key",
|
|
190652
190664
|
type: "string",
|
|
190653
190665
|
value: ""
|
|
190654
190666
|
}
|
|
190655
|
-
|
|
190667
|
+
}
|
|
190656
190668
|
};
|
|
190657
190669
|
async function models7(apiKey) {
|
|
190658
190670
|
const mistralClient = new import_mistralai3.Mistral({
|
|
@@ -190665,7 +190677,7 @@ async function models7(apiKey) {
|
|
|
190665
190677
|
return models8.data.map(({ id: id4 }) => id4);
|
|
190666
190678
|
}
|
|
190667
190679
|
function createMistral(opts) {
|
|
190668
|
-
const apiKey = opts?.
|
|
190680
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
190669
190681
|
return {
|
|
190670
190682
|
models: () => models7(apiKey),
|
|
190671
190683
|
client: (model) => new ChatMistralAI({
|
|
@@ -190679,17 +190691,15 @@ function createMistral(opts) {
|
|
|
190679
190691
|
}
|
|
190680
190692
|
|
|
190681
190693
|
// node_modules/@fullstacked/ai-agent/src/providers/index.ts
|
|
190682
|
-
|
|
190683
|
-
|
|
190684
|
-
|
|
190685
|
-
|
|
190686
|
-
|
|
190687
|
-
|
|
190688
|
-
|
|
190689
|
-
|
|
190690
|
-
|
|
190691
|
-
];
|
|
190692
|
-
}
|
|
190694
|
+
var providersInfo = {
|
|
190695
|
+
ollama: OllamaInfo,
|
|
190696
|
+
openai: OpenAIInfo,
|
|
190697
|
+
antropic: AnthropicInfo,
|
|
190698
|
+
google: GoogleInfo,
|
|
190699
|
+
xai: xAIInfo,
|
|
190700
|
+
deepseek: DeepSeekInfo,
|
|
190701
|
+
mistral: MistralInfo
|
|
190702
|
+
};
|
|
190693
190703
|
function getProvider(providerInfo) {
|
|
190694
190704
|
switch (providerInfo.id) {
|
|
190695
190705
|
case "ollama":
|
|
@@ -190713,7 +190723,7 @@ export {
|
|
|
190713
190723
|
createConversation,
|
|
190714
190724
|
createTool,
|
|
190715
190725
|
getProvider,
|
|
190716
|
-
|
|
190726
|
+
providersInfo
|
|
190717
190727
|
};
|
|
190718
190728
|
/*! Bundled license information:
|
|
190719
190729
|
|
|
@@ -3,7 +3,7 @@ import { z } from "zod";
|
|
|
3
3
|
import fs from "fs";
|
|
4
4
|
import { createOllama } from "./src/providers/ollama";
|
|
5
5
|
import { StoredMessage } from "@langchain/core/messages";
|
|
6
|
-
import { getProvider,
|
|
6
|
+
import { getProvider, providersInfo } from "./src";
|
|
7
7
|
import { oneDark } from "@codemirror/theme-one-dark";
|
|
8
8
|
import {} from "./src/providers/google";
|
|
9
9
|
|
|
@@ -11,7 +11,7 @@ document.title = "FullStacked AI Agent";
|
|
|
11
11
|
|
|
12
12
|
const controls = document.createElement("div");
|
|
13
13
|
|
|
14
|
-
const provider = getProvider(
|
|
14
|
+
const provider = getProvider(providersInfo.ollama);
|
|
15
15
|
const models = await provider.models();
|
|
16
16
|
|
|
17
17
|
const select = document.createElement("select");
|
|
@@ -103,7 +103,9 @@ async function createChat() {
|
|
|
103
103
|
};
|
|
104
104
|
|
|
105
105
|
button4.onclick = () => {
|
|
106
|
-
conversation
|
|
106
|
+
conversation
|
|
107
|
+
.generateConversationTitle()
|
|
108
|
+
.then((t) => (title.innerText = t));
|
|
107
109
|
};
|
|
108
110
|
}
|
|
109
111
|
|
|
@@ -4,17 +4,22 @@ import { ProviderInfo } from "./interface";
|
|
|
4
4
|
import { core_fetch2 } from "fetch";
|
|
5
5
|
import { Provider } from "@fullstacked/ai-agent/src/providers/interface";
|
|
6
6
|
|
|
7
|
-
export const AnthropicInfo: ProviderInfo
|
|
7
|
+
export const AnthropicInfo: ProviderInfo<{
|
|
8
|
+
apiKey: {
|
|
9
|
+
title: "API Key";
|
|
10
|
+
type: "string";
|
|
11
|
+
value: string;
|
|
12
|
+
};
|
|
13
|
+
}> = {
|
|
8
14
|
id: "anthropic",
|
|
9
15
|
title: "Anthropic",
|
|
10
|
-
configs:
|
|
11
|
-
{
|
|
12
|
-
id: "apiKey",
|
|
16
|
+
configs: {
|
|
17
|
+
apiKey: {
|
|
13
18
|
title: "API Key",
|
|
14
19
|
type: "string",
|
|
15
20
|
value: "",
|
|
16
21
|
},
|
|
17
|
-
|
|
22
|
+
},
|
|
18
23
|
};
|
|
19
24
|
|
|
20
25
|
async function models(apiKey: string) {
|
|
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
|
|
|
29
34
|
}
|
|
30
35
|
|
|
31
36
|
export function createClaude(opts?: typeof AnthropicInfo.configs): Provider {
|
|
32
|
-
const apiKey =
|
|
33
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
37
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
34
38
|
|
|
35
39
|
return {
|
|
36
40
|
models: () => models(apiKey),
|
|
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
|
|
|
3
3
|
import openai from "openai";
|
|
4
4
|
import { ChatDeepSeek } from "@langchain/deepseek";
|
|
5
5
|
|
|
6
|
-
export const DeepSeekInfo: ProviderInfo
|
|
6
|
+
export const DeepSeekInfo: ProviderInfo<{
|
|
7
|
+
apiKey: {
|
|
8
|
+
title: "API Key";
|
|
9
|
+
type: "string";
|
|
10
|
+
value: string;
|
|
11
|
+
};
|
|
12
|
+
}> = {
|
|
7
13
|
id: "deepseek",
|
|
8
14
|
title: "DeepSeek",
|
|
9
|
-
configs:
|
|
10
|
-
{
|
|
11
|
-
id: "apiKey",
|
|
15
|
+
configs: {
|
|
16
|
+
apiKey: {
|
|
12
17
|
title: "API Key",
|
|
13
18
|
type: "string",
|
|
14
19
|
value: "",
|
|
15
20
|
},
|
|
16
|
-
|
|
21
|
+
},
|
|
17
22
|
};
|
|
18
23
|
|
|
19
24
|
async function models(apiKey: string) {
|
|
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
|
|
|
29
34
|
}
|
|
30
35
|
|
|
31
36
|
export function createDeepSeek(opts?: typeof DeepSeekInfo.configs): Provider {
|
|
32
|
-
const apiKey =
|
|
33
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
37
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
34
38
|
|
|
35
39
|
return {
|
|
36
40
|
models: () => models(apiKey),
|
|
@@ -3,17 +3,22 @@ import { GoogleGenAI } from "@google/genai";
|
|
|
3
3
|
import { Provider } from "@fullstacked/ai-agent/src/providers/interface";
|
|
4
4
|
import { ProviderInfo } from "./interface";
|
|
5
5
|
|
|
6
|
-
export const GoogleInfo: ProviderInfo
|
|
6
|
+
export const GoogleInfo: ProviderInfo<{
|
|
7
|
+
apiKey: {
|
|
8
|
+
title: "API Key";
|
|
9
|
+
type: "string";
|
|
10
|
+
value: string;
|
|
11
|
+
};
|
|
12
|
+
}> = {
|
|
7
13
|
id: "google",
|
|
8
14
|
title: "Google",
|
|
9
|
-
configs:
|
|
10
|
-
{
|
|
11
|
-
id: "apiKey",
|
|
15
|
+
configs: {
|
|
16
|
+
apiKey: {
|
|
12
17
|
title: "API Key",
|
|
13
18
|
type: "string",
|
|
14
19
|
value: "",
|
|
15
20
|
},
|
|
16
|
-
|
|
21
|
+
},
|
|
17
22
|
};
|
|
18
23
|
|
|
19
24
|
async function models(apiKey: string) {
|
|
@@ -26,8 +31,7 @@ async function models(apiKey: string) {
|
|
|
26
31
|
}
|
|
27
32
|
|
|
28
33
|
export function createGemini(opts?: typeof GoogleInfo.configs): Provider {
|
|
29
|
-
const apiKey =
|
|
30
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
34
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
31
35
|
|
|
32
36
|
return {
|
|
33
37
|
models: () => models(apiKey),
|
|
@@ -8,19 +8,17 @@ import { createGrok, xAIInfo } from "./xai";
|
|
|
8
8
|
import { DeepSeekInfo, createDeepSeek } from "./deepseek";
|
|
9
9
|
import { MistralInfo, createMistral } from "./mistral";
|
|
10
10
|
|
|
11
|
-
export
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
];
|
|
21
|
-
}
|
|
11
|
+
export const providersInfo = {
|
|
12
|
+
ollama: OllamaInfo,
|
|
13
|
+
openai: OpenAIInfo,
|
|
14
|
+
antropic: AnthropicInfo,
|
|
15
|
+
google: GoogleInfo,
|
|
16
|
+
xai: xAIInfo,
|
|
17
|
+
deepseek: DeepSeekInfo,
|
|
18
|
+
mistral: MistralInfo,
|
|
19
|
+
} as const;
|
|
22
20
|
|
|
23
|
-
export function getProvider(providerInfo: ProviderInfo) {
|
|
21
|
+
export function getProvider(providerInfo: ProviderInfo<any>) {
|
|
24
22
|
switch (providerInfo.id) {
|
|
25
23
|
case "ollama":
|
|
26
24
|
return createOllama(providerInfo.configs);
|
|
@@ -10,13 +10,17 @@ type ConfigType =
|
|
|
10
10
|
value: [string, string][];
|
|
11
11
|
};
|
|
12
12
|
|
|
13
|
-
export type ProviderInfo
|
|
13
|
+
export type ProviderInfo<
|
|
14
|
+
C extends Record<
|
|
15
|
+
string,
|
|
16
|
+
ConfigType & {
|
|
17
|
+
title: string;
|
|
18
|
+
}
|
|
19
|
+
>,
|
|
20
|
+
> = {
|
|
14
21
|
id: string;
|
|
15
|
-
title
|
|
16
|
-
configs:
|
|
17
|
-
id: string;
|
|
18
|
-
title: string;
|
|
19
|
-
})[];
|
|
22
|
+
title?: string;
|
|
23
|
+
configs: C;
|
|
20
24
|
};
|
|
21
25
|
|
|
22
26
|
export interface Provider {
|
|
@@ -4,17 +4,22 @@ import { ChatMistralAI } from "@langchain/mistralai";
|
|
|
4
4
|
import { HTTPClient } from "@mistralai/mistralai/lib/http";
|
|
5
5
|
import { Mistral } from "@mistralai/mistralai";
|
|
6
6
|
|
|
7
|
-
export const MistralInfo: ProviderInfo
|
|
7
|
+
export const MistralInfo: ProviderInfo<{
|
|
8
|
+
apiKey: {
|
|
9
|
+
title: "API Key";
|
|
10
|
+
type: "string";
|
|
11
|
+
value: string;
|
|
12
|
+
};
|
|
13
|
+
}> = {
|
|
8
14
|
id: "mistral",
|
|
9
15
|
title: "Mistral AI",
|
|
10
|
-
configs:
|
|
11
|
-
{
|
|
12
|
-
id: "apiKey",
|
|
16
|
+
configs: {
|
|
17
|
+
apiKey: {
|
|
13
18
|
title: "API Key",
|
|
14
19
|
type: "string",
|
|
15
20
|
value: "",
|
|
16
21
|
},
|
|
17
|
-
|
|
22
|
+
},
|
|
18
23
|
};
|
|
19
24
|
|
|
20
25
|
async function models(apiKey: string) {
|
|
@@ -30,8 +35,7 @@ async function models(apiKey: string) {
|
|
|
30
35
|
}
|
|
31
36
|
|
|
32
37
|
export function createMistral(opts?: typeof MistralInfo.configs): Provider {
|
|
33
|
-
const apiKey =
|
|
34
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
38
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
35
39
|
|
|
36
40
|
return {
|
|
37
41
|
models: () => models(apiKey),
|
|
@@ -5,23 +5,32 @@ import { Provider, ProviderInfo } from "./interface";
|
|
|
5
5
|
import { core_fetch2 } from "fetch";
|
|
6
6
|
import { ChatOllama } from "@langchain/ollama";
|
|
7
7
|
|
|
8
|
-
export const OllamaInfo: ProviderInfo
|
|
8
|
+
export const OllamaInfo: ProviderInfo<{
|
|
9
|
+
host: {
|
|
10
|
+
title: "Host";
|
|
11
|
+
type: "string";
|
|
12
|
+
value: string;
|
|
13
|
+
};
|
|
14
|
+
headers: {
|
|
15
|
+
title: "Custom Headers";
|
|
16
|
+
type: "key-value";
|
|
17
|
+
value: [string, string][];
|
|
18
|
+
};
|
|
19
|
+
}> = {
|
|
9
20
|
id: "ollama",
|
|
10
21
|
title: "Ollama",
|
|
11
|
-
configs:
|
|
12
|
-
{
|
|
13
|
-
id: "host",
|
|
22
|
+
configs: {
|
|
23
|
+
host: {
|
|
14
24
|
title: "Host",
|
|
15
25
|
type: "string",
|
|
16
|
-
value: "
|
|
26
|
+
value: "",
|
|
17
27
|
},
|
|
18
|
-
{
|
|
19
|
-
id: "headers",
|
|
28
|
+
headers: {
|
|
20
29
|
title: "Custom Headers",
|
|
21
30
|
type: "key-value",
|
|
22
31
|
value: [],
|
|
23
32
|
},
|
|
24
|
-
|
|
33
|
+
},
|
|
25
34
|
};
|
|
26
35
|
|
|
27
36
|
async function models(opts: ollama.Config) {
|
|
@@ -42,12 +51,8 @@ function keyValueArrToObject(arr: [string, string][]) {
|
|
|
42
51
|
}
|
|
43
52
|
|
|
44
53
|
export function createOllama(opts?: typeof OllamaInfo.configs): Provider {
|
|
45
|
-
const baseUrl =
|
|
46
|
-
|
|
47
|
-
"http://localhost:11434";
|
|
48
|
-
const headers = keyValueArrToObject(
|
|
49
|
-
(opts?.find(({ id }) => id === "headers")?.value as []) || [],
|
|
50
|
-
);
|
|
54
|
+
const baseUrl = opts?.host?.value || "http://localhost:11434";
|
|
55
|
+
const headers = keyValueArrToObject(opts?.headers?.value || []);
|
|
51
56
|
return {
|
|
52
57
|
models: () =>
|
|
53
58
|
models({
|
|
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
|
|
|
3
3
|
import openai from "openai";
|
|
4
4
|
import { ChatOpenAI } from "@langchain/openai";
|
|
5
5
|
|
|
6
|
-
export const OpenAIInfo: ProviderInfo
|
|
6
|
+
export const OpenAIInfo: ProviderInfo<{
|
|
7
|
+
apiKey: {
|
|
8
|
+
title: "API Key";
|
|
9
|
+
type: "string";
|
|
10
|
+
value: string;
|
|
11
|
+
};
|
|
12
|
+
}> = {
|
|
7
13
|
id: "openai",
|
|
8
14
|
title: "OpenAI",
|
|
9
|
-
configs:
|
|
10
|
-
{
|
|
11
|
-
id: "apiKey",
|
|
15
|
+
configs: {
|
|
16
|
+
apiKey: {
|
|
12
17
|
title: "API Key",
|
|
13
18
|
type: "string",
|
|
14
19
|
value: "",
|
|
15
20
|
},
|
|
16
|
-
|
|
21
|
+
},
|
|
17
22
|
};
|
|
18
23
|
|
|
19
24
|
async function models(apiKey: string) {
|
|
@@ -28,8 +33,7 @@ async function models(apiKey: string) {
|
|
|
28
33
|
}
|
|
29
34
|
|
|
30
35
|
export function createOpenAI(opts?: typeof OpenAIInfo.configs): Provider {
|
|
31
|
-
const apiKey =
|
|
32
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
36
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
33
37
|
|
|
34
38
|
return {
|
|
35
39
|
models: () => models(apiKey),
|
|
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
|
|
|
3
3
|
import openai from "openai";
|
|
4
4
|
import { ChatXAI } from "@langchain/xai";
|
|
5
5
|
|
|
6
|
-
export const xAIInfo: ProviderInfo
|
|
6
|
+
export const xAIInfo: ProviderInfo<{
|
|
7
|
+
apiKey: {
|
|
8
|
+
title: "API Key";
|
|
9
|
+
type: "string";
|
|
10
|
+
value: string;
|
|
11
|
+
};
|
|
12
|
+
}> = {
|
|
7
13
|
id: "xai",
|
|
8
14
|
title: "xAI",
|
|
9
|
-
configs:
|
|
10
|
-
{
|
|
11
|
-
id: "apiKey",
|
|
15
|
+
configs: {
|
|
16
|
+
apiKey: {
|
|
12
17
|
title: "API Key",
|
|
13
18
|
type: "string",
|
|
14
19
|
value: "",
|
|
15
20
|
},
|
|
16
|
-
|
|
21
|
+
},
|
|
17
22
|
};
|
|
18
23
|
|
|
19
24
|
async function models(apiKey: string) {
|
|
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
|
|
|
29
34
|
}
|
|
30
35
|
|
|
31
36
|
export function createGrok(opts?: typeof xAIInfo.configs): Provider {
|
|
32
|
-
const apiKey =
|
|
33
|
-
(opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
|
|
37
|
+
const apiKey = opts?.apiKey?.value || "";
|
|
34
38
|
|
|
35
39
|
return {
|
|
36
40
|
models: () => models(apiKey),
|