fullstacked 0.12.1-1354 → 0.12.1-1355

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3377,9 +3377,9 @@ function ensureAll(state, addrs) {
3377
3377
  changed = true;
3378
3378
  return changed;
3379
3379
  }
3380
- function dynamicFacetSlot(addresses, facet, providers2) {
3381
- let providerAddrs = providers2.map((p) => addresses[p.id]);
3382
- let providerTypes = providers2.map((p) => p.type);
3380
+ function dynamicFacetSlot(addresses, facet, providers) {
3381
+ let providerAddrs = providers.map((p) => addresses[p.id]);
3382
+ let providerTypes = providers.map((p) => p.type);
3383
3383
  let dynamic = providerAddrs.filter((p) => !(p & 1));
3384
3384
  let idx = addresses[facet.id] >> 1;
3385
3385
  function get2(state) {
@@ -3413,7 +3413,7 @@ function dynamicFacetSlot(addresses, facet, providers2) {
3413
3413
  reconfigure(state, oldState) {
3414
3414
  let depChanged = ensureAll(state, providerAddrs);
3415
3415
  let oldProviders = oldState.config.facets[facet.id], oldValue = oldState.facet(facet);
3416
- if (oldProviders && !depChanged && sameArray(providers2, oldProviders)) {
3416
+ if (oldProviders && !depChanged && sameArray(providers, oldProviders)) {
3417
3417
  state.values[idx] = oldValue;
3418
3418
  return 0;
3419
3419
  }
@@ -5254,21 +5254,21 @@ var init_dist = __esm({
5254
5254
  }
5255
5255
  let oldFacets = oldState === null || oldState === void 0 ? void 0 : oldState.config.facets;
5256
5256
  for (let id4 in facets) {
5257
- let providers2 = facets[id4], facet = providers2[0].facet;
5257
+ let providers = facets[id4], facet = providers[0].facet;
5258
5258
  let oldProviders = oldFacets && oldFacets[id4] || [];
5259
- if (providers2.every(
5259
+ if (providers.every(
5260
5260
  (p) => p.type == 0
5261
5261
  /* Provider.Static */
5262
5262
  )) {
5263
5263
  address[facet.id] = staticValues.length << 1 | 1;
5264
- if (sameArray(oldProviders, providers2)) {
5264
+ if (sameArray(oldProviders, providers)) {
5265
5265
  staticValues.push(oldState.facet(facet));
5266
5266
  } else {
5267
- let value = facet.combine(providers2.map((p) => p.value));
5267
+ let value = facet.combine(providers.map((p) => p.value));
5268
5268
  staticValues.push(oldState && facet.compare(value, oldState.facet(facet)) ? oldState.facet(facet) : value);
5269
5269
  }
5270
5270
  } else {
5271
- for (let p of providers2) {
5271
+ for (let p of providers) {
5272
5272
  if (p.type == 0) {
5273
5273
  address[p.id] = staticValues.length << 1 | 1;
5274
5274
  staticValues.push(p.value);
@@ -5278,7 +5278,7 @@ var init_dist = __esm({
5278
5278
  }
5279
5279
  }
5280
5280
  address[facet.id] = dynamicSlots.length << 1;
5281
- dynamicSlots.push((a) => dynamicFacetSlot(a, facet, providers2));
5281
+ dynamicSlots.push((a) => dynamicFacetSlot(a, facet, providers));
5282
5282
  }
5283
5283
  }
5284
5284
  let dynamic = dynamicSlots.map((f5) => f5(address));
@@ -162706,14 +162706,13 @@ Object.defineProperty(DallEAPIWrapper, "toolName", {
162706
162706
  var OpenAIInfo = {
162707
162707
  id: "openai",
162708
162708
  title: "OpenAI",
162709
- configs: [
162710
- {
162711
- id: "apiKey",
162709
+ configs: {
162710
+ apiKey: {
162712
162711
  title: "API Key",
162713
162712
  type: "string",
162714
162713
  value: ""
162715
162714
  }
162716
- ]
162715
+ }
162717
162716
  };
162718
162717
  async function models(apiKey) {
162719
162718
  const openAIClient = new OpenAI({
@@ -162725,7 +162724,7 @@ async function models(apiKey) {
162725
162724
  return models8.data.map(({ id: id4 }) => id4);
162726
162725
  }
162727
162726
  function createOpenAI(opts) {
162728
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
162727
+ const apiKey = opts?.apiKey?.value || "";
162729
162728
  return {
162730
162729
  models: () => models(apiKey),
162731
162730
  client: (model) => new ChatOpenAI({
@@ -164357,20 +164356,18 @@ var ChatOllama = class extends BaseChatModel {
164357
164356
  var OllamaInfo = {
164358
164357
  id: "ollama",
164359
164358
  title: "Ollama",
164360
- configs: [
164361
- {
164362
- id: "host",
164359
+ configs: {
164360
+ host: {
164363
164361
  title: "Host",
164364
164362
  type: "string",
164365
- value: "http://localhost:11434"
164363
+ value: ""
164366
164364
  },
164367
- {
164368
- id: "headers",
164365
+ headers: {
164369
164366
  title: "Custom Headers",
164370
164367
  type: "key-value",
164371
164368
  value: []
164372
164369
  }
164373
- ]
164370
+ }
164374
164371
  };
164375
164372
  async function models2(opts) {
164376
164373
  const ollamaClient = new Ollama$1({
@@ -164386,10 +164383,8 @@ function keyValueArrToObject(arr2) {
164386
164383
  return obj;
164387
164384
  }
164388
164385
  function createOllama(opts) {
164389
- const baseUrl = opts?.find(({ id: id4 }) => id4 === "host")?.value || "http://localhost:11434";
164390
- const headers = keyValueArrToObject(
164391
- opts?.find(({ id: id4 }) => id4 === "headers")?.value || []
164392
- );
164386
+ const baseUrl = opts?.host?.value || "http://localhost:11434";
164387
+ const headers = keyValueArrToObject(opts?.headers?.value || []);
164393
164388
  return {
164394
164389
  models: () => models2({
164395
164390
  host: baseUrl,
@@ -174205,14 +174200,13 @@ import { core_fetch2 as core_fetch23 } from "fetch";
174205
174200
  var AnthropicInfo = {
174206
174201
  id: "anthropic",
174207
174202
  title: "Anthropic",
174208
- configs: [
174209
- {
174210
- id: "apiKey",
174203
+ configs: {
174204
+ apiKey: {
174211
174205
  title: "API Key",
174212
174206
  type: "string",
174213
174207
  value: ""
174214
174208
  }
174215
- ]
174209
+ }
174216
174210
  };
174217
174211
  async function models3(apiKey) {
174218
174212
  const anthropicClient = new Anthropic({
@@ -174224,7 +174218,7 @@ async function models3(apiKey) {
174224
174218
  return models8.data.map(({ id: id4 }) => id4);
174225
174219
  }
174226
174220
  function createClaude(opts) {
174227
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
174221
+ const apiKey = opts?.apiKey?.value || "";
174228
174222
  return {
174229
174223
  models: () => models3(apiKey),
174230
174224
  client: (model) => new ChatAnthropic({
@@ -189551,14 +189545,13 @@ var GoogleGenAI = class {
189551
189545
  var GoogleInfo = {
189552
189546
  id: "google",
189553
189547
  title: "Google",
189554
- configs: [
189555
- {
189556
- id: "apiKey",
189548
+ configs: {
189549
+ apiKey: {
189557
189550
  title: "API Key",
189558
189551
  type: "string",
189559
189552
  value: ""
189560
189553
  }
189561
- ]
189554
+ }
189562
189555
  };
189563
189556
  async function models4(apiKey) {
189564
189557
  const googleClient = new GoogleGenAI({
@@ -189568,7 +189561,7 @@ async function models4(apiKey) {
189568
189561
  return models8.page.map(({ name: name2 }) => name2.slice("models/".length));
189569
189562
  }
189570
189563
  function createGemini(opts) {
189571
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
189564
+ const apiKey = opts?.apiKey?.value || "";
189572
189565
  return {
189573
189566
  models: () => models4(apiKey),
189574
189567
  client: (model) => new ChatGoogleGenerativeAI({
@@ -189691,14 +189684,13 @@ var ChatXAI = class extends ChatOpenAICompletions {
189691
189684
  var xAIInfo = {
189692
189685
  id: "xai",
189693
189686
  title: "xAI",
189694
- configs: [
189695
- {
189696
- id: "apiKey",
189687
+ configs: {
189688
+ apiKey: {
189697
189689
  title: "API Key",
189698
189690
  type: "string",
189699
189691
  value: ""
189700
189692
  }
189701
- ]
189693
+ }
189702
189694
  };
189703
189695
  async function models5(apiKey) {
189704
189696
  const client2 = new OpenAI({
@@ -189711,7 +189703,7 @@ async function models5(apiKey) {
189711
189703
  return models8.data.map(({ id: id4 }) => id4);
189712
189704
  }
189713
189705
  function createGrok(opts) {
189714
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
189706
+ const apiKey = opts?.apiKey?.value || "";
189715
189707
  return {
189716
189708
  models: () => models5(apiKey),
189717
189709
  client: (model) => new ChatXAI({
@@ -189787,14 +189779,13 @@ var ChatDeepSeek = class extends ChatOpenAICompletions {
189787
189779
  var DeepSeekInfo = {
189788
189780
  id: "deepseek",
189789
189781
  title: "DeepSeek",
189790
- configs: [
189791
- {
189792
- id: "apiKey",
189782
+ configs: {
189783
+ apiKey: {
189793
189784
  title: "API Key",
189794
189785
  type: "string",
189795
189786
  value: ""
189796
189787
  }
189797
- ]
189788
+ }
189798
189789
  };
189799
189790
  async function models6(apiKey) {
189800
189791
  const client2 = new OpenAI({
@@ -189807,7 +189798,7 @@ async function models6(apiKey) {
189807
189798
  return models8.data.map(({ id: id4 }) => id4);
189808
189799
  }
189809
189800
  function createDeepSeek(opts) {
189810
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
189801
+ const apiKey = opts?.apiKey?.value || "";
189811
189802
  return {
189812
189803
  models: () => models6(apiKey),
189813
189804
  client: (model) => new ChatDeepSeek({
@@ -190645,14 +190636,13 @@ var import_mistralai3 = __toESM(require_mistralai());
190645
190636
  var MistralInfo = {
190646
190637
  id: "mistral",
190647
190638
  title: "Mistral AI",
190648
- configs: [
190649
- {
190650
- id: "apiKey",
190639
+ configs: {
190640
+ apiKey: {
190651
190641
  title: "API Key",
190652
190642
  type: "string",
190653
190643
  value: ""
190654
190644
  }
190655
- ]
190645
+ }
190656
190646
  };
190657
190647
  async function models7(apiKey) {
190658
190648
  const mistralClient = new import_mistralai3.Mistral({
@@ -190665,7 +190655,7 @@ async function models7(apiKey) {
190665
190655
  return models8.data.map(({ id: id4 }) => id4);
190666
190656
  }
190667
190657
  function createMistral(opts) {
190668
- const apiKey = opts?.find(({ id: id4 }) => id4 === "apiKey")?.value || "";
190658
+ const apiKey = opts?.apiKey?.value || "";
190669
190659
  return {
190670
190660
  models: () => models7(apiKey),
190671
190661
  client: (model) => new ChatMistralAI({
@@ -190679,17 +190669,15 @@ function createMistral(opts) {
190679
190669
  }
190680
190670
 
190681
190671
  // node_modules/@fullstacked/ai-agent/src/providers/index.ts
190682
- function providers() {
190683
- return [
190684
- OllamaInfo,
190685
- OpenAIInfo,
190686
- AnthropicInfo,
190687
- GoogleInfo,
190688
- xAIInfo,
190689
- DeepSeekInfo,
190690
- MistralInfo
190691
- ];
190692
- }
190672
+ var providersInfo = {
190673
+ ollama: OllamaInfo,
190674
+ openai: OpenAIInfo,
190675
+ antropic: AnthropicInfo,
190676
+ google: GoogleInfo,
190677
+ xai: xAIInfo,
190678
+ deepseek: DeepSeekInfo,
190679
+ mistral: MistralInfo
190680
+ };
190693
190681
  function getProvider(providerInfo) {
190694
190682
  switch (providerInfo.id) {
190695
190683
  case "ollama":
@@ -190713,7 +190701,7 @@ export {
190713
190701
  createConversation,
190714
190702
  createTool,
190715
190703
  getProvider,
190716
- providers
190704
+ providersInfo
190717
190705
  };
190718
190706
  /*! Bundled license information:
190719
190707
 
@@ -3,7 +3,7 @@ import { z } from "zod";
3
3
  import fs from "fs";
4
4
  import { createOllama } from "./src/providers/ollama";
5
5
  import { StoredMessage } from "@langchain/core/messages";
6
- import { getProvider, providers } from "./src";
6
+ import { getProvider, providersInfo } from "./src";
7
7
  import { oneDark } from "@codemirror/theme-one-dark";
8
8
  import {} from "./src/providers/google";
9
9
 
@@ -11,7 +11,7 @@ document.title = "FullStacked AI Agent";
11
11
 
12
12
  const controls = document.createElement("div");
13
13
 
14
- const provider = getProvider(providers().at(0));
14
+ const provider = getProvider(providersInfo.ollama);
15
15
  const models = await provider.models();
16
16
 
17
17
  const select = document.createElement("select");
@@ -103,7 +103,9 @@ async function createChat() {
103
103
  };
104
104
 
105
105
  button4.onclick = () => {
106
- conversation.generateConversationTitle().then(t => title.innerText = t);
106
+ conversation
107
+ .generateConversationTitle()
108
+ .then((t) => (title.innerText = t));
107
109
  };
108
110
  }
109
111
 
@@ -4,17 +4,22 @@ import { ProviderInfo } from "./interface";
4
4
  import { core_fetch2 } from "fetch";
5
5
  import { Provider } from "@fullstacked/ai-agent/src/providers/interface";
6
6
 
7
- export const AnthropicInfo: ProviderInfo = {
7
+ export const AnthropicInfo: ProviderInfo<{
8
+ apiKey: {
9
+ title: "API Key";
10
+ type: "string";
11
+ value: string;
12
+ };
13
+ }> = {
8
14
  id: "anthropic",
9
15
  title: "Anthropic",
10
- configs: [
11
- {
12
- id: "apiKey",
16
+ configs: {
17
+ apiKey: {
13
18
  title: "API Key",
14
19
  type: "string",
15
20
  value: "",
16
21
  },
17
- ],
22
+ },
18
23
  };
19
24
 
20
25
  async function models(apiKey: string) {
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
29
34
  }
30
35
 
31
36
  export function createClaude(opts?: typeof AnthropicInfo.configs): Provider {
32
- const apiKey =
33
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
37
+ const apiKey = opts?.apiKey?.value || "";
34
38
 
35
39
  return {
36
40
  models: () => models(apiKey),
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
3
3
  import openai from "openai";
4
4
  import { ChatDeepSeek } from "@langchain/deepseek";
5
5
 
6
- export const DeepSeekInfo: ProviderInfo = {
6
+ export const DeepSeekInfo: ProviderInfo<{
7
+ apiKey: {
8
+ title: "API Key";
9
+ type: "string";
10
+ value: string;
11
+ };
12
+ }> = {
7
13
  id: "deepseek",
8
14
  title: "DeepSeek",
9
- configs: [
10
- {
11
- id: "apiKey",
15
+ configs: {
16
+ apiKey: {
12
17
  title: "API Key",
13
18
  type: "string",
14
19
  value: "",
15
20
  },
16
- ],
21
+ },
17
22
  };
18
23
 
19
24
  async function models(apiKey: string) {
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
29
34
  }
30
35
 
31
36
  export function createDeepSeek(opts?: typeof DeepSeekInfo.configs): Provider {
32
- const apiKey =
33
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
37
+ const apiKey = opts?.apiKey?.value || "";
34
38
 
35
39
  return {
36
40
  models: () => models(apiKey),
@@ -3,17 +3,22 @@ import { GoogleGenAI } from "@google/genai";
3
3
  import { Provider } from "@fullstacked/ai-agent/src/providers/interface";
4
4
  import { ProviderInfo } from "./interface";
5
5
 
6
- export const GoogleInfo: ProviderInfo = {
6
+ export const GoogleInfo: ProviderInfo<{
7
+ apiKey: {
8
+ title: "API Key";
9
+ type: "string";
10
+ value: string;
11
+ };
12
+ }> = {
7
13
  id: "google",
8
14
  title: "Google",
9
- configs: [
10
- {
11
- id: "apiKey",
15
+ configs: {
16
+ apiKey: {
12
17
  title: "API Key",
13
18
  type: "string",
14
19
  value: "",
15
20
  },
16
- ],
21
+ },
17
22
  };
18
23
 
19
24
  async function models(apiKey: string) {
@@ -26,8 +31,7 @@ async function models(apiKey: string) {
26
31
  }
27
32
 
28
33
  export function createGemini(opts?: typeof GoogleInfo.configs): Provider {
29
- const apiKey =
30
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
34
+ const apiKey = opts?.apiKey?.value || "";
31
35
 
32
36
  return {
33
37
  models: () => models(apiKey),
@@ -8,19 +8,17 @@ import { createGrok, xAIInfo } from "./xai";
8
8
  import { DeepSeekInfo, createDeepSeek } from "./deepseek";
9
9
  import { MistralInfo, createMistral } from "./mistral";
10
10
 
11
- export function providers(): ProviderInfo[] {
12
- return [
13
- OllamaInfo,
14
- OpenAIInfo,
15
- AnthropicInfo,
16
- GoogleInfo,
17
- xAIInfo,
18
- DeepSeekInfo,
19
- MistralInfo,
20
- ];
21
- }
11
+ export const providersInfo = {
12
+ ollama: OllamaInfo,
13
+ openai: OpenAIInfo,
14
+ antropic: AnthropicInfo,
15
+ google: GoogleInfo,
16
+ xai: xAIInfo,
17
+ deepseek: DeepSeekInfo,
18
+ mistral: MistralInfo,
19
+ } as const;
22
20
 
23
- export function getProvider(providerInfo: ProviderInfo) {
21
+ export function getProvider(providerInfo: ProviderInfo<any>) {
24
22
  switch (providerInfo.id) {
25
23
  case "ollama":
26
24
  return createOllama(providerInfo.configs);
@@ -10,13 +10,17 @@ type ConfigType =
10
10
  value: [string, string][];
11
11
  };
12
12
 
13
- export type ProviderInfo = {
13
+ export type ProviderInfo<
14
+ C extends Record<
15
+ string,
16
+ ConfigType & {
17
+ title: string;
18
+ }
19
+ >,
20
+ > = {
14
21
  id: string;
15
- title: string;
16
- configs: (ConfigType & {
17
- id: string;
18
- title: string;
19
- })[];
22
+ title?: string;
23
+ configs: C;
20
24
  };
21
25
 
22
26
  export interface Provider {
@@ -4,17 +4,22 @@ import { ChatMistralAI } from "@langchain/mistralai";
4
4
  import { HTTPClient } from "@mistralai/mistralai/lib/http";
5
5
  import { Mistral } from "@mistralai/mistralai";
6
6
 
7
- export const MistralInfo: ProviderInfo = {
7
+ export const MistralInfo: ProviderInfo<{
8
+ apiKey: {
9
+ title: "API Key";
10
+ type: "string";
11
+ value: string;
12
+ };
13
+ }> = {
8
14
  id: "mistral",
9
15
  title: "Mistral AI",
10
- configs: [
11
- {
12
- id: "apiKey",
16
+ configs: {
17
+ apiKey: {
13
18
  title: "API Key",
14
19
  type: "string",
15
20
  value: "",
16
21
  },
17
- ],
22
+ },
18
23
  };
19
24
 
20
25
  async function models(apiKey: string) {
@@ -30,8 +35,7 @@ async function models(apiKey: string) {
30
35
  }
31
36
 
32
37
  export function createMistral(opts?: typeof MistralInfo.configs): Provider {
33
- const apiKey =
34
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
38
+ const apiKey = opts?.apiKey?.value || "";
35
39
 
36
40
  return {
37
41
  models: () => models(apiKey),
@@ -5,23 +5,32 @@ import { Provider, ProviderInfo } from "./interface";
5
5
  import { core_fetch2 } from "fetch";
6
6
  import { ChatOllama } from "@langchain/ollama";
7
7
 
8
- export const OllamaInfo: ProviderInfo = {
8
+ export const OllamaInfo: ProviderInfo<{
9
+ host: {
10
+ title: "Host";
11
+ type: "string";
12
+ value: string;
13
+ };
14
+ headers: {
15
+ title: "Custom Headers";
16
+ type: "key-value";
17
+ value: [string, string][];
18
+ };
19
+ }> = {
9
20
  id: "ollama",
10
21
  title: "Ollama",
11
- configs: [
12
- {
13
- id: "host",
22
+ configs: {
23
+ host: {
14
24
  title: "Host",
15
25
  type: "string",
16
- value: "http://localhost:11434",
26
+ value: "",
17
27
  },
18
- {
19
- id: "headers",
28
+ headers: {
20
29
  title: "Custom Headers",
21
30
  type: "key-value",
22
31
  value: [],
23
32
  },
24
- ],
33
+ },
25
34
  };
26
35
 
27
36
  async function models(opts: ollama.Config) {
@@ -42,12 +51,8 @@ function keyValueArrToObject(arr: [string, string][]) {
42
51
  }
43
52
 
44
53
  export function createOllama(opts?: typeof OllamaInfo.configs): Provider {
45
- const baseUrl =
46
- (opts?.find(({ id }) => id === "host")?.value as string) ||
47
- "http://localhost:11434";
48
- const headers = keyValueArrToObject(
49
- (opts?.find(({ id }) => id === "headers")?.value as []) || [],
50
- );
54
+ const baseUrl = opts?.host?.value || "http://localhost:11434";
55
+ const headers = keyValueArrToObject(opts?.headers?.value || []);
51
56
  return {
52
57
  models: () =>
53
58
  models({
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
3
3
  import openai from "openai";
4
4
  import { ChatOpenAI } from "@langchain/openai";
5
5
 
6
- export const OpenAIInfo: ProviderInfo = {
6
+ export const OpenAIInfo: ProviderInfo<{
7
+ apiKey: {
8
+ title: "API Key";
9
+ type: "string";
10
+ value: string;
11
+ };
12
+ }> = {
7
13
  id: "openai",
8
14
  title: "OpenAI",
9
- configs: [
10
- {
11
- id: "apiKey",
15
+ configs: {
16
+ apiKey: {
12
17
  title: "API Key",
13
18
  type: "string",
14
19
  value: "",
15
20
  },
16
- ],
21
+ },
17
22
  };
18
23
 
19
24
  async function models(apiKey: string) {
@@ -28,8 +33,7 @@ async function models(apiKey: string) {
28
33
  }
29
34
 
30
35
  export function createOpenAI(opts?: typeof OpenAIInfo.configs): Provider {
31
- const apiKey =
32
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
36
+ const apiKey = opts?.apiKey?.value || "";
33
37
 
34
38
  return {
35
39
  models: () => models(apiKey),
@@ -3,17 +3,22 @@ import { Provider, ProviderInfo } from "./interface";
3
3
  import openai from "openai";
4
4
  import { ChatXAI } from "@langchain/xai";
5
5
 
6
- export const xAIInfo: ProviderInfo = {
6
+ export const xAIInfo: ProviderInfo<{
7
+ apiKey: {
8
+ title: "API Key";
9
+ type: "string";
10
+ value: string;
11
+ };
12
+ }> = {
7
13
  id: "xai",
8
14
  title: "xAI",
9
- configs: [
10
- {
11
- id: "apiKey",
15
+ configs: {
16
+ apiKey: {
12
17
  title: "API Key",
13
18
  type: "string",
14
19
  value: "",
15
20
  },
16
- ],
21
+ },
17
22
  };
18
23
 
19
24
  async function models(apiKey: string) {
@@ -29,8 +34,7 @@ async function models(apiKey: string) {
29
34
  }
30
35
 
31
36
  export function createGrok(opts?: typeof xAIInfo.configs): Provider {
32
- const apiKey =
33
- (opts?.find(({ id }) => id === "apiKey")?.value as string) || "";
37
+ const apiKey = opts?.apiKey?.value || "";
34
38
 
35
39
  return {
36
40
  models: () => models(apiKey),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fullstacked",
3
- "version": "0.12.1-1354",
3
+ "version": "0.12.1-1355",
4
4
  "scripts": {
5
5
  "build": "node build.js",
6
6
  "postbuild": "node build.js --binding",