beeai-framework 0.1.26 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/adapters/google-vertex/backend/chat.d.cts +1 -1
  2. package/dist/adapters/google-vertex/backend/chat.d.ts +1 -1
  3. package/dist/adapters/groq/backend/chat.cjs +44 -1
  4. package/dist/adapters/groq/backend/chat.cjs.map +1 -1
  5. package/dist/adapters/groq/backend/chat.d.cts +12 -14
  6. package/dist/adapters/groq/backend/chat.d.ts +12 -14
  7. package/dist/adapters/groq/backend/chat.js +44 -1
  8. package/dist/adapters/groq/backend/chat.js.map +1 -1
  9. package/dist/adapters/vercel/backend/chat.cjs +57 -18
  10. package/dist/adapters/vercel/backend/chat.cjs.map +1 -1
  11. package/dist/adapters/vercel/backend/chat.d.cts +2 -2
  12. package/dist/adapters/vercel/backend/chat.d.ts +2 -2
  13. package/dist/adapters/vercel/backend/chat.js +59 -20
  14. package/dist/adapters/vercel/backend/chat.js.map +1 -1
  15. package/dist/adapters/vercel/backend/embedding.cjs.map +1 -1
  16. package/dist/adapters/vercel/backend/embedding.d.cts +1 -1
  17. package/dist/adapters/vercel/backend/embedding.d.ts +1 -1
  18. package/dist/adapters/vercel/backend/embedding.js.map +1 -1
  19. package/dist/adapters/watsonx/backend/chat.cjs +3 -4
  20. package/dist/adapters/watsonx/backend/chat.cjs.map +1 -1
  21. package/dist/adapters/watsonx/backend/chat.js +4 -5
  22. package/dist/adapters/watsonx/backend/chat.js.map +1 -1
  23. package/dist/agents/react/prompts.d.cts +1 -1
  24. package/dist/agents/react/prompts.d.ts +1 -1
  25. package/dist/agents/react/runners/deep-think/prompts.d.cts +1 -1
  26. package/dist/agents/react/runners/deep-think/prompts.d.ts +1 -1
  27. package/dist/agents/react/runners/deep-think/runner.d.cts +1 -1
  28. package/dist/agents/react/runners/deep-think/runner.d.ts +1 -1
  29. package/dist/agents/react/runners/default/runner.d.cts +1 -1
  30. package/dist/agents/react/runners/default/runner.d.ts +1 -1
  31. package/dist/agents/react/runners/granite/prompts.d.cts +1 -1
  32. package/dist/agents/react/runners/granite/prompts.d.ts +1 -1
  33. package/dist/agents/react/runners/granite/runner.d.cts +1 -1
  34. package/dist/agents/react/runners/granite/runner.d.ts +1 -1
  35. package/dist/agents/requirement/agent.d.cts +1 -1
  36. package/dist/agents/requirement/agent.d.ts +1 -1
  37. package/dist/agents/requirement/prompts.d.cts +2 -2
  38. package/dist/agents/requirement/prompts.d.ts +2 -2
  39. package/dist/agents/requirement/requirements/conditional.d.cts +1 -1
  40. package/dist/agents/requirement/requirements/conditional.d.ts +1 -1
  41. package/dist/agents/requirement/requirements/requirement.d.cts +1 -1
  42. package/dist/agents/requirement/requirements/requirement.d.ts +1 -1
  43. package/dist/agents/requirement/runner.d.cts +1 -1
  44. package/dist/agents/requirement/runner.d.ts +1 -1
  45. package/dist/agents/requirement/types.d.cts +1 -1
  46. package/dist/agents/requirement/types.d.ts +1 -1
  47. package/dist/agents/requirement/utils/llm.d.cts +1 -1
  48. package/dist/agents/requirement/utils/llm.d.ts +1 -1
  49. package/dist/agents/requirement/utils/tool.d.cts +1 -1
  50. package/dist/agents/requirement/utils/tool.d.ts +1 -1
  51. package/dist/backend/chat.cjs +8 -1
  52. package/dist/backend/chat.cjs.map +1 -1
  53. package/dist/backend/chat.js +8 -1
  54. package/dist/backend/chat.js.map +1 -1
  55. package/dist/backend/core.d.cts +1 -1
  56. package/dist/backend/core.d.ts +1 -1
  57. package/dist/backend/errors.cjs.map +1 -1
  58. package/dist/backend/errors.d.cts +8 -11
  59. package/dist/backend/errors.d.ts +8 -11
  60. package/dist/backend/errors.js.map +1 -1
  61. package/dist/backend/message.cjs +4 -1
  62. package/dist/backend/message.cjs.map +1 -1
  63. package/dist/backend/message.js +4 -1
  64. package/dist/backend/message.js.map +1 -1
  65. package/dist/version.cjs +1 -1
  66. package/dist/version.js +1 -1
  67. package/dist/workflows/agent.d.cts +37 -37
  68. package/dist/workflows/agent.d.ts +37 -37
  69. package/package.json +19 -19
  70. package/dist/{types-BQ-3LdkE.d.ts → types-C3Fnr6yY.d.ts} +8 -8
  71. package/dist/{types-CkEtV8R9.d.cts → types-DepshYVR.d.cts} +8 -8
@@ -13,7 +13,6 @@ import '../../../emitter-jN55XZZq.cjs';
13
13
  import '../../../internals/helpers/promise.cjs';
14
14
  import 'ai';
15
15
  import '../../../backend/message.cjs';
16
- import '../../../backend/client.cjs';
17
16
  import 'promise-based-task';
18
17
  import '../../../cache/base.cjs';
19
18
  import '../../../backend/constants.cjs';
@@ -23,6 +22,7 @@ import '../../../internals/helpers/schema.cjs';
23
22
  import 'zod';
24
23
  import 'zod-to-json-schema';
25
24
  import '../../../template.cjs';
25
+ import '../../../backend/client.cjs';
26
26
 
27
27
  /**
28
28
  * Copyright 2025 © BeeAI a Series of LF Projects, LLC
@@ -13,7 +13,6 @@ import '../../../emitter-36-9MnvA.js';
13
13
  import '../../../internals/helpers/promise.js';
14
14
  import 'ai';
15
15
  import '../../../backend/message.js';
16
- import '../../../backend/client.js';
17
16
  import 'promise-based-task';
18
17
  import '../../../cache/base.js';
19
18
  import '../../../backend/constants.js';
@@ -23,6 +22,7 @@ import '../../../internals/helpers/schema.js';
23
22
  import 'zod';
24
23
  import 'zod-to-json-schema';
25
24
  import '../../../template.js';
25
+ import '../../../backend/client.js';
26
26
 
27
27
  /**
28
28
  * Copyright 2025 © BeeAI a Series of LF Projects, LLC
@@ -3,6 +3,10 @@
3
3
  var chat_cjs = require('../../vercel/backend/chat.cjs');
4
4
  var client_cjs = require('./client.cjs');
5
5
  var env_cjs = require('../../../internals/env.cjs');
6
+ var ai = require('ai');
7
+ var errors_cjs = require('../../../backend/errors.cjs');
8
+ var schema_cjs = require('../../../internals/helpers/schema.cjs');
9
+ var remeda = require('remeda');
6
10
 
7
11
  var __defProp = Object.defineProperty;
8
12
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
@@ -10,7 +14,7 @@ class GroqChatModel extends chat_cjs.VercelChatModel {
10
14
  static {
11
15
  __name(this, "GroqChatModel");
12
16
  }
13
- constructor(modelId = env_cjs.getEnv("GROQ_CHAT_MODEL", "gemma2-9b-it"), parameters = {}, client) {
17
+ constructor(modelId = env_cjs.getEnv("GROQ_CHAT_MODEL", "openai/gpt-oss-20b"), parameters = {}, client) {
14
18
  const model = client_cjs.GroqClient.ensure(client).instance.languageModel(modelId);
15
19
  super(model);
16
20
  Object.assign(this.parameters, parameters ?? {});
@@ -18,6 +22,45 @@ class GroqChatModel extends chat_cjs.VercelChatModel {
18
22
  static {
19
23
  this.register();
20
24
  }
25
+ async *_createStream(input, run) {
26
+ try {
27
+ for await (const chunk of super._createStream(input, run)) {
28
+ yield chunk;
29
+ }
30
+ } catch (error) {
31
+ this.handleError(input, error);
32
+ }
33
+ }
34
+ async _create(input, run) {
35
+ return await super._create(input, run).catch((e) => this.handleError(input, e));
36
+ }
37
+ handleError(input, error) {
38
+ const matchedErrorMessages = [
39
+ "model did not call a tool",
40
+ "tool call validation failed",
41
+ "tool choice is required",
42
+ "Parsing failed"
43
+ ];
44
+ const cause = error instanceof errors_cjs.ChatModelError ? error.getCause() : error;
45
+ const responseBodyRaw = ai.APICallError.isInstance(cause) ? cause.responseBody : remeda.isPlainObject(cause) ? JSON.stringify(cause) : void 0;
46
+ if (responseBodyRaw && matchedErrorMessages.some((message) => responseBodyRaw.includes(message))) {
47
+ const responseBody = schema_cjs.parseBrokenJson(responseBodyRaw, {
48
+ pair: [
49
+ "{",
50
+ "}"
51
+ ]
52
+ });
53
+ if (!responseBody) {
54
+ throw cause;
55
+ }
56
+ const tools = (input.tools || []).map((t) => t.name).join(", ");
57
+ throw new errors_cjs.ChatModelToolCallError(responseBody?.error?.message || responseBody?.message || String(responseBody), [], {
58
+ generatedContent: responseBody?.error?.failed_generation || "empty",
59
+ generatedError: `Invalid response. Use one of the following tools: ${tools}. `
60
+ });
61
+ }
62
+ throw cause;
63
+ }
21
64
  }
22
65
 
23
66
  exports.GroqChatModel = GroqChatModel;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/groq/backend/chat.ts"],"names":["GroqChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","GroqClient","ensure","instance","languageModel","Object","assign","register"],"mappings":";;;;;;;;AAcO,MAAMA,sBAAsBC,wBAAAA,CAAAA;EAdnC;;;EAeE,WAAA,CACEC,OAAAA,GAA2BC,eAAO,iBAAA,EAAmB,cAAA,GACrDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,qBAAAA,CAAWC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,cAAcR,OAAAA,CAAAA;AAC/D,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;EAEA;AACE,IAAA,IAAA,CAAKS,QAAAA,EAAQ;AACf;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { GroqClient, GroqClientSettings } from \"@/adapters/groq/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { GroqProvider } from \"@ai-sdk/groq\";\nimport { ChatModelParameters } from \"@/backend/chat.js\";\n\ntype GroqParameters = Parameters<GroqProvider[\"languageModel\"]>;\nexport type GroqChatModelId = NonNullable<GroqParameters[0]>;\n\nexport class GroqChatModel extends VercelChatModel {\n constructor(\n modelId: GroqChatModelId = getEnv(\"GROQ_CHAT_MODEL\", \"gemma2-9b-it\"),\n parameters: ChatModelParameters = {},\n client?: GroqClientSettings | GroqClient,\n ) {\n const model = GroqClient.ensure(client).instance.languageModel(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n\n static {\n this.register();\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/groq/backend/chat.ts"],"names":["GroqChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","GroqClient","ensure","instance","languageModel","Object","assign","register","_createStream","input","run","chunk","error","handleError","_create","catch","e","matchedErrorMessages","cause","ChatModelError","getCause","responseBodyRaw","APICallError","isInstance","responseBody","isPlainObject","JSON","stringify","undefined","some","message","includes","parseBrokenJson","pair","tools","map","t","name","join","ChatModelToolCallError","String","generatedContent","failed_generation","generatedError"],"mappings":";;;;;;;;;;;;AAmBO,MAAMA,sBAAsBC,wBAAAA,CAAAA;EAnBnC;;;EAoBE,WAAA,CACEC,OAAAA,GAA2BC,eAAO,iBAAA,EAAmB,oBAAA,GACrDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,qBAAAA,CAAWC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,cAAcR,OAAAA,CAAAA;AAC/D,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;EAEA;AACE,IAAA,IAAA,CAAKS,QAAAA,EAAQ;AACf;EAEA,OAAOC,aAAAA,CAAcC,OAAuBC,GAAAA,EAA0B;AACpE,IAAA,IAAI;AACF,MAAA,WAAA,MAAiBC,KAAAA,IAAS,KAAA,CAAMH,aAAAA,CAAcC,KAAAA,EAAOC,GAAAA,CAAAA,EAAM;AACzD,QAAA,MAAMC,KAAAA;AACR;AACF,KAAA,CAAA,OAASC,KAAAA,EAAO;AACd,MAAA,IAAA,CAAKC,WAAAA,CAAYJ,OAAOG,KAAAA,CAAAA;AAC1B;AACF;EAEA,MAAgBE,OAAAA,CAAQL,OAAuBC,GAAAA,EAA0B;AACvE,IAAA,OAAO,MAAM,KAAA,CAAMI,OAAAA,CAAQL,KAAAA,EAAOC,GAAAA,CAAAA,CAAKK,KAAAA,CAAM,CAACC,CAAAA,KAAM,IAAA,CAAKH,WAAAA,CAAYJ,KAAAA,EAAOO,CAAAA,CAAAA,CAAAA;AAC9E;AAEUH,EAAAA,WAAAA,CAAYJ,OAAuBG,KAAAA,EAAqB;AAChE,IAAA,MAAMK,oBAAAA,GAAuB;AAC3B,MAAA,2BAAA;AACA,MAAA,6BAAA;AACA,MAAA,yBAAA;AACA,MAAA;;AAGF,IAAA,MAAMC,KAAAA,GACJN,KAAAA,YAAiBO,yBAAAA,GAAiBP,KAAAA,CAAMQ,UAAQ,GAAKR,KAAAA;AACvD,IAAA,MAAMS,eAAAA,GAAkBC,eAAAA,CAAaC,UAAAA,CAAWL,KAAAA,CAAAA,GAC5CA,KAAAA,CAAMM,YAAAA,GACNC,oBAAAA,CAAcP,KAAAA,CAAAA,GACZQ,IAAAA,CAAKC,SAAAA,CAAUT,KAAAA,CAAAA,GACfU,MAAAA;AAEN,IAAA,IACEP,eAAAA,IACAJ,qBAAqBY,IAAAA,CAAK,CAACC,YAAYT,eAAAA,CAAgBU,QAAAA,CAASD,OAAAA,CAAAA,CAAAA,EAChE;AACA,MAAA,MAAMN,YAAAA,GAAeQ,2BAAgBX,eAAAA,EAAiB;QAAEY,IAAAA,EAAM;AAAC,UAAA,GAAA;AAAK,UAAA;;OAAK,CAAA;AACzE,MAAA,IAAI,CAACT,YAAAA,EAAc;AACjB,QAAA,MAAMN,KAAAA;AACR;AAEA,MAAA,MAAMgB,KAAAA,GAAAA,CAASzB,KAAAA,CAAMyB,KAAAA,IAAS,EAAA,EAAIC,GAAAA,CAAI,CAACC,CAAAA,KAAMA,CAAAA,CAAEC,IAAI,CAAA,CAAEC,IAAAA,CAAK,IAAA,CAAA;AAC1D,MAAA,MAAM,IAAIC,iCAAAA,CACRf,YAAAA,EAAcZ,KAAAA,EAAOkB,OAAAA,IAAWN,YAAAA,EAAcM,OAAAA,IAAWU,MAAAA,CAAOhB,YAAAA,CAAAA,EAChE,EAAA,EACA;QACEiB,gBAAAA,EAAkBjB,YAAAA,EAAcZ,OAAO8B,iBAAAA,IAAqB,OAAA;AAC5DC,QAAAA,cAAAA,EAAgB,qDAAqDT,KAAAA,CAAAA,EAAAA;OACvE,CAAA;AAEJ;AACA,IAAA,MAAMhB,KAAAA;AACR;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { GroqClient, GroqClientSettings } from \"@/adapters/groq/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { GroqProvider } from \"@ai-sdk/groq\";\nimport { ChatModelInput, ChatModelParameters } from \"@/backend/chat.js\";\nimport type { GetRunContext } from \"@/context.js\";\nimport { APICallError } from \"ai\";\nimport { ChatModelError, ChatModelToolCallError } from \"@/backend/errors.js\";\nimport { parseBrokenJson } from \"@/internals/helpers/schema.js\";\nimport { isPlainObject } from \"remeda\";\n\ntype GroqParameters = Parameters<GroqProvider[\"languageModel\"]>;\nexport type GroqChatModelId = NonNullable<GroqParameters[0]>;\n\nexport class GroqChatModel extends VercelChatModel {\n constructor(\n modelId: GroqChatModelId = getEnv(\"GROQ_CHAT_MODEL\", \"openai/gpt-oss-20b\"),\n parameters: ChatModelParameters = {},\n client?: GroqClientSettings | GroqClient,\n ) {\n const model = GroqClient.ensure(client).instance.languageModel(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n\n static {\n this.register();\n }\n\n async *_createStream(input: ChatModelInput, run: GetRunContext<this>) {\n try {\n for await (const chunk of super._createStream(input, run)) {\n yield chunk;\n }\n } catch (error) {\n this.handleError(input, error);\n }\n }\n\n protected async _create(input: ChatModelInput, run: GetRunContext<this>) {\n return await super._create(input, run).catch((e) => this.handleError(input, e));\n }\n\n protected handleError(input: ChatModelInput, error: Error): never {\n const matchedErrorMessages = [\n \"model did not call a tool\",\n \"tool call validation failed\",\n \"tool choice is required\",\n \"Parsing failed\",\n ];\n\n const cause: Error | Record<string, any> =\n error instanceof ChatModelError ? error.getCause() : error;\n const responseBodyRaw = APICallError.isInstance(cause)\n ? cause.responseBody\n : isPlainObject(cause)\n ? JSON.stringify(cause)\n : undefined;\n\n if (\n responseBodyRaw &&\n matchedErrorMessages.some((message) => responseBodyRaw.includes(message))\n ) {\n const responseBody = parseBrokenJson(responseBodyRaw, { pair: [\"{\", \"}\"] });\n if (!responseBody) {\n throw cause;\n }\n\n const tools = (input.tools || []).map((t) => t.name).join(\", \");\n throw new ChatModelToolCallError(\n responseBody?.error?.message || responseBody?.message || String(responseBody),\n [],\n {\n generatedContent: responseBody?.error?.failed_generation || \"empty\",\n generatedError: `Invalid response. Use one of the following tools: ${tools}. `,\n },\n );\n }\n throw cause;\n }\n}\n"]}
@@ -1,18 +1,15 @@
1
+ import { C as ChatModelParameters, d as ChatModelInput, n as ChatModelOutput } from '../../../chat-BFl85cqt.cjs';
1
2
  import { VercelChatModel } from '../../vercel/backend/chat.cjs';
2
3
  import { GroqClientSettings, GroqClient } from './client.cjs';
3
4
  import { GroqProvider } from '@ai-sdk/groq';
4
- import { C as ChatModelParameters } from '../../../chat-BFl85cqt.cjs';
5
- import '../../../logger/logger.cjs';
6
- import 'pino';
7
- import '../../../errors.cjs';
5
+ import { GetRunContext } from '../../../context.cjs';
6
+ import '../../../backend/message.cjs';
7
+ import '../../../internals/serializable.cjs';
8
8
  import '../../../internals/types.cjs';
9
9
  import '../../../internals/helpers/guards.cjs';
10
- import '../../../internals/serializable.cjs';
11
- import '../../../context.cjs';
12
- import '../../../emitter-jN55XZZq.cjs';
13
- import '../../../internals/helpers/promise.cjs';
14
10
  import 'ai';
15
- import '../../../backend/message.cjs';
11
+ import '../../../emitter-jN55XZZq.cjs';
12
+ import '../../../errors.cjs';
16
13
  import 'promise-based-task';
17
14
  import '../../../cache/base.cjs';
18
15
  import '../../../backend/constants.cjs';
@@ -21,18 +18,19 @@ import 'ajv';
21
18
  import '../../../internals/helpers/schema.cjs';
22
19
  import 'zod';
23
20
  import 'zod-to-json-schema';
21
+ import '../../../internals/helpers/promise.cjs';
24
22
  import '../../../template.cjs';
23
+ import '../../../logger/logger.cjs';
24
+ import 'pino';
25
25
  import '../../../backend/client.cjs';
26
26
 
27
- /**
28
- * Copyright 2025 © BeeAI a Series of LF Projects, LLC
29
- * SPDX-License-Identifier: Apache-2.0
30
- */
31
-
32
27
  type GroqParameters = Parameters<GroqProvider["languageModel"]>;
33
28
  type GroqChatModelId = NonNullable<GroqParameters[0]>;
34
29
  declare class GroqChatModel extends VercelChatModel {
35
30
  constructor(modelId?: GroqChatModelId, parameters?: ChatModelParameters, client?: GroqClientSettings | GroqClient);
31
+ _createStream(input: ChatModelInput, run: GetRunContext<this>): AsyncGenerator<ChatModelOutput, void, unknown>;
32
+ protected _create(input: ChatModelInput, run: GetRunContext<this>): Promise<ChatModelOutput>;
33
+ protected handleError(input: ChatModelInput, error: Error): never;
36
34
  }
37
35
 
38
36
  export { GroqChatModel, type GroqChatModelId };
@@ -1,18 +1,15 @@
1
+ import { C as ChatModelParameters, d as ChatModelInput, n as ChatModelOutput } from '../../../chat-BRuyDeKR.js';
1
2
  import { VercelChatModel } from '../../vercel/backend/chat.js';
2
3
  import { GroqClientSettings, GroqClient } from './client.js';
3
4
  import { GroqProvider } from '@ai-sdk/groq';
4
- import { C as ChatModelParameters } from '../../../chat-BRuyDeKR.js';
5
- import '../../../logger/logger.js';
6
- import 'pino';
7
- import '../../../errors.js';
5
+ import { GetRunContext } from '../../../context.js';
6
+ import '../../../backend/message.js';
7
+ import '../../../internals/serializable.js';
8
8
  import '../../../internals/types.js';
9
9
  import '../../../internals/helpers/guards.js';
10
- import '../../../internals/serializable.js';
11
- import '../../../context.js';
12
- import '../../../emitter-36-9MnvA.js';
13
- import '../../../internals/helpers/promise.js';
14
10
  import 'ai';
15
- import '../../../backend/message.js';
11
+ import '../../../emitter-36-9MnvA.js';
12
+ import '../../../errors.js';
16
13
  import 'promise-based-task';
17
14
  import '../../../cache/base.js';
18
15
  import '../../../backend/constants.js';
@@ -21,18 +18,19 @@ import 'ajv';
21
18
  import '../../../internals/helpers/schema.js';
22
19
  import 'zod';
23
20
  import 'zod-to-json-schema';
21
+ import '../../../internals/helpers/promise.js';
24
22
  import '../../../template.js';
23
+ import '../../../logger/logger.js';
24
+ import 'pino';
25
25
  import '../../../backend/client.js';
26
26
 
27
- /**
28
- * Copyright 2025 © BeeAI a Series of LF Projects, LLC
29
- * SPDX-License-Identifier: Apache-2.0
30
- */
31
-
32
27
  type GroqParameters = Parameters<GroqProvider["languageModel"]>;
33
28
  type GroqChatModelId = NonNullable<GroqParameters[0]>;
34
29
  declare class GroqChatModel extends VercelChatModel {
35
30
  constructor(modelId?: GroqChatModelId, parameters?: ChatModelParameters, client?: GroqClientSettings | GroqClient);
31
+ _createStream(input: ChatModelInput, run: GetRunContext<this>): AsyncGenerator<ChatModelOutput, void, unknown>;
32
+ protected _create(input: ChatModelInput, run: GetRunContext<this>): Promise<ChatModelOutput>;
33
+ protected handleError(input: ChatModelInput, error: Error): never;
36
34
  }
37
35
 
38
36
  export { GroqChatModel, type GroqChatModelId };
@@ -1,6 +1,10 @@
1
1
  import { VercelChatModel } from '../../vercel/backend/chat.js';
2
2
  import { GroqClient } from './client.js';
3
3
  import { getEnv } from '../../../internals/env.js';
4
+ import { APICallError } from 'ai';
5
+ import { ChatModelError, ChatModelToolCallError } from '../../../backend/errors.js';
6
+ import { parseBrokenJson } from '../../../internals/helpers/schema.js';
7
+ import { isPlainObject } from 'remeda';
4
8
 
5
9
  var __defProp = Object.defineProperty;
6
10
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
@@ -8,7 +12,7 @@ class GroqChatModel extends VercelChatModel {
8
12
  static {
9
13
  __name(this, "GroqChatModel");
10
14
  }
11
- constructor(modelId = getEnv("GROQ_CHAT_MODEL", "gemma2-9b-it"), parameters = {}, client) {
15
+ constructor(modelId = getEnv("GROQ_CHAT_MODEL", "openai/gpt-oss-20b"), parameters = {}, client) {
12
16
  const model = GroqClient.ensure(client).instance.languageModel(modelId);
13
17
  super(model);
14
18
  Object.assign(this.parameters, parameters ?? {});
@@ -16,6 +20,45 @@ class GroqChatModel extends VercelChatModel {
16
20
  static {
17
21
  this.register();
18
22
  }
23
+ async *_createStream(input, run) {
24
+ try {
25
+ for await (const chunk of super._createStream(input, run)) {
26
+ yield chunk;
27
+ }
28
+ } catch (error) {
29
+ this.handleError(input, error);
30
+ }
31
+ }
32
+ async _create(input, run) {
33
+ return await super._create(input, run).catch((e) => this.handleError(input, e));
34
+ }
35
+ handleError(input, error) {
36
+ const matchedErrorMessages = [
37
+ "model did not call a tool",
38
+ "tool call validation failed",
39
+ "tool choice is required",
40
+ "Parsing failed"
41
+ ];
42
+ const cause = error instanceof ChatModelError ? error.getCause() : error;
43
+ const responseBodyRaw = APICallError.isInstance(cause) ? cause.responseBody : isPlainObject(cause) ? JSON.stringify(cause) : void 0;
44
+ if (responseBodyRaw && matchedErrorMessages.some((message) => responseBodyRaw.includes(message))) {
45
+ const responseBody = parseBrokenJson(responseBodyRaw, {
46
+ pair: [
47
+ "{",
48
+ "}"
49
+ ]
50
+ });
51
+ if (!responseBody) {
52
+ throw cause;
53
+ }
54
+ const tools = (input.tools || []).map((t) => t.name).join(", ");
55
+ throw new ChatModelToolCallError(responseBody?.error?.message || responseBody?.message || String(responseBody), [], {
56
+ generatedContent: responseBody?.error?.failed_generation || "empty",
57
+ generatedError: `Invalid response. Use one of the following tools: ${tools}. `
58
+ });
59
+ }
60
+ throw cause;
61
+ }
19
62
  }
20
63
 
21
64
  export { GroqChatModel };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/groq/backend/chat.ts"],"names":["GroqChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","GroqClient","ensure","instance","languageModel","Object","assign","register"],"mappings":";;;;;;AAcO,MAAMA,sBAAsBC,eAAAA,CAAAA;EAdnC;;;EAeE,WAAA,CACEC,OAAAA,GAA2BC,OAAO,iBAAA,EAAmB,cAAA,GACrDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,UAAAA,CAAWC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,cAAcR,OAAAA,CAAAA;AAC/D,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;EAEA;AACE,IAAA,IAAA,CAAKS,QAAAA,EAAQ;AACf;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { GroqClient, GroqClientSettings } from \"@/adapters/groq/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { GroqProvider } from \"@ai-sdk/groq\";\nimport { ChatModelParameters } from \"@/backend/chat.js\";\n\ntype GroqParameters = Parameters<GroqProvider[\"languageModel\"]>;\nexport type GroqChatModelId = NonNullable<GroqParameters[0]>;\n\nexport class GroqChatModel extends VercelChatModel {\n constructor(\n modelId: GroqChatModelId = getEnv(\"GROQ_CHAT_MODEL\", \"gemma2-9b-it\"),\n parameters: ChatModelParameters = {},\n client?: GroqClientSettings | GroqClient,\n ) {\n const model = GroqClient.ensure(client).instance.languageModel(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n\n static {\n this.register();\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/groq/backend/chat.ts"],"names":["GroqChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","GroqClient","ensure","instance","languageModel","Object","assign","register","_createStream","input","run","chunk","error","handleError","_create","catch","e","matchedErrorMessages","cause","ChatModelError","getCause","responseBodyRaw","APICallError","isInstance","responseBody","isPlainObject","JSON","stringify","undefined","some","message","includes","parseBrokenJson","pair","tools","map","t","name","join","ChatModelToolCallError","String","generatedContent","failed_generation","generatedError"],"mappings":";;;;;;;;;;AAmBO,MAAMA,sBAAsBC,eAAAA,CAAAA;EAnBnC;;;EAoBE,WAAA,CACEC,OAAAA,GAA2BC,OAAO,iBAAA,EAAmB,oBAAA,GACrDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,UAAAA,CAAWC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,cAAcR,OAAAA,CAAAA;AAC/D,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;EAEA;AACE,IAAA,IAAA,CAAKS,QAAAA,EAAQ;AACf;EAEA,OAAOC,aAAAA,CAAcC,OAAuBC,GAAAA,EAA0B;AACpE,IAAA,IAAI;AACF,MAAA,WAAA,MAAiBC,KAAAA,IAAS,KAAA,CAAMH,aAAAA,CAAcC,KAAAA,EAAOC,GAAAA,CAAAA,EAAM;AACzD,QAAA,MAAMC,KAAAA;AACR;AACF,KAAA,CAAA,OAASC,KAAAA,EAAO;AACd,MAAA,IAAA,CAAKC,WAAAA,CAAYJ,OAAOG,KAAAA,CAAAA;AAC1B;AACF;EAEA,MAAgBE,OAAAA,CAAQL,OAAuBC,GAAAA,EAA0B;AACvE,IAAA,OAAO,MAAM,KAAA,CAAMI,OAAAA,CAAQL,KAAAA,EAAOC,GAAAA,CAAAA,CAAKK,KAAAA,CAAM,CAACC,CAAAA,KAAM,IAAA,CAAKH,WAAAA,CAAYJ,KAAAA,EAAOO,CAAAA,CAAAA,CAAAA;AAC9E;AAEUH,EAAAA,WAAAA,CAAYJ,OAAuBG,KAAAA,EAAqB;AAChE,IAAA,MAAMK,oBAAAA,GAAuB;AAC3B,MAAA,2BAAA;AACA,MAAA,6BAAA;AACA,MAAA,yBAAA;AACA,MAAA;;AAGF,IAAA,MAAMC,KAAAA,GACJN,KAAAA,YAAiBO,cAAAA,GAAiBP,KAAAA,CAAMQ,UAAQ,GAAKR,KAAAA;AACvD,IAAA,MAAMS,eAAAA,GAAkBC,YAAAA,CAAaC,UAAAA,CAAWL,KAAAA,CAAAA,GAC5CA,KAAAA,CAAMM,YAAAA,GACNC,aAAAA,CAAcP,KAAAA,CAAAA,GACZQ,IAAAA,CAAKC,SAAAA,CAAUT,KAAAA,CAAAA,GACfU,MAAAA;AAEN,IAAA,IACEP,eAAAA,IACAJ,qBAAqBY,IAAAA,CAAK,CAACC,YAAYT,eAAAA,CAAgBU,QAAAA,CAASD,OAAAA,CAAAA,CAAAA,EAChE;AACA,MAAA,MAAMN,YAAAA,GAAeQ,gBAAgBX,eAAAA,EAAiB;QAAEY,IAAAA,EAAM;AAAC,UAAA,GAAA;AAAK,UAAA;;OAAK,CAAA;AACzE,MAAA,IAAI,CAACT,YAAAA,EAAc;AACjB,QAAA,MAAMN,KAAAA;AACR;AAEA,MAAA,MAAMgB,KAAAA,GAAAA,CAASzB,KAAAA,CAAMyB,KAAAA,IAAS,EAAA,EAAIC,GAAAA,CAAI,CAACC,CAAAA,KAAMA,CAAAA,CAAEC,IAAI,CAAA,CAAEC,IAAAA,CAAK,IAAA,CAAA;AAC1D,MAAA,MAAM,IAAIC,sBAAAA,CACRf,YAAAA,EAAcZ,KAAAA,EAAOkB,OAAAA,IAAWN,YAAAA,EAAcM,OAAAA,IAAWU,MAAAA,CAAOhB,YAAAA,CAAAA,EAChE,EAAA,EACA;QACEiB,gBAAAA,EAAkBjB,YAAAA,EAAcZ,OAAO8B,iBAAAA,IAAqB,OAAA;AAC5DC,QAAAA,cAAAA,EAAgB,qDAAqDT,KAAAA,CAAAA,EAAAA;OACvE,CAAA;AAEJ;AACA,IAAA,MAAMhB,KAAAA;AACR;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { GroqClient, GroqClientSettings } from \"@/adapters/groq/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { GroqProvider } from \"@ai-sdk/groq\";\nimport { ChatModelInput, ChatModelParameters } from \"@/backend/chat.js\";\nimport type { GetRunContext } from \"@/context.js\";\nimport { APICallError } from \"ai\";\nimport { ChatModelError, ChatModelToolCallError } from \"@/backend/errors.js\";\nimport { parseBrokenJson } from \"@/internals/helpers/schema.js\";\nimport { isPlainObject } from \"remeda\";\n\ntype GroqParameters = Parameters<GroqProvider[\"languageModel\"]>;\nexport type GroqChatModelId = NonNullable<GroqParameters[0]>;\n\nexport class GroqChatModel extends VercelChatModel {\n constructor(\n modelId: GroqChatModelId = getEnv(\"GROQ_CHAT_MODEL\", \"openai/gpt-oss-20b\"),\n parameters: ChatModelParameters = {},\n client?: GroqClientSettings | GroqClient,\n ) {\n const model = GroqClient.ensure(client).instance.languageModel(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n\n static {\n this.register();\n }\n\n async *_createStream(input: ChatModelInput, run: GetRunContext<this>) {\n try {\n for await (const chunk of super._createStream(input, run)) {\n yield chunk;\n }\n } catch (error) {\n this.handleError(input, error);\n }\n }\n\n protected async _create(input: ChatModelInput, run: GetRunContext<this>) {\n return await super._create(input, run).catch((e) => this.handleError(input, e));\n }\n\n protected handleError(input: ChatModelInput, error: Error): never {\n const matchedErrorMessages = [\n \"model did not call a tool\",\n \"tool call validation failed\",\n \"tool choice is required\",\n \"Parsing failed\",\n ];\n\n const cause: Error | Record<string, any> =\n error instanceof ChatModelError ? error.getCause() : error;\n const responseBodyRaw = APICallError.isInstance(cause)\n ? cause.responseBody\n : isPlainObject(cause)\n ? JSON.stringify(cause)\n : undefined;\n\n if (\n responseBodyRaw &&\n matchedErrorMessages.some((message) => responseBodyRaw.includes(message))\n ) {\n const responseBody = parseBrokenJson(responseBodyRaw, { pair: [\"{\", \"}\"] });\n if (!responseBody) {\n throw cause;\n }\n\n const tools = (input.tools || []).map((t) => t.name).join(\", \");\n throw new ChatModelToolCallError(\n responseBody?.error?.message || responseBody?.message || String(responseBody),\n [],\n {\n generatedContent: responseBody?.error?.failed_generation || \"empty\",\n generatedError: `Invalid response. Use one of the following tools: ${tools}. `,\n },\n );\n }\n throw cause;\n }\n}\n"]}
@@ -13,6 +13,10 @@ var utils_cjs = require('./utils.cjs');
13
13
 
14
14
  var __defProp = Object.defineProperty;
15
15
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
16
+ try {
17
+ globalThis.AI_SDK_LOG_WARNINGS = false;
18
+ } catch {
19
+ }
16
20
  class VercelChatModel extends chat_cjs.ChatModel {
17
21
  static {
18
22
  __name(this, "VercelChatModel");
@@ -50,32 +54,63 @@ class VercelChatModel extends chat_cjs.ChatModel {
50
54
  }, run);
51
55
  return output;
52
56
  }
53
- const { finishReason, usage, response: { messages } } = await ai.generateText({
57
+ const { finishReason, usage, response: { messages, id } } = await ai.generateText({
54
58
  temperature: 0,
55
59
  ...await this.transformInput(input),
56
60
  abortSignal: run.signal
57
61
  });
58
- return new chat_cjs.ChatModelOutput(this.transformMessages(messages), utils_cjs.extractTokenUsage(usage), finishReason);
62
+ return new chat_cjs.ChatModelOutput(this.transformMessages(messages, id), utils_cjs.extractTokenUsage(usage), finishReason);
59
63
  }
60
64
  async _createStructure({ schema, ...input }, run) {
61
- const response = await ai.generateObject({
65
+ const { output, response, finishReason, usage } = await ai.generateText({
62
66
  temperature: 0,
63
67
  ...await this.transformInput(input),
64
68
  abortSignal: run.signal,
65
- ...schema instanceof zod.ZodSchema ? {
66
- schema,
67
- output: (schema._input || schema) instanceof zod.ZodArray ? "array" : (schema._input || schema) instanceof zod.ZodEnum ? "enum" : "object"
68
- } : {
69
- schema: schema.schema ? ai.jsonSchema(schema.schema) : zod.z.any(),
70
- schemaName: schema.name,
71
- schemaDescription: schema.description
72
- }
69
+ output: (() => {
70
+ if (schema instanceof zod.ZodSchema) {
71
+ const [name, description] = [
72
+ "Schema",
73
+ schema.description
74
+ ];
75
+ const target = schema._input || schema;
76
+ if (target instanceof zod.ZodArray) {
77
+ return ai.Output.array({
78
+ element: schema,
79
+ name,
80
+ description
81
+ });
82
+ }
83
+ if (target instanceof zod.ZodEnum) {
84
+ return ai.Output.choice({
85
+ options: target.options,
86
+ name: "",
87
+ description: schema.description
88
+ });
89
+ }
90
+ return ai.Output.object({
91
+ schema,
92
+ name,
93
+ description
94
+ });
95
+ }
96
+ if (schema.schema) {
97
+ return ai.Output.object({
98
+ schema: ai.jsonSchema(schema.schema),
99
+ name: schema.name,
100
+ description: schema.description
101
+ });
102
+ }
103
+ return ai.Output.json({
104
+ name: schema.name,
105
+ description: schema.description
106
+ });
107
+ })()
73
108
  });
74
109
  return {
75
- object: response.object,
110
+ object: output,
76
111
  output: new chat_cjs.ChatModelOutput([
77
- new message_cjs.AssistantMessage(JSON.stringify(response.object, null, 2))
78
- ], utils_cjs.extractTokenUsage(response.usage), response.finishReason)
112
+ new message_cjs.AssistantMessage(JSON.stringify(output, null, 2), void 0, response.id)
113
+ ], utils_cjs.extractTokenUsage(usage), finishReason)
79
114
  };
80
115
  }
81
116
  async *_createStream(input, run) {
@@ -94,6 +129,7 @@ class VercelChatModel extends chat_cjs.ChatModel {
94
129
  return;
95
130
  }
96
131
  const { fullStream, usage: usagePromise, finishReason: finishReasonPromise, response: responsePromise } = ai.streamText({
132
+ temperature: 0,
97
133
  ...await this.transformInput(input),
98
134
  abortSignal: run.signal
99
135
  });
@@ -181,7 +217,7 @@ class VercelChatModel extends chat_cjs.ChatModel {
181
217
  break;
182
218
  }
183
219
  }
184
- if (streamEmpty) {
220
+ if (streamEmpty && !run.signal.aborted) {
185
221
  throw new errors_cjs$1.ChatModelError("No chunks have been received!");
186
222
  }
187
223
  try {
@@ -265,12 +301,15 @@ class VercelChatModel extends chat_cjs.ChatModel {
265
301
  messages
266
302
  };
267
303
  }
268
- transformMessages(messages) {
304
+ transformMessages(messages, id) {
305
+ if (messages.length > 1) {
306
+ id = void 0;
307
+ }
269
308
  return messages.flatMap((msg) => {
270
309
  if (msg.role === "tool") {
271
- return new message_cjs.ToolMessage(msg.content, msg.providerOptions);
310
+ return new message_cjs.ToolMessage(msg.content.filter((part) => part.type === "tool-result"), msg.providerOptions, id);
272
311
  }
273
- return new message_cjs.AssistantMessage(msg.content, msg.providerOptions);
312
+ return new message_cjs.AssistantMessage(msg.content, msg.providerOptions, id);
274
313
  });
275
314
  }
276
315
  createSnapshot() {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/vercel/backend/chat.ts"],"names":["VercelChatModel","ChatModel","emitter","supportsToolStreaming","model","modelId","ValueError","Emitter","root","child","namespace","providerId","creator","provider","split","toCamelCase","_create","input","run","responseFormat","ZodSchema","schema","output","_createStructure","finishReason","usage","response","messages","generateText","temperature","transformInput","abortSignal","signal","ChatModelOutput","transformMessages","extractTokenUsage","generateObject","_input","ZodArray","ZodEnum","jsonSchema","z","any","schemaName","name","schemaDescription","description","object","AssistantMessage","JSON","stringify","_createStream","isEmpty","tools","fullStream","usagePromise","finishReasonPromise","responsePromise","streamText","streamEmpty","streamedToolCalls","Map","event","message","type","text","id","streamPartialToolCalls","chunk","toolName","toolCallId","set","delta","get","existingToolCall","delete","ChatModelError","error","ToolMessage","_","Promise","all","lastChunk","e","aborted","map","tool","inputSchema","getInputJsonSchema","msg","CustomMessage","encodeCustomMessage","role","content","UserMessage","SystemMessage","part","join","toolChoice","Tool","toolChoiceSupport","includes","logger","warn","parameters","mapToObj","flatMap","providerOptions","createSnapshot","loadSnapshot","snapshot","instance","fromName","Error","destroy","Object","assign"],"mappings":";;;;;;;;;;;;;;;AA6CO,MAAeA,wBAEZC,kBAAAA,CAAAA;EA/CV;;;;AAgDkBC,EAAAA,OAAAA;EACAC,qBAAAA,GAAiC,IAAA;AAEjD,EAAA,WAAA,CAA6BC,KAAAA,EAAU;AACrC,IAAA,KAAA,EAAK,EAAA,KADsBA,KAAAA,GAAAA,KAAAA;AAE3B,IAAA,IAAI,CAAC,KAAKC,OAAAA,EAAS;AACjB,MAAA,MAAM,IAAIC,sBAAW,+BAAA,CAAA;AACvB;AACA,IAAA,IAAA,CAAKJ,OAAAA,GAAUK,mBAAAA,CAAQC,IAAAA,CAAKC,KAAAA,CAAM;MAChCC,SAAAA,EAAW;AAAC,QAAA,SAAA;QAAW,IAAA,CAAKC,UAAAA;AAAY,QAAA;;MACxCC,OAAAA,EAAS;KACX,CAAA;AACF;AAEA,EAAA,IAAIP,OAAAA,GAAkB;AACpB,IAAA,OAAO,KAAKD,KAAAA,CAAMC,OAAAA;AACpB;AAEA,EAAA,IAAIM,UAAAA,GAAqB;AACvB,IAAA,MAAME,QAAAA,GAAW,IAAA,CAAKT,KAAAA,CAAMS,QAAAA,CAASC,KAAAA,CAAM,GAAA,CAAA,CAAK,CAAA,CAAA,CAAGA,KAAAA,CAAM,GAAA,CAAA,CAAK,CAAA,CAAA;AAC9D,IAAA,OAAOC,mBAAYF,QAAAA,CAAAA;AACrB;EAEA,MAAgBG,OAAAA,CAAQC,OAAuBC,GAAAA,EAA0B;AACvE,IAAA,MAAMC,iBAAiBF,KAAAA,CAAME,cAAAA;AAC7B,IAAA,IAAIA,cAAAA,KAAmBA,cAAAA,YAA0BC,aAAAA,IAAaD,cAAAA,CAAeE,MAAAA,CAAAA,EAAS;AACpF,MAAA,MAAM,EAAEC,MAAAA,EAAM,GAAK,MAAM,KAAKC,gBAAAA,CAC5B;QACE,GAAGN,KAAAA;QACHI,MAAAA,EAAQF;AACV,OAAA,EACAD,GAAAA,CAAAA;AAEF,MAAA,OAAOI,MAAAA;AACT;AAEA,IAAA,MAAM,EACJE,cACAC,KAAAA,EACAC,QAAAA,EAAU,EAAEC,QAAAA,EAAQ,EAAE,GACpB,MAAMC,eAAAA,CAAa;MACrBC,WAAAA,EAAa,CAAA;MACb,GAAI,MAAM,IAAA,CAAKC,cAAAA,CAAeb,KAAAA,CAAAA;AAC9Bc,MAAAA,WAAAA,EAAab,GAAAA,CAAIc;KACnB,CAAA;AAEA,IAAA,OAAO,IAAIC,yBACT,IAAA,CAAKC,iBAAAA,CAAkBP,QAAAA,CAAAA,EACvBQ,2BAAAA,CAAkBV,KAAAA,CAAAA,EAClBD,YAAAA,CAAAA;AAEJ;AAEA,EAAA,MAAgBD,iBACd,EAAEF,MAAAA,EAAQ,GAAGJ,KAAAA,IACbC,GAAAA,EACmC;AACnC,IAAA,MAAMQ,QAAAA,GAAW,MAAMU,iBAAAA,CAAe;MACpCP,WAAAA,EAAa,CAAA;MACb,GAAI,MAAM,IAAA,CAAKC,cAAAA,CAAeb,KAAAA,CAAAA;AAC9Bc,MAAAA,WAAAA,EAAab,GAAAA,CAAIc,MAAAA;AACjB,MAAA,GAAIX,kBAAkBD,aAAAA,GAClB;AACEC,QAAAA,MAAAA;QACAC,MAAAA,EAAAA,CAAUD,MAAAA,CAAOgB,UAAUhB,MAAAA,aAAmBiB,YAAAA,GAC1C,WACCjB,MAAAA,CAAOgB,MAAAA,IAAUhB,MAAAA,aAAmBkB,WAAAA,GACnC,MAAA,GACA;OACR,GACA;AACElB,QAAAA,MAAAA,EAAQA,OAAOA,MAAAA,GAASmB,aAAAA,CAAcnB,OAAOA,MAAM,CAAA,GAAIoB,MAAEC,GAAAA,EAAG;AAC5DC,QAAAA,UAAAA,EAAYtB,MAAAA,CAAOuB,IAAAA;AACnBC,QAAAA,iBAAAA,EAAmBxB,MAAAA,CAAOyB;AAC5B;KACN,CAAA;AAEA,IAAA,OAAO;AACLC,MAAAA,MAAAA,EAAQrB,QAAAA,CAASqB,MAAAA;AACjBzB,MAAAA,MAAAA,EAAQ,IAAIW,wBAAAA,CACV;AAAC,QAAA,IAAIe,6BAAiBC,IAAAA,CAAKC,SAAAA,CAAUxB,SAASqB,MAAAA,EAAQ,IAAA,EAAM,CAAA,CAAA;AAC5DZ,OAAAA,EAAAA,2BAAAA,CAAkBT,QAAAA,CAASD,KAAK,CAAA,EAChCC,QAAAA,CAASF,YAAY;AAEzB,KAAA;AACF;EAEA,OAAO2B,aAAAA,CAAclC,OAAuBC,GAAAA,EAA0B;AACpE,IAAA,MAAMC,iBAAiBF,KAAAA,CAAME,cAAAA;AAC7B,IAAA,IAAIA,cAAAA,KAAmBA,cAAAA,YAA0BC,aAAAA,IAAaD,cAAAA,CAAeE,MAAAA,CAAAA,EAAS;AACpF,MAAA,MAAM,EAAEC,MAAAA,EAAM,GAAK,MAAM,KAAKC,gBAAAA,CAC5B;QACE,GAAGN,KAAAA;QACHI,MAAAA,EAAQF;AACV,OAAA,EACAD,GAAAA,CAAAA;AAEF,MAAA,MAAMI,MAAAA;AACN,MAAA;AACF;AAEA,IAAA,IAAI,CAAC,KAAKnB,qBAAAA,IAAyB,CAACiD,eAAQnC,KAAAA,CAAMoC,KAAAA,IAAS,EAAE,CAAA,EAAG;AAC9D,MAAA,MAAM3B,QAAAA,GAAW,MAAM,IAAA,CAAKV,OAAAA,CAAQC,OAAOC,GAAAA,CAAAA;AAC3C,MAAA,MAAMQ,QAAAA;AACN,MAAA;AACF;AAEA,IAAA,MAAM,EACJ4B,YACA7B,KAAAA,EAAO8B,YAAAA,EACP/B,cAAcgC,mBAAAA,EACd9B,QAAAA,EAAU+B,eAAAA,EAAe,GACvBC,aAAAA,CAAW;MACb,GAAI,MAAM,IAAA,CAAK5B,cAAAA,CAAeb,KAAAA,CAAAA;AAC9Bc,MAAAA,WAAAA,EAAab,GAAAA,CAAIc;KACnB,CAAA;AAEA,IAAA,IAAI2B,WAAAA,GAAc,IAAA;AAClB,IAAA,MAAMC,iBAAAA,uBAAwBC,GAAAA,EAAAA;AAC9B,IAAA,WAAA,MAAiBC,SAASR,UAAAA,EAAY;AACpC,MAAA,IAAIS,OAAAA;AACJ,MAAA,QAAQD,MAAME,IAAAA;QACZ,KAAK,YAAA;AACHL,UAAAA,WAAAA,GAAc,KAAA;AACdI,UAAAA,OAAAA,GAAU,IAAIf,4BAAAA,CAAiBc,KAAAA,CAAMG,MAAM,EAAC,EAAGH,MAAMI,EAAE,CAAA;AACvD,UAAA,MAAM,IAAIjC,wBAAAA,CAAgB;AAAC8B,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;QACF,KAAK,UAAA;AACHJ,UAAAA,WAAAA,GAAc,KAAA;AACd,UAAA;AACF,QAAA,KAAK,kBAAA,EAAoB;AACvB,UAAA,IAAI,CAAC1C,MAAMkD,sBAAAA,EAAwB;AACjC,YAAA;AACF;AAEA,UAAA,MAAMC,KAAAA,GAAsB;YAC1BJ,IAAAA,EAAM,WAAA;AACNK,YAAAA,QAAAA,EAAUP,KAAAA,CAAMO,QAAAA;AAChBC,YAAAA,UAAAA,EAAYR,KAAAA,CAAMI,EAAAA;YAClBjD,KAAAA,EAAO;AACT,WAAA;AACA2C,UAAAA,iBAAAA,CAAkBW,GAAAA,CAAIT,KAAAA,CAAMI,EAAAA,EAAIE,KAAAA,CAAAA;AAChC,UAAA,MAAML,WAAU,IAAIf,4BAAAA,CAAiBoB,OAAO,EAAC,EAAGN,MAAMI,EAAE,CAAA;AACxD,UAAA,MAAM,IAAIjC,wBAAAA,CAAgB;AAAC8B,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AACF;AACA,QAAA,KAAK,kBAAA,EAAoB;AACvB,UAAA,IAAI,CAAC9C,MAAMkD,sBAAAA,EAAwB;AACjC,YAAA;AACF;AAEA,UAAA,IAAIL,MAAMU,KAAAA,EAAO;AACf,YAAA,MAAMJ,KAAAA,GAAQR,iBAAAA,CAAkBa,GAAAA,CAAIX,KAAAA,CAAMI,EAAE,CAAA;AAC5C,YAAA,MAAMH,QAAAA,GAAU,IAAIf,4BAAAA,CAAiB;cAAE,GAAGoB,KAAAA;AAAOnD,cAAAA,KAAAA,EAAO6C,KAAAA,CAAMU;aAAM,EAAG,EAAC,EAAGV,KAAAA,CAAMI,EAAE,CAAA;AACnF,YAAA,MAAM,IAAIjC,wBAAAA,CAAgB;AAAC8B,cAAAA;AAAQ,aAAA,CAAA;AACrC;AACA,UAAA;AACF;AACA,QAAA,KAAK,WAAA,EAAa;AAChBJ,UAAAA,WAAAA,GAAc,KAAA;AACd,UAAA,MAAMe,gBAAAA,GAAmBd,iBAAAA,CAAkBa,GAAAA,CAAIX,KAAAA,CAAMQ,UAAU,CAAA;AAC/D,UAAA,IAAII,gBAAAA,EAAkB;AACpBd,YAAAA,iBAAAA,CAAkBe,MAAAA,CAAOb,MAAMQ,UAAU,CAAA;AACzC,YAAA;AACF;AACAP,UAAAA,OAAAA,GAAU,IAAIf,4BAAAA,CACZ;AACEgB,YAAAA,IAAAA,EAAMF,KAAAA,CAAME,IAAAA;AACZM,YAAAA,UAAAA,EAAYR,KAAAA,CAAMQ,UAAAA;AAClBD,YAAAA,QAAAA,EAAUP,KAAAA,CAAMO,QAAAA;AAChBpD,YAAAA,KAAAA,EAAO6C,KAAAA,CAAM7C;WACf,EACA,EAAC,EACD6C,KAAAA,CAAMQ,UAAU,CAAA;AAElB,UAAA,MAAM,IAAIrC,wBAAAA,CAAgB;AAAC8B,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AACF;QACA,KAAK,OAAA;AACH,UAAA,MAAM,IAAIa,4BAAe,iBAAA,EAAmB;YAACd,KAAAA,CAAMe;AAAe,WAAA,CAAA;QACpE,KAAK,aAAA;AACHlB,UAAAA,WAAAA,GAAc,KAAA;AACdI,UAAAA,OAAAA,GAAU,IAAIe,uBAAAA,CACZ;AACEd,YAAAA,IAAAA,EAAMF,KAAAA,CAAME,IAAAA;AACZM,YAAAA,UAAAA,EAAYR,KAAAA,CAAMQ,UAAAA;AAClBD,YAAAA,QAAAA,EAAUP,KAAAA,CAAMO,QAAAA;AAChB/C,YAAAA,MAAAA,EAAQwC,KAAAA,CAAMxC;AAChB,WAAA,EACA,EAAC,EACD,CAAA,YAAA,EAAewC,KAAAA,CAAMQ,UAAU,CAAA,CAAE,CAAA;AAEnC,UAAA,MAAM,IAAIrC,wBAAAA,CAAgB;AAAC8B,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AAKJ;AACF;AAEA,IAAA,IAAIJ,WAAAA,EAAa;AACf,MAAA,MAAM,IAAIiB,4BAAe,+BAAA,CAAA;AAC3B;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,CAACnD,KAAAA,EAAOD,YAAAA,EAAcuD,CAAAA,CAAAA,GAAK,MAAMC,QAAQC,GAAAA,CAAI;AACjD1B,QAAAA,YAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA;AACD,OAAA,CAAA;AACD,MAAA,MAAMyB,SAAAA,GAAY,IAAIjD,wBAAAA,CAAgB,EAAE,CAAA;AACxCiD,MAAAA,SAAAA,CAAUzD,KAAAA,GAAQU,4BAAkBV,KAAAA,CAAAA;AACpCyD,MAAAA,SAAAA,CAAU1D,YAAAA,GAAeA,YAAAA;AACzB,MAAA,MAAM0D,SAAAA;AACR,KAAA,CAAA,OAASC,CAAAA,EAAG;AACV,MAAA,IAAI,CAACjE,GAAAA,CAAIc,MAAAA,CAAOoD,OAAAA,EAAS;AACvB,QAAA,MAAMD,CAAAA;AACR;AACF;AACF;AAEA,EAAA,MAAgBrD,eACdb,KAAAA,EACkE;AAClE,IAAA,MAAMoC,KAAAA,GAAQ,MAAM2B,OAAAA,CAAQC,GAAAA,CAAAA,CACzBhE,KAAAA,CAAMoC,SAAS,EAAA,EAAIgC,GAAAA,CAAI,OAAOC,IAAAA,MAAU;AACvC1C,MAAAA,IAAAA,EAAM0C,IAAAA,CAAK1C,IAAAA;AACXE,MAAAA,WAAAA,EAAawC,IAAAA,CAAKxC,WAAAA;AAClByC,MAAAA,WAAAA,EAAa/C,aAAAA,CAAW,MAAM8C,IAAAA,CAAKE,kBAAAA,EAAkB;AACvD,KAAA,CAAA,CAAA,CAAA;AAGF,IAAA,MAAM7D,QAAAA,GAAWV,KAAAA,CAAMU,QAAAA,CAAS0D,GAAAA,CAAI,CAACI,GAAAA,KAAAA;AACnC,MAAA,IAAIA,eAAeC,yBAAAA,EAAe;AAChCD,QAAAA,GAAAA,GAAME,8BAAoBF,GAAAA,CAAAA;AAC5B;AAEA,MAAA,IAAIA,eAAezC,4BAAAA,EAAkB;AACnC,QAAA,OAAO;UAAE4C,IAAAA,EAAM,WAAA;AAAaC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AACnD,OAAA,MAAA,IAAWJ,eAAeX,uBAAAA,EAAa;AACrC,QAAA,OAAO;UAAEc,IAAAA,EAAM,MAAA;AAAQC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AAC9C,OAAA,MAAA,IAAWJ,eAAeK,uBAAAA,EAAa;AACrC,QAAA,OAAO;UAAEF,IAAAA,EAAM,MAAA;AAAQC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AAC9C,OAAA,MAAA,IAAWJ,eAAeM,yBAAAA,EAAe;AACvC,QAAA,OAAO;UAAEH,IAAAA,EAAM,QAAA;UAAUC,OAAAA,EAASJ,GAAAA,CAAII,QAAQR,GAAAA,CAAI,CAACW,SAASA,IAAAA,CAAK/B,IAAI,CAAA,CAAEgC,IAAAA,CAAK,IAAA;AAAM,SAAA;AACpF;AACA,MAAA,OAAO;AAAEL,QAAAA,IAAAA,EAAMH,GAAAA,CAAIG,IAAAA;AAAMC,QAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,OAAA;KAChD,CAAA;AAEA,IAAA,IAAIK,UAAAA;AACJ,IAAA,IAAIjF,KAAAA,CAAMiF,UAAAA,IAAcjF,KAAAA,CAAMiF,UAAAA,YAAsBC,aAAAA,EAAM;AACxD,MAAA,IAAI,IAAA,CAAKC,iBAAAA,CAAkBC,QAAAA,CAAS,QAAA,CAAA,EAAW;AAC7CH,QAAAA,UAAAA,GAAa;UACXlC,IAAAA,EAAM,MAAA;AACNK,UAAAA,QAAAA,EAAUpD,MAAMiF,UAAAA,CAAWtD;AAC7B,SAAA;OACF,MAAO;AACL,QAAA,IAAA,CAAK0D,MAAAA,CAAOC,KAAK,CAAA,wCAAA,CAA0C,CAAA;AAC7D;AACF,KAAA,MAAA,IAAWtF,MAAMiF,UAAAA,EAAY;AAC3B,MAAA,IAAI,IAAA,CAAKE,iBAAAA,CAAkBC,QAAAA,CAASpF,KAAAA,CAAMiF,UAAU,CAAA,EAAG;AACrDA,QAAAA,UAAAA,GAAajF,KAAAA,CAAMiF,UAAAA;OACrB,MAAO;AACL,QAAA,IAAA,CAAKI,MAAAA,CAAOC,IAAAA,CAAK,CAAA,iCAAA,EAAoCtF,KAAAA,CAAMiF,UAAU,CAAA,mBAAA,CAAqB,CAAA;AAC5F;AACF;AAEA,IAAA,OAAO;AACL,MAAA,GAAG,IAAA,CAAKM,UAAAA;MACR,GAAGvF,KAAAA;AACHiF,MAAAA,UAAAA;AACA9F,MAAAA,KAAAA,EAAO,IAAA,CAAKA,KAAAA;AACZiD,MAAAA,KAAAA,EAAOoD,gBAASpD,KAAAA,EAAO,CAAC,EAAET,IAAAA,EAAM,GAAG0C,MAAAA,KAAW;AAAC1C,QAAAA,IAAAA;AAAM0C,QAAAA;AAAK,OAAA,CAAA;AAC1D3D,MAAAA;AACF,KAAA;AACF;AAEUO,EAAAA,iBAAAA,CAAkBP,QAAAA,EAAiE;AAC3F,IAAA,OAAOA,QAAAA,CAAS+E,OAAAA,CAAQ,CAACjB,GAAAA,KAAAA;AACvB,MAAA,IAAIA,GAAAA,CAAIG,SAAS,MAAA,EAAQ;AACvB,QAAA,OAAO,IAAId,uBAAAA,CAAYW,GAAAA,CAAII,OAAAA,EAASJ,IAAIkB,eAAe,CAAA;AACzD;AACA,MAAA,OAAO,IAAI3D,4BAAAA,CACTyC,GAAAA,CAAII,OAAAA,EACJJ,IAAIkB,eAAe,CAAA;KAEvB,CAAA;AACF;EAEAC,cAAAA,GAAiB;AACf,IAAA,OAAO;AACL,MAAA,GAAG,MAAMA,cAAAA,EAAAA;AACTjG,MAAAA,UAAAA,EAAY,IAAA,CAAKA,UAAAA;AACjBN,MAAAA,OAAAA,EAAS,IAAA,CAAKA,OAAAA;AACdF,MAAAA,qBAAAA,EAAuB,IAAA,CAAKA;AAC9B,KAAA;AACF;AAEA,EAAA,MAAM0G,aAAa,EAAElG,UAAAA,EAAYN,OAAAA,EAAS,GAAGyG,UAAAA,EAAoD;AAC/F,IAAA,MAAMC,QAAAA,GAAW,MAAM9G,kBAAAA,CAAU+G,QAAAA,CAAS,GAAGrG,UAAAA,CAAAA,CAAAA,EAAcN,OAAAA,CAAAA,CAAS,CAAA;AACpE,IAAA,IAAI,EAAE0G,oBAAoB/G,eAAAA,CAAAA,EAAkB;AAC1C,MAAA,MAAM,IAAIiH,MAAM,4BAAA,CAAA;AAClB;AACAF,IAAAA,QAAAA,CAASG,OAAAA,EAAO;AAChBC,IAAAA,MAAAA,CAAOC,OAAO,IAAA,EAAM;MAClB,GAAGN,QAAAA;AACH1G,MAAAA,KAAAA,EAAO2G,QAAAA,CAAS3G;KAClB,CAAA;AACF;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n ChatModelInput,\n ChatModel,\n ChatModelOutput,\n ChatModelEvents,\n ChatModelObjectInput,\n ChatModelObjectOutput,\n} from \"@/backend/chat.js\";\nimport {\n CoreAssistantMessage,\n ModelMessage,\n CoreToolMessage,\n generateObject,\n generateText,\n jsonSchema,\n LanguageModel as _LanguageModel,\n streamText,\n TextPart,\n ToolCallPart,\n ToolChoice,\n} from \"ai\";\ntype LanguageModelV2 = Exclude<_LanguageModel, string>;\nimport { Emitter } from \"@/emitter/emitter.js\";\nimport {\n AssistantMessage,\n CustomMessage,\n Message,\n SystemMessage,\n ToolMessage,\n UserMessage,\n} from \"@/backend/message.js\";\nimport { GetRunContext } from \"@/context.js\";\nimport { ValueError } from \"@/errors.js\";\nimport { isEmpty, mapToObj, toCamelCase } from \"remeda\";\nimport { FullModelName } from \"@/backend/utils.js\";\nimport { ChatModelError } from \"@/backend/errors.js\";\nimport { z, ZodArray, ZodEnum, ZodSchema } from \"zod\";\nimport { Tool } from \"@/tools/base.js\";\nimport { encodeCustomMessage, extractTokenUsage } from \"@/adapters/vercel/backend/utils.js\";\n\nexport abstract class VercelChatModel<\n M extends LanguageModelV2 = LanguageModelV2,\n> extends ChatModel {\n public readonly emitter: Emitter<ChatModelEvents>;\n public readonly supportsToolStreaming: boolean = true;\n\n constructor(private readonly model: M) {\n super();\n if (!this.modelId) {\n throw new ValueError(\"No modelId has been provided!\");\n }\n this.emitter = Emitter.root.child({\n namespace: [\"backend\", this.providerId, \"chat\"],\n creator: this,\n });\n }\n\n get modelId(): string {\n return this.model.modelId;\n }\n\n get providerId(): string {\n const provider = this.model.provider.split(\".\")[0].split(\"-\")[0];\n return toCamelCase(provider);\n }\n\n protected async _create(input: ChatModelInput, run: GetRunContext<this>) {\n const responseFormat = input.responseFormat;\n if (responseFormat && (responseFormat instanceof ZodSchema || responseFormat.schema)) {\n const { output } = await this._createStructure(\n {\n ...input,\n schema: responseFormat,\n },\n run,\n );\n return output;\n }\n\n const {\n finishReason,\n usage,\n response: { messages },\n } = await generateText({\n temperature: 0,\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n });\n\n return new ChatModelOutput(\n this.transformMessages(messages),\n extractTokenUsage(usage),\n finishReason,\n );\n }\n\n protected async _createStructure<T>(\n { schema, ...input }: ChatModelObjectInput<T>,\n run: GetRunContext<this>,\n ): Promise<ChatModelObjectOutput<T>> {\n const response = await generateObject({\n temperature: 0,\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n ...(schema instanceof ZodSchema\n ? {\n schema,\n output: ((schema._input || schema) instanceof ZodArray\n ? \"array\"\n : (schema._input || schema) instanceof ZodEnum\n ? \"enum\"\n : \"object\") as any,\n }\n : {\n schema: schema.schema ? jsonSchema<T>(schema.schema) : z.any(),\n schemaName: schema.name,\n schemaDescription: schema.description,\n }),\n });\n\n return {\n object: response.object as T,\n output: new ChatModelOutput(\n [new AssistantMessage(JSON.stringify(response.object, null, 2))],\n extractTokenUsage(response.usage),\n response.finishReason,\n ),\n };\n }\n\n async *_createStream(input: ChatModelInput, run: GetRunContext<this>) {\n const responseFormat = input.responseFormat;\n if (responseFormat && (responseFormat instanceof ZodSchema || responseFormat.schema)) {\n const { output } = await this._createStructure(\n {\n ...input,\n schema: responseFormat,\n },\n run,\n );\n yield output;\n return;\n }\n\n if (!this.supportsToolStreaming && !isEmpty(input.tools ?? [])) {\n const response = await this._create(input, run);\n yield response;\n return;\n }\n\n const {\n fullStream,\n usage: usagePromise,\n finishReason: finishReasonPromise,\n response: responsePromise,\n } = streamText({\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n });\n\n let streamEmpty = true;\n const streamedToolCalls = new Map<string, ToolCallPart>();\n for await (const event of fullStream) {\n let message: Message;\n switch (event.type) {\n case \"text-delta\":\n streamEmpty = false;\n message = new AssistantMessage(event.text, {}, event.id);\n yield new ChatModelOutput([message]);\n break;\n case \"text-end\":\n streamEmpty = false;\n break;\n case \"tool-input-start\": {\n if (!input.streamPartialToolCalls) {\n break;\n }\n\n const chunk: ToolCallPart = {\n type: \"tool-call\",\n toolName: event.toolName,\n toolCallId: event.id,\n input: \"\",\n };\n streamedToolCalls.set(event.id, chunk);\n const message = new AssistantMessage(chunk, {}, event.id);\n yield new ChatModelOutput([message]);\n break;\n }\n case \"tool-input-delta\": {\n if (!input.streamPartialToolCalls) {\n break;\n }\n\n if (event.delta) {\n const chunk = streamedToolCalls.get(event.id)!;\n const message = new AssistantMessage({ ...chunk, input: event.delta }, {}, event.id);\n yield new ChatModelOutput([message]);\n }\n break;\n }\n case \"tool-call\": {\n streamEmpty = false;\n const existingToolCall = streamedToolCalls.get(event.toolCallId);\n if (existingToolCall) {\n streamedToolCalls.delete(event.toolCallId);\n break;\n }\n message = new AssistantMessage(\n {\n type: event.type,\n toolCallId: event.toolCallId,\n toolName: event.toolName,\n input: event.input,\n },\n {},\n event.toolCallId,\n );\n yield new ChatModelOutput([message]);\n break;\n }\n case \"error\":\n throw new ChatModelError(\"Unhandled error\", [event.error as Error]);\n case \"tool-result\":\n streamEmpty = false;\n message = new ToolMessage(\n {\n type: event.type,\n toolCallId: event.toolCallId,\n toolName: event.toolName,\n output: event.output as any,\n },\n {},\n `tool_result_${event.toolCallId}`,\n );\n yield new ChatModelOutput([message]);\n break;\n case \"abort\":\n break;\n default:\n break;\n }\n }\n\n if (streamEmpty) {\n throw new ChatModelError(\"No chunks have been received!\");\n }\n\n try {\n const [usage, finishReason, _] = await Promise.all([\n usagePromise,\n finishReasonPromise,\n responsePromise,\n ]);\n const lastChunk = new ChatModelOutput([]);\n lastChunk.usage = extractTokenUsage(usage);\n lastChunk.finishReason = finishReason;\n yield lastChunk;\n } catch (e) {\n if (!run.signal.aborted) {\n throw e;\n }\n }\n }\n\n protected async transformInput(\n input: ChatModelInput,\n ): Promise<Parameters<typeof generateText<Record<string, any>>>[0]> {\n const tools = await Promise.all(\n (input.tools ?? []).map(async (tool) => ({\n name: tool.name,\n description: tool.description,\n inputSchema: jsonSchema(await tool.getInputJsonSchema()),\n })),\n );\n\n const messages = input.messages.map((msg): ModelMessage => {\n if (msg instanceof CustomMessage) {\n msg = encodeCustomMessage(msg);\n }\n\n if (msg instanceof AssistantMessage) {\n return { role: \"assistant\", content: msg.content };\n } else if (msg instanceof ToolMessage) {\n return { role: \"tool\", content: msg.content };\n } else if (msg instanceof UserMessage) {\n return { role: \"user\", content: msg.content };\n } else if (msg instanceof SystemMessage) {\n return { role: \"system\", content: msg.content.map((part) => part.text).join(\"\\n\") };\n }\n return { role: msg.role, content: msg.content } as ModelMessage;\n });\n\n let toolChoice: ToolChoice<Record<string, any>> | undefined;\n if (input.toolChoice && input.toolChoice instanceof Tool) {\n if (this.toolChoiceSupport.includes(\"single\")) {\n toolChoice = {\n type: \"tool\",\n toolName: input.toolChoice.name,\n };\n } else {\n this.logger.warn(`The single tool choice is not supported.`);\n }\n } else if (input.toolChoice) {\n if (this.toolChoiceSupport.includes(input.toolChoice)) {\n toolChoice = input.toolChoice;\n } else {\n this.logger.warn(`The following tool choice value '${input.toolChoice}' is not supported.`);\n }\n }\n\n return {\n ...this.parameters,\n ...input,\n toolChoice,\n model: this.model,\n tools: mapToObj(tools, ({ name, ...tool }) => [name, tool]),\n messages,\n };\n }\n\n protected transformMessages(messages: (CoreAssistantMessage | CoreToolMessage)[]): Message[] {\n return messages.flatMap((msg) => {\n if (msg.role === \"tool\") {\n return new ToolMessage(msg.content, msg.providerOptions);\n }\n return new AssistantMessage(\n msg.content as TextPart | ToolCallPart | string,\n msg.providerOptions,\n );\n });\n }\n\n createSnapshot() {\n return {\n ...super.createSnapshot(),\n providerId: this.providerId,\n modelId: this.modelId,\n supportsToolStreaming: this.supportsToolStreaming,\n };\n }\n\n async loadSnapshot({ providerId, modelId, ...snapshot }: ReturnType<typeof this.createSnapshot>) {\n const instance = await ChatModel.fromName(`${providerId}:${modelId}` as FullModelName);\n if (!(instance instanceof VercelChatModel)) {\n throw new Error(\"Incorrect deserialization!\");\n }\n instance.destroy();\n Object.assign(this, {\n ...snapshot,\n model: instance.model,\n });\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/vercel/backend/chat.ts"],"names":["globalThis","AI_SDK_LOG_WARNINGS","VercelChatModel","ChatModel","emitter","supportsToolStreaming","model","modelId","ValueError","Emitter","root","child","namespace","providerId","creator","provider","split","toCamelCase","_create","input","run","responseFormat","ZodSchema","schema","output","_createStructure","finishReason","usage","response","messages","id","generateText","temperature","transformInput","abortSignal","signal","ChatModelOutput","transformMessages","extractTokenUsage","name","description","target","_input","ZodArray","Output","array","element","ZodEnum","choice","options","object","jsonSchema","json","AssistantMessage","JSON","stringify","undefined","_createStream","isEmpty","tools","fullStream","usagePromise","finishReasonPromise","responsePromise","streamText","streamEmpty","streamedToolCalls","Map","event","message","type","text","streamPartialToolCalls","chunk","toolName","toolCallId","set","delta","get","existingToolCall","delete","ChatModelError","error","ToolMessage","aborted","_","Promise","all","lastChunk","e","map","tool","inputSchema","getInputJsonSchema","msg","CustomMessage","encodeCustomMessage","role","content","UserMessage","SystemMessage","part","join","toolChoice","Tool","toolChoiceSupport","includes","logger","warn","parameters","mapToObj","length","flatMap","filter","providerOptions","createSnapshot","loadSnapshot","snapshot","instance","fromName","Error","destroy","Object","assign"],"mappings":";;;;;;;;;;;;;;;AA6CA,IAAI;AACFA,EAAAA,UAAAA,CAAWC,mBAAAA,GAAsB,KAAA;AACnC,CAAA,CAAA,MAAQ;AAER;AAEO,MAAeC,wBAEZC,kBAAAA,CAAAA;EArDV;;;;AAsDkBC,EAAAA,OAAAA;EACAC,qBAAAA,GAAiC,IAAA;AAEjD,EAAA,WAAA,CAA6BC,KAAAA,EAAU;AACrC,IAAA,KAAA,EAAK,EAAA,KADsBA,KAAAA,GAAAA,KAAAA;AAE3B,IAAA,IAAI,CAAC,KAAKC,OAAAA,EAAS;AACjB,MAAA,MAAM,IAAIC,sBAAW,+BAAA,CAAA;AACvB;AACA,IAAA,IAAA,CAAKJ,OAAAA,GAAUK,mBAAAA,CAAQC,IAAAA,CAAKC,KAAAA,CAAM;MAChCC,SAAAA,EAAW;AAAC,QAAA,SAAA;QAAW,IAAA,CAAKC,UAAAA;AAAY,QAAA;;MACxCC,OAAAA,EAAS;KACX,CAAA;AACF;AAEA,EAAA,IAAIP,OAAAA,GAAkB;AACpB,IAAA,OAAO,KAAKD,KAAAA,CAAMC,OAAAA;AACpB;AAEA,EAAA,IAAIM,UAAAA,GAAqB;AACvB,IAAA,MAAME,QAAAA,GAAW,IAAA,CAAKT,KAAAA,CAAMS,QAAAA,CAASC,KAAAA,CAAM,GAAA,CAAA,CAAK,CAAA,CAAA,CAAGA,KAAAA,CAAM,GAAA,CAAA,CAAK,CAAA,CAAA;AAC9D,IAAA,OAAOC,mBAAYF,QAAAA,CAAAA;AACrB;EAEA,MAAgBG,OAAAA,CAAQC,OAAuBC,GAAAA,EAA0B;AACvE,IAAA,MAAMC,iBAAiBF,KAAAA,CAAME,cAAAA;AAC7B,IAAA,IAAIA,cAAAA,KAAmBA,cAAAA,YAA0BC,aAAAA,IAAaD,cAAAA,CAAeE,MAAAA,CAAAA,EAAS;AACpF,MAAA,MAAM,EAAEC,MAAAA,EAAM,GAAK,MAAM,KAAKC,gBAAAA,CAC5B;QACE,GAAGN,KAAAA;QACHI,MAAAA,EAAQF;AACV,OAAA,EACAD,GAAAA,CAAAA;AAEF,MAAA,OAAOI,MAAAA;AACT;AAEA,IAAA,MAAM,EACJE,YAAAA,EACAC,KAAAA,EACAC,QAAAA,EAAU,EAAEC,UAAUC,EAAAA,EAAE,EAAE,GACxB,MAAMC,eAAAA,CAAa;MACrBC,WAAAA,EAAa,CAAA;MACb,GAAI,MAAM,IAAA,CAAKC,cAAAA,CAAed,KAAAA,CAAAA;AAC9Be,MAAAA,WAAAA,EAAad,GAAAA,CAAIe;KACnB,CAAA;AAEA,IAAA,OAAO,IAAIC,wBAAAA,CACT,IAAA,CAAKC,iBAAAA,CAAkBR,QAAAA,EAAUC,EAAAA,CAAAA,EACjCQ,2BAAAA,CAAkBX,KAAAA,CAAAA,EAClBD,YAAAA,CAAAA;AAEJ;AAEA,EAAA,MAAgBD,iBACd,EAAEF,MAAAA,EAAQ,GAAGJ,KAAAA,IACbC,GAAAA,EACmC;AACnC,IAAA,MAAM,EAAEI,MAAAA,EAAQI,QAAAA,EAAUF,cAAcC,KAAAA,EAAK,GAAK,MAAMI,eAAAA,CAAa;MACnEC,WAAAA,EAAa,CAAA;MACb,GAAI,MAAM,IAAA,CAAKC,cAAAA,CAAed,KAAAA,CAAAA;AAC9Be,MAAAA,WAAAA,EAAad,GAAAA,CAAIe,MAAAA;AACjBX,MAAAA,MAAAA,EAAAA,CAAS,MAAA;AACP,QAAA,IAAID,kBAAkBD,aAAAA,EAAW;AAC/B,UAAA,MAAM,CAACiB,IAAAA,EAAMC,WAAAA,CAAAA,GAAe;AAAC,YAAA,QAAA;YAAUjB,MAAAA,CAAOiB;;AAC9C,UAAA,MAAMC,MAAAA,GAASlB,OAAOmB,MAAAA,IAAUnB,MAAAA;AAChC,UAAA,IAAIkB,kBAAkBE,YAAAA,EAAU;AAC9B,YAAA,OAAOC,UAAOC,KAAAA,CAAM;cAAEC,OAAAA,EAASvB,MAAAA;AAAQgB,cAAAA,IAAAA;AAAMC,cAAAA;aAAY,CAAA;AAC3D;AACA,UAAA,IAAIC,kBAAkBM,WAAAA,EAAS;AAC7B,YAAA,OAAOH,UAAOI,MAAAA,CAAO;AACnBC,cAAAA,OAAAA,EAASR,MAAAA,CAAOQ,OAAAA;cAChBV,IAAAA,EAAM,EAAA;AACNC,cAAAA,WAAAA,EAAajB,MAAAA,CAAOiB;aACtB,CAAA;AACF;AACA,UAAA,OAAOI,UAAOM,MAAAA,CAAO;AAAE3B,YAAAA,MAAAA;AAAQgB,YAAAA,IAAAA;AAAMC,YAAAA;WAAY,CAAA;AACnD;AACA,QAAA,IAAIjB,OAAOA,MAAAA,EAAQ;AACjB,UAAA,OAAOqB,UAAOM,MAAAA,CAAO;YACnB3B,MAAAA,EAAQ4B,aAAAA,CAAc5B,OAAOA,MAAM,CAAA;AACnCgB,YAAAA,IAAAA,EAAMhB,MAAAA,CAAOgB,IAAAA;AACbC,YAAAA,WAAAA,EAAajB,MAAAA,CAAOiB;WACtB,CAAA;AACF;AACA,QAAA,OAAOI,UAAOQ,IAAAA,CAAK;AAAEb,UAAAA,IAAAA,EAAMhB,MAAAA,CAAOgB,IAAAA;AAAMC,UAAAA,WAAAA,EAAajB,MAAAA,CAAOiB;SAAY,CAAA;OAC1E;KACF,CAAA;AAEA,IAAA,OAAO;MACLU,MAAAA,EAAQ1B,MAAAA;AACRA,MAAAA,MAAAA,EAAQ,IAAIY,wBAAAA,CACV;QAAC,IAAIiB,4BAAAA,CAAiBC,KAAKC,SAAAA,CAAU/B,MAAAA,EAAQ,MAAM,CAAA,CAAA,EAAIgC,MAAAA,EAAW5B,QAAAA,CAASE,EAAE;SAC7EQ,2BAAAA,CAAkBX,KAAAA,GAClBD,YAAAA;AAEJ,KAAA;AACF;EAEA,OAAO+B,aAAAA,CAActC,OAAuBC,GAAAA,EAA0B;AACpE,IAAA,MAAMC,iBAAiBF,KAAAA,CAAME,cAAAA;AAC7B,IAAA,IAAIA,cAAAA,KAAmBA,cAAAA,YAA0BC,aAAAA,IAAaD,cAAAA,CAAeE,MAAAA,CAAAA,EAAS;AACpF,MAAA,MAAM,EAAEC,MAAAA,EAAM,GAAK,MAAM,KAAKC,gBAAAA,CAC5B;QACE,GAAGN,KAAAA;QACHI,MAAAA,EAAQF;AACV,OAAA,EACAD,GAAAA,CAAAA;AAEF,MAAA,MAAMI,MAAAA;AACN,MAAA;AACF;AAEA,IAAA,IAAI,CAAC,KAAKnB,qBAAAA,IAAyB,CAACqD,eAAQvC,KAAAA,CAAMwC,KAAAA,IAAS,EAAE,CAAA,EAAG;AAC9D,MAAA,MAAM/B,QAAAA,GAAW,MAAM,IAAA,CAAKV,OAAAA,CAAQC,OAAOC,GAAAA,CAAAA;AAC3C,MAAA,MAAMQ,QAAAA;AACN,MAAA;AACF;AAEA,IAAA,MAAM,EACJgC,YACAjC,KAAAA,EAAOkC,YAAAA,EACPnC,cAAcoC,mBAAAA,EACdlC,QAAAA,EAAUmC,eAAAA,EAAe,GACvBC,aAAAA,CAAW;MACbhC,WAAAA,EAAa,CAAA;MACb,GAAI,MAAM,IAAA,CAAKC,cAAAA,CAAed,KAAAA,CAAAA;AAC9Be,MAAAA,WAAAA,EAAad,GAAAA,CAAIe;KACnB,CAAA;AAEA,IAAA,IAAI8B,WAAAA,GAAc,IAAA;AAClB,IAAA,MAAMC,iBAAAA,uBAAwBC,GAAAA,EAAAA;AAC9B,IAAA,WAAA,MAAiBC,SAASR,UAAAA,EAAY;AACpC,MAAA,IAAIS,OAAAA;AACJ,MAAA,QAAQD,MAAME,IAAAA;QACZ,KAAK,YAAA;AACHL,UAAAA,WAAAA,GAAc,KAAA;AACdI,UAAAA,OAAAA,GAAU,IAAIhB,4BAAAA,CAAiBe,KAAAA,CAAMG,MAAM,EAAC,EAAGH,MAAMtC,EAAE,CAAA;AACvD,UAAA,MAAM,IAAIM,wBAAAA,CAAgB;AAACiC,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;QACF,KAAK,UAAA;AACHJ,UAAAA,WAAAA,GAAc,KAAA;AACd,UAAA;AACF,QAAA,KAAK,kBAAA,EAAoB;AACvB,UAAA,IAAI,CAAC9C,MAAMqD,sBAAAA,EAAwB;AACjC,YAAA;AACF;AAEA,UAAA,MAAMC,KAAAA,GAAsB;YAC1BH,IAAAA,EAAM,WAAA;AACNI,YAAAA,QAAAA,EAAUN,KAAAA,CAAMM,QAAAA;AAChBC,YAAAA,UAAAA,EAAYP,KAAAA,CAAMtC,EAAAA;YAClBX,KAAAA,EAAO;AACT,WAAA;AACA+C,UAAAA,iBAAAA,CAAkBU,GAAAA,CAAIR,KAAAA,CAAMtC,EAAAA,EAAI2C,KAAAA,CAAAA;AAChC,UAAA,MAAMJ,WAAU,IAAIhB,4BAAAA,CAAiBoB,OAAO,EAAC,EAAGL,MAAMtC,EAAE,CAAA;AACxD,UAAA,MAAM,IAAIM,wBAAAA,CAAgB;AAACiC,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AACF;AACA,QAAA,KAAK,kBAAA,EAAoB;AACvB,UAAA,IAAI,CAAClD,MAAMqD,sBAAAA,EAAwB;AACjC,YAAA;AACF;AAEA,UAAA,IAAIJ,MAAMS,KAAAA,EAAO;AACf,YAAA,MAAMJ,KAAAA,GAAQP,iBAAAA,CAAkBY,GAAAA,CAAIV,KAAAA,CAAMtC,EAAE,CAAA;AAC5C,YAAA,MAAMuC,QAAAA,GAAU,IAAIhB,4BAAAA,CAAiB;cAAE,GAAGoB,KAAAA;AAAOtD,cAAAA,KAAAA,EAAOiD,KAAAA,CAAMS;aAAM,EAAG,EAAC,EAAGT,KAAAA,CAAMtC,EAAE,CAAA;AACnF,YAAA,MAAM,IAAIM,wBAAAA,CAAgB;AAACiC,cAAAA;AAAQ,aAAA,CAAA;AACrC;AACA,UAAA;AACF;AACA,QAAA,KAAK,WAAA,EAAa;AAChBJ,UAAAA,WAAAA,GAAc,KAAA;AACd,UAAA,MAAMc,gBAAAA,GAAmBb,iBAAAA,CAAkBY,GAAAA,CAAIV,KAAAA,CAAMO,UAAU,CAAA;AAC/D,UAAA,IAAII,gBAAAA,EAAkB;AACpBb,YAAAA,iBAAAA,CAAkBc,MAAAA,CAAOZ,MAAMO,UAAU,CAAA;AACzC,YAAA;AACF;AACAN,UAAAA,OAAAA,GAAU,IAAIhB,4BAAAA,CACZ;AACEiB,YAAAA,IAAAA,EAAMF,KAAAA,CAAME,IAAAA;AACZK,YAAAA,UAAAA,EAAYP,KAAAA,CAAMO,UAAAA;AAClBD,YAAAA,QAAAA,EAAUN,KAAAA,CAAMM,QAAAA;AAChBvD,YAAAA,KAAAA,EAAOiD,KAAAA,CAAMjD;WACf,EACA,EAAC,EACDiD,KAAAA,CAAMO,UAAU,CAAA;AAElB,UAAA,MAAM,IAAIvC,wBAAAA,CAAgB;AAACiC,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AACF;QACA,KAAK,OAAA;AACH,UAAA,MAAM,IAAIY,4BAAe,iBAAA,EAAmB;YAACb,KAAAA,CAAMc;AAAe,WAAA,CAAA;QACpE,KAAK,aAAA;AACHjB,UAAAA,WAAAA,GAAc,KAAA;AACdI,UAAAA,OAAAA,GAAU,IAAIc,uBAAAA,CACZ;AACEb,YAAAA,IAAAA,EAAMF,KAAAA,CAAME,IAAAA;AACZK,YAAAA,UAAAA,EAAYP,KAAAA,CAAMO,UAAAA;AAClBD,YAAAA,QAAAA,EAAUN,KAAAA,CAAMM,QAAAA;AAChBlD,YAAAA,MAAAA,EAAQ4C,KAAAA,CAAM5C;AAChB,WAAA,EACA,EAAC,EACD,CAAA,YAAA,EAAe4C,KAAAA,CAAMO,UAAU,CAAA,CAAE,CAAA;AAEnC,UAAA,MAAM,IAAIvC,wBAAAA,CAAgB;AAACiC,YAAAA;AAAQ,WAAA,CAAA;AACnC,UAAA;AAKJ;AACF;AAEA,IAAA,IAAIJ,WAAAA,IAAe,CAAC7C,GAAAA,CAAIe,MAAAA,CAAOiD,OAAAA,EAAS;AACtC,MAAA,MAAM,IAAIH,4BAAe,+BAAA,CAAA;AAC3B;AAEA,IAAA,IAAI;AACF,MAAA,MAAM,CAACtD,KAAAA,EAAOD,YAAAA,EAAc2D,CAAAA,CAAAA,GAAK,MAAMC,QAAQC,GAAAA,CAAI;AACjD1B,QAAAA,YAAAA;AACAC,QAAAA,mBAAAA;AACAC,QAAAA;AACD,OAAA,CAAA;AACD,MAAA,MAAMyB,SAAAA,GAAY,IAAIpD,wBAAAA,CAAgB,EAAE,CAAA;AACxCoD,MAAAA,SAAAA,CAAU7D,KAAAA,GAAQW,4BAAkBX,KAAAA,CAAAA;AACpC6D,MAAAA,SAAAA,CAAU9D,YAAAA,GAAeA,YAAAA;AACzB,MAAA,MAAM8D,SAAAA;AACR,KAAA,CAAA,OAASC,CAAAA,EAAG;AACV,MAAA,IAAI,CAACrE,GAAAA,CAAIe,MAAAA,CAAOiD,OAAAA,EAAS;AACvB,QAAA,MAAMK,CAAAA;AACR;AACF;AACF;AAEA,EAAA,MAAgBxD,eACdd,KAAAA,EACkE;AAClE,IAAA,MAAMwC,KAAAA,GAAQ,MAAM2B,OAAAA,CAAQC,GAAAA,CAAAA,CACzBpE,KAAAA,CAAMwC,SAAS,EAAA,EAAI+B,GAAAA,CAAI,OAAOC,IAAAA,MAAU;AACvCpD,MAAAA,IAAAA,EAAMoD,IAAAA,CAAKpD,IAAAA;AACXC,MAAAA,WAAAA,EAAamD,IAAAA,CAAKnD,WAAAA;AAClBoD,MAAAA,WAAAA,EAAazC,aAAAA,CAAW,MAAMwC,IAAAA,CAAKE,kBAAAA,EAAkB;AACvD,KAAA,CAAA,CAAA,CAAA;AAGF,IAAA,MAAMhE,QAAAA,GAAWV,KAAAA,CAAMU,QAAAA,CAAS6D,GAAAA,CAAI,CAACI,GAAAA,KAAAA;AACnC,MAAA,IAAIA,eAAeC,yBAAAA,EAAe;AAChCD,QAAAA,GAAAA,GAAME,8BAAoBF,GAAAA,CAAAA;AAC5B;AAEA,MAAA,IAAIA,eAAezC,4BAAAA,EAAkB;AACnC,QAAA,OAAO;UAAE4C,IAAAA,EAAM,WAAA;AAAaC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AACnD,OAAA,MAAA,IAAWJ,eAAeX,uBAAAA,EAAa;AACrC,QAAA,OAAO;UAAEc,IAAAA,EAAM,MAAA;AAAQC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AAC9C,OAAA,MAAA,IAAWJ,eAAeK,uBAAAA,EAAa;AACrC,QAAA,OAAO;UAAEF,IAAAA,EAAM,MAAA;AAAQC,UAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,SAAA;AAC9C,OAAA,MAAA,IAAWJ,eAAeM,yBAAAA,EAAe;AACvC,QAAA,OAAO;UAAEH,IAAAA,EAAM,QAAA;UAAUC,OAAAA,EAASJ,GAAAA,CAAII,QAAQR,GAAAA,CAAI,CAACW,SAASA,IAAAA,CAAK9B,IAAI,CAAA,CAAE+B,IAAAA,CAAK,IAAA;AAAM,SAAA;AACpF;AACA,MAAA,OAAO;AAAEL,QAAAA,IAAAA,EAAMH,GAAAA,CAAIG,IAAAA;AAAMC,QAAAA,OAAAA,EAASJ,GAAAA,CAAII;AAAQ,OAAA;KAChD,CAAA;AAEA,IAAA,IAAIK,UAAAA;AACJ,IAAA,IAAIpF,KAAAA,CAAMoF,UAAAA,IAAcpF,KAAAA,CAAMoF,UAAAA,YAAsBC,aAAAA,EAAM;AACxD,MAAA,IAAI,IAAA,CAAKC,iBAAAA,CAAkBC,QAAAA,CAAS,QAAA,CAAA,EAAW;AAC7CH,QAAAA,UAAAA,GAAa;UACXjC,IAAAA,EAAM,MAAA;AACNI,UAAAA,QAAAA,EAAUvD,MAAMoF,UAAAA,CAAWhE;AAC7B,SAAA;OACF,MAAO;AACL,QAAA,IAAA,CAAKoE,MAAAA,CAAOC,KAAK,CAAA,wCAAA,CAA0C,CAAA;AAC7D;AACF,KAAA,MAAA,IAAWzF,MAAMoF,UAAAA,EAAY;AAC3B,MAAA,IAAI,IAAA,CAAKE,iBAAAA,CAAkBC,QAAAA,CAASvF,KAAAA,CAAMoF,UAAU,CAAA,EAAG;AACrDA,QAAAA,UAAAA,GAAapF,KAAAA,CAAMoF,UAAAA;OACrB,MAAO;AACL,QAAA,IAAA,CAAKI,MAAAA,CAAOC,IAAAA,CAAK,CAAA,iCAAA,EAAoCzF,KAAAA,CAAMoF,UAAU,CAAA,mBAAA,CAAqB,CAAA;AAC5F;AACF;AAEA,IAAA,OAAO;AACL,MAAA,GAAG,IAAA,CAAKM,UAAAA;MACR,GAAG1F,KAAAA;AACHoF,MAAAA,UAAAA;AACAjG,MAAAA,KAAAA,EAAO,IAAA,CAAKA,KAAAA;AACZqD,MAAAA,KAAAA,EAAOmD,gBAASnD,KAAAA,EAAO,CAAC,EAAEpB,IAAAA,EAAM,GAAGoD,MAAAA,KAAW;AAACpD,QAAAA,IAAAA;AAAMoD,QAAAA;AAAK,OAAA,CAAA;AAC1D9D,MAAAA;AACF,KAAA;AACF;AAEUQ,EAAAA,iBAAAA,CACRR,UACAC,EAAAA,EACW;AACX,IAAA,IAAID,QAAAA,CAASkF,SAAS,CAAA,EAAG;AACvBjF,MAAAA,EAAAA,GAAK0B,MAAAA;AACP;AACA,IAAA,OAAO3B,QAAAA,CAASmF,OAAAA,CAAQ,CAAClB,GAAAA,KAAAA;AACvB,MAAA,IAAIA,GAAAA,CAAIG,SAAS,MAAA,EAAQ;AACvB,QAAA,OAAO,IAAId,uBAAAA,CACTW,GAAAA,CAAII,OAAAA,CAAQe,MAAAA,CAAO,CAACZ,IAAAA,KAASA,IAAAA,CAAK/B,IAAAA,KAAS,aAAA,CAAA,EAC3CwB,GAAAA,CAAIoB,iBACJpF,EAAAA,CAAAA;AAEJ;AACA,MAAA,OAAO,IAAIuB,4BAAAA,CACTyC,GAAAA,CAAII,OAAAA,EACJJ,GAAAA,CAAIoB,iBACJpF,EAAAA,CAAAA;KAEJ,CAAA;AACF;EAEAqF,cAAAA,GAAiB;AACf,IAAA,OAAO;AACL,MAAA,GAAG,MAAMA,cAAAA,EAAAA;AACTtG,MAAAA,UAAAA,EAAY,IAAA,CAAKA,UAAAA;AACjBN,MAAAA,OAAAA,EAAS,IAAA,CAAKA,OAAAA;AACdF,MAAAA,qBAAAA,EAAuB,IAAA,CAAKA;AAC9B,KAAA;AACF;AAEA,EAAA,MAAM+G,aAAa,EAAEvG,UAAAA,EAAYN,OAAAA,EAAS,GAAG8G,UAAAA,EAAoD;AAC/F,IAAA,MAAMC,QAAAA,GAAW,MAAMnH,kBAAAA,CAAUoH,QAAAA,CAAS,GAAG1G,UAAAA,CAAAA,CAAAA,EAAcN,OAAAA,CAAAA,CAAS,CAAA;AACpE,IAAA,IAAI,EAAE+G,oBAAoBpH,eAAAA,CAAAA,EAAkB;AAC1C,MAAA,MAAM,IAAIsH,MAAM,4BAAA,CAAA;AAClB;AACAF,IAAAA,QAAAA,CAASG,OAAAA,EAAO;AAChBC,IAAAA,MAAAA,CAAOC,OAAO,IAAA,EAAM;MAClB,GAAGN,QAAAA;AACH/G,MAAAA,KAAAA,EAAOgH,QAAAA,CAAShH;KAClB,CAAA;AACF;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n ChatModelInput,\n ChatModel,\n ChatModelOutput,\n ChatModelEvents,\n ChatModelObjectInput,\n ChatModelObjectOutput,\n} from \"@/backend/chat.js\";\nimport {\n AssistantModelMessage,\n ModelMessage,\n ToolModelMessage,\n generateText,\n jsonSchema,\n LanguageModel as _LanguageModel,\n streamText,\n TextPart,\n ToolCallPart,\n ToolChoice,\n Output,\n} from \"ai\";\ntype LanguageModelV2 = Exclude<_LanguageModel, string>;\nimport { Emitter } from \"@/emitter/emitter.js\";\nimport {\n AssistantMessage,\n CustomMessage,\n Message,\n SystemMessage,\n ToolMessage,\n UserMessage,\n} from \"@/backend/message.js\";\nimport { GetRunContext } from \"@/context.js\";\nimport { ValueError } from \"@/errors.js\";\nimport { isEmpty, mapToObj, toCamelCase } from \"remeda\";\nimport { FullModelName } from \"@/backend/utils.js\";\nimport { ChatModelError } from \"@/backend/errors.js\";\nimport { ZodArray, ZodEnum, ZodSchema } from \"zod\";\nimport { Tool } from \"@/tools/base.js\";\nimport { encodeCustomMessage, extractTokenUsage } from \"@/adapters/vercel/backend/utils.js\";\n\ntry {\n globalThis.AI_SDK_LOG_WARNINGS = false;\n} catch {\n /* empty */\n}\n\nexport abstract class VercelChatModel<\n M extends LanguageModelV2 = LanguageModelV2,\n> extends ChatModel {\n public readonly emitter: Emitter<ChatModelEvents>;\n public readonly supportsToolStreaming: boolean = true;\n\n constructor(private readonly model: M) {\n super();\n if (!this.modelId) {\n throw new ValueError(\"No modelId has been provided!\");\n }\n this.emitter = Emitter.root.child({\n namespace: [\"backend\", this.providerId, \"chat\"],\n creator: this,\n });\n }\n\n get modelId(): string {\n return this.model.modelId;\n }\n\n get providerId(): string {\n const provider = this.model.provider.split(\".\")[0].split(\"-\")[0];\n return toCamelCase(provider);\n }\n\n protected async _create(input: ChatModelInput, run: GetRunContext<this>) {\n const responseFormat = input.responseFormat;\n if (responseFormat && (responseFormat instanceof ZodSchema || responseFormat.schema)) {\n const { output } = await this._createStructure(\n {\n ...input,\n schema: responseFormat,\n },\n run,\n );\n return output;\n }\n\n const {\n finishReason,\n usage,\n response: { messages, id },\n } = await generateText({\n temperature: 0,\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n });\n\n return new ChatModelOutput(\n this.transformMessages(messages, id),\n extractTokenUsage(usage),\n finishReason,\n );\n }\n\n protected async _createStructure<T>(\n { schema, ...input }: ChatModelObjectInput<T>,\n run: GetRunContext<this>,\n ): Promise<ChatModelObjectOutput<T>> {\n const { output, response, finishReason, usage } = await generateText({\n temperature: 0,\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n output: ((): Output.Output => {\n if (schema instanceof ZodSchema) {\n const [name, description] = [\"Schema\", schema.description];\n const target = schema._input || schema;\n if (target instanceof ZodArray) {\n return Output.array({ element: schema, name, description });\n }\n if (target instanceof ZodEnum) {\n return Output.choice({\n options: target.options,\n name: \"\",\n description: schema.description,\n });\n }\n return Output.object({ schema, name, description });\n }\n if (schema.schema) {\n return Output.object({\n schema: jsonSchema<T>(schema.schema),\n name: schema.name,\n description: schema.description,\n });\n }\n return Output.json({ name: schema.name, description: schema.description });\n })(),\n });\n\n return {\n object: output as T,\n output: new ChatModelOutput(\n [new AssistantMessage(JSON.stringify(output, null, 2), undefined, response.id)],\n extractTokenUsage(usage),\n finishReason,\n ),\n };\n }\n\n async *_createStream(input: ChatModelInput, run: GetRunContext<this>) {\n const responseFormat = input.responseFormat;\n if (responseFormat && (responseFormat instanceof ZodSchema || responseFormat.schema)) {\n const { output } = await this._createStructure(\n {\n ...input,\n schema: responseFormat,\n },\n run,\n );\n yield output;\n return;\n }\n\n if (!this.supportsToolStreaming && !isEmpty(input.tools ?? [])) {\n const response = await this._create(input, run);\n yield response;\n return;\n }\n\n const {\n fullStream,\n usage: usagePromise,\n finishReason: finishReasonPromise,\n response: responsePromise,\n } = streamText({\n temperature: 0,\n ...(await this.transformInput(input)),\n abortSignal: run.signal,\n });\n\n let streamEmpty = true;\n const streamedToolCalls = new Map<string, ToolCallPart>();\n for await (const event of fullStream) {\n let message: Message;\n switch (event.type) {\n case \"text-delta\":\n streamEmpty = false;\n message = new AssistantMessage(event.text, {}, event.id);\n yield new ChatModelOutput([message]);\n break;\n case \"text-end\":\n streamEmpty = false;\n break;\n case \"tool-input-start\": {\n if (!input.streamPartialToolCalls) {\n break;\n }\n\n const chunk: ToolCallPart = {\n type: \"tool-call\",\n toolName: event.toolName,\n toolCallId: event.id,\n input: \"\",\n };\n streamedToolCalls.set(event.id, chunk);\n const message = new AssistantMessage(chunk, {}, event.id);\n yield new ChatModelOutput([message]);\n break;\n }\n case \"tool-input-delta\": {\n if (!input.streamPartialToolCalls) {\n break;\n }\n\n if (event.delta) {\n const chunk = streamedToolCalls.get(event.id)!;\n const message = new AssistantMessage({ ...chunk, input: event.delta }, {}, event.id);\n yield new ChatModelOutput([message]);\n }\n break;\n }\n case \"tool-call\": {\n streamEmpty = false;\n const existingToolCall = streamedToolCalls.get(event.toolCallId);\n if (existingToolCall) {\n streamedToolCalls.delete(event.toolCallId);\n break;\n }\n message = new AssistantMessage(\n {\n type: event.type,\n toolCallId: event.toolCallId,\n toolName: event.toolName,\n input: event.input,\n },\n {},\n event.toolCallId,\n );\n yield new ChatModelOutput([message]);\n break;\n }\n case \"error\":\n throw new ChatModelError(\"Unhandled error\", [event.error as Error]);\n case \"tool-result\":\n streamEmpty = false;\n message = new ToolMessage(\n {\n type: event.type,\n toolCallId: event.toolCallId,\n toolName: event.toolName,\n output: event.output as any,\n },\n {},\n `tool_result_${event.toolCallId}`,\n );\n yield new ChatModelOutput([message]);\n break;\n case \"abort\":\n break;\n default:\n break;\n }\n }\n\n if (streamEmpty && !run.signal.aborted) {\n throw new ChatModelError(\"No chunks have been received!\");\n }\n\n try {\n const [usage, finishReason, _] = await Promise.all([\n usagePromise,\n finishReasonPromise,\n responsePromise,\n ]);\n const lastChunk = new ChatModelOutput([]);\n lastChunk.usage = extractTokenUsage(usage);\n lastChunk.finishReason = finishReason;\n yield lastChunk;\n } catch (e) {\n if (!run.signal.aborted) {\n throw e;\n }\n }\n }\n\n protected async transformInput(\n input: ChatModelInput,\n ): Promise<Parameters<typeof generateText<Record<string, any>>>[0]> {\n const tools = await Promise.all(\n (input.tools ?? []).map(async (tool) => ({\n name: tool.name,\n description: tool.description,\n inputSchema: jsonSchema(await tool.getInputJsonSchema()),\n })),\n );\n\n const messages = input.messages.map((msg): ModelMessage => {\n if (msg instanceof CustomMessage) {\n msg = encodeCustomMessage(msg);\n }\n\n if (msg instanceof AssistantMessage) {\n return { role: \"assistant\", content: msg.content };\n } else if (msg instanceof ToolMessage) {\n return { role: \"tool\", content: msg.content };\n } else if (msg instanceof UserMessage) {\n return { role: \"user\", content: msg.content };\n } else if (msg instanceof SystemMessage) {\n return { role: \"system\", content: msg.content.map((part) => part.text).join(\"\\n\") };\n }\n return { role: msg.role, content: msg.content } as ModelMessage;\n });\n\n let toolChoice: ToolChoice<Record<string, any>> | undefined;\n if (input.toolChoice && input.toolChoice instanceof Tool) {\n if (this.toolChoiceSupport.includes(\"single\")) {\n toolChoice = {\n type: \"tool\",\n toolName: input.toolChoice.name,\n };\n } else {\n this.logger.warn(`The single tool choice is not supported.`);\n }\n } else if (input.toolChoice) {\n if (this.toolChoiceSupport.includes(input.toolChoice)) {\n toolChoice = input.toolChoice;\n } else {\n this.logger.warn(`The following tool choice value '${input.toolChoice}' is not supported.`);\n }\n }\n\n return {\n ...this.parameters,\n ...input,\n toolChoice,\n model: this.model,\n tools: mapToObj(tools, ({ name, ...tool }) => [name, tool]),\n messages,\n };\n }\n\n protected transformMessages(\n messages: (AssistantModelMessage | ToolModelMessage)[],\n id: string | undefined,\n ): Message[] {\n if (messages.length > 1) {\n id = undefined;\n }\n return messages.flatMap((msg) => {\n if (msg.role === \"tool\") {\n return new ToolMessage(\n msg.content.filter((part) => part.type === \"tool-result\"),\n msg.providerOptions,\n id,\n );\n }\n return new AssistantMessage(\n msg.content as TextPart | ToolCallPart | string,\n msg.providerOptions,\n id,\n );\n });\n }\n\n createSnapshot() {\n return {\n ...super.createSnapshot(),\n providerId: this.providerId,\n modelId: this.modelId,\n supportsToolStreaming: this.supportsToolStreaming,\n };\n }\n\n async loadSnapshot({ providerId, modelId, ...snapshot }: ReturnType<typeof this.createSnapshot>) {\n const instance = await ChatModel.fromName(`${providerId}:${modelId}` as FullModelName);\n if (!(instance instanceof VercelChatModel)) {\n throw new Error(\"Incorrect deserialization!\");\n }\n instance.destroy();\n Object.assign(this, {\n ...snapshot,\n model: instance.model,\n });\n }\n}\n"]}
@@ -1,7 +1,7 @@
1
1
  import { Logger } from '../../../logger/logger.cjs';
2
2
  import { GetRunContext, MiddlewareType } from '../../../context.cjs';
3
3
  import { m as ChatModel, g as ChatModelEvents, d as ChatModelInput, n as ChatModelOutput, a as ChatModelObjectInput, b as ChatModelObjectOutput, i as ChatModelCache, C as ChatModelParameters, l as ChatModelToolChoiceSupport } from '../../../chat-BFl85cqt.cjs';
4
- import { LanguageModel, generateText, CoreAssistantMessage, CoreToolMessage } from 'ai';
4
+ import { LanguageModel, generateText, AssistantModelMessage, ToolModelMessage } from 'ai';
5
5
  import { E as Emitter } from '../../../emitter-jN55XZZq.cjs';
6
6
  import { Message } from '../../../backend/message.cjs';
7
7
  import 'pino';
@@ -33,7 +33,7 @@ declare abstract class VercelChatModel<M extends LanguageModelV2 = LanguageModel
33
33
  protected _createStructure<T>({ schema, ...input }: ChatModelObjectInput<T>, run: GetRunContext<this>): Promise<ChatModelObjectOutput<T>>;
34
34
  _createStream(input: ChatModelInput, run: GetRunContext<this>): AsyncGenerator<ChatModelOutput, void, unknown>;
35
35
  protected transformInput(input: ChatModelInput): Promise<Parameters<typeof generateText<Record<string, any>>>[0]>;
36
- protected transformMessages(messages: (CoreAssistantMessage | CoreToolMessage)[]): Message[];
36
+ protected transformMessages(messages: (AssistantModelMessage | ToolModelMessage)[], id: string | undefined): Message[];
37
37
  createSnapshot(): {
38
38
  providerId: string;
39
39
  modelId: string;
@@ -1,7 +1,7 @@
1
1
  import { Logger } from '../../../logger/logger.js';
2
2
  import { GetRunContext, MiddlewareType } from '../../../context.js';
3
3
  import { m as ChatModel, g as ChatModelEvents, d as ChatModelInput, n as ChatModelOutput, a as ChatModelObjectInput, b as ChatModelObjectOutput, i as ChatModelCache, C as ChatModelParameters, l as ChatModelToolChoiceSupport } from '../../../chat-BRuyDeKR.js';
4
- import { LanguageModel, generateText, CoreAssistantMessage, CoreToolMessage } from 'ai';
4
+ import { LanguageModel, generateText, AssistantModelMessage, ToolModelMessage } from 'ai';
5
5
  import { E as Emitter } from '../../../emitter-36-9MnvA.js';
6
6
  import { Message } from '../../../backend/message.js';
7
7
  import 'pino';
@@ -33,7 +33,7 @@ declare abstract class VercelChatModel<M extends LanguageModelV2 = LanguageModel
33
33
  protected _createStructure<T>({ schema, ...input }: ChatModelObjectInput<T>, run: GetRunContext<this>): Promise<ChatModelObjectOutput<T>>;
34
34
  _createStream(input: ChatModelInput, run: GetRunContext<this>): AsyncGenerator<ChatModelOutput, void, unknown>;
35
35
  protected transformInput(input: ChatModelInput): Promise<Parameters<typeof generateText<Record<string, any>>>[0]>;
36
- protected transformMessages(messages: (CoreAssistantMessage | CoreToolMessage)[]): Message[];
36
+ protected transformMessages(messages: (AssistantModelMessage | ToolModelMessage)[], id: string | undefined): Message[];
37
37
  createSnapshot(): {
38
38
  providerId: string;
39
39
  modelId: string;