@assistant-ui/react 0.7.57 → 0.7.58
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/runtimes/edge/EdgeChatAdapter.js +3 -3
- package/dist/runtimes/edge/EdgeChatAdapter.js.map +1 -1
- package/dist/runtimes/edge/EdgeChatAdapter.mjs +3 -3
- package/dist/runtimes/edge/EdgeChatAdapter.mjs.map +1 -1
- package/dist/runtimes/edge/converters/toLanguageModelMessages.d.ts +3 -1
- package/dist/runtimes/edge/converters/toLanguageModelMessages.d.ts.map +1 -1
- package/dist/runtimes/edge/converters/toLanguageModelMessages.js +10 -2
- package/dist/runtimes/edge/converters/toLanguageModelMessages.js.map +1 -1
- package/dist/runtimes/edge/converters/toLanguageModelMessages.mjs +10 -2
- package/dist/runtimes/edge/converters/toLanguageModelMessages.mjs.map +1 -1
- package/package.json +3 -3
- package/src/runtimes/edge/EdgeChatAdapter.ts +3 -3
- package/src/runtimes/edge/converters/toLanguageModelMessages.ts +12 -1
|
@@ -63,9 +63,9 @@ var EdgeChatAdapter = class {
|
|
|
63
63
|
credentials: this.options.credentials ?? "same-origin",
|
|
64
64
|
body: JSON.stringify({
|
|
65
65
|
system: context.system,
|
|
66
|
-
messages: this.options.unstable_AISDKInterop ? (0, import_converters.toLanguageModelMessages)(
|
|
67
|
-
|
|
68
|
-
) : (0, import_toCoreMessages.toCoreMessages)(messages, {
|
|
66
|
+
messages: this.options.unstable_AISDKInterop ? (0, import_converters.toLanguageModelMessages)(messages, {
|
|
67
|
+
unstable_includeId: this.options.unstable_sendMessageIds
|
|
68
|
+
}) : (0, import_toCoreMessages.toCoreMessages)(messages, {
|
|
69
69
|
unstable_includeId: this.options.unstable_sendMessageIds
|
|
70
70
|
}),
|
|
71
71
|
tools: context.tools ? (0, import_toLanguageModelTools.toLanguageModelTools)(context.tools) : [],
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/runtimes/edge/EdgeChatAdapter.ts"],"sourcesContent":["import {\n ChatModelAdapter,\n ChatModelRunOptions,\n} from \"../local/ChatModelAdapter\";\nimport { ChatModelRunResult } from \"../local/ChatModelAdapter\";\nimport { toCoreMessages } from \"./converters/toCoreMessages\";\nimport { toLanguageModelTools } from \"./converters/toLanguageModelTools\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { assistantDecoderStream } from \"./streams/assistantDecoderStream\";\nimport { streamPartDecoderStream } from \"./streams/utils/streamPartDecoderStream\";\nimport { runResultStream } from \"./streams/runResultStream\";\nimport { toolResultStream } from \"./streams/toolResultStream\";\nimport { toLanguageModelMessages } from \"./converters\";\n\nexport function asAsyncIterable<T>(\n source: ReadableStream<T>,\n): AsyncIterable<T> {\n return {\n [Symbol.asyncIterator]: () => {\n const reader = source.getReader();\n return {\n async next(): Promise<IteratorResult<T, undefined>> {\n const { done, value } = await reader.read();\n return done\n ? { done: true, value: undefined }\n : { done: false, value };\n },\n };\n },\n };\n}\nexport type EdgeChatAdapterOptions = {\n api: string;\n\n credentials?: RequestCredentials;\n headers?: Record<string, string> | Headers;\n body?: object;\n\n /**\n * When enabled, the adapter will not strip `id` from messages in the messages array.\n */\n unstable_sendMessageIds?: boolean;\n\n /**\n * When enabled, the adapter will send messages in the format expected by the Vercel AI SDK Core.\n * This feature will be removed in the future in favor of a better solution.\n */\n unstable_AISDKInterop?: boolean | undefined;\n};\n\nexport class EdgeChatAdapter implements ChatModelAdapter {\n constructor(private options: EdgeChatAdapterOptions) {}\n\n async *run({\n messages,\n runConfig,\n abortSignal,\n context,\n unstable_assistantMessageId,\n }: ChatModelRunOptions) {\n const headers = new Headers(this.options.headers);\n headers.set(\"Content-Type\", \"application/json\");\n\n const result = await fetch(this.options.api, {\n method: \"POST\",\n headers,\n credentials: this.options.credentials ?? \"same-origin\",\n body: JSON.stringify({\n system: context.system,\n messages: this.options.unstable_AISDKInterop\n ? (toLanguageModelMessages(\n
|
|
1
|
+
{"version":3,"sources":["../../../src/runtimes/edge/EdgeChatAdapter.ts"],"sourcesContent":["import {\n ChatModelAdapter,\n ChatModelRunOptions,\n} from \"../local/ChatModelAdapter\";\nimport { ChatModelRunResult } from \"../local/ChatModelAdapter\";\nimport { toCoreMessages } from \"./converters/toCoreMessages\";\nimport { toLanguageModelTools } from \"./converters/toLanguageModelTools\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { assistantDecoderStream } from \"./streams/assistantDecoderStream\";\nimport { streamPartDecoderStream } from \"./streams/utils/streamPartDecoderStream\";\nimport { runResultStream } from \"./streams/runResultStream\";\nimport { toolResultStream } from \"./streams/toolResultStream\";\nimport { toLanguageModelMessages } from \"./converters\";\n\nexport function asAsyncIterable<T>(\n source: ReadableStream<T>,\n): AsyncIterable<T> {\n return {\n [Symbol.asyncIterator]: () => {\n const reader = source.getReader();\n return {\n async next(): Promise<IteratorResult<T, undefined>> {\n const { done, value } = await reader.read();\n return done\n ? { done: true, value: undefined }\n : { done: false, value };\n },\n };\n },\n };\n}\nexport type EdgeChatAdapterOptions = {\n api: string;\n\n credentials?: RequestCredentials;\n headers?: Record<string, string> | Headers;\n body?: object;\n\n /**\n * When enabled, the adapter will not strip `id` from messages in the messages array.\n */\n unstable_sendMessageIds?: boolean;\n\n /**\n * When enabled, the adapter will send messages in the format expected by the Vercel AI SDK Core.\n * This feature will be removed in the future in favor of a better solution.\n */\n unstable_AISDKInterop?: boolean | undefined;\n};\n\nexport class EdgeChatAdapter implements ChatModelAdapter {\n constructor(private options: EdgeChatAdapterOptions) {}\n\n async *run({\n messages,\n runConfig,\n abortSignal,\n context,\n unstable_assistantMessageId,\n }: ChatModelRunOptions) {\n const headers = new Headers(this.options.headers);\n headers.set(\"Content-Type\", \"application/json\");\n\n const result = await fetch(this.options.api, {\n method: \"POST\",\n headers,\n credentials: this.options.credentials ?? \"same-origin\",\n body: JSON.stringify({\n system: context.system,\n messages: this.options.unstable_AISDKInterop\n ? (toLanguageModelMessages(messages, {\n unstable_includeId: this.options.unstable_sendMessageIds,\n }) as EdgeRuntimeRequestOptions[\"messages\"]) // TODO figure out a better way to do this\n : toCoreMessages(messages, {\n unstable_includeId: this.options.unstable_sendMessageIds,\n }),\n tools: context.tools ? toLanguageModelTools(context.tools) : [],\n unstable_assistantMessageId,\n runConfig,\n ...context.callSettings,\n ...context.config,\n\n ...this.options.body,\n } satisfies EdgeRuntimeRequestOptions),\n signal: abortSignal,\n });\n\n if (!result.ok) {\n throw new Error(`Status ${result.status}: ${await result.text()}`);\n }\n\n const stream = result\n .body!.pipeThrough(streamPartDecoderStream())\n .pipeThrough(assistantDecoderStream())\n .pipeThrough(toolResultStream(context.tools, abortSignal))\n .pipeThrough(runResultStream());\n\n let update: ChatModelRunResult | undefined;\n for await (update of asAsyncIterable(stream)) {\n yield update;\n }\n\n if (update === undefined)\n throw new Error(\"No data received from Edge Runtime\");\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,4BAA+B;AAC/B,kCAAqC;AAErC,oCAAuC;AACvC,qCAAwC;AACxC,6BAAgC;AAChC,8BAAiC;AACjC,wBAAwC;AAEjC,SAAS,gBACd,QACkB;AAClB,SAAO;AAAA,IACL,CAAC,OAAO,aAAa,GAAG,MAAM;AAC5B,YAAM,SAAS,OAAO,UAAU;AAChC,aAAO;AAAA,QACL,MAAM,OAA8C;AAClD,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,iBAAO,OACH,EAAE,MAAM,MAAM,OAAO,OAAU,IAC/B,EAAE,MAAM,OAAO,MAAM;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAoBO,IAAM,kBAAN,MAAkD;AAAA,EACvD,YAAoB,SAAiC;AAAjC;AAAA,EAAkC;AAAA,EAEtD,OAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAwB;AACtB,UAAM,UAAU,IAAI,QAAQ,KAAK,QAAQ,OAAO;AAChD,YAAQ,IAAI,gBAAgB,kBAAkB;AAE9C,UAAM,SAAS,MAAM,MAAM,KAAK,QAAQ,KAAK;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,MACA,aAAa,KAAK,QAAQ,eAAe;AAAA,MACzC,MAAM,KAAK,UAAU;AAAA,QACnB,QAAQ,QAAQ;AAAA,QAChB,UAAU,KAAK,QAAQ,4BAClB,2CAAwB,UAAU;AAAA,UACjC,oBAAoB,KAAK,QAAQ;AAAA,QACnC,CAAC,QACD,sCAAe,UAAU;AAAA,UACvB,oBAAoB,KAAK,QAAQ;AAAA,QACnC,CAAC;AAAA,QACL,OAAO,QAAQ,YAAQ,kDAAqB,QAAQ,KAAK,IAAI,CAAC;AAAA,QAC9D;AAAA,QACA;AAAA,QACA,GAAG,QAAQ;AAAA,QACX,GAAG,QAAQ;AAAA,QAEX,GAAG,KAAK,QAAQ;AAAA,MAClB,CAAqC;AAAA,MACrC,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,CAAC,OAAO,IAAI;AACd,YAAM,IAAI,MAAM,UAAU,OAAO,MAAM,KAAK,MAAM,OAAO,KAAK,CAAC,EAAE;AAAA,IACnE;AAEA,UAAM,SAAS,OACZ,KAAM,gBAAY,wDAAwB,CAAC,EAC3C,gBAAY,sDAAuB,CAAC,EACpC,gBAAY,0CAAiB,QAAQ,OAAO,WAAW,CAAC,EACxD,gBAAY,wCAAgB,CAAC;AAEhC,QAAI;AACJ,eAAW,UAAU,gBAAgB,MAAM,GAAG;AAC5C,YAAM;AAAA,IACR;AAEA,QAAI,WAAW;AACb,YAAM,IAAI,MAAM,oCAAoC;AAAA,EACxD;AACF;","names":[]}
|
|
@@ -38,9 +38,9 @@ var EdgeChatAdapter = class {
|
|
|
38
38
|
credentials: this.options.credentials ?? "same-origin",
|
|
39
39
|
body: JSON.stringify({
|
|
40
40
|
system: context.system,
|
|
41
|
-
messages: this.options.unstable_AISDKInterop ? toLanguageModelMessages(
|
|
42
|
-
|
|
43
|
-
) : toCoreMessages(messages, {
|
|
41
|
+
messages: this.options.unstable_AISDKInterop ? toLanguageModelMessages(messages, {
|
|
42
|
+
unstable_includeId: this.options.unstable_sendMessageIds
|
|
43
|
+
}) : toCoreMessages(messages, {
|
|
44
44
|
unstable_includeId: this.options.unstable_sendMessageIds
|
|
45
45
|
}),
|
|
46
46
|
tools: context.tools ? toLanguageModelTools(context.tools) : [],
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/runtimes/edge/EdgeChatAdapter.ts"],"sourcesContent":["import {\n ChatModelAdapter,\n ChatModelRunOptions,\n} from \"../local/ChatModelAdapter\";\nimport { ChatModelRunResult } from \"../local/ChatModelAdapter\";\nimport { toCoreMessages } from \"./converters/toCoreMessages\";\nimport { toLanguageModelTools } from \"./converters/toLanguageModelTools\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { assistantDecoderStream } from \"./streams/assistantDecoderStream\";\nimport { streamPartDecoderStream } from \"./streams/utils/streamPartDecoderStream\";\nimport { runResultStream } from \"./streams/runResultStream\";\nimport { toolResultStream } from \"./streams/toolResultStream\";\nimport { toLanguageModelMessages } from \"./converters\";\n\nexport function asAsyncIterable<T>(\n source: ReadableStream<T>,\n): AsyncIterable<T> {\n return {\n [Symbol.asyncIterator]: () => {\n const reader = source.getReader();\n return {\n async next(): Promise<IteratorResult<T, undefined>> {\n const { done, value } = await reader.read();\n return done\n ? { done: true, value: undefined }\n : { done: false, value };\n },\n };\n },\n };\n}\nexport type EdgeChatAdapterOptions = {\n api: string;\n\n credentials?: RequestCredentials;\n headers?: Record<string, string> | Headers;\n body?: object;\n\n /**\n * When enabled, the adapter will not strip `id` from messages in the messages array.\n */\n unstable_sendMessageIds?: boolean;\n\n /**\n * When enabled, the adapter will send messages in the format expected by the Vercel AI SDK Core.\n * This feature will be removed in the future in favor of a better solution.\n */\n unstable_AISDKInterop?: boolean | undefined;\n};\n\nexport class EdgeChatAdapter implements ChatModelAdapter {\n constructor(private options: EdgeChatAdapterOptions) {}\n\n async *run({\n messages,\n runConfig,\n abortSignal,\n context,\n unstable_assistantMessageId,\n }: ChatModelRunOptions) {\n const headers = new Headers(this.options.headers);\n headers.set(\"Content-Type\", \"application/json\");\n\n const result = await fetch(this.options.api, {\n method: \"POST\",\n headers,\n credentials: this.options.credentials ?? \"same-origin\",\n body: JSON.stringify({\n system: context.system,\n messages: this.options.unstable_AISDKInterop\n ? (toLanguageModelMessages(\n
|
|
1
|
+
{"version":3,"sources":["../../../src/runtimes/edge/EdgeChatAdapter.ts"],"sourcesContent":["import {\n ChatModelAdapter,\n ChatModelRunOptions,\n} from \"../local/ChatModelAdapter\";\nimport { ChatModelRunResult } from \"../local/ChatModelAdapter\";\nimport { toCoreMessages } from \"./converters/toCoreMessages\";\nimport { toLanguageModelTools } from \"./converters/toLanguageModelTools\";\nimport { EdgeRuntimeRequestOptions } from \"./EdgeRuntimeRequestOptions\";\nimport { assistantDecoderStream } from \"./streams/assistantDecoderStream\";\nimport { streamPartDecoderStream } from \"./streams/utils/streamPartDecoderStream\";\nimport { runResultStream } from \"./streams/runResultStream\";\nimport { toolResultStream } from \"./streams/toolResultStream\";\nimport { toLanguageModelMessages } from \"./converters\";\n\nexport function asAsyncIterable<T>(\n source: ReadableStream<T>,\n): AsyncIterable<T> {\n return {\n [Symbol.asyncIterator]: () => {\n const reader = source.getReader();\n return {\n async next(): Promise<IteratorResult<T, undefined>> {\n const { done, value } = await reader.read();\n return done\n ? { done: true, value: undefined }\n : { done: false, value };\n },\n };\n },\n };\n}\nexport type EdgeChatAdapterOptions = {\n api: string;\n\n credentials?: RequestCredentials;\n headers?: Record<string, string> | Headers;\n body?: object;\n\n /**\n * When enabled, the adapter will not strip `id` from messages in the messages array.\n */\n unstable_sendMessageIds?: boolean;\n\n /**\n * When enabled, the adapter will send messages in the format expected by the Vercel AI SDK Core.\n * This feature will be removed in the future in favor of a better solution.\n */\n unstable_AISDKInterop?: boolean | undefined;\n};\n\nexport class EdgeChatAdapter implements ChatModelAdapter {\n constructor(private options: EdgeChatAdapterOptions) {}\n\n async *run({\n messages,\n runConfig,\n abortSignal,\n context,\n unstable_assistantMessageId,\n }: ChatModelRunOptions) {\n const headers = new Headers(this.options.headers);\n headers.set(\"Content-Type\", \"application/json\");\n\n const result = await fetch(this.options.api, {\n method: \"POST\",\n headers,\n credentials: this.options.credentials ?? \"same-origin\",\n body: JSON.stringify({\n system: context.system,\n messages: this.options.unstable_AISDKInterop\n ? (toLanguageModelMessages(messages, {\n unstable_includeId: this.options.unstable_sendMessageIds,\n }) as EdgeRuntimeRequestOptions[\"messages\"]) // TODO figure out a better way to do this\n : toCoreMessages(messages, {\n unstable_includeId: this.options.unstable_sendMessageIds,\n }),\n tools: context.tools ? toLanguageModelTools(context.tools) : [],\n unstable_assistantMessageId,\n runConfig,\n ...context.callSettings,\n ...context.config,\n\n ...this.options.body,\n } satisfies EdgeRuntimeRequestOptions),\n signal: abortSignal,\n });\n\n if (!result.ok) {\n throw new Error(`Status ${result.status}: ${await result.text()}`);\n }\n\n const stream = result\n .body!.pipeThrough(streamPartDecoderStream())\n .pipeThrough(assistantDecoderStream())\n .pipeThrough(toolResultStream(context.tools, abortSignal))\n .pipeThrough(runResultStream());\n\n let update: ChatModelRunResult | undefined;\n for await (update of asAsyncIterable(stream)) {\n yield update;\n }\n\n if (update === undefined)\n throw new Error(\"No data received from Edge Runtime\");\n }\n}\n"],"mappings":";AAKA,SAAS,sBAAsB;AAC/B,SAAS,4BAA4B;AAErC,SAAS,8BAA8B;AACvC,SAAS,+BAA+B;AACxC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AACjC,SAAS,+BAA+B;AAEjC,SAAS,gBACd,QACkB;AAClB,SAAO;AAAA,IACL,CAAC,OAAO,aAAa,GAAG,MAAM;AAC5B,YAAM,SAAS,OAAO,UAAU;AAChC,aAAO;AAAA,QACL,MAAM,OAA8C;AAClD,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,iBAAO,OACH,EAAE,MAAM,MAAM,OAAO,OAAU,IAC/B,EAAE,MAAM,OAAO,MAAM;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAoBO,IAAM,kBAAN,MAAkD;AAAA,EACvD,YAAoB,SAAiC;AAAjC;AAAA,EAAkC;AAAA,EAEtD,OAAO,IAAI;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAwB;AACtB,UAAM,UAAU,IAAI,QAAQ,KAAK,QAAQ,OAAO;AAChD,YAAQ,IAAI,gBAAgB,kBAAkB;AAE9C,UAAM,SAAS,MAAM,MAAM,KAAK,QAAQ,KAAK;AAAA,MAC3C,QAAQ;AAAA,MACR;AAAA,MACA,aAAa,KAAK,QAAQ,eAAe;AAAA,MACzC,MAAM,KAAK,UAAU;AAAA,QACnB,QAAQ,QAAQ;AAAA,QAChB,UAAU,KAAK,QAAQ,wBAClB,wBAAwB,UAAU;AAAA,UACjC,oBAAoB,KAAK,QAAQ;AAAA,QACnC,CAAC,IACD,eAAe,UAAU;AAAA,UACvB,oBAAoB,KAAK,QAAQ;AAAA,QACnC,CAAC;AAAA,QACL,OAAO,QAAQ,QAAQ,qBAAqB,QAAQ,KAAK,IAAI,CAAC;AAAA,QAC9D;AAAA,QACA;AAAA,QACA,GAAG,QAAQ;AAAA,QACX,GAAG,QAAQ;AAAA,QAEX,GAAG,KAAK,QAAQ;AAAA,MAClB,CAAqC;AAAA,MACrC,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,CAAC,OAAO,IAAI;AACd,YAAM,IAAI,MAAM,UAAU,OAAO,MAAM,KAAK,MAAM,OAAO,KAAK,CAAC,EAAE;AAAA,IACnE;AAEA,UAAM,SAAS,OACZ,KAAM,YAAY,wBAAwB,CAAC,EAC3C,YAAY,uBAAuB,CAAC,EACpC,YAAY,iBAAiB,QAAQ,OAAO,WAAW,CAAC,EACxD,YAAY,gBAAgB,CAAC;AAEhC,QAAI;AACJ,eAAW,UAAU,gBAAgB,MAAM,GAAG;AAC5C,YAAM;AAAA,IACR;AAEA,QAAI,WAAW;AACb,YAAM,IAAI,MAAM,oCAAoC;AAAA,EACxD;AACF;","names":[]}
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { LanguageModelV1Message } from "@ai-sdk/provider";
|
|
2
2
|
import { CoreMessage, ThreadMessage } from "../../../types/AssistantTypes";
|
|
3
|
-
export declare function toLanguageModelMessages(message: readonly CoreMessage[] | readonly ThreadMessage[]
|
|
3
|
+
export declare function toLanguageModelMessages(message: readonly CoreMessage[] | readonly ThreadMessage[], options?: {
|
|
4
|
+
unstable_includeId?: boolean | undefined;
|
|
5
|
+
}): LanguageModelV1Message[];
|
|
4
6
|
//# sourceMappingURL=toLanguageModelMessages.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"toLanguageModelMessages.d.ts","sourceRoot":"","sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,sBAAsB,EAIvB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,WAAW,EACX,aAAa,EAGd,MAAM,+BAA+B,CAAC;AA6DvC,wBAAgB,uBAAuB,CACrC,OAAO,EAAE,SAAS,WAAW,EAAE,GAAG,SAAS,aAAa,EAAE,GACzD,sBAAsB,EAAE,
|
|
1
|
+
{"version":3,"file":"toLanguageModelMessages.d.ts","sourceRoot":"","sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"names":[],"mappings":"AAAA,OAAO,EAGL,sBAAsB,EAIvB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,WAAW,EACX,aAAa,EAGd,MAAM,+BAA+B,CAAC;AA6DvC,wBAAgB,uBAAuB,CACrC,OAAO,EAAE,SAAS,WAAW,EAAE,GAAG,SAAS,aAAa,EAAE,EAC1D,OAAO,GAAE;IAAE,kBAAkB,CAAC,EAAE,OAAO,GAAG,SAAS,CAAA;CAAO,GACzD,sBAAsB,EAAE,CA+F1B"}
|
|
@@ -72,12 +72,19 @@ var assistantMessageSplitter = () => {
|
|
|
72
72
|
}
|
|
73
73
|
};
|
|
74
74
|
};
|
|
75
|
-
function toLanguageModelMessages(message) {
|
|
75
|
+
function toLanguageModelMessages(message, options = {}) {
|
|
76
|
+
const includeId = options.unstable_includeId ?? false;
|
|
76
77
|
return message.flatMap((message2) => {
|
|
77
78
|
const role = message2.role;
|
|
78
79
|
switch (role) {
|
|
79
80
|
case "system": {
|
|
80
|
-
return [
|
|
81
|
+
return [
|
|
82
|
+
{
|
|
83
|
+
...includeId ? { unstable_id: message2.id } : {},
|
|
84
|
+
role: "system",
|
|
85
|
+
content: message2.content[0].text
|
|
86
|
+
}
|
|
87
|
+
];
|
|
81
88
|
}
|
|
82
89
|
case "user": {
|
|
83
90
|
const attachments = "attachments" in message2 ? message2.attachments : [];
|
|
@@ -86,6 +93,7 @@ function toLanguageModelMessages(message) {
|
|
|
86
93
|
...attachments.map((a) => a.content).flat()
|
|
87
94
|
];
|
|
88
95
|
const msg = {
|
|
96
|
+
...includeId ? { unstable_id: message2.id } : {},
|
|
89
97
|
role: "user",
|
|
90
98
|
content: content.map(
|
|
91
99
|
(part) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"sourcesContent":["import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreMessage,\n ThreadMessage,\n TextContentPart,\n CoreToolCallContentPart,\n} from \"../../../types/AssistantTypes\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: CoreToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result ?? \"<no result>\",\n isError: part.isError ?? false,\n });\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n): LanguageModelV1Message[] {\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\"
|
|
1
|
+
{"version":3,"sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"sourcesContent":["import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreMessage,\n ThreadMessage,\n TextContentPart,\n CoreToolCallContentPart,\n} from \"../../../types/AssistantTypes\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: CoreToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result ?? \"<no result>\",\n isError: part.isError ?? false,\n });\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n options: { unstable_includeId?: boolean | undefined } = {},\n): LanguageModelV1Message[] {\n const includeId = options.unstable_includeId ?? false;\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [\n {\n ...(includeId\n ? { unstable_id: (message as ThreadMessage).id }\n : {}),\n role: \"system\",\n content: message.content[0].text,\n },\n ];\n }\n\n case \"user\": {\n const attachments = \"attachments\" in message ? message.attachments : [];\n const content = [\n ...message.content,\n ...attachments.map((a) => a.content).flat(),\n ];\n const msg: LanguageModelV1Message = {\n ...(includeId ? { unstable_id: (message as ThreadMessage).id } : {}),\n role: \"user\",\n content: content.map(\n (\n part,\n ):\n | LanguageModelV1TextPart\n | LanguageModelV1ImagePart\n | LanguageModelV1FilePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n case \"file\": {\n return {\n type: \"file\",\n data: new URL(part.data),\n mimeType: part.mimeType,\n };\n }\n\n default: {\n const unhandledType: \"ui\" | \"audio\" = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(`Unhandled content part type: ${unhandledType}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n });\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAeA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAAkC;AAClD,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AAED,kBAAY,QAAQ,KAAK;AAAA,QACvB,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK,UAAU;AAAA,QACvB,SAAS,KAAK,WAAW;AAAA,MAC3B,CAAC;AAAA,IACH;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,wBACd,SACA,UAAwD,CAAC,GAC/B;AAC1B,QAAM,YAAY,QAAQ,sBAAsB;AAChD,SAAO,QAAQ,QAAQ,CAACA,aAAY;AAClC,UAAM,OAAOA,SAAQ;AACrB,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,eAAO;AAAA,UACL;AAAA,YACE,GAAI,YACA,EAAE,aAAcA,SAA0B,GAAG,IAC7C,CAAC;AAAA,YACL,MAAM;AAAA,YACN,SAASA,SAAQ,QAAQ,CAAC,EAAE;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,iBAAiBA,WAAUA,SAAQ,cAAc,CAAC;AACtE,cAAM,UAAU;AAAA,UACd,GAAGA,SAAQ;AAAA,UACX,GAAG,YAAY,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK;AAAA,QAC5C;AACA,cAAM,MAA8B;AAAA,UAClC,GAAI,YAAY,EAAE,aAAcA,SAA0B,GAAG,IAAI,CAAC;AAAA,UAClE,MAAM;AAAA,UACN,SAAS,QAAQ;AAAA,YACf,CACE,SAI6B;AAC7B,oBAAM,OAAO,KAAK;AAClB,sBAAQ,MAAM;AAAA,gBACZ,KAAK,QAAQ;AACX,yBAAO;AAAA,gBACT;AAAA,gBAEA,KAAK,SAAS;AACZ,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,kBAC3B;AAAA,gBACF;AAAA,gBAEA,KAAK,QAAQ;AACX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,MAAM,IAAI,IAAI,KAAK,IAAI;AAAA,oBACvB,UAAU,KAAK;AAAA,kBACjB;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,wBAAM,gBAAgC;AACtC,wBAAM,IAAI;AAAA,oBACR,iCAAiC,aAAa;AAAA,kBAChD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AACA,eAAO,CAAC,GAAG;AAAA,MACb;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,yBAAyB;AAC1C,mBAAW,QAAQA,SAAQ,SAAS;AAClC,gBAAM,OAAO,KAAK;AAClB,kBAAQ,MAAM;AAAA,YACZ,KAAK,QAAQ;AACX,uBAAS,mBAAmB,IAAI;AAChC;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,gBAAgB,IAAI;AAC7B;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,gBAAsB;AAC5B,oBAAM,IAAI,MAAM,gCAAgC,aAAa,EAAE;AAAA,YACjE;AAAA,UACF;AAAA,QACF;AACA,eAAO,SAAS,YAAY;AAAA,MAC9B;AAAA,MAEA,SAAS;AACP,cAAM,gBAAuB;AAC7B,cAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,MAC1D;AAAA,IACF;AAAA,EACF,CAAC;AACH;","names":["message"]}
|
|
@@ -48,12 +48,19 @@ var assistantMessageSplitter = () => {
|
|
|
48
48
|
}
|
|
49
49
|
};
|
|
50
50
|
};
|
|
51
|
-
function toLanguageModelMessages(message) {
|
|
51
|
+
function toLanguageModelMessages(message, options = {}) {
|
|
52
|
+
const includeId = options.unstable_includeId ?? false;
|
|
52
53
|
return message.flatMap((message2) => {
|
|
53
54
|
const role = message2.role;
|
|
54
55
|
switch (role) {
|
|
55
56
|
case "system": {
|
|
56
|
-
return [
|
|
57
|
+
return [
|
|
58
|
+
{
|
|
59
|
+
...includeId ? { unstable_id: message2.id } : {},
|
|
60
|
+
role: "system",
|
|
61
|
+
content: message2.content[0].text
|
|
62
|
+
}
|
|
63
|
+
];
|
|
57
64
|
}
|
|
58
65
|
case "user": {
|
|
59
66
|
const attachments = "attachments" in message2 ? message2.attachments : [];
|
|
@@ -62,6 +69,7 @@ function toLanguageModelMessages(message) {
|
|
|
62
69
|
...attachments.map((a) => a.content).flat()
|
|
63
70
|
];
|
|
64
71
|
const msg = {
|
|
72
|
+
...includeId ? { unstable_id: message2.id } : {},
|
|
65
73
|
role: "user",
|
|
66
74
|
content: content.map(
|
|
67
75
|
(part) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"sourcesContent":["import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreMessage,\n ThreadMessage,\n TextContentPart,\n CoreToolCallContentPart,\n} from \"../../../types/AssistantTypes\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: CoreToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result ?? \"<no result>\",\n isError: part.isError ?? false,\n });\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n): LanguageModelV1Message[] {\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [{ role: \"system\"
|
|
1
|
+
{"version":3,"sources":["../../../../src/runtimes/edge/converters/toLanguageModelMessages.ts"],"sourcesContent":["import {\n LanguageModelV1FilePart,\n LanguageModelV1ImagePart,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ToolCallPart,\n LanguageModelV1ToolResultPart,\n} from \"@ai-sdk/provider\";\nimport {\n CoreMessage,\n ThreadMessage,\n TextContentPart,\n CoreToolCallContentPart,\n} from \"../../../types/AssistantTypes\";\n\nconst assistantMessageSplitter = () => {\n const stash: LanguageModelV1Message[] = [];\n let assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (LanguageModelV1TextPart | LanguageModelV1ToolCallPart)[],\n };\n let toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n\n return {\n addTextContentPart: (part: TextContentPart) => {\n if (toolMessage.content.length > 0) {\n stash.push(assistantMessage);\n stash.push(toolMessage);\n\n assistantMessage = {\n role: \"assistant\" as const,\n content: [] as (\n | LanguageModelV1TextPart\n | LanguageModelV1ToolCallPart\n )[],\n };\n\n toolMessage = {\n role: \"tool\" as const,\n content: [] as LanguageModelV1ToolResultPart[],\n };\n }\n\n assistantMessage.content.push(part);\n },\n addToolCallPart: (part: CoreToolCallContentPart) => {\n assistantMessage.content.push({\n type: \"tool-call\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n args: part.args,\n });\n\n toolMessage.content.push({\n type: \"tool-result\",\n toolCallId: part.toolCallId,\n toolName: part.toolName,\n result: part.result ?? \"<no result>\",\n isError: part.isError ?? false,\n });\n },\n getMessages: () => {\n if (toolMessage.content.length > 0) {\n return [...stash, assistantMessage, toolMessage];\n }\n\n return [...stash, assistantMessage];\n },\n };\n};\n\nexport function toLanguageModelMessages(\n message: readonly CoreMessage[] | readonly ThreadMessage[],\n options: { unstable_includeId?: boolean | undefined } = {},\n): LanguageModelV1Message[] {\n const includeId = options.unstable_includeId ?? false;\n return message.flatMap((message) => {\n const role = message.role;\n switch (role) {\n case \"system\": {\n return [\n {\n ...(includeId\n ? { unstable_id: (message as ThreadMessage).id }\n : {}),\n role: \"system\",\n content: message.content[0].text,\n },\n ];\n }\n\n case \"user\": {\n const attachments = \"attachments\" in message ? message.attachments : [];\n const content = [\n ...message.content,\n ...attachments.map((a) => a.content).flat(),\n ];\n const msg: LanguageModelV1Message = {\n ...(includeId ? { unstable_id: (message as ThreadMessage).id } : {}),\n role: \"user\",\n content: content.map(\n (\n part,\n ):\n | LanguageModelV1TextPart\n | LanguageModelV1ImagePart\n | LanguageModelV1FilePart => {\n const type = part.type;\n switch (type) {\n case \"text\": {\n return part;\n }\n\n case \"image\": {\n return {\n type: \"image\",\n image: new URL(part.image),\n };\n }\n\n case \"file\": {\n return {\n type: \"file\",\n data: new URL(part.data),\n mimeType: part.mimeType,\n };\n }\n\n default: {\n const unhandledType: \"ui\" | \"audio\" = type;\n throw new Error(\n `Unspported content part type: ${unhandledType}`,\n );\n }\n }\n },\n ),\n };\n return [msg];\n }\n\n case \"assistant\": {\n const splitter = assistantMessageSplitter();\n for (const part of message.content) {\n const type = part.type;\n switch (type) {\n case \"text\": {\n splitter.addTextContentPart(part);\n break;\n }\n case \"tool-call\": {\n splitter.addToolCallPart(part);\n break;\n }\n default: {\n const unhandledType: \"ui\" = type;\n throw new Error(`Unhandled content part type: ${unhandledType}`);\n }\n }\n }\n return splitter.getMessages();\n }\n\n default: {\n const unhandledRole: never = role;\n throw new Error(`Unknown message role: ${unhandledRole}`);\n }\n }\n });\n}\n"],"mappings":";AAeA,IAAM,2BAA2B,MAAM;AACrC,QAAM,QAAkC,CAAC;AACzC,MAAI,mBAAmB;AAAA,IACrB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AACA,MAAI,cAAc;AAAA,IAChB,MAAM;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAEA,SAAO;AAAA,IACL,oBAAoB,CAAC,SAA0B;AAC7C,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,cAAM,KAAK,gBAAgB;AAC3B,cAAM,KAAK,WAAW;AAEtB,2BAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QAIZ;AAEA,sBAAc;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC;AAAA,QACZ;AAAA,MACF;AAEA,uBAAiB,QAAQ,KAAK,IAAI;AAAA,IACpC;AAAA,IACA,iBAAiB,CAAC,SAAkC;AAClD,uBAAiB,QAAQ,KAAK;AAAA,QAC5B,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,MACb,CAAC;AAED,kBAAY,QAAQ,KAAK;AAAA,QACvB,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK,UAAU;AAAA,QACvB,SAAS,KAAK,WAAW;AAAA,MAC3B,CAAC;AAAA,IACH;AAAA,IACA,aAAa,MAAM;AACjB,UAAI,YAAY,QAAQ,SAAS,GAAG;AAClC,eAAO,CAAC,GAAG,OAAO,kBAAkB,WAAW;AAAA,MACjD;AAEA,aAAO,CAAC,GAAG,OAAO,gBAAgB;AAAA,IACpC;AAAA,EACF;AACF;AAEO,SAAS,wBACd,SACA,UAAwD,CAAC,GAC/B;AAC1B,QAAM,YAAY,QAAQ,sBAAsB;AAChD,SAAO,QAAQ,QAAQ,CAACA,aAAY;AAClC,UAAM,OAAOA,SAAQ;AACrB,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,eAAO;AAAA,UACL;AAAA,YACE,GAAI,YACA,EAAE,aAAcA,SAA0B,GAAG,IAC7C,CAAC;AAAA,YACL,MAAM;AAAA,YACN,SAASA,SAAQ,QAAQ,CAAC,EAAE;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,iBAAiBA,WAAUA,SAAQ,cAAc,CAAC;AACtE,cAAM,UAAU;AAAA,UACd,GAAGA,SAAQ;AAAA,UACX,GAAG,YAAY,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK;AAAA,QAC5C;AACA,cAAM,MAA8B;AAAA,UAClC,GAAI,YAAY,EAAE,aAAcA,SAA0B,GAAG,IAAI,CAAC;AAAA,UAClE,MAAM;AAAA,UACN,SAAS,QAAQ;AAAA,YACf,CACE,SAI6B;AAC7B,oBAAM,OAAO,KAAK;AAClB,sBAAQ,MAAM;AAAA,gBACZ,KAAK,QAAQ;AACX,yBAAO;AAAA,gBACT;AAAA,gBAEA,KAAK,SAAS;AACZ,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,OAAO,IAAI,IAAI,KAAK,KAAK;AAAA,kBAC3B;AAAA,gBACF;AAAA,gBAEA,KAAK,QAAQ;AACX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,MAAM,IAAI,IAAI,KAAK,IAAI;AAAA,oBACvB,UAAU,KAAK;AAAA,kBACjB;AAAA,gBACF;AAAA,gBAEA,SAAS;AACP,wBAAM,gBAAgC;AACtC,wBAAM,IAAI;AAAA,oBACR,iCAAiC,aAAa;AAAA,kBAChD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AACA,eAAO,CAAC,GAAG;AAAA,MACb;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,WAAW,yBAAyB;AAC1C,mBAAW,QAAQA,SAAQ,SAAS;AAClC,gBAAM,OAAO,KAAK;AAClB,kBAAQ,MAAM;AAAA,YACZ,KAAK,QAAQ;AACX,uBAAS,mBAAmB,IAAI;AAChC;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,gBAAgB,IAAI;AAC7B;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,gBAAsB;AAC5B,oBAAM,IAAI,MAAM,gCAAgC,aAAa,EAAE;AAAA,YACjE;AAAA,UACF;AAAA,QACF;AACA,eAAO,SAAS,YAAY;AAAA,MAC9B;AAAA,MAEA,SAAS;AACP,cAAM,gBAAuB;AAC7B,cAAM,IAAI,MAAM,yBAAyB,aAAa,EAAE;AAAA,MAC1D;AAAA,IACF;AAAA,EACF,CAAC;AACH;","names":["message"]}
|
package/package.json
CHANGED
|
@@ -29,7 +29,7 @@
|
|
|
29
29
|
"conversational-ui",
|
|
30
30
|
"conversational-ai"
|
|
31
31
|
],
|
|
32
|
-
"version": "0.7.
|
|
32
|
+
"version": "0.7.58",
|
|
33
33
|
"license": "MIT",
|
|
34
34
|
"exports": {
|
|
35
35
|
".": {
|
|
@@ -118,8 +118,8 @@
|
|
|
118
118
|
"tailwindcss-animate": "^1.0.7",
|
|
119
119
|
"tsx": "^4.19.2",
|
|
120
120
|
"@assistant-ui/tailwindcss-transformer": "0.1.0",
|
|
121
|
-
"@assistant-ui/
|
|
122
|
-
"@assistant-ui/
|
|
121
|
+
"@assistant-ui/tsbuildutils": "^0.0.1",
|
|
122
|
+
"@assistant-ui/tsconfig": "0.0.0"
|
|
123
123
|
},
|
|
124
124
|
"publishConfig": {
|
|
125
125
|
"access": "public",
|
|
@@ -68,9 +68,9 @@ export class EdgeChatAdapter implements ChatModelAdapter {
|
|
|
68
68
|
body: JSON.stringify({
|
|
69
69
|
system: context.system,
|
|
70
70
|
messages: this.options.unstable_AISDKInterop
|
|
71
|
-
? (toLanguageModelMessages(
|
|
72
|
-
|
|
73
|
-
) as EdgeRuntimeRequestOptions["messages"]) // TODO figure out a better way to do this
|
|
71
|
+
? (toLanguageModelMessages(messages, {
|
|
72
|
+
unstable_includeId: this.options.unstable_sendMessageIds,
|
|
73
|
+
}) as EdgeRuntimeRequestOptions["messages"]) // TODO figure out a better way to do this
|
|
74
74
|
: toCoreMessages(messages, {
|
|
75
75
|
unstable_includeId: this.options.unstable_sendMessageIds,
|
|
76
76
|
}),
|
|
@@ -74,12 +74,22 @@ const assistantMessageSplitter = () => {
|
|
|
74
74
|
|
|
75
75
|
export function toLanguageModelMessages(
|
|
76
76
|
message: readonly CoreMessage[] | readonly ThreadMessage[],
|
|
77
|
+
options: { unstable_includeId?: boolean | undefined } = {},
|
|
77
78
|
): LanguageModelV1Message[] {
|
|
79
|
+
const includeId = options.unstable_includeId ?? false;
|
|
78
80
|
return message.flatMap((message) => {
|
|
79
81
|
const role = message.role;
|
|
80
82
|
switch (role) {
|
|
81
83
|
case "system": {
|
|
82
|
-
return [
|
|
84
|
+
return [
|
|
85
|
+
{
|
|
86
|
+
...(includeId
|
|
87
|
+
? { unstable_id: (message as ThreadMessage).id }
|
|
88
|
+
: {}),
|
|
89
|
+
role: "system",
|
|
90
|
+
content: message.content[0].text,
|
|
91
|
+
},
|
|
92
|
+
];
|
|
83
93
|
}
|
|
84
94
|
|
|
85
95
|
case "user": {
|
|
@@ -89,6 +99,7 @@ export function toLanguageModelMessages(
|
|
|
89
99
|
...attachments.map((a) => a.content).flat(),
|
|
90
100
|
];
|
|
91
101
|
const msg: LanguageModelV1Message = {
|
|
102
|
+
...(includeId ? { unstable_id: (message as ThreadMessage).id } : {}),
|
|
92
103
|
role: "user",
|
|
93
104
|
content: content.map(
|
|
94
105
|
(
|