@ai-sdk/llamaindex 1.0.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,154 @@
1
+ # @ai-sdk/llamaindex
2
+
3
+ ## 1.0.0-alpha.1
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [b346545]
8
+ - Updated dependencies [109c0ac]
9
+ - ai@5.0.0-alpha.1
10
+
11
+ ## 1.0.0-canary.12
12
+
13
+ ### Patch Changes
14
+
15
+ - Updated dependencies [bedb239]
16
+ - Updated dependencies [507ac1d]
17
+ - Updated dependencies [2b9bbcd]
18
+ - Updated dependencies [f7e8bf4]
19
+ - Updated dependencies [cda32ba]
20
+ - Updated dependencies [50f0362]
21
+ - Updated dependencies [ed675de]
22
+ - Updated dependencies [64f6d64]
23
+ - ai@5.0.0-canary.24
24
+
25
+ ## 1.0.0-canary.11
26
+
27
+ ### Patch Changes
28
+
29
+ - Updated dependencies [40acf9b]
30
+ - ai@5.0.0-canary.23
31
+
32
+ ## 1.0.0-canary.10
33
+
34
+ ### Patch Changes
35
+
36
+ - Updated dependencies [e7dc6c7]
37
+ - Updated dependencies [a34eb39]
38
+ - Updated dependencies [b33ed7a]
39
+ - Updated dependencies [765f1cd]
40
+ - ai@5.0.0-canary.22
41
+
42
+ ## 1.0.0-canary.9
43
+
44
+ ### Patch Changes
45
+
46
+ - Updated dependencies [d964901]
47
+ - Updated dependencies [0560977]
48
+ - Updated dependencies [66af894]
49
+ - Updated dependencies [516be5b]
50
+ - Updated dependencies [bfbfc4c]
51
+ - Updated dependencies [ea7a7c9]
52
+ - Updated dependencies [1409e13]
53
+ - ai@5.0.0-canary.21
54
+
55
+ ## 1.0.0-canary.8
56
+
57
+ ### Patch Changes
58
+
59
+ - bc3109f: chore (ai): push stream-callbacks into langchain/llamaindex adapters
60
+ - Updated dependencies [13fef90]
61
+ - Updated dependencies [e90d45d]
62
+ - Updated dependencies [bc3109f]
63
+ - Updated dependencies [496bbc1]
64
+ - Updated dependencies [da70d79]
65
+ - Updated dependencies [bcea599]
66
+ - Updated dependencies [48d675a]
67
+ - Updated dependencies [c7710a9]
68
+ - Updated dependencies [35fc02c]
69
+ - Updated dependencies [b983b51]
70
+ - ai@5.0.0-canary.20
71
+
72
+ ## 1.0.0-canary.7
73
+
74
+ ### Patch Changes
75
+
76
+ - Updated dependencies [2d03e19]
77
+ - Updated dependencies [319b989]
78
+ - Updated dependencies [441d042]
79
+ - Updated dependencies [dcc549b]
80
+ - Updated dependencies [cb2b53a]
81
+ - Updated dependencies [e244a78]
82
+ - ai@5.0.0-canary.19
83
+
84
+ ## 1.0.0-canary.6
85
+
86
+ ### Patch Changes
87
+
88
+ - Updated dependencies [a571d6e]
89
+ - Updated dependencies [c60f895]
90
+ - Updated dependencies [332167b]
91
+ - Updated dependencies [a8c8bd5]
92
+ - Updated dependencies [a662dea]
93
+ - Updated dependencies [41fa418]
94
+ - @ai-sdk/provider-utils@3.0.0-canary.15
95
+ - ai@5.0.0-canary.18
96
+
97
+ ## 1.0.0-canary.5
98
+
99
+ ### Patch Changes
100
+
101
+ - Updated dependencies [f04fb4a]
102
+ - Updated dependencies [fd1924b]
103
+ - Updated dependencies [957b739]
104
+ - Updated dependencies [fafc3f2]
105
+ - Updated dependencies [c9ad635]
106
+ - Updated dependencies [9bd5ab5]
107
+ - Updated dependencies [92cb0a2]
108
+ - ai@5.0.0-canary.17
109
+ - @ai-sdk/provider-utils@3.0.0-canary.14
110
+
111
+ ## 1.0.0-canary.4
112
+
113
+ ### Patch Changes
114
+
115
+ - Updated dependencies [ec78cdc]
116
+ - Updated dependencies [9b4d074]
117
+ - Updated dependencies [8b86e99]
118
+ - Updated dependencies [28ad69e]
119
+ - Updated dependencies [2524fc7]
120
+ - Updated dependencies [ec5933d]
121
+ - Updated dependencies [175b868]
122
+ - ai@5.0.0-canary.16
123
+
124
+ ## 1.0.0-canary.3
125
+
126
+ ### Patch Changes
127
+
128
+ - Updated dependencies [d9209ca]
129
+ - Updated dependencies [ea27cc6]
130
+ - Updated dependencies [0ff02bb]
131
+ - Updated dependencies [4bfe9ec]
132
+ - Updated dependencies [2877a74]
133
+ - ai@5.0.0-canary.15
134
+ - @ai-sdk/provider-utils@3.0.0-canary.13
135
+
136
+ ## 1.0.0-canary.2
137
+
138
+ ### Patch Changes
139
+
140
+ - Updated dependencies [9bf7291]
141
+ - Updated dependencies [4617fab]
142
+ - Updated dependencies [a76a62b]
143
+ - ai@5.0.0-canary.14
144
+ - @ai-sdk/provider-utils@3.0.0-canary.12
145
+
146
+ ## 1.0.0-canary.1
147
+
148
+ ### Patch Changes
149
+
150
+ - Updated dependencies [14cb3be]
151
+ - Updated dependencies [66962ed]
152
+ - Updated dependencies [9301f86]
153
+ - ai@5.0.0-canary.13
154
+ - @ai-sdk/provider-utils@3.0.0-canary.11
package/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright 2023 Vercel, Inc.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # AI SDK - LlamaIndex Adapter
2
+
3
+ This package contains a LlamaIndex adapter for the AI SDK.
@@ -0,0 +1,22 @@
1
+ import { UIMessageStreamPart } from 'ai';
2
+
3
+ /**
4
+ * Configuration options and helper callback methods for stream lifecycle events.
5
+ */
6
+ interface StreamCallbacks {
7
+ /** `onStart`: Called once when the stream is initialized. */
8
+ onStart?: () => Promise<void> | void;
9
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
10
+ onFinal?: (completion: string) => Promise<void> | void;
11
+ /** `onToken`: Called for each tokenized message. */
12
+ onToken?: (token: string) => Promise<void> | void;
13
+ /** `onText`: Called for each text chunk. */
14
+ onText?: (text: string) => Promise<void> | void;
15
+ }
16
+
17
+ type EngineResponse = {
18
+ delta: string;
19
+ };
20
+ declare function toUIMessageStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<UIMessageStreamPart>;
21
+
22
+ export { toUIMessageStream };
@@ -0,0 +1,22 @@
1
+ import { UIMessageStreamPart } from 'ai';
2
+
3
+ /**
4
+ * Configuration options and helper callback methods for stream lifecycle events.
5
+ */
6
+ interface StreamCallbacks {
7
+ /** `onStart`: Called once when the stream is initialized. */
8
+ onStart?: () => Promise<void> | void;
9
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
10
+ onFinal?: (completion: string) => Promise<void> | void;
11
+ /** `onToken`: Called for each tokenized message. */
12
+ onToken?: (token: string) => Promise<void> | void;
13
+ /** `onText`: Called for each text chunk. */
14
+ onText?: (text: string) => Promise<void> | void;
15
+ }
16
+
17
+ type EngineResponse = {
18
+ delta: string;
19
+ };
20
+ declare function toUIMessageStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<UIMessageStreamPart>;
21
+
22
+ export { toUIMessageStream };
package/dist/index.js ADDED
@@ -0,0 +1,84 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var src_exports = {};
22
+ __export(src_exports, {
23
+ toUIMessageStream: () => toUIMessageStream
24
+ });
25
+ module.exports = __toCommonJS(src_exports);
26
+
27
+ // src/llamaindex-adapter.ts
28
+ var import_internal = require("ai/internal");
29
+
30
+ // src/stream-callbacks.ts
31
+ function createCallbacksTransformer(callbacks = {}) {
32
+ let aggregatedResponse = "";
33
+ return new TransformStream({
34
+ async start() {
35
+ if (callbacks.onStart) await callbacks.onStart();
36
+ },
37
+ async transform(message, controller) {
38
+ controller.enqueue(message);
39
+ aggregatedResponse += message;
40
+ if (callbacks.onToken) await callbacks.onToken(message);
41
+ if (callbacks.onText && typeof message === "string") {
42
+ await callbacks.onText(message);
43
+ }
44
+ },
45
+ async flush() {
46
+ if (callbacks.onFinal) {
47
+ await callbacks.onFinal(aggregatedResponse);
48
+ }
49
+ }
50
+ });
51
+ }
52
+
53
+ // src/llamaindex-adapter.ts
54
+ function toUIMessageStream(stream, callbacks) {
55
+ const trimStart = trimStartOfStream();
56
+ return (0, import_internal.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
57
+ new TransformStream({
58
+ async transform(message, controller) {
59
+ controller.enqueue(trimStart(message.delta));
60
+ }
61
+ })
62
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
63
+ new TransformStream({
64
+ transform: async (chunk, controller) => {
65
+ controller.enqueue({ type: "text", text: chunk });
66
+ }
67
+ })
68
+ );
69
+ }
70
+ function trimStartOfStream() {
71
+ let isStreamStart = true;
72
+ return (text) => {
73
+ if (isStreamStart) {
74
+ text = text.trimStart();
75
+ if (text) isStreamStart = false;
76
+ }
77
+ return text;
78
+ };
79
+ }
80
+ // Annotate the CommonJS export names for ESM import in node:
81
+ 0 && (module.exports = {
82
+ toUIMessageStream
83
+ });
84
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/llamaindex-adapter.ts","../src/stream-callbacks.ts"],"sourcesContent":["export * from './llamaindex-adapter';\n","import { UIMessageStreamPart } from 'ai';\nimport { convertAsyncIteratorToReadableStream } from 'ai/internal';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\n\ntype EngineResponse = {\n delta: string;\n};\n\nexport function toUIMessageStream(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n const trimStart = trimStartOfStream();\n\n return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())\n .pipeThrough(\n new TransformStream({\n async transform(message, controller): Promise<void> {\n controller.enqueue(trimStart(message.delta));\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n new TransformStream<string, UIMessageStreamPart>({\n transform: async (chunk, controller) => {\n controller.enqueue({ type: 'text', text: chunk });\n },\n }),\n );\n}\n\nfunction trimStartOfStream(): (text: string) => string {\n let isStreamStart = true;\n\n return (text: string): string => {\n if (isStreamStart) {\n text = text.trimStart();\n if (text) isStreamStart = false;\n }\n return text;\n };\n}\n","/**\n * Configuration options and helper callback methods for stream lifecycle events.\n */\nexport interface StreamCallbacks {\n /** `onStart`: Called once when the stream is initialized. */\n onStart?: () => Promise<void> | void;\n\n /** `onFinal`: Called once when the stream is closed with the final completion message. */\n onFinal?: (completion: string) => Promise<void> | void;\n\n /** `onToken`: Called for each tokenized message. */\n onToken?: (token: string) => Promise<void> | void;\n\n /** `onText`: Called for each text chunk. */\n onText?: (text: string) => Promise<void> | void;\n}\n\n/**\n * Creates a transform stream that encodes input messages and invokes optional callback functions.\n * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.\n * - `onStart`: Called once when the stream is initialized.\n * - `onToken`: Called for each tokenized message.\n * - `onFinal`: Called once when the stream is closed with the final completion message.\n *\n * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.\n *\n * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.\n * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.\n *\n * @example\n * const callbacks = {\n * onStart: async () => console.log('Stream started'),\n * onToken: async (token) => console.log(`Token: ${token}`),\n * onFinal: async () => data.close()\n * };\n * const transformer = createCallbacksTransformer(callbacks);\n */\nexport function createCallbacksTransformer(\n callbacks: StreamCallbacks | undefined = {},\n): TransformStream<string, string> {\n let aggregatedResponse = '';\n\n return new TransformStream({\n async start(): Promise<void> {\n if (callbacks.onStart) await callbacks.onStart();\n },\n\n async transform(message, controller): Promise<void> {\n controller.enqueue(message);\n\n aggregatedResponse += message;\n\n if (callbacks.onToken) await callbacks.onToken(message);\n if (callbacks.onText && typeof message === 'string') {\n await callbacks.onText(message);\n }\n },\n\n async flush(): Promise<void> {\n if (callbacks.onFinal) {\n await callbacks.onFinal(aggregatedResponse);\n }\n },\n });\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,sBAAqD;;;ACoC9C,SAAS,2BACd,YAAyC,CAAC,GACT;AACjC,MAAI,qBAAqB;AAEzB,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,QAAuB;AAC3B,UAAI,UAAU,QAAS,OAAM,UAAU,QAAQ;AAAA,IACjD;AAAA,IAEA,MAAM,UAAU,SAAS,YAA2B;AAClD,iBAAW,QAAQ,OAAO;AAE1B,4BAAsB;AAEtB,UAAI,UAAU,QAAS,OAAM,UAAU,QAAQ,OAAO;AACtD,UAAI,UAAU,UAAU,OAAO,YAAY,UAAU;AACnD,cAAM,UAAU,OAAO,OAAO;AAAA,MAChC;AAAA,IACF;AAAA,IAEA,MAAM,QAAuB;AAC3B,UAAI,UAAU,SAAS;AACrB,cAAM,UAAU,QAAQ,kBAAkB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ADrDO,SAAS,kBACd,QACA,WACA;AACA,QAAM,YAAY,kBAAkB;AAEpC,aAAO,sDAAqC,OAAO,OAAO,aAAa,EAAE,CAAC,EACvE;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,MAAM,UAAU,SAAS,YAA2B;AAClD,mBAAW,QAAQ,UAAU,QAAQ,KAAK,CAAC;AAAA,MAC7C;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,IACC,IAAI,gBAA6C;AAAA,MAC/C,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,QAAQ,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AAEA,SAAS,oBAA8C;AACrD,MAAI,gBAAgB;AAEpB,SAAO,CAAC,SAAyB;AAC/B,QAAI,eAAe;AACjB,aAAO,KAAK,UAAU;AACtB,UAAI,KAAM,iBAAgB;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
package/dist/index.mjs ADDED
@@ -0,0 +1,57 @@
1
+ // src/llamaindex-adapter.ts
2
+ import { convertAsyncIteratorToReadableStream } from "ai/internal";
3
+
4
+ // src/stream-callbacks.ts
5
+ function createCallbacksTransformer(callbacks = {}) {
6
+ let aggregatedResponse = "";
7
+ return new TransformStream({
8
+ async start() {
9
+ if (callbacks.onStart) await callbacks.onStart();
10
+ },
11
+ async transform(message, controller) {
12
+ controller.enqueue(message);
13
+ aggregatedResponse += message;
14
+ if (callbacks.onToken) await callbacks.onToken(message);
15
+ if (callbacks.onText && typeof message === "string") {
16
+ await callbacks.onText(message);
17
+ }
18
+ },
19
+ async flush() {
20
+ if (callbacks.onFinal) {
21
+ await callbacks.onFinal(aggregatedResponse);
22
+ }
23
+ }
24
+ });
25
+ }
26
+
27
+ // src/llamaindex-adapter.ts
28
+ function toUIMessageStream(stream, callbacks) {
29
+ const trimStart = trimStartOfStream();
30
+ return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
31
+ new TransformStream({
32
+ async transform(message, controller) {
33
+ controller.enqueue(trimStart(message.delta));
34
+ }
35
+ })
36
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
37
+ new TransformStream({
38
+ transform: async (chunk, controller) => {
39
+ controller.enqueue({ type: "text", text: chunk });
40
+ }
41
+ })
42
+ );
43
+ }
44
+ function trimStartOfStream() {
45
+ let isStreamStart = true;
46
+ return (text) => {
47
+ if (isStreamStart) {
48
+ text = text.trimStart();
49
+ if (text) isStreamStart = false;
50
+ }
51
+ return text;
52
+ };
53
+ }
54
+ export {
55
+ toUIMessageStream
56
+ };
57
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/llamaindex-adapter.ts","../src/stream-callbacks.ts"],"sourcesContent":["import { UIMessageStreamPart } from 'ai';\nimport { convertAsyncIteratorToReadableStream } from 'ai/internal';\nimport {\n createCallbacksTransformer,\n StreamCallbacks,\n} from './stream-callbacks';\n\ntype EngineResponse = {\n delta: string;\n};\n\nexport function toUIMessageStream(\n stream: AsyncIterable<EngineResponse>,\n callbacks?: StreamCallbacks,\n) {\n const trimStart = trimStartOfStream();\n\n return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())\n .pipeThrough(\n new TransformStream({\n async transform(message, controller): Promise<void> {\n controller.enqueue(trimStart(message.delta));\n },\n }),\n )\n .pipeThrough(createCallbacksTransformer(callbacks))\n .pipeThrough(\n new TransformStream<string, UIMessageStreamPart>({\n transform: async (chunk, controller) => {\n controller.enqueue({ type: 'text', text: chunk });\n },\n }),\n );\n}\n\nfunction trimStartOfStream(): (text: string) => string {\n let isStreamStart = true;\n\n return (text: string): string => {\n if (isStreamStart) {\n text = text.trimStart();\n if (text) isStreamStart = false;\n }\n return text;\n };\n}\n","/**\n * Configuration options and helper callback methods for stream lifecycle events.\n */\nexport interface StreamCallbacks {\n /** `onStart`: Called once when the stream is initialized. */\n onStart?: () => Promise<void> | void;\n\n /** `onFinal`: Called once when the stream is closed with the final completion message. */\n onFinal?: (completion: string) => Promise<void> | void;\n\n /** `onToken`: Called for each tokenized message. */\n onToken?: (token: string) => Promise<void> | void;\n\n /** `onText`: Called for each text chunk. */\n onText?: (text: string) => Promise<void> | void;\n}\n\n/**\n * Creates a transform stream that encodes input messages and invokes optional callback functions.\n * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.\n * - `onStart`: Called once when the stream is initialized.\n * - `onToken`: Called for each tokenized message.\n * - `onFinal`: Called once when the stream is closed with the final completion message.\n *\n * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.\n *\n * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.\n * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.\n *\n * @example\n * const callbacks = {\n * onStart: async () => console.log('Stream started'),\n * onToken: async (token) => console.log(`Token: ${token}`),\n * onFinal: async () => data.close()\n * };\n * const transformer = createCallbacksTransformer(callbacks);\n */\nexport function createCallbacksTransformer(\n callbacks: StreamCallbacks | undefined = {},\n): TransformStream<string, string> {\n let aggregatedResponse = '';\n\n return new TransformStream({\n async start(): Promise<void> {\n if (callbacks.onStart) await callbacks.onStart();\n },\n\n async transform(message, controller): Promise<void> {\n controller.enqueue(message);\n\n aggregatedResponse += message;\n\n if (callbacks.onToken) await callbacks.onToken(message);\n if (callbacks.onText && typeof message === 'string') {\n await callbacks.onText(message);\n }\n },\n\n async flush(): Promise<void> {\n if (callbacks.onFinal) {\n await callbacks.onFinal(aggregatedResponse);\n }\n },\n });\n}\n"],"mappings":";AACA,SAAS,4CAA4C;;;ACoC9C,SAAS,2BACd,YAAyC,CAAC,GACT;AACjC,MAAI,qBAAqB;AAEzB,SAAO,IAAI,gBAAgB;AAAA,IACzB,MAAM,QAAuB;AAC3B,UAAI,UAAU,QAAS,OAAM,UAAU,QAAQ;AAAA,IACjD;AAAA,IAEA,MAAM,UAAU,SAAS,YAA2B;AAClD,iBAAW,QAAQ,OAAO;AAE1B,4BAAsB;AAEtB,UAAI,UAAU,QAAS,OAAM,UAAU,QAAQ,OAAO;AACtD,UAAI,UAAU,UAAU,OAAO,YAAY,UAAU;AACnD,cAAM,UAAU,OAAO,OAAO;AAAA,MAChC;AAAA,IACF;AAAA,IAEA,MAAM,QAAuB;AAC3B,UAAI,UAAU,SAAS;AACrB,cAAM,UAAU,QAAQ,kBAAkB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;ADrDO,SAAS,kBACd,QACA,WACA;AACA,QAAM,YAAY,kBAAkB;AAEpC,SAAO,qCAAqC,OAAO,OAAO,aAAa,EAAE,CAAC,EACvE;AAAA,IACC,IAAI,gBAAgB;AAAA,MAClB,MAAM,UAAU,SAAS,YAA2B;AAClD,mBAAW,QAAQ,UAAU,QAAQ,KAAK,CAAC;AAAA,MAC7C;AAAA,IACF,CAAC;AAAA,EACH,EACC,YAAY,2BAA2B,SAAS,CAAC,EACjD;AAAA,IACC,IAAI,gBAA6C;AAAA,MAC/C,WAAW,OAAO,OAAO,eAAe;AACtC,mBAAW,QAAQ,EAAE,MAAM,QAAQ,MAAM,MAAM,CAAC;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AACJ;AAEA,SAAS,oBAA8C;AACrD,MAAI,gBAAgB;AAEpB,SAAO,CAAC,SAAyB;AAC/B,QAAI,eAAe;AACjB,aAAO,KAAK,UAAU;AACtB,UAAI,KAAM,iBAAgB;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AACF;","names":[]}
package/package.json ADDED
@@ -0,0 +1,60 @@
1
+ {
2
+ "name": "@ai-sdk/llamaindex",
3
+ "version": "1.0.0-alpha.1",
4
+ "license": "Apache-2.0",
5
+ "sideEffects": false,
6
+ "main": "./dist/index.js",
7
+ "module": "./dist/index.mjs",
8
+ "types": "./dist/index.d.ts",
9
+ "files": [
10
+ "dist/**/*",
11
+ "CHANGELOG.md"
12
+ ],
13
+ "exports": {
14
+ "./package.json": "./package.json",
15
+ ".": {
16
+ "types": "./dist/index.d.ts",
17
+ "import": "./dist/index.mjs",
18
+ "require": "./dist/index.js"
19
+ }
20
+ },
21
+ "dependencies": {
22
+ "ai": "5.0.0-alpha.1"
23
+ },
24
+ "devDependencies": {
25
+ "@types/node": "20.17.24",
26
+ "tsup": "^8",
27
+ "typescript": "5.8.3",
28
+ "@vercel/ai-tsconfig": "0.0.0"
29
+ },
30
+ "engines": {
31
+ "node": ">=18"
32
+ },
33
+ "publishConfig": {
34
+ "access": "public"
35
+ },
36
+ "homepage": "https://ai-sdk.dev/docs",
37
+ "repository": {
38
+ "type": "git",
39
+ "url": "git+https://github.com/vercel/ai.git"
40
+ },
41
+ "bugs": {
42
+ "url": "https://github.com/vercel/ai/issues"
43
+ },
44
+ "keywords": [
45
+ "ai"
46
+ ],
47
+ "scripts": {
48
+ "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
49
+ "build:watch": "pnpm clean && tsup --watch",
50
+ "clean": "rm -rf dist *.tsbuildinfo",
51
+ "lint": "eslint \"./**/*.ts*\"",
52
+ "type-check": "tsc --build",
53
+ "prettier-check": "prettier --check \"./**/*.ts*\"",
54
+ "test": "pnpm test:node && pnpm test:edge",
55
+ "test:update": "pnpm test:node -u",
56
+ "test:watch": "vitest --config vitest.node.config.js",
57
+ "test:edge": "vitest --config vitest.edge.config.js --run",
58
+ "test:node": "vitest --config vitest.node.config.js --run"
59
+ }
60
+ }