@ai-sdk/llamaindex 0.0.0-0219f568-20260113124214 → 0.0.0-17394c74-20260122151521

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,11 +1,118 @@
1
1
  # @ai-sdk/llamaindex
2
2
 
3
- ## 0.0.0-0219f568-20260113124214
3
+ ## 0.0.0-17394c74-20260122151521
4
4
 
5
5
  ### Patch Changes
6
6
 
7
- - Updated dependencies [eade408]
8
- - ai@0.0.0-0219f568-20260113124214
7
+ - Updated dependencies [4caafb2]
8
+ - ai@0.0.0-17394c74-20260122151521
9
+
10
+ ## 2.0.47
11
+
12
+ ### Patch Changes
13
+
14
+ - ai@6.0.47
15
+
16
+ ## 2.0.46
17
+
18
+ ### Patch Changes
19
+
20
+ - 8dc54db: chore: add src folders to package bundle
21
+ - ai@6.0.46
22
+
23
+ ## 2.0.45
24
+
25
+ ### Patch Changes
26
+
27
+ - ai@6.0.45
28
+
29
+ ## 2.0.44
30
+
31
+ ### Patch Changes
32
+
33
+ - ai@6.0.44
34
+
35
+ ## 2.0.43
36
+
37
+ ### Patch Changes
38
+
39
+ - Updated dependencies [2dc9bfa]
40
+ - ai@6.0.43
41
+
42
+ ## 2.0.42
43
+
44
+ ### Patch Changes
45
+
46
+ - ai@6.0.42
47
+
48
+ ## 2.0.41
49
+
50
+ ### Patch Changes
51
+
52
+ - Updated dependencies [84b6e6d]
53
+ - ai@6.0.41
54
+
55
+ ## 2.0.40
56
+
57
+ ### Patch Changes
58
+
59
+ - Updated dependencies [ab57783]
60
+ - ai@6.0.40
61
+
62
+ ## 2.0.39
63
+
64
+ ### Patch Changes
65
+
66
+ - Updated dependencies [4e28ba0]
67
+ - ai@6.0.39
68
+
69
+ ## 2.0.38
70
+
71
+ ### Patch Changes
72
+
73
+ - ai@6.0.38
74
+
75
+ ## 2.0.37
76
+
77
+ ### Patch Changes
78
+
79
+ - Updated dependencies [b5dab9b]
80
+ - ai@6.0.37
81
+
82
+ ## 2.0.36
83
+
84
+ ### Patch Changes
85
+
86
+ - Updated dependencies [46f46e4]
87
+ - ai@6.0.36
88
+
89
+ ## 2.0.35
90
+
91
+ ### Patch Changes
92
+
93
+ - Updated dependencies [d7e7f1f]
94
+ - ai@6.0.35
95
+
96
+ ## 2.0.34
97
+
98
+ ### Patch Changes
99
+
100
+ - Updated dependencies [1b11dcb]
101
+ - ai@6.0.34
102
+
103
+ ## 2.0.33
104
+
105
+ ### Patch Changes
106
+
107
+ - Updated dependencies [0ca078c]
108
+ - ai@6.0.33
109
+
110
+ ## 2.0.32
111
+
112
+ ### Patch Changes
113
+
114
+ - Updated dependencies [ec24401]
115
+ - ai@6.0.32
9
116
 
10
117
  ## 2.0.31
11
118
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/llamaindex",
3
- "version": "0.0.0-0219f568-20260113124214",
3
+ "version": "0.0.0-17394c74-20260122151521",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -8,6 +8,11 @@
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
10
10
  "dist/**/*",
11
+ "src",
12
+ "!src/**/*.test.ts",
13
+ "!src/**/*.test-d.ts",
14
+ "!src/**/__snapshots__",
15
+ "!src/**/__fixtures__",
11
16
  "CHANGELOG.md",
12
17
  "README.md"
13
18
  ],
@@ -20,7 +25,7 @@
20
25
  }
21
26
  },
22
27
  "dependencies": {
23
- "ai": "0.0.0-0219f568-20260113124214"
28
+ "ai": "0.0.0-17394c74-20260122151521"
24
29
  },
25
30
  "devDependencies": {
26
31
  "@types/node": "20.17.24",
package/src/index.ts ADDED
@@ -0,0 +1 @@
1
+ export * from './llamaindex-adapter';
@@ -0,0 +1,52 @@
1
+ import { UIMessageChunk } from 'ai';
2
+ import { convertAsyncIteratorToReadableStream } from 'ai/internal';
3
+ import {
4
+ createCallbacksTransformer,
5
+ StreamCallbacks,
6
+ } from './stream-callbacks';
7
+
8
+ type EngineResponse = {
9
+ delta: string;
10
+ };
11
+
12
+ export function toUIMessageStream(
13
+ stream: AsyncIterable<EngineResponse>,
14
+ callbacks?: StreamCallbacks,
15
+ ) {
16
+ const trimStart = trimStartOfStream();
17
+
18
+ return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())
19
+ .pipeThrough(
20
+ new TransformStream({
21
+ async transform(message, controller): Promise<void> {
22
+ controller.enqueue(trimStart(message.delta));
23
+ },
24
+ }),
25
+ )
26
+ .pipeThrough(createCallbacksTransformer(callbacks))
27
+ .pipeThrough(
28
+ new TransformStream<string, UIMessageChunk>({
29
+ start: async controller => {
30
+ controller.enqueue({ type: 'text-start', id: '1' });
31
+ },
32
+ transform: async (chunk, controller) => {
33
+ controller.enqueue({ type: 'text-delta', delta: chunk, id: '1' });
34
+ },
35
+ flush: async controller => {
36
+ controller.enqueue({ type: 'text-end', id: '1' });
37
+ },
38
+ }),
39
+ );
40
+ }
41
+
42
+ function trimStartOfStream(): (text: string) => string {
43
+ let isStreamStart = true;
44
+
45
+ return (text: string): string => {
46
+ if (isStreamStart) {
47
+ text = text.trimStart();
48
+ if (text) isStreamStart = false;
49
+ }
50
+ return text;
51
+ };
52
+ }
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Configuration options and helper callback methods for stream lifecycle events.
3
+ */
4
+ export interface StreamCallbacks {
5
+ /** `onStart`: Called once when the stream is initialized. */
6
+ onStart?: () => Promise<void> | void;
7
+
8
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
9
+ onFinal?: (completion: string) => Promise<void> | void;
10
+
11
+ /** `onToken`: Called for each tokenized message. */
12
+ onToken?: (token: string) => Promise<void> | void;
13
+
14
+ /** `onText`: Called for each text chunk. */
15
+ onText?: (text: string) => Promise<void> | void;
16
+ }
17
+
18
+ /**
19
+ * Creates a transform stream that encodes input messages and invokes optional callback functions.
20
+ * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.
21
+ * - `onStart`: Called once when the stream is initialized.
22
+ * - `onToken`: Called for each tokenized message.
23
+ * - `onFinal`: Called once when the stream is closed with the final completion message.
24
+ *
25
+ * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.
26
+ *
27
+ * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.
28
+ * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.
29
+ *
30
+ * @example
31
+ * const callbacks = {
32
+ * onStart: async () => console.log('Stream started'),
33
+ * onToken: async (token) => console.log(`Token: ${token}`),
34
+ * onFinal: async () => data.close()
35
+ * };
36
+ * const transformer = createCallbacksTransformer(callbacks);
37
+ */
38
+ export function createCallbacksTransformer(
39
+ callbacks: StreamCallbacks | undefined = {},
40
+ ): TransformStream<string, string> {
41
+ let aggregatedResponse = '';
42
+
43
+ return new TransformStream({
44
+ async start(): Promise<void> {
45
+ if (callbacks.onStart) await callbacks.onStart();
46
+ },
47
+
48
+ async transform(message, controller): Promise<void> {
49
+ controller.enqueue(message);
50
+
51
+ aggregatedResponse += message;
52
+
53
+ if (callbacks.onToken) await callbacks.onToken(message);
54
+ if (callbacks.onText && typeof message === 'string') {
55
+ await callbacks.onText(message);
56
+ }
57
+ },
58
+
59
+ async flush(): Promise<void> {
60
+ if (callbacks.onFinal) {
61
+ await callbacks.onFinal(aggregatedResponse);
62
+ }
63
+ },
64
+ });
65
+ }