@ai-sdk/llamaindex 0.0.0-1c33ba03-20260114162300 → 0.0.0-4115c213-20260122152721

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,10 +1,98 @@
1
1
  # @ai-sdk/llamaindex
2
2
 
3
- ## 0.0.0-1c33ba03-20260114162300
3
+ ## 0.0.0-4115c213-20260122152721
4
4
 
5
5
  ### Patch Changes
6
6
 
7
- - ai@0.0.0-1c33ba03-20260114162300
7
+ - 4caafb2: chore: excluded tests from src folder in npm package
8
+ - Updated dependencies [4caafb2]
9
+ - ai@0.0.0-4115c213-20260122152721
10
+
11
+ ## 2.0.47
12
+
13
+ ### Patch Changes
14
+
15
+ - ai@6.0.47
16
+
17
+ ## 2.0.46
18
+
19
+ ### Patch Changes
20
+
21
+ - 8dc54db: chore: add src folders to package bundle
22
+ - ai@6.0.46
23
+
24
+ ## 2.0.45
25
+
26
+ ### Patch Changes
27
+
28
+ - ai@6.0.45
29
+
30
+ ## 2.0.44
31
+
32
+ ### Patch Changes
33
+
34
+ - ai@6.0.44
35
+
36
+ ## 2.0.43
37
+
38
+ ### Patch Changes
39
+
40
+ - Updated dependencies [2dc9bfa]
41
+ - ai@6.0.43
42
+
43
+ ## 2.0.42
44
+
45
+ ### Patch Changes
46
+
47
+ - ai@6.0.42
48
+
49
+ ## 2.0.41
50
+
51
+ ### Patch Changes
52
+
53
+ - Updated dependencies [84b6e6d]
54
+ - ai@6.0.41
55
+
56
+ ## 2.0.40
57
+
58
+ ### Patch Changes
59
+
60
+ - Updated dependencies [ab57783]
61
+ - ai@6.0.40
62
+
63
+ ## 2.0.39
64
+
65
+ ### Patch Changes
66
+
67
+ - Updated dependencies [4e28ba0]
68
+ - ai@6.0.39
69
+
70
+ ## 2.0.38
71
+
72
+ ### Patch Changes
73
+
74
+ - ai@6.0.38
75
+
76
+ ## 2.0.37
77
+
78
+ ### Patch Changes
79
+
80
+ - Updated dependencies [b5dab9b]
81
+ - ai@6.0.37
82
+
83
+ ## 2.0.36
84
+
85
+ ### Patch Changes
86
+
87
+ - Updated dependencies [46f46e4]
88
+ - ai@6.0.36
89
+
90
+ ## 2.0.35
91
+
92
+ ### Patch Changes
93
+
94
+ - Updated dependencies [d7e7f1f]
95
+ - ai@6.0.35
8
96
 
9
97
  ## 2.0.34
10
98
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/llamaindex",
3
- "version": "0.0.0-1c33ba03-20260114162300",
3
+ "version": "0.0.0-4115c213-20260122152721",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -8,6 +8,11 @@
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
10
10
  "dist/**/*",
11
+ "src",
12
+ "!src/**/*.test.ts",
13
+ "!src/**/*.test-d.ts",
14
+ "!src/**/__snapshots__",
15
+ "!src/**/__fixtures__",
11
16
  "CHANGELOG.md",
12
17
  "README.md"
13
18
  ],
@@ -20,7 +25,7 @@
20
25
  }
21
26
  },
22
27
  "dependencies": {
23
- "ai": "0.0.0-1c33ba03-20260114162300"
28
+ "ai": "0.0.0-4115c213-20260122152721"
24
29
  },
25
30
  "devDependencies": {
26
31
  "@types/node": "20.17.24",
package/src/index.ts ADDED
@@ -0,0 +1 @@
1
+ export * from './llamaindex-adapter';
@@ -0,0 +1,52 @@
1
+ import { UIMessageChunk } from 'ai';
2
+ import { convertAsyncIteratorToReadableStream } from 'ai/internal';
3
+ import {
4
+ createCallbacksTransformer,
5
+ StreamCallbacks,
6
+ } from './stream-callbacks';
7
+
8
+ type EngineResponse = {
9
+ delta: string;
10
+ };
11
+
12
+ export function toUIMessageStream(
13
+ stream: AsyncIterable<EngineResponse>,
14
+ callbacks?: StreamCallbacks,
15
+ ) {
16
+ const trimStart = trimStartOfStream();
17
+
18
+ return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]())
19
+ .pipeThrough(
20
+ new TransformStream({
21
+ async transform(message, controller): Promise<void> {
22
+ controller.enqueue(trimStart(message.delta));
23
+ },
24
+ }),
25
+ )
26
+ .pipeThrough(createCallbacksTransformer(callbacks))
27
+ .pipeThrough(
28
+ new TransformStream<string, UIMessageChunk>({
29
+ start: async controller => {
30
+ controller.enqueue({ type: 'text-start', id: '1' });
31
+ },
32
+ transform: async (chunk, controller) => {
33
+ controller.enqueue({ type: 'text-delta', delta: chunk, id: '1' });
34
+ },
35
+ flush: async controller => {
36
+ controller.enqueue({ type: 'text-end', id: '1' });
37
+ },
38
+ }),
39
+ );
40
+ }
41
+
42
+ function trimStartOfStream(): (text: string) => string {
43
+ let isStreamStart = true;
44
+
45
+ return (text: string): string => {
46
+ if (isStreamStart) {
47
+ text = text.trimStart();
48
+ if (text) isStreamStart = false;
49
+ }
50
+ return text;
51
+ };
52
+ }
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Configuration options and helper callback methods for stream lifecycle events.
3
+ */
4
+ export interface StreamCallbacks {
5
+ /** `onStart`: Called once when the stream is initialized. */
6
+ onStart?: () => Promise<void> | void;
7
+
8
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
9
+ onFinal?: (completion: string) => Promise<void> | void;
10
+
11
+ /** `onToken`: Called for each tokenized message. */
12
+ onToken?: (token: string) => Promise<void> | void;
13
+
14
+ /** `onText`: Called for each text chunk. */
15
+ onText?: (text: string) => Promise<void> | void;
16
+ }
17
+
18
+ /**
19
+ * Creates a transform stream that encodes input messages and invokes optional callback functions.
20
+ * The transform stream uses the provided callbacks to execute custom logic at different stages of the stream's lifecycle.
21
+ * - `onStart`: Called once when the stream is initialized.
22
+ * - `onToken`: Called for each tokenized message.
23
+ * - `onFinal`: Called once when the stream is closed with the final completion message.
24
+ *
25
+ * This function is useful when you want to process a stream of messages and perform specific actions during the stream's lifecycle.
26
+ *
27
+ * @param {StreamCallbacks} [callbacks] - An object containing the callback functions.
28
+ * @return {TransformStream<string, Uint8Array>} A transform stream that encodes input messages as Uint8Array and allows the execution of custom logic through callbacks.
29
+ *
30
+ * @example
31
+ * const callbacks = {
32
+ * onStart: async () => console.log('Stream started'),
33
+ * onToken: async (token) => console.log(`Token: ${token}`),
34
+ * onFinal: async () => data.close()
35
+ * };
36
+ * const transformer = createCallbacksTransformer(callbacks);
37
+ */
38
+ export function createCallbacksTransformer(
39
+ callbacks: StreamCallbacks | undefined = {},
40
+ ): TransformStream<string, string> {
41
+ let aggregatedResponse = '';
42
+
43
+ return new TransformStream({
44
+ async start(): Promise<void> {
45
+ if (callbacks.onStart) await callbacks.onStart();
46
+ },
47
+
48
+ async transform(message, controller): Promise<void> {
49
+ controller.enqueue(message);
50
+
51
+ aggregatedResponse += message;
52
+
53
+ if (callbacks.onToken) await callbacks.onToken(message);
54
+ if (callbacks.onText && typeof message === 'string') {
55
+ await callbacks.onText(message);
56
+ }
57
+ },
58
+
59
+ async flush(): Promise<void> {
60
+ if (callbacks.onFinal) {
61
+ await callbacks.onFinal(aggregatedResponse);
62
+ }
63
+ },
64
+ });
65
+ }