ai 2.1.18 → 2.1.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,7 +1,11 @@
1
- import { ServerResponse } from 'node:http';
2
1
  import { ChatCompletionRequestMessageFunctionCall, CreateChatCompletionRequestFunctionCall } from 'openai-edge';
3
2
  import { ChatCompletionFunctions } from 'openai-edge/types/api';
3
+ import { ServerResponse } from 'node:http';
4
4
 
5
+ interface FunctionCallPayload {
6
+ name: string;
7
+ arguments: Record<string, unknown>;
8
+ }
5
9
  /**
6
10
  * Helper callback methods for AIStream stream lifecycle events
7
11
  * @interface
@@ -81,44 +85,6 @@ declare function trimStartOfStreamHelper(): (text: string) => string;
81
85
  */
82
86
  declare function AIStream(response: Response, customParser: AIStreamParser, callbacks?: AIStreamCallbacks): ReadableStream;
83
87
 
84
- declare function OpenAIStream(res: Response, cb?: AIStreamCallbacks): ReadableStream;
85
-
86
- /**
87
- * A utility class for streaming text responses.
88
- */
89
- declare class StreamingTextResponse extends Response {
90
- constructor(res: ReadableStream, init?: ResponseInit);
91
- }
92
- /**
93
- * A utility function to stream a ReadableStream to a Node.js response-like object.
94
- */
95
- declare function streamToResponse(res: ReadableStream, response: ServerResponse, init?: {
96
- headers?: Record<string, string>;
97
- status?: number;
98
- }): void;
99
-
100
- declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacks): ReadableStream;
101
-
102
- declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacks): ReadableStream;
103
-
104
- declare function AnthropicStream(res: Response, cb?: AIStreamCallbacks): ReadableStream;
105
-
106
- declare function LangChainStream(callbacks?: AIStreamCallbacks): {
107
- stream: ReadableStream<Uint8Array>;
108
- handlers: {
109
- handleLLMNewToken: (token: string) => Promise<void>;
110
- handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
111
- handleLLMEnd: (_output: any, runId: string) => Promise<void>;
112
- handleLLMError: (e: Error, runId: string) => Promise<void>;
113
- handleChainStart: (_chain: any, _inputs: any, runId: string) => Promise<void>;
114
- handleChainEnd: (_outputs: any, runId: string) => Promise<void>;
115
- handleChainError: (e: Error, runId: string) => Promise<void>;
116
- handleToolStart: (_tool: any, _input: string, runId: string) => Promise<void>;
117
- handleToolEnd: (_output: string, runId: string) => Promise<void>;
118
- handleToolError: (e: Error, runId: string) => Promise<void>;
119
- };
120
- };
121
-
122
88
  /**
123
89
  * Shared types between the API and UI packages.
124
90
  */
@@ -283,7 +249,78 @@ type UseCompletionOptions = {
283
249
  body?: object;
284
250
  };
285
251
 
252
+ type JSONValue = null | string | number | boolean | {
253
+ [x: string]: JSONValue;
254
+ } | Array<JSONValue>;
255
+ type OpenAIStreamCallbacks = AIStreamCallbacks & {
256
+ /**
257
+ * @example
258
+ * ```js
259
+ * const response = await openai.createChatCompletion({
260
+ * model: 'gpt-3.5-turbo-0613',
261
+ * stream: true,
262
+ * messages,
263
+ * functions,
264
+ * })
265
+ *
266
+ * const stream = OpenAIStream(response, {
267
+ * experimental_onFunctionCall: async (functionCallPayload, createFunctionCallMessages) => {
268
+ * // ... run your custom logic here
269
+ * const result = await myFunction(functionCallPayload)
270
+ *
271
+ * // Ask for another completion, or return a string to send to the client as an assistant message.
272
+ * return await openai.createChatCompletion({
273
+ * model: 'gpt-3.5-turbo-0613',
274
+ * stream: true,
275
+ * // Append the relevant "assistant" and "function" call messages
276
+ * messages: [...messages, ...createFunctionCallMessages(result)],
277
+ * functions,
278
+ * })
279
+ * }
280
+ * })
281
+ * ```
282
+ */
283
+ experimental_onFunctionCall?: (functionCallPayload: FunctionCallPayload, createFunctionCallMessages: (functionCallResult: JSONValue) => CreateMessage[]) => Promise<Response | undefined | void | string>;
284
+ };
285
+ declare function OpenAIStream(res: Response, callbacks?: OpenAIStreamCallbacks): ReadableStream;
286
+
287
+ /**
288
+ * A utility class for streaming text responses.
289
+ */
290
+ declare class StreamingTextResponse extends Response {
291
+ constructor(res: ReadableStream, init?: ResponseInit);
292
+ }
293
+ /**
294
+ * A utility function to stream a ReadableStream to a Node.js response-like object.
295
+ */
296
+ declare function streamToResponse(res: ReadableStream, response: ServerResponse, init?: {
297
+ headers?: Record<string, string>;
298
+ status?: number;
299
+ }): void;
300
+
301
+ declare function HuggingFaceStream(res: AsyncGenerator<any>, callbacks?: AIStreamCallbacks): ReadableStream;
302
+
303
+ declare function CohereStream(reader: Response, callbacks?: AIStreamCallbacks): ReadableStream;
304
+
305
+ declare function AnthropicStream(res: Response, cb?: AIStreamCallbacks): ReadableStream;
306
+
307
+ declare function LangChainStream(callbacks?: AIStreamCallbacks): {
308
+ stream: ReadableStream<Uint8Array>;
309
+ handlers: {
310
+ handleLLMNewToken: (token: string) => Promise<void>;
311
+ handleLLMStart: (_llm: any, _prompts: string[], runId: string) => Promise<void>;
312
+ handleLLMEnd: (_output: any, runId: string) => Promise<void>;
313
+ handleLLMError: (e: Error, runId: string) => Promise<void>;
314
+ handleChainStart: (_chain: any, _inputs: any, runId: string) => Promise<void>;
315
+ handleChainEnd: (_outputs: any, runId: string) => Promise<void>;
316
+ handleChainError: (e: Error, runId: string) => Promise<void>;
317
+ handleToolStart: (_tool: any, _input: string, runId: string) => Promise<void>;
318
+ handleToolEnd: (_output: string, runId: string) => Promise<void>;
319
+ handleToolError: (e: Error, runId: string) => Promise<void>;
320
+ };
321
+ };
322
+
286
323
  declare const nanoid: (size?: number | undefined) => string;
287
324
  declare function createChunkDecoder(): (chunk: Uint8Array | undefined) => string;
288
325
 
289
- export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, HuggingFaceStream, LangChainStream, Message, OpenAIStream, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, nanoid, streamToResponse, trimStartOfStreamHelper };
326
+ export { AIStream, AIStreamCallbacks, AIStreamParser, AnthropicStream, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, RequestOptions, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, nanoid, streamToResponse, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -196,8 +196,94 @@ function parseOpenAIStream() {
196
196
  return text;
197
197
  };
198
198
  }
199
- function OpenAIStream(res, cb) {
200
- return AIStream(res, parseOpenAIStream(), cb);
199
+ var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
200
+ function OpenAIStream(res, callbacks) {
201
+ const cb = callbacks;
202
+ const stream = AIStream(res, parseOpenAIStream(), cb);
203
+ if (cb && cb.experimental_onFunctionCall) {
204
+ const functionCallTransformer = createFunctionCallTransformer(cb);
205
+ return stream.pipeThrough(functionCallTransformer);
206
+ } else {
207
+ return stream;
208
+ }
209
+ }
210
+ function createFunctionCallTransformer(callbacks) {
211
+ const textEncoder = new TextEncoder();
212
+ let isFirstChunk = true;
213
+ let aggregatedResponse = "";
214
+ let isFunctionStreamingIn = false;
215
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
216
+ return new TransformStream({
217
+ transform(chunk, controller) {
218
+ return __async(this, null, function* () {
219
+ const message = new TextDecoder().decode(chunk);
220
+ const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
221
+ if (shouldHandleAsFunction) {
222
+ isFunctionStreamingIn = true;
223
+ aggregatedResponse += message;
224
+ isFirstChunk = false;
225
+ return;
226
+ }
227
+ if (!isFunctionStreamingIn) {
228
+ controller.enqueue(chunk);
229
+ return;
230
+ } else {
231
+ aggregatedResponse += message;
232
+ }
233
+ });
234
+ },
235
+ flush(controller) {
236
+ return __async(this, null, function* () {
237
+ const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
238
+ if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
239
+ isFunctionStreamingIn = false;
240
+ const payload = JSON.parse(aggregatedResponse);
241
+ const argumentsPayload = JSON.parse(payload.function_call.arguments);
242
+ let newFunctionCallMessages = [...functionCallMessages];
243
+ const functionResponse = yield callbacks.experimental_onFunctionCall(
244
+ {
245
+ name: payload.function_call.name,
246
+ arguments: argumentsPayload
247
+ },
248
+ (result) => {
249
+ newFunctionCallMessages = [
250
+ ...functionCallMessages,
251
+ {
252
+ role: "assistant",
253
+ content: "",
254
+ function_call: payload.function_call
255
+ },
256
+ {
257
+ role: "function",
258
+ name: payload.function_call.name,
259
+ content: JSON.stringify(result)
260
+ }
261
+ ];
262
+ return newFunctionCallMessages;
263
+ }
264
+ );
265
+ if (!functionResponse) {
266
+ controller.enqueue(textEncoder.encode(aggregatedResponse));
267
+ return;
268
+ } else if (typeof functionResponse === "string") {
269
+ controller.enqueue(textEncoder.encode(functionResponse));
270
+ return;
271
+ }
272
+ const openAIStream = OpenAIStream(functionResponse, __spreadProps(__spreadValues({}, callbacks), {
273
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
274
+ }));
275
+ const reader = openAIStream.getReader();
276
+ while (true) {
277
+ const { done, value } = yield reader.read();
278
+ if (done) {
279
+ break;
280
+ }
281
+ controller.enqueue(value);
282
+ }
283
+ }
284
+ });
285
+ }
286
+ });
201
287
  }
202
288
 
203
289
  // streams/streaming-text-response.ts
package/dist/index.mjs CHANGED
@@ -163,8 +163,94 @@ function parseOpenAIStream() {
163
163
  return text;
164
164
  };
165
165
  }
166
- function OpenAIStream(res, cb) {
167
- return AIStream(res, parseOpenAIStream(), cb);
166
+ var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
167
+ function OpenAIStream(res, callbacks) {
168
+ const cb = callbacks;
169
+ const stream = AIStream(res, parseOpenAIStream(), cb);
170
+ if (cb && cb.experimental_onFunctionCall) {
171
+ const functionCallTransformer = createFunctionCallTransformer(cb);
172
+ return stream.pipeThrough(functionCallTransformer);
173
+ } else {
174
+ return stream;
175
+ }
176
+ }
177
+ function createFunctionCallTransformer(callbacks) {
178
+ const textEncoder = new TextEncoder();
179
+ let isFirstChunk = true;
180
+ let aggregatedResponse = "";
181
+ let isFunctionStreamingIn = false;
182
+ let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
183
+ return new TransformStream({
184
+ transform(chunk, controller) {
185
+ return __async(this, null, function* () {
186
+ const message = new TextDecoder().decode(chunk);
187
+ const shouldHandleAsFunction = isFirstChunk && message.startsWith('{"function_call":');
188
+ if (shouldHandleAsFunction) {
189
+ isFunctionStreamingIn = true;
190
+ aggregatedResponse += message;
191
+ isFirstChunk = false;
192
+ return;
193
+ }
194
+ if (!isFunctionStreamingIn) {
195
+ controller.enqueue(chunk);
196
+ return;
197
+ } else {
198
+ aggregatedResponse += message;
199
+ }
200
+ });
201
+ },
202
+ flush(controller) {
203
+ return __async(this, null, function* () {
204
+ const isEndOfFunction = !isFirstChunk && callbacks.experimental_onFunctionCall && isFunctionStreamingIn;
205
+ if (isEndOfFunction && callbacks.experimental_onFunctionCall) {
206
+ isFunctionStreamingIn = false;
207
+ const payload = JSON.parse(aggregatedResponse);
208
+ const argumentsPayload = JSON.parse(payload.function_call.arguments);
209
+ let newFunctionCallMessages = [...functionCallMessages];
210
+ const functionResponse = yield callbacks.experimental_onFunctionCall(
211
+ {
212
+ name: payload.function_call.name,
213
+ arguments: argumentsPayload
214
+ },
215
+ (result) => {
216
+ newFunctionCallMessages = [
217
+ ...functionCallMessages,
218
+ {
219
+ role: "assistant",
220
+ content: "",
221
+ function_call: payload.function_call
222
+ },
223
+ {
224
+ role: "function",
225
+ name: payload.function_call.name,
226
+ content: JSON.stringify(result)
227
+ }
228
+ ];
229
+ return newFunctionCallMessages;
230
+ }
231
+ );
232
+ if (!functionResponse) {
233
+ controller.enqueue(textEncoder.encode(aggregatedResponse));
234
+ return;
235
+ } else if (typeof functionResponse === "string") {
236
+ controller.enqueue(textEncoder.encode(functionResponse));
237
+ return;
238
+ }
239
+ const openAIStream = OpenAIStream(functionResponse, __spreadProps(__spreadValues({}, callbacks), {
240
+ [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
241
+ }));
242
+ const reader = openAIStream.getReader();
243
+ while (true) {
244
+ const { done, value } = yield reader.read();
245
+ if (done) {
246
+ break;
247
+ }
248
+ controller.enqueue(value);
249
+ }
250
+ }
251
+ });
252
+ }
253
+ });
168
254
  }
169
255
 
170
256
  // streams/streaming-text-response.ts
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.1.18",
3
+ "version": "2.1.20",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",