ai 2.2.2 → 2.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +40 -2
- package/dist/index.js +10 -4
- package/dist/index.mjs +10 -4
- package/package.json +1 -1
- package/react/dist/index.js +25 -23
- package/react/dist/index.mjs +25 -23
package/dist/index.d.ts
CHANGED
@@ -197,7 +197,7 @@ type OpenAIStreamCallbacks = AIStreamCallbacksAndOptions & {
|
|
197
197
|
* })
|
198
198
|
* ```
|
199
199
|
*/
|
200
|
-
experimental_onFunctionCall?: (functionCallPayload: FunctionCallPayload, createFunctionCallMessages: (functionCallResult: JSONValue) => CreateMessage[]) => Promise<Response | undefined | void | string |
|
200
|
+
experimental_onFunctionCall?: (functionCallPayload: FunctionCallPayload, createFunctionCallMessages: (functionCallResult: JSONValue) => CreateMessage[]) => Promise<Response | undefined | void | string | AsyncIterableOpenAIStreamReturnTypes>;
|
201
201
|
};
|
202
202
|
interface ChatCompletionChunk {
|
203
203
|
id: string;
|
@@ -239,7 +239,45 @@ interface FunctionCall {
|
|
239
239
|
*/
|
240
240
|
name?: string;
|
241
241
|
}
|
242
|
-
|
242
|
+
/**
|
243
|
+
* https://github.com/openai/openai-node/blob/3ec43ee790a2eb6a0ccdd5f25faa23251b0f9b8e/src/resources/completions.ts#L28C1-L64C1
|
244
|
+
* Completions API. Streamed and non-streamed responses are the same.
|
245
|
+
*/
|
246
|
+
interface Completion {
|
247
|
+
/**
|
248
|
+
* A unique identifier for the completion.
|
249
|
+
*/
|
250
|
+
id: string;
|
251
|
+
/**
|
252
|
+
* The list of completion choices the model generated for the input prompt.
|
253
|
+
*/
|
254
|
+
choices: Array<CompletionChoice>;
|
255
|
+
/**
|
256
|
+
* The Unix timestamp of when the completion was created.
|
257
|
+
*/
|
258
|
+
created: number;
|
259
|
+
/**
|
260
|
+
* The model used for completion.
|
261
|
+
*/
|
262
|
+
model: string;
|
263
|
+
/**
|
264
|
+
* The object type, which is always "text_completion"
|
265
|
+
*/
|
266
|
+
object: string;
|
267
|
+
}
|
268
|
+
interface CompletionChoice {
|
269
|
+
/**
|
270
|
+
* The reason the model stopped generating tokens. This will be `stop` if the model
|
271
|
+
* hit a natural stop point or a provided stop sequence, or `length` if the maximum
|
272
|
+
* number of tokens specified in the request was reached.
|
273
|
+
*/
|
274
|
+
finish_reason: 'stop' | 'length';
|
275
|
+
index: number;
|
276
|
+
logprobs: any | null;
|
277
|
+
text: string;
|
278
|
+
}
|
279
|
+
type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion>;
|
280
|
+
declare function OpenAIStream(res: Response | AsyncIterableOpenAIStreamReturnTypes, callbacks?: OpenAIStreamCallbacks): ReadableStream;
|
243
281
|
|
244
282
|
interface FunctionCallPayload {
|
245
283
|
name: string;
|
package/dist/index.js
CHANGED
@@ -315,11 +315,11 @@ function chunkToText() {
|
|
315
315
|
const trimStartOfStream = trimStartOfStreamHelper();
|
316
316
|
let isFunctionStreamingIn;
|
317
317
|
return (json) => {
|
318
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j
|
319
|
-
if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
|
318
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
319
|
+
if (isChatCompletionChunk(json) && ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name)) {
|
320
320
|
isFunctionStreamingIn = true;
|
321
321
|
return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
|
322
|
-
} else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
|
322
|
+
} else if (isChatCompletionChunk(json) && ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments)) {
|
323
323
|
const argumentChunk = json.choices[0].delta.function_call.arguments;
|
324
324
|
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
325
325
|
return `${escapedPartialJson}`;
|
@@ -328,12 +328,18 @@ function chunkToText() {
|
|
328
328
|
return '"}}';
|
329
329
|
}
|
330
330
|
const text = trimStartOfStream(
|
331
|
-
(
|
331
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
332
332
|
);
|
333
333
|
return text;
|
334
334
|
};
|
335
335
|
}
|
336
336
|
var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
|
337
|
+
function isChatCompletionChunk(data) {
|
338
|
+
return "choices" in data && "delta" in data.choices[0];
|
339
|
+
}
|
340
|
+
function isCompletion(data) {
|
341
|
+
return "choices" in data && "text" in data.choices[0];
|
342
|
+
}
|
337
343
|
function OpenAIStream(res, callbacks) {
|
338
344
|
const cb = callbacks;
|
339
345
|
let stream;
|
package/dist/index.mjs
CHANGED
@@ -270,11 +270,11 @@ function chunkToText() {
|
|
270
270
|
const trimStartOfStream = trimStartOfStreamHelper();
|
271
271
|
let isFunctionStreamingIn;
|
272
272
|
return (json) => {
|
273
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j
|
274
|
-
if ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name) {
|
273
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
274
|
+
if (isChatCompletionChunk(json) && ((_c = (_b = (_a = json.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.function_call) == null ? void 0 : _c.name)) {
|
275
275
|
isFunctionStreamingIn = true;
|
276
276
|
return `{"function_call": {"name": "${(_e = (_d = json.choices[0]) == null ? void 0 : _d.delta) == null ? void 0 : _e.function_call.name}", "arguments": "`;
|
277
|
-
} else if ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments) {
|
277
|
+
} else if (isChatCompletionChunk(json) && ((_h = (_g = (_f = json.choices[0]) == null ? void 0 : _f.delta) == null ? void 0 : _g.function_call) == null ? void 0 : _h.arguments)) {
|
278
278
|
const argumentChunk = json.choices[0].delta.function_call.arguments;
|
279
279
|
let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
|
280
280
|
return `${escapedPartialJson}`;
|
@@ -283,12 +283,18 @@ function chunkToText() {
|
|
283
283
|
return '"}}';
|
284
284
|
}
|
285
285
|
const text = trimStartOfStream(
|
286
|
-
(
|
286
|
+
isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
|
287
287
|
);
|
288
288
|
return text;
|
289
289
|
};
|
290
290
|
}
|
291
291
|
var __internal__OpenAIFnMessagesSymbol = Symbol("internal_openai_fn_messages");
|
292
|
+
function isChatCompletionChunk(data) {
|
293
|
+
return "choices" in data && "delta" in data.choices[0];
|
294
|
+
}
|
295
|
+
function isCompletion(data) {
|
296
|
+
return "choices" in data && "text" in data.choices[0];
|
297
|
+
}
|
292
298
|
function OpenAIStream(res, callbacks) {
|
293
299
|
const cb = callbacks;
|
294
300
|
let stream;
|
package/package.json
CHANGED
package/react/dist/index.js
CHANGED
@@ -144,14 +144,6 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
144
144
|
const reader = res.body.getReader();
|
145
145
|
const decode = createChunkDecoder(isComplexMode);
|
146
146
|
let responseMessages = [];
|
147
|
-
let streamedResponse = "";
|
148
|
-
const replyId = nanoid();
|
149
|
-
let responseMessage = {
|
150
|
-
id: replyId,
|
151
|
-
createdAt,
|
152
|
-
content: "",
|
153
|
-
role: "assistant"
|
154
|
-
};
|
155
147
|
let responseData = [];
|
156
148
|
const prefixMap = {};
|
157
149
|
if (isComplexMode) {
|
@@ -182,8 +174,22 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
182
174
|
};
|
183
175
|
}
|
184
176
|
}
|
177
|
+
let functionCallMessage = null;
|
185
178
|
if (type === "function_call") {
|
186
179
|
prefixMap["function_call"] = value2;
|
180
|
+
let functionCall = prefixMap["function_call"];
|
181
|
+
if (functionCall && typeof functionCall === "string") {
|
182
|
+
const parsedFunctionCall = JSON.parse(functionCall).function_call;
|
183
|
+
functionCallMessage = {
|
184
|
+
id: nanoid(),
|
185
|
+
role: "assistant",
|
186
|
+
content: "",
|
187
|
+
function_call: parsedFunctionCall,
|
188
|
+
name: parsedFunctionCall.name,
|
189
|
+
createdAt
|
190
|
+
};
|
191
|
+
prefixMap["function_call"] = functionCallMessage;
|
192
|
+
}
|
187
193
|
}
|
188
194
|
if (type === "data") {
|
189
195
|
const parsedValue = JSON.parse(value2);
|
@@ -194,24 +200,12 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
194
200
|
}
|
195
201
|
}
|
196
202
|
const data = prefixMap["data"];
|
197
|
-
const
|
198
|
-
|
199
|
-
let functionCallMessage = null;
|
200
|
-
if (functionCall) {
|
201
|
-
const parsedFunctionCall = JSON.parse(functionCall).function_call;
|
202
|
-
functionCallMessage = {
|
203
|
-
id: nanoid(),
|
204
|
-
role: "function",
|
205
|
-
content: "",
|
206
|
-
name: parsedFunctionCall.name,
|
207
|
-
createdAt
|
208
|
-
};
|
209
|
-
}
|
210
|
-
const merged = [functionCallMessage, responseMessage2].filter(
|
203
|
+
const responseMessage = prefixMap["text"];
|
204
|
+
const merged = [functionCallMessage, responseMessage].filter(
|
211
205
|
Boolean
|
212
206
|
);
|
213
207
|
mutate([...chatRequest.messages, ...merged], false);
|
214
|
-
mutateStreamData([...existingData || [], ...data || []]);
|
208
|
+
mutateStreamData([...existingData || [], ...data || []], false);
|
215
209
|
if (abortControllerRef.current === null) {
|
216
210
|
reader.cancel();
|
217
211
|
break;
|
@@ -230,6 +224,14 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
230
224
|
}
|
231
225
|
return { messages: responseMessages, data: responseData };
|
232
226
|
} else {
|
227
|
+
let streamedResponse = "";
|
228
|
+
const replyId = nanoid();
|
229
|
+
let responseMessage = {
|
230
|
+
id: replyId,
|
231
|
+
createdAt,
|
232
|
+
content: "",
|
233
|
+
role: "assistant"
|
234
|
+
};
|
233
235
|
while (true) {
|
234
236
|
const { done, value } = await reader.read();
|
235
237
|
if (done) {
|
package/react/dist/index.mjs
CHANGED
@@ -108,14 +108,6 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
108
108
|
const reader = res.body.getReader();
|
109
109
|
const decode = createChunkDecoder(isComplexMode);
|
110
110
|
let responseMessages = [];
|
111
|
-
let streamedResponse = "";
|
112
|
-
const replyId = nanoid();
|
113
|
-
let responseMessage = {
|
114
|
-
id: replyId,
|
115
|
-
createdAt,
|
116
|
-
content: "",
|
117
|
-
role: "assistant"
|
118
|
-
};
|
119
111
|
let responseData = [];
|
120
112
|
const prefixMap = {};
|
121
113
|
if (isComplexMode) {
|
@@ -146,8 +138,22 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
146
138
|
};
|
147
139
|
}
|
148
140
|
}
|
141
|
+
let functionCallMessage = null;
|
149
142
|
if (type === "function_call") {
|
150
143
|
prefixMap["function_call"] = value2;
|
144
|
+
let functionCall = prefixMap["function_call"];
|
145
|
+
if (functionCall && typeof functionCall === "string") {
|
146
|
+
const parsedFunctionCall = JSON.parse(functionCall).function_call;
|
147
|
+
functionCallMessage = {
|
148
|
+
id: nanoid(),
|
149
|
+
role: "assistant",
|
150
|
+
content: "",
|
151
|
+
function_call: parsedFunctionCall,
|
152
|
+
name: parsedFunctionCall.name,
|
153
|
+
createdAt
|
154
|
+
};
|
155
|
+
prefixMap["function_call"] = functionCallMessage;
|
156
|
+
}
|
151
157
|
}
|
152
158
|
if (type === "data") {
|
153
159
|
const parsedValue = JSON.parse(value2);
|
@@ -158,24 +164,12 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
158
164
|
}
|
159
165
|
}
|
160
166
|
const data = prefixMap["data"];
|
161
|
-
const
|
162
|
-
|
163
|
-
let functionCallMessage = null;
|
164
|
-
if (functionCall) {
|
165
|
-
const parsedFunctionCall = JSON.parse(functionCall).function_call;
|
166
|
-
functionCallMessage = {
|
167
|
-
id: nanoid(),
|
168
|
-
role: "function",
|
169
|
-
content: "",
|
170
|
-
name: parsedFunctionCall.name,
|
171
|
-
createdAt
|
172
|
-
};
|
173
|
-
}
|
174
|
-
const merged = [functionCallMessage, responseMessage2].filter(
|
167
|
+
const responseMessage = prefixMap["text"];
|
168
|
+
const merged = [functionCallMessage, responseMessage].filter(
|
175
169
|
Boolean
|
176
170
|
);
|
177
171
|
mutate([...chatRequest.messages, ...merged], false);
|
178
|
-
mutateStreamData([...existingData || [], ...data || []]);
|
172
|
+
mutateStreamData([...existingData || [], ...data || []], false);
|
179
173
|
if (abortControllerRef.current === null) {
|
180
174
|
reader.cancel();
|
181
175
|
break;
|
@@ -194,6 +188,14 @@ var getStreamedResponse = async (api, chatRequest, mutate, mutateStreamData, exi
|
|
194
188
|
}
|
195
189
|
return { messages: responseMessages, data: responseData };
|
196
190
|
} else {
|
191
|
+
let streamedResponse = "";
|
192
|
+
const replyId = nanoid();
|
193
|
+
let responseMessage = {
|
194
|
+
id: replyId,
|
195
|
+
createdAt,
|
196
|
+
content: "",
|
197
|
+
role: "assistant"
|
198
|
+
};
|
197
199
|
while (true) {
|
198
200
|
const { done, value } = await reader.read();
|
199
201
|
if (done) {
|