ai 2.2.32 → 2.2.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -352,6 +352,41 @@ type StreamString = `${(typeof StreamStringPrefixes)[keyof typeof StreamStringPr
352
352
  */
353
353
  declare const COMPLEX_HEADER = "X-Experimental-Stream-Data";
354
354
 
355
+ declare interface AzureChatCompletions {
356
+ id: string;
357
+ created: Date;
358
+ choices: AzureChatChoice[];
359
+ systemFingerprint?: string;
360
+ usage?: AzureCompletionsUsage;
361
+ promptFilterResults: any[];
362
+ }
363
+ declare interface AzureChatChoice {
364
+ message?: AzureChatResponseMessage;
365
+ index: number;
366
+ finishReason: string | null;
367
+ delta?: AzureChatResponseMessage;
368
+ }
369
+ declare interface AzureChatResponseMessage {
370
+ role: string;
371
+ content: string | null;
372
+ toolCalls: AzureChatCompletionsFunctionToolCall[];
373
+ functionCall?: AzureFunctionCall;
374
+ }
375
+ declare interface AzureCompletionsUsage {
376
+ completionTokens: number;
377
+ promptTokens: number;
378
+ totalTokens: number;
379
+ }
380
+ declare interface AzureFunctionCall {
381
+ name: string;
382
+ arguments: string;
383
+ }
384
+ declare interface AzureChatCompletionsFunctionToolCall {
385
+ type: 'function';
386
+ function: AzureFunctionCall;
387
+ id: string;
388
+ }
389
+
355
390
  type OpenAIStreamCallbacks = AIStreamCallbacksAndOptions & {
356
391
  /**
357
392
  * @example
@@ -536,7 +571,7 @@ interface CompletionUsage {
536
571
  */
537
572
  total_tokens: number;
538
573
  }
539
- type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion>;
574
+ type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion> | AsyncIterable<AzureChatCompletions>;
540
575
  declare function OpenAIStream(res: Response | AsyncIterableOpenAIStreamReturnTypes, callbacks?: OpenAIStreamCallbacks): ReadableStream;
541
576
 
542
577
  interface FunctionCallPayload {
package/dist/index.js CHANGED
@@ -782,7 +782,35 @@ function parseOpenAIStream() {
782
782
  }
783
783
  async function* streamable3(stream) {
784
784
  const extract = chunkToText();
785
- for await (const chunk of stream) {
785
+ for await (let chunk of stream) {
786
+ if ("promptFilterResults" in chunk) {
787
+ chunk = {
788
+ id: chunk.id,
789
+ created: chunk.created.getDate(),
790
+ object: chunk.object,
791
+ // not exposed by Azure API
792
+ model: chunk.model,
793
+ // not exposed by Azure API
794
+ choices: chunk.choices.map((choice) => {
795
+ var _a, _b, _c, _d, _e, _f, _g;
796
+ return {
797
+ delta: {
798
+ content: (_a = choice.delta) == null ? void 0 : _a.content,
799
+ function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
800
+ role: (_c = choice.delta) == null ? void 0 : _c.role,
801
+ tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
802
+ index,
803
+ id: toolCall.id,
804
+ function: toolCall.function,
805
+ type: toolCall.type
806
+ })) : void 0
807
+ },
808
+ finish_reason: choice.finishReason,
809
+ index: choice.index
810
+ };
811
+ })
812
+ };
813
+ }
786
814
  const text = extract(chunk);
787
815
  if (text)
788
816
  yield text;