ai 2.2.14 → 2.2.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -284,6 +284,10 @@ interface Completion {
284
284
  * The object type, which is always "text_completion"
285
285
  */
286
286
  object: string;
287
+ /**
288
+ * Usage statistics for the completion request.
289
+ */
290
+ usage?: CompletionUsage;
287
291
  }
288
292
  interface CompletionChoice {
289
293
  /**
@@ -291,11 +295,28 @@ interface CompletionChoice {
291
295
  * hit a natural stop point or a provided stop sequence, or `length` if the maximum
292
296
  * number of tokens specified in the request was reached.
293
297
  */
294
- finish_reason: 'stop' | 'length';
298
+ finish_reason: 'stop' | 'length' | 'content_filter';
295
299
  index: number;
296
300
  logprobs: any | null;
297
301
  text: string;
298
302
  }
303
+ interface CompletionUsage {
304
+ /**
305
+ * Usage statistics for the completion request.
306
+ */
307
+ /**
308
+ * Number of tokens in the generated completion.
309
+ */
310
+ completion_tokens: number;
311
+ /**
312
+ * Number of tokens in the prompt.
313
+ */
314
+ prompt_tokens: number;
315
+ /**
316
+ * Total number of tokens used in the request (prompt + completion).
317
+ */
318
+ total_tokens: number;
319
+ }
299
320
  type AsyncIterableOpenAIStreamReturnTypes = AsyncIterable<ChatCompletionChunk> | AsyncIterable<Completion>;
300
321
  declare function OpenAIStream(res: Response | AsyncIterableOpenAIStreamReturnTypes, callbacks?: OpenAIStreamCallbacks): ReadableStream;
301
322
 
@@ -499,7 +520,7 @@ interface Prediction {
499
520
  webhook?: string;
500
521
  webhook_events_filter?: ('start' | 'output' | 'logs' | 'completed')[];
501
522
  created_at: string;
502
- updated_at: string;
523
+ updated_at?: string;
503
524
  completed_at?: string;
504
525
  urls: {
505
526
  get: string;
@@ -571,4 +592,4 @@ declare const getStreamStringTypeAndValue: (line: string) => {
571
592
  */
572
593
  declare const COMPLEX_HEADER = "X-Experimental-Stream-Data";
573
594
 
574
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AnthropicStream, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReplicateStream, RequestOptions, StreamString, StreamStringPrefixes, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_StreamData, getStreamString, getStreamStringTypeAndValue, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
595
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AnthropicStream, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReplicateStream, RequestOptions, StreamString, StreamStringPrefixes, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_StreamData, getStreamString, getStreamStringTypeAndValue, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -84,10 +84,9 @@ function createCallbacksTransformer(cb) {
84
84
  },
85
85
  async transform(message, controller) {
86
86
  controller.enqueue(textEncoder.encode(message));
87
+ aggregatedResponse += message;
87
88
  if (callbacks.onToken)
88
89
  await callbacks.onToken(message);
89
- if (callbacks.onCompletion)
90
- aggregatedResponse += message;
91
90
  },
92
91
  async flush() {
93
92
  const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
package/dist/index.mjs CHANGED
@@ -39,10 +39,9 @@ function createCallbacksTransformer(cb) {
39
39
  },
40
40
  async transform(message, controller) {
41
41
  controller.enqueue(textEncoder.encode(message));
42
+ aggregatedResponse += message;
42
43
  if (callbacks.onToken)
43
44
  await callbacks.onToken(message);
44
- if (callbacks.onCompletion)
45
- aggregatedResponse += message;
46
45
  },
47
46
  async flush() {
48
47
  const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai",
3
- "version": "2.2.14",
3
+ "version": "2.2.16",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",