@firebase/ai 2.3.0-canary.0ffcb26af → 2.3.0-canary.2596dd1b5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/dist/ai-public.d.ts +37 -28
  2. package/dist/ai.d.ts +37 -28
  3. package/dist/esm/index.esm.js +55 -32
  4. package/dist/esm/index.esm.js.map +1 -1
  5. package/dist/esm/src/api.d.ts +1 -1
  6. package/dist/esm/src/methods/live-session-helpers.d.ts +2 -2
  7. package/dist/esm/src/methods/live-session.d.ts +10 -1
  8. package/dist/esm/src/models/imagen-model.d.ts +2 -2
  9. package/dist/esm/src/requests/imagen-image-format.d.ts +3 -3
  10. package/dist/esm/src/types/content.d.ts +4 -4
  11. package/dist/esm/src/types/enums.d.ts +4 -4
  12. package/dist/esm/src/types/imagen/requests.d.ts +9 -9
  13. package/dist/esm/src/types/imagen/responses.d.ts +3 -3
  14. package/dist/esm/src/types/live-responses.d.ts +9 -1
  15. package/dist/esm/src/types/requests.d.ts +1 -1
  16. package/dist/index.cjs.js +55 -32
  17. package/dist/index.cjs.js.map +1 -1
  18. package/dist/index.node.cjs.js +55 -32
  19. package/dist/index.node.cjs.js.map +1 -1
  20. package/dist/index.node.mjs +55 -32
  21. package/dist/index.node.mjs.map +1 -1
  22. package/dist/src/api.d.ts +1 -1
  23. package/dist/src/methods/live-session-helpers.d.ts +2 -2
  24. package/dist/src/methods/live-session.d.ts +10 -1
  25. package/dist/src/models/imagen-model.d.ts +2 -2
  26. package/dist/src/requests/imagen-image-format.d.ts +3 -3
  27. package/dist/src/types/content.d.ts +4 -4
  28. package/dist/src/types/enums.d.ts +4 -4
  29. package/dist/src/types/imagen/requests.d.ts +9 -9
  30. package/dist/src/types/imagen/responses.d.ts +3 -3
  31. package/dist/src/types/live-responses.d.ts +9 -1
  32. package/dist/src/types/requests.d.ts +1 -1
  33. package/package.json +8 -8
@@ -8,7 +8,7 @@ var util = require('@firebase/util');
8
8
  var logger$1 = require('@firebase/logger');
9
9
 
10
10
  var name = "@firebase/ai";
11
- var version = "2.3.0-canary.0ffcb26af";
11
+ var version = "2.3.0-canary.2596dd1b5";
12
12
 
13
13
  /**
14
14
  * @license
@@ -330,7 +330,7 @@ const InferenceMode = {
330
330
  /**
331
331
  * Represents the result of the code execution.
332
332
  *
333
- * @public
333
+ * @beta
334
334
  */
335
335
  const Outcome = {
336
336
  UNSPECIFIED: 'OUTCOME_UNSPECIFIED',
@@ -341,7 +341,7 @@ const Outcome = {
341
341
  /**
342
342
  * The programming language of the code.
343
343
  *
344
- * @public
344
+ * @beta
345
345
  */
346
346
  const Language = {
347
347
  UNSPECIFIED: 'LANGUAGE_UNSPECIFIED',
@@ -529,7 +529,7 @@ const SchemaType = {
529
529
  * and the {@link https://cloud.google.com/vertex-ai/generative-ai/docs/image/responsible-ai-imagen#safety-filters | Responsible AI and usage guidelines}
530
530
  * for more details.
531
531
  *
532
- * @beta
532
+ * @public
533
533
  */
534
534
  const ImagenSafetyFilterLevel = {
535
535
  /**
@@ -558,7 +558,7 @@ const ImagenSafetyFilterLevel = {
558
558
  * See the <a href="http://firebase.google.com/docs/vertex-ai/generate-images">personGeneration</a>
559
559
  * documentation for more details.
560
560
  *
561
- * @beta
561
+ * @public
562
562
  */
563
563
  const ImagenPersonFilterLevel = {
564
564
  /**
@@ -591,7 +591,7 @@ const ImagenPersonFilterLevel = {
591
591
  * See the {@link http://firebase.google.com/docs/vertex-ai/generate-images | documentation }
592
592
  * for more details and examples of the supported aspect ratios.
593
593
  *
594
- * @beta
594
+ * @public
595
595
  */
596
596
  const ImagenAspectRatio = {
597
597
  /**
@@ -1596,7 +1596,7 @@ function mapGenerateContentCandidates(candidates) {
1596
1596
  // videoMetadata is not supported.
1597
1597
  // Throw early since developers may send a long video as input and only expect to pay
1598
1598
  // for inference on a small portion of the video.
1599
- if (candidate.content?.parts.some(part => part?.videoMetadata)) {
1599
+ if (candidate.content?.parts?.some(part => part?.videoMetadata)) {
1600
1600
  throw new AIError(AIErrorCode.UNSUPPORTED, 'Part.videoMetadata is not supported in the Gemini Developer API. Please remove this property.');
1601
1601
  }
1602
1602
  const mappedCandidate = {
@@ -1698,6 +1698,14 @@ async function* generateResponseSequence(stream, apiSettings) {
1698
1698
  else {
1699
1699
  enhancedResponse = createEnhancedContentResponse(value);
1700
1700
  }
1701
+ const firstCandidate = enhancedResponse.candidates?.[0];
1702
+ // Don't yield a response with no useful data for the developer.
1703
+ if (!firstCandidate?.content?.parts &&
1704
+ !firstCandidate?.finishReason &&
1705
+ !firstCandidate?.citationMetadata &&
1706
+ !firstCandidate?.urlContextMetadata) {
1707
+ continue;
1708
+ }
1701
1709
  yield enhancedResponse;
1702
1710
  }
1703
1711
  }
@@ -1792,32 +1800,28 @@ function aggregateResponses(responses) {
1792
1800
  * Candidates should always have content and parts, but this handles
1793
1801
  * possible malformed responses.
1794
1802
  */
1795
- if (candidate.content && candidate.content.parts) {
1803
+ if (candidate.content) {
1804
+ // Skip a candidate without parts.
1805
+ if (!candidate.content.parts) {
1806
+ continue;
1807
+ }
1796
1808
  if (!aggregatedResponse.candidates[i].content) {
1797
1809
  aggregatedResponse.candidates[i].content = {
1798
1810
  role: candidate.content.role || 'user',
1799
1811
  parts: []
1800
1812
  };
1801
1813
  }
1802
- const newPart = {};
1803
1814
  for (const part of candidate.content.parts) {
1804
- if (part.text !== undefined) {
1805
- // The backend can send empty text parts. If these are sent back
1806
- // (e.g. in chat history), the backend will respond with an error.
1807
- // To prevent this, ignore empty text parts.
1808
- if (part.text === '') {
1809
- continue;
1810
- }
1811
- newPart.text = part.text;
1815
+ const newPart = { ...part };
1816
+ // The backend can send empty text parts. If these are sent back
1817
+ // (e.g. in chat history), the backend will respond with an error.
1818
+ // To prevent this, ignore empty text parts.
1819
+ if (part.text === '') {
1820
+ continue;
1812
1821
  }
1813
- if (part.functionCall) {
1814
- newPart.functionCall = part.functionCall;
1822
+ if (Object.keys(newPart).length > 0) {
1823
+ aggregatedResponse.candidates[i].content.parts.push(newPart);
1815
1824
  }
1816
- if (Object.keys(newPart).length === 0) {
1817
- throw new AIError(AIErrorCode.INVALID_CONTENT, 'Part should have at least one property, but there are none. This is likely caused ' +
1818
- 'by a malformed response from the backend.');
1819
- }
1820
- aggregatedResponse.candidates[i].content.parts.push(newPart);
1821
1825
  }
1822
1826
  }
1823
1827
  }
@@ -2523,6 +2527,25 @@ class LiveSession {
2523
2527
  this.webSocketHandler.send(JSON.stringify(message));
2524
2528
  });
2525
2529
  }
2530
+ /**
2531
+ * Sends function responses to the server.
2532
+ *
2533
+ * @param functionResponses - The function responses to send.
2534
+ * @throws If this session has been closed.
2535
+ *
2536
+ * @beta
2537
+ */
2538
+ async sendFunctionResponses(functionResponses) {
2539
+ if (this.isClosed) {
2540
+ throw new AIError(AIErrorCode.REQUEST_ERROR, 'This LiveSession has been closed and cannot be used.');
2541
+ }
2542
+ const message = {
2543
+ toolResponse: {
2544
+ functionResponses
2545
+ }
2546
+ };
2547
+ this.webSocketHandler.send(JSON.stringify(message));
2548
+ }
2526
2549
  /**
2527
2550
  * Sends a stream of {@link GenerativeContentBlob}.
2528
2551
  *
@@ -2737,7 +2760,7 @@ class LiveGenerativeModel extends AIModel {
2737
2760
  * }
2738
2761
  * ```
2739
2762
  *
2740
- * @beta
2763
+ * @public
2741
2764
  */
2742
2765
  class ImagenModel extends AIModel {
2743
2766
  /**
@@ -2773,7 +2796,7 @@ class ImagenModel extends AIModel {
2773
2796
  * returned object will have a `filteredReason` property.
2774
2797
  * If all images are filtered, the `images` array will be empty.
2775
2798
  *
2776
- * @beta
2799
+ * @public
2777
2800
  */
2778
2801
  async generateImages(prompt) {
2779
2802
  const body = createPredictRequestBody(prompt, {
@@ -3236,7 +3259,7 @@ class AnyOfSchema extends Schema {
3236
3259
  * }
3237
3260
  * ```
3238
3261
  *
3239
- * @beta
3262
+ * @public
3240
3263
  */
3241
3264
  class ImagenImageFormat {
3242
3265
  constructor() {
@@ -3248,7 +3271,7 @@ class ImagenImageFormat {
3248
3271
  * @param compressionQuality - The level of compression (a number between 0 and 100).
3249
3272
  * @returns An {@link ImagenImageFormat} object for a JPEG image.
3250
3273
  *
3251
- * @beta
3274
+ * @public
3252
3275
  */
3253
3276
  static jpeg(compressionQuality) {
3254
3277
  if (compressionQuality &&
@@ -3262,7 +3285,7 @@ class ImagenImageFormat {
3262
3285
  *
3263
3286
  * @returns An {@link ImagenImageFormat} object for a PNG image.
3264
3287
  *
3265
- * @beta
3288
+ * @public
3266
3289
  */
3267
3290
  static png() {
3268
3291
  return { mimeType: 'image/png' };
@@ -3504,9 +3527,9 @@ class AudioConversationRunner {
3504
3527
  }
3505
3528
  else {
3506
3529
  try {
3507
- const resultPart = await this.options.functionCallingHandler(message.functionCalls);
3530
+ const functionResponse = await this.options.functionCallingHandler(message.functionCalls);
3508
3531
  if (!this.isStopped) {
3509
- void this.liveSession.send([resultPart]);
3532
+ void this.liveSession.sendFunctionResponses([functionResponse]);
3510
3533
  }
3511
3534
  }
3512
3535
  catch (e) {
@@ -3726,7 +3749,7 @@ function getGenerativeModel(ai, modelParams, requestOptions) {
3726
3749
  * @throws If the `apiKey` or `projectId` fields are missing in your
3727
3750
  * Firebase config.
3728
3751
  *
3729
- * @beta
3752
+ * @public
3730
3753
  */
3731
3754
  function getImagenModel(ai, modelParams, requestOptions) {
3732
3755
  if (!modelParams.model) {