@firebase/ai 2.3.0-canary.0ffcb26af → 2.3.0-canary.2596dd1b5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/dist/ai-public.d.ts +37 -28
  2. package/dist/ai.d.ts +37 -28
  3. package/dist/esm/index.esm.js +55 -32
  4. package/dist/esm/index.esm.js.map +1 -1
  5. package/dist/esm/src/api.d.ts +1 -1
  6. package/dist/esm/src/methods/live-session-helpers.d.ts +2 -2
  7. package/dist/esm/src/methods/live-session.d.ts +10 -1
  8. package/dist/esm/src/models/imagen-model.d.ts +2 -2
  9. package/dist/esm/src/requests/imagen-image-format.d.ts +3 -3
  10. package/dist/esm/src/types/content.d.ts +4 -4
  11. package/dist/esm/src/types/enums.d.ts +4 -4
  12. package/dist/esm/src/types/imagen/requests.d.ts +9 -9
  13. package/dist/esm/src/types/imagen/responses.d.ts +3 -3
  14. package/dist/esm/src/types/live-responses.d.ts +9 -1
  15. package/dist/esm/src/types/requests.d.ts +1 -1
  16. package/dist/index.cjs.js +55 -32
  17. package/dist/index.cjs.js.map +1 -1
  18. package/dist/index.node.cjs.js +55 -32
  19. package/dist/index.node.cjs.js.map +1 -1
  20. package/dist/index.node.mjs +55 -32
  21. package/dist/index.node.mjs.map +1 -1
  22. package/dist/src/api.d.ts +1 -1
  23. package/dist/src/methods/live-session-helpers.d.ts +2 -2
  24. package/dist/src/methods/live-session.d.ts +10 -1
  25. package/dist/src/models/imagen-model.d.ts +2 -2
  26. package/dist/src/requests/imagen-image-format.d.ts +3 -3
  27. package/dist/src/types/content.d.ts +4 -4
  28. package/dist/src/types/enums.d.ts +4 -4
  29. package/dist/src/types/imagen/requests.d.ts +9 -9
  30. package/dist/src/types/imagen/responses.d.ts +3 -3
  31. package/dist/src/types/live-responses.d.ts +9 -1
  32. package/dist/src/types/requests.d.ts +1 -1
  33. package/package.json +8 -8
@@ -4,7 +4,7 @@ import { FirebaseError, Deferred, getModularInstance } from '@firebase/util';
4
4
  import { Logger } from '@firebase/logger';
5
5
 
6
6
  var name = "@firebase/ai";
7
- var version = "2.3.0-canary.0ffcb26af";
7
+ var version = "2.3.0-canary.2596dd1b5";
8
8
 
9
9
  /**
10
10
  * @license
@@ -382,7 +382,7 @@ const InferenceMode = {
382
382
  /**
383
383
  * Represents the result of the code execution.
384
384
  *
385
- * @public
385
+ * @beta
386
386
  */
387
387
  const Outcome = {
388
388
  UNSPECIFIED: 'OUTCOME_UNSPECIFIED',
@@ -393,7 +393,7 @@ const Outcome = {
393
393
  /**
394
394
  * The programming language of the code.
395
395
  *
396
- * @public
396
+ * @beta
397
397
  */
398
398
  const Language = {
399
399
  UNSPECIFIED: 'LANGUAGE_UNSPECIFIED',
@@ -581,7 +581,7 @@ const SchemaType = {
581
581
  * and the {@link https://cloud.google.com/vertex-ai/generative-ai/docs/image/responsible-ai-imagen#safety-filters | Responsible AI and usage guidelines}
582
582
  * for more details.
583
583
  *
584
- * @beta
584
+ * @public
585
585
  */
586
586
  const ImagenSafetyFilterLevel = {
587
587
  /**
@@ -610,7 +610,7 @@ const ImagenSafetyFilterLevel = {
610
610
  * See the <a href="http://firebase.google.com/docs/vertex-ai/generate-images">personGeneration</a>
611
611
  * documentation for more details.
612
612
  *
613
- * @beta
613
+ * @public
614
614
  */
615
615
  const ImagenPersonFilterLevel = {
616
616
  /**
@@ -643,7 +643,7 @@ const ImagenPersonFilterLevel = {
643
643
  * See the {@link http://firebase.google.com/docs/vertex-ai/generate-images | documentation }
644
644
  * for more details and examples of the supported aspect ratios.
645
645
  *
646
- * @beta
646
+ * @public
647
647
  */
648
648
  const ImagenAspectRatio = {
649
649
  /**
@@ -1915,7 +1915,7 @@ function mapGenerateContentCandidates(candidates) {
1915
1915
  // videoMetadata is not supported.
1916
1916
  // Throw early since developers may send a long video as input and only expect to pay
1917
1917
  // for inference on a small portion of the video.
1918
- if (candidate.content?.parts.some(part => part?.videoMetadata)) {
1918
+ if (candidate.content?.parts?.some(part => part?.videoMetadata)) {
1919
1919
  throw new AIError(AIErrorCode.UNSUPPORTED, 'Part.videoMetadata is not supported in the Gemini Developer API. Please remove this property.');
1920
1920
  }
1921
1921
  const mappedCandidate = {
@@ -2017,6 +2017,14 @@ async function* generateResponseSequence(stream, apiSettings) {
2017
2017
  else {
2018
2018
  enhancedResponse = createEnhancedContentResponse(value);
2019
2019
  }
2020
+ const firstCandidate = enhancedResponse.candidates?.[0];
2021
+ // Don't yield a response with no useful data for the developer.
2022
+ if (!firstCandidate?.content?.parts &&
2023
+ !firstCandidate?.finishReason &&
2024
+ !firstCandidate?.citationMetadata &&
2025
+ !firstCandidate?.urlContextMetadata) {
2026
+ continue;
2027
+ }
2020
2028
  yield enhancedResponse;
2021
2029
  }
2022
2030
  }
@@ -2111,32 +2119,28 @@ function aggregateResponses(responses) {
2111
2119
  * Candidates should always have content and parts, but this handles
2112
2120
  * possible malformed responses.
2113
2121
  */
2114
- if (candidate.content && candidate.content.parts) {
2122
+ if (candidate.content) {
2123
+ // Skip a candidate without parts.
2124
+ if (!candidate.content.parts) {
2125
+ continue;
2126
+ }
2115
2127
  if (!aggregatedResponse.candidates[i].content) {
2116
2128
  aggregatedResponse.candidates[i].content = {
2117
2129
  role: candidate.content.role || 'user',
2118
2130
  parts: []
2119
2131
  };
2120
2132
  }
2121
- const newPart = {};
2122
2133
  for (const part of candidate.content.parts) {
2123
- if (part.text !== undefined) {
2124
- // The backend can send empty text parts. If these are sent back
2125
- // (e.g. in chat history), the backend will respond with an error.
2126
- // To prevent this, ignore empty text parts.
2127
- if (part.text === '') {
2128
- continue;
2129
- }
2130
- newPart.text = part.text;
2134
+ const newPart = { ...part };
2135
+ // The backend can send empty text parts. If these are sent back
2136
+ // (e.g. in chat history), the backend will respond with an error.
2137
+ // To prevent this, ignore empty text parts.
2138
+ if (part.text === '') {
2139
+ continue;
2131
2140
  }
2132
- if (part.functionCall) {
2133
- newPart.functionCall = part.functionCall;
2141
+ if (Object.keys(newPart).length > 0) {
2142
+ aggregatedResponse.candidates[i].content.parts.push(newPart);
2134
2143
  }
2135
- if (Object.keys(newPart).length === 0) {
2136
- throw new AIError(AIErrorCode.INVALID_CONTENT, 'Part should have at least one property, but there are none. This is likely caused ' +
2137
- 'by a malformed response from the backend.');
2138
- }
2139
- aggregatedResponse.candidates[i].content.parts.push(newPart);
2140
2144
  }
2141
2145
  }
2142
2146
  }
@@ -2842,6 +2846,25 @@ class LiveSession {
2842
2846
  this.webSocketHandler.send(JSON.stringify(message));
2843
2847
  });
2844
2848
  }
2849
+ /**
2850
+ * Sends function responses to the server.
2851
+ *
2852
+ * @param functionResponses - The function responses to send.
2853
+ * @throws If this session has been closed.
2854
+ *
2855
+ * @beta
2856
+ */
2857
+ async sendFunctionResponses(functionResponses) {
2858
+ if (this.isClosed) {
2859
+ throw new AIError(AIErrorCode.REQUEST_ERROR, 'This LiveSession has been closed and cannot be used.');
2860
+ }
2861
+ const message = {
2862
+ toolResponse: {
2863
+ functionResponses
2864
+ }
2865
+ };
2866
+ this.webSocketHandler.send(JSON.stringify(message));
2867
+ }
2845
2868
  /**
2846
2869
  * Sends a stream of {@link GenerativeContentBlob}.
2847
2870
  *
@@ -3056,7 +3079,7 @@ class LiveGenerativeModel extends AIModel {
3056
3079
  * }
3057
3080
  * ```
3058
3081
  *
3059
- * @beta
3082
+ * @public
3060
3083
  */
3061
3084
  class ImagenModel extends AIModel {
3062
3085
  /**
@@ -3092,7 +3115,7 @@ class ImagenModel extends AIModel {
3092
3115
  * returned object will have a `filteredReason` property.
3093
3116
  * If all images are filtered, the `images` array will be empty.
3094
3117
  *
3095
- * @beta
3118
+ * @public
3096
3119
  */
3097
3120
  async generateImages(prompt) {
3098
3121
  const body = createPredictRequestBody(prompt, {
@@ -3555,7 +3578,7 @@ class AnyOfSchema extends Schema {
3555
3578
  * }
3556
3579
  * ```
3557
3580
  *
3558
- * @beta
3581
+ * @public
3559
3582
  */
3560
3583
  class ImagenImageFormat {
3561
3584
  constructor() {
@@ -3567,7 +3590,7 @@ class ImagenImageFormat {
3567
3590
  * @param compressionQuality - The level of compression (a number between 0 and 100).
3568
3591
  * @returns An {@link ImagenImageFormat} object for a JPEG image.
3569
3592
  *
3570
- * @beta
3593
+ * @public
3571
3594
  */
3572
3595
  static jpeg(compressionQuality) {
3573
3596
  if (compressionQuality &&
@@ -3581,7 +3604,7 @@ class ImagenImageFormat {
3581
3604
  *
3582
3605
  * @returns An {@link ImagenImageFormat} object for a PNG image.
3583
3606
  *
3584
- * @beta
3607
+ * @public
3585
3608
  */
3586
3609
  static png() {
3587
3610
  return { mimeType: 'image/png' };
@@ -3823,9 +3846,9 @@ class AudioConversationRunner {
3823
3846
  }
3824
3847
  else {
3825
3848
  try {
3826
- const resultPart = await this.options.functionCallingHandler(message.functionCalls);
3849
+ const functionResponse = await this.options.functionCallingHandler(message.functionCalls);
3827
3850
  if (!this.isStopped) {
3828
- void this.liveSession.send([resultPart]);
3851
+ void this.liveSession.sendFunctionResponses([functionResponse]);
3829
3852
  }
3830
3853
  }
3831
3854
  catch (e) {
@@ -4045,7 +4068,7 @@ function getGenerativeModel(ai, modelParams, requestOptions) {
4045
4068
  * @throws If the `apiKey` or `projectId` fields are missing in your
4046
4069
  * Firebase config.
4047
4070
  *
4048
- * @beta
4071
+ * @public
4049
4072
  */
4050
4073
  function getImagenModel(ai, modelParams, requestOptions) {
4051
4074
  if (!modelParams.model) {