@firebase/ai 2.1.0-canary.5501791d0 → 2.1.0-canary.984086b0b
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-public.d.ts +570 -4
- package/dist/ai.d.ts +689 -5
- package/dist/esm/index.esm.js +1236 -351
- package/dist/esm/index.esm.js.map +1 -1
- package/dist/esm/src/api.d.ts +18 -3
- package/dist/esm/src/constants.d.ts +1 -1
- package/dist/esm/src/index.d.ts +5 -1
- package/dist/esm/src/methods/chrome-adapter.d.ts +7 -3
- package/dist/esm/src/methods/live-session-helpers.d.ts +154 -0
- package/dist/esm/src/methods/live-session.d.ts +90 -0
- package/dist/esm/src/models/ai-model.d.ts +1 -1
- package/dist/esm/src/models/index.d.ts +1 -0
- package/dist/esm/src/models/live-generative-model.d.ts +55 -0
- package/dist/esm/src/public-types.d.ts +10 -1
- package/dist/esm/src/requests/request.d.ts +6 -0
- package/dist/esm/src/requests/response-helpers.d.ts +9 -5
- package/dist/esm/src/service.d.ts +7 -2
- package/dist/esm/src/types/content.d.ts +42 -0
- package/dist/esm/src/types/enums.d.ts +5 -0
- package/dist/esm/src/types/error.d.ts +2 -0
- package/dist/esm/src/types/imagen/internal.d.ts +10 -0
- package/dist/esm/src/types/live-responses.d.ts +53 -0
- package/dist/esm/src/types/requests.d.ts +105 -0
- package/dist/esm/src/types/responses.d.ts +87 -4
- package/dist/esm/src/websocket.d.ts +67 -0
- package/dist/index.cjs.js +1240 -349
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.node.cjs.js +907 -313
- package/dist/index.node.cjs.js.map +1 -1
- package/dist/index.node.mjs +904 -315
- package/dist/index.node.mjs.map +1 -1
- package/dist/src/api.d.ts +18 -3
- package/dist/src/constants.d.ts +1 -1
- package/dist/src/index.d.ts +5 -1
- package/dist/src/methods/chrome-adapter.d.ts +7 -3
- package/dist/src/methods/live-session-helpers.d.ts +154 -0
- package/dist/src/methods/live-session.d.ts +90 -0
- package/dist/src/models/ai-model.d.ts +1 -1
- package/dist/src/models/index.d.ts +1 -0
- package/dist/src/models/live-generative-model.d.ts +55 -0
- package/dist/src/public-types.d.ts +10 -1
- package/dist/src/requests/request.d.ts +6 -0
- package/dist/src/requests/response-helpers.d.ts +9 -5
- package/dist/src/service.d.ts +7 -2
- package/dist/src/types/content.d.ts +42 -0
- package/dist/src/types/enums.d.ts +5 -0
- package/dist/src/types/error.d.ts +2 -0
- package/dist/src/types/imagen/internal.d.ts +10 -0
- package/dist/src/types/live-responses.d.ts +53 -0
- package/dist/src/types/requests.d.ts +105 -0
- package/dist/src/types/responses.d.ts +87 -4
- package/dist/src/websocket.d.ts +67 -0
- package/package.json +10 -8
package/dist/src/api.d.ts
CHANGED
|
@@ -18,14 +18,16 @@ import { FirebaseApp } from '@firebase/app';
|
|
|
18
18
|
import { AI_TYPE } from './constants';
|
|
19
19
|
import { AIService } from './service';
|
|
20
20
|
import { AI, AIOptions } from './public-types';
|
|
21
|
-
import { ImagenModelParams, HybridParams, ModelParams, RequestOptions } from './types';
|
|
21
|
+
import { ImagenModelParams, HybridParams, ModelParams, RequestOptions, LiveModelParams } from './types';
|
|
22
22
|
import { AIError } from './errors';
|
|
23
|
-
import { AIModel, GenerativeModel, ImagenModel } from './models';
|
|
23
|
+
import { AIModel, GenerativeModel, LiveGenerativeModel, ImagenModel } from './models';
|
|
24
24
|
export { ChatSession } from './methods/chat-session';
|
|
25
|
+
export { LiveSession } from './methods/live-session';
|
|
25
26
|
export * from './requests/schema-builder';
|
|
26
27
|
export { ImagenImageFormat } from './requests/imagen-image-format';
|
|
27
|
-
export { AIModel, GenerativeModel, ImagenModel, AIError };
|
|
28
|
+
export { AIModel, GenerativeModel, LiveGenerativeModel, ImagenModel, AIError };
|
|
28
29
|
export { Backend, VertexAIBackend, GoogleAIBackend } from './backend';
|
|
30
|
+
export { startAudioConversation, AudioConversationController, StartAudioConversationOptions } from './methods/live-session-helpers';
|
|
29
31
|
declare module '@firebase/component' {
|
|
30
32
|
interface NameServiceMapping {
|
|
31
33
|
[AI_TYPE]: AIService;
|
|
@@ -82,3 +84,16 @@ export declare function getGenerativeModel(ai: AI, modelParams: ModelParams | Hy
|
|
|
82
84
|
* @beta
|
|
83
85
|
*/
|
|
84
86
|
export declare function getImagenModel(ai: AI, modelParams: ImagenModelParams, requestOptions?: RequestOptions): ImagenModel;
|
|
87
|
+
/**
|
|
88
|
+
* Returns a {@link LiveGenerativeModel} class for real-time, bidirectional communication.
|
|
89
|
+
*
|
|
90
|
+
* The Live API is only supported in modern browser windows and Node >= 22.
|
|
91
|
+
*
|
|
92
|
+
* @param ai - An {@link AI} instance.
|
|
93
|
+
* @param modelParams - Parameters to use when setting up a {@link LiveSession}.
|
|
94
|
+
* @throws If the `apiKey` or `projectId` fields are missing in your
|
|
95
|
+
* Firebase config.
|
|
96
|
+
*
|
|
97
|
+
* @beta
|
|
98
|
+
*/
|
|
99
|
+
export declare function getLiveGenerativeModel(ai: AI, modelParams: LiveModelParams): LiveGenerativeModel;
|
package/dist/src/constants.d.ts
CHANGED
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
*/
|
|
17
17
|
export declare const AI_TYPE = "AI";
|
|
18
18
|
export declare const DEFAULT_LOCATION = "us-central1";
|
|
19
|
-
export declare const
|
|
19
|
+
export declare const DEFAULT_DOMAIN = "firebasevertexai.googleapis.com";
|
|
20
20
|
export declare const DEFAULT_API_VERSION = "v1beta";
|
|
21
21
|
export declare const PACKAGE_VERSION: string;
|
|
22
22
|
export declare const LANGUAGE_TAG = "gl-js";
|
package/dist/src/index.d.ts
CHANGED
|
@@ -3,10 +3,14 @@
|
|
|
3
3
|
*
|
|
4
4
|
* @packageDocumentation
|
|
5
5
|
*/
|
|
6
|
+
import { AIService } from './service';
|
|
7
|
+
import { ComponentContainer, InstanceFactoryOptions } from '@firebase/component';
|
|
8
|
+
import { LanguageModel } from './types/language-model';
|
|
6
9
|
declare global {
|
|
7
10
|
interface Window {
|
|
8
|
-
|
|
11
|
+
LanguageModel: LanguageModel;
|
|
9
12
|
}
|
|
10
13
|
}
|
|
14
|
+
export declare function factory(container: ComponentContainer, { instanceIdentifier }: InstanceFactoryOptions): AIService;
|
|
11
15
|
export * from './api';
|
|
12
16
|
export * from './public-types';
|
|
@@ -23,9 +23,9 @@ import { LanguageModel } from '../types/language-model';
|
|
|
23
23
|
* possible.
|
|
24
24
|
*/
|
|
25
25
|
export declare class ChromeAdapterImpl implements ChromeAdapter {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
26
|
+
languageModelProvider: LanguageModel;
|
|
27
|
+
mode: InferenceMode;
|
|
28
|
+
onDeviceParams: OnDeviceParams;
|
|
29
29
|
static SUPPORTED_MIME_TYPES: string[];
|
|
30
30
|
private isDownloading;
|
|
31
31
|
private downloadPromise;
|
|
@@ -118,3 +118,7 @@ export declare class ChromeAdapterImpl implements ChromeAdapter {
|
|
|
118
118
|
*/
|
|
119
119
|
private static toStreamResponse;
|
|
120
120
|
}
|
|
121
|
+
/**
|
|
122
|
+
* Creates a ChromeAdapterImpl on demand.
|
|
123
|
+
*/
|
|
124
|
+
export declare function chromeAdapterFactory(mode: InferenceMode, window?: Window, params?: OnDeviceParams): ChromeAdapterImpl | undefined;
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Google LLC
|
|
4
|
+
*
|
|
5
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
* you may not use this file except in compliance with the License.
|
|
7
|
+
* You may obtain a copy of the License at
|
|
8
|
+
*
|
|
9
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
*
|
|
11
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
* See the License for the specific language governing permissions and
|
|
15
|
+
* limitations under the License.
|
|
16
|
+
*/
|
|
17
|
+
import { LiveServerToolCall, Part } from '../types';
|
|
18
|
+
import { LiveSession } from './live-session';
|
|
19
|
+
/**
|
|
20
|
+
* A controller for managing an active audio conversation.
|
|
21
|
+
*
|
|
22
|
+
* @beta
|
|
23
|
+
*/
|
|
24
|
+
export interface AudioConversationController {
|
|
25
|
+
/**
|
|
26
|
+
* Stops the audio conversation, closes the microphone connection, and
|
|
27
|
+
* cleans up resources. Returns a promise that resolves when cleanup is complete.
|
|
28
|
+
*/
|
|
29
|
+
stop: () => Promise<void>;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Options for {@link startAudioConversation}.
|
|
33
|
+
*
|
|
34
|
+
* @beta
|
|
35
|
+
*/
|
|
36
|
+
export interface StartAudioConversationOptions {
|
|
37
|
+
/**
|
|
38
|
+
* An async handler that is called when the model requests a function to be executed.
|
|
39
|
+
* The handler should perform the function call and return the result as a `Part`,
|
|
40
|
+
* which will then be sent back to the model.
|
|
41
|
+
*/
|
|
42
|
+
functionCallingHandler?: (functionCalls: LiveServerToolCall['functionCalls']) => Promise<Part>;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Dependencies needed by the {@link AudioConversationRunner}.
|
|
46
|
+
*
|
|
47
|
+
* @internal
|
|
48
|
+
*/
|
|
49
|
+
interface RunnerDependencies {
|
|
50
|
+
audioContext: AudioContext;
|
|
51
|
+
mediaStream: MediaStream;
|
|
52
|
+
sourceNode: MediaStreamAudioSourceNode;
|
|
53
|
+
workletNode: AudioWorkletNode;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Encapsulates the core logic of an audio conversation.
|
|
57
|
+
*
|
|
58
|
+
* @internal
|
|
59
|
+
*/
|
|
60
|
+
export declare class AudioConversationRunner {
|
|
61
|
+
private readonly liveSession;
|
|
62
|
+
private readonly options;
|
|
63
|
+
private readonly deps;
|
|
64
|
+
/** A flag to indicate if the conversation has been stopped. */
|
|
65
|
+
private isStopped;
|
|
66
|
+
/** A deferred that contains a promise that is resolved when stop() is called, to unblock the receive loop. */
|
|
67
|
+
private readonly stopDeferred;
|
|
68
|
+
/** A promise that tracks the lifecycle of the main `runReceiveLoop`. */
|
|
69
|
+
private readonly receiveLoopPromise;
|
|
70
|
+
/** A FIFO queue of 24kHz, 16-bit PCM audio chunks received from the server. */
|
|
71
|
+
private readonly playbackQueue;
|
|
72
|
+
/** Tracks scheduled audio sources. Used to cancel scheduled audio when the model is interrupted. */
|
|
73
|
+
private scheduledSources;
|
|
74
|
+
/** A high-precision timeline pointer for scheduling gapless audio playback. */
|
|
75
|
+
private nextStartTime;
|
|
76
|
+
/** A mutex to prevent the playback processing loop from running multiple times concurrently. */
|
|
77
|
+
private isPlaybackLoopRunning;
|
|
78
|
+
constructor(liveSession: LiveSession, options: StartAudioConversationOptions, deps: RunnerDependencies);
|
|
79
|
+
/**
|
|
80
|
+
* Stops the conversation and unblocks the main receive loop.
|
|
81
|
+
*/
|
|
82
|
+
stop(): Promise<void>;
|
|
83
|
+
/**
|
|
84
|
+
* Cleans up all audio resources (nodes, stream tracks, context) and marks the
|
|
85
|
+
* session as no longer in a conversation.
|
|
86
|
+
*/
|
|
87
|
+
private cleanup;
|
|
88
|
+
/**
|
|
89
|
+
* Adds audio data to the queue and ensures the playback loop is running.
|
|
90
|
+
*/
|
|
91
|
+
private enqueueAndPlay;
|
|
92
|
+
/**
|
|
93
|
+
* Stops all current and pending audio playback and clears the queue. This is
|
|
94
|
+
* called when the server indicates the model's speech was interrupted with
|
|
95
|
+
* `LiveServerContent.modelTurn.interrupted`.
|
|
96
|
+
*/
|
|
97
|
+
private interruptPlayback;
|
|
98
|
+
/**
|
|
99
|
+
* Processes the playback queue in a loop, scheduling each chunk in a gapless sequence.
|
|
100
|
+
*/
|
|
101
|
+
private processPlaybackQueue;
|
|
102
|
+
/**
|
|
103
|
+
* The main loop that listens for and processes messages from the server.
|
|
104
|
+
*/
|
|
105
|
+
private runReceiveLoop;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Starts a real-time, bidirectional audio conversation with the model. This helper function manages
|
|
109
|
+
* the complexities of microphone access, audio recording, playback, and interruptions.
|
|
110
|
+
*
|
|
111
|
+
* @remarks Important: This function must be called in response to a user gesture
|
|
112
|
+
* (for example, a button click) to comply with {@link https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Best_practices#autoplay_policy | browser autoplay policies}.
|
|
113
|
+
*
|
|
114
|
+
* @example
|
|
115
|
+
* ```javascript
|
|
116
|
+
* const liveSession = await model.connect();
|
|
117
|
+
* let conversationController;
|
|
118
|
+
*
|
|
119
|
+
* // This function must be called from within a click handler.
|
|
120
|
+
* async function startConversation() {
|
|
121
|
+
* try {
|
|
122
|
+
* conversationController = await startAudioConversation(liveSession);
|
|
123
|
+
* } catch (e) {
|
|
124
|
+
* // Handle AI-specific errors
|
|
125
|
+
* if (e instanceof AIError) {
|
|
126
|
+
* console.error("AI Error:", e.message);
|
|
127
|
+
* }
|
|
128
|
+
* // Handle microphone permission and hardware errors
|
|
129
|
+
* else if (e instanceof DOMException) {
|
|
130
|
+
* console.error("Microphone Error:", e.message);
|
|
131
|
+
* }
|
|
132
|
+
* // Handle other unexpected errors
|
|
133
|
+
* else {
|
|
134
|
+
* console.error("An unexpected error occurred:", e);
|
|
135
|
+
* }
|
|
136
|
+
* }
|
|
137
|
+
* }
|
|
138
|
+
*
|
|
139
|
+
* // Later, to stop the conversation:
|
|
140
|
+
* // if (conversationController) {
|
|
141
|
+
* // await conversationController.stop();
|
|
142
|
+
* // }
|
|
143
|
+
* ```
|
|
144
|
+
*
|
|
145
|
+
* @param liveSession - An active {@link LiveSession} instance.
|
|
146
|
+
* @param options - Configuration options for the audio conversation.
|
|
147
|
+
* @returns A `Promise` that resolves with an {@link AudioConversationController}.
|
|
148
|
+
* @throws `AIError` if the environment does not support required Web APIs (`UNSUPPORTED`), if a conversation is already active (`REQUEST_ERROR`), the session is closed (`SESSION_CLOSED`), or if an unexpected initialization error occurs (`ERROR`).
|
|
149
|
+
* @throws `DOMException` Thrown by `navigator.mediaDevices.getUserMedia()` if issues occur with microphone access, such as permissions being denied (`NotAllowedError`) or no compatible hardware being found (`NotFoundError`). See the {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#exceptions | MDN documentation} for a full list of exceptions.
|
|
150
|
+
*
|
|
151
|
+
* @beta
|
|
152
|
+
*/
|
|
153
|
+
export declare function startAudioConversation(liveSession: LiveSession, options?: StartAudioConversationOptions): Promise<AudioConversationController>;
|
|
154
|
+
export {};
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Google LLC
|
|
4
|
+
*
|
|
5
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
* you may not use this file except in compliance with the License.
|
|
7
|
+
* You may obtain a copy of the License at
|
|
8
|
+
*
|
|
9
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
*
|
|
11
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
* See the License for the specific language governing permissions and
|
|
15
|
+
* limitations under the License.
|
|
16
|
+
*/
|
|
17
|
+
import { GenerativeContentBlob, LiveServerContent, LiveServerToolCall, LiveServerToolCallCancellation, Part } from '../public-types';
|
|
18
|
+
import { WebSocketHandler } from '../websocket';
|
|
19
|
+
/**
|
|
20
|
+
* Represents an active, real-time, bidirectional conversation with the model.
|
|
21
|
+
*
|
|
22
|
+
* This class should only be instantiated by calling {@link LiveGenerativeModel.connect}.
|
|
23
|
+
*
|
|
24
|
+
* @beta
|
|
25
|
+
*/
|
|
26
|
+
export declare class LiveSession {
|
|
27
|
+
private webSocketHandler;
|
|
28
|
+
private serverMessages;
|
|
29
|
+
/**
|
|
30
|
+
* Indicates whether this Live session is closed.
|
|
31
|
+
*
|
|
32
|
+
* @beta
|
|
33
|
+
*/
|
|
34
|
+
isClosed: boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Indicates whether this Live session is being controlled by an `AudioConversationController`.
|
|
37
|
+
*
|
|
38
|
+
* @beta
|
|
39
|
+
*/
|
|
40
|
+
inConversation: boolean;
|
|
41
|
+
/**
|
|
42
|
+
* @internal
|
|
43
|
+
*/
|
|
44
|
+
constructor(webSocketHandler: WebSocketHandler, serverMessages: AsyncGenerator<unknown>);
|
|
45
|
+
/**
|
|
46
|
+
* Sends content to the server.
|
|
47
|
+
*
|
|
48
|
+
* @param request - The message to send to the model.
|
|
49
|
+
* @param turnComplete - Indicates if the turn is complete. Defaults to false.
|
|
50
|
+
* @throws If this session has been closed.
|
|
51
|
+
*
|
|
52
|
+
* @beta
|
|
53
|
+
*/
|
|
54
|
+
send(request: string | Array<string | Part>, turnComplete?: boolean): Promise<void>;
|
|
55
|
+
/**
|
|
56
|
+
* Sends realtime input to the server.
|
|
57
|
+
*
|
|
58
|
+
* @param mediaChunks - The media chunks to send.
|
|
59
|
+
* @throws If this session has been closed.
|
|
60
|
+
*
|
|
61
|
+
* @beta
|
|
62
|
+
*/
|
|
63
|
+
sendMediaChunks(mediaChunks: GenerativeContentBlob[]): Promise<void>;
|
|
64
|
+
/**
|
|
65
|
+
* Sends a stream of {@link GenerativeContentBlob}.
|
|
66
|
+
*
|
|
67
|
+
* @param mediaChunkStream - The stream of {@link GenerativeContentBlob} to send.
|
|
68
|
+
* @throws If this session has been closed.
|
|
69
|
+
*
|
|
70
|
+
* @beta
|
|
71
|
+
*/
|
|
72
|
+
sendMediaStream(mediaChunkStream: ReadableStream<GenerativeContentBlob>): Promise<void>;
|
|
73
|
+
/**
|
|
74
|
+
* Yields messages received from the server.
|
|
75
|
+
* This can only be used by one consumer at a time.
|
|
76
|
+
*
|
|
77
|
+
* @returns An `AsyncGenerator` that yields server messages as they arrive.
|
|
78
|
+
* @throws If the session is already closed, or if we receive a response that we don't support.
|
|
79
|
+
*
|
|
80
|
+
* @beta
|
|
81
|
+
*/
|
|
82
|
+
receive(): AsyncGenerator<LiveServerContent | LiveServerToolCall | LiveServerToolCallCancellation>;
|
|
83
|
+
/**
|
|
84
|
+
* Closes this session.
|
|
85
|
+
* All methods on this session will throw an error once this resolves.
|
|
86
|
+
*
|
|
87
|
+
* @beta
|
|
88
|
+
*/
|
|
89
|
+
close(): Promise<void>;
|
|
90
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Google LLC
|
|
4
|
+
*
|
|
5
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
* you may not use this file except in compliance with the License.
|
|
7
|
+
* You may obtain a copy of the License at
|
|
8
|
+
*
|
|
9
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
*
|
|
11
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
* See the License for the specific language governing permissions and
|
|
15
|
+
* limitations under the License.
|
|
16
|
+
*/
|
|
17
|
+
import { AIModel } from './ai-model';
|
|
18
|
+
import { LiveSession } from '../methods/live-session';
|
|
19
|
+
import { AI, Content, LiveGenerationConfig, LiveModelParams, Tool, ToolConfig } from '../public-types';
|
|
20
|
+
import { WebSocketHandler } from '../websocket';
|
|
21
|
+
/**
|
|
22
|
+
* Class for Live generative model APIs. The Live API enables low-latency, two-way multimodal
|
|
23
|
+
* interactions with Gemini.
|
|
24
|
+
*
|
|
25
|
+
* This class should only be instantiated with {@link getLiveGenerativeModel}.
|
|
26
|
+
*
|
|
27
|
+
* @beta
|
|
28
|
+
*/
|
|
29
|
+
export declare class LiveGenerativeModel extends AIModel {
|
|
30
|
+
/**
|
|
31
|
+
* @internal
|
|
32
|
+
*/
|
|
33
|
+
private _webSocketHandler;
|
|
34
|
+
generationConfig: LiveGenerationConfig;
|
|
35
|
+
tools?: Tool[];
|
|
36
|
+
toolConfig?: ToolConfig;
|
|
37
|
+
systemInstruction?: Content;
|
|
38
|
+
/**
|
|
39
|
+
* @internal
|
|
40
|
+
*/
|
|
41
|
+
constructor(ai: AI, modelParams: LiveModelParams,
|
|
42
|
+
/**
|
|
43
|
+
* @internal
|
|
44
|
+
*/
|
|
45
|
+
_webSocketHandler: WebSocketHandler);
|
|
46
|
+
/**
|
|
47
|
+
* Starts a {@link LiveSession}.
|
|
48
|
+
*
|
|
49
|
+
* @returns A {@link LiveSession}.
|
|
50
|
+
* @throws If the connection failed to be established with the server.
|
|
51
|
+
*
|
|
52
|
+
* @beta
|
|
53
|
+
*/
|
|
54
|
+
connect(): Promise<LiveSession>;
|
|
55
|
+
}
|
|
@@ -35,6 +35,10 @@ export interface AI {
|
|
|
35
35
|
* Vertex AI Gemini API (using {@link VertexAIBackend}).
|
|
36
36
|
*/
|
|
37
37
|
backend: Backend;
|
|
38
|
+
/**
|
|
39
|
+
* Options applied to this {@link AI} instance.
|
|
40
|
+
*/
|
|
41
|
+
options?: AIOptions;
|
|
38
42
|
/**
|
|
39
43
|
* @deprecated use `AI.backend.location` instead.
|
|
40
44
|
*
|
|
@@ -83,6 +87,11 @@ export type BackendType = (typeof BackendType)[keyof typeof BackendType];
|
|
|
83
87
|
export interface AIOptions {
|
|
84
88
|
/**
|
|
85
89
|
* The backend configuration to use for the AI service instance.
|
|
90
|
+
* Defaults to the Gemini Developer API backend ({@link GoogleAIBackend}).
|
|
86
91
|
*/
|
|
87
|
-
backend
|
|
92
|
+
backend?: Backend;
|
|
93
|
+
/**
|
|
94
|
+
* Whether to use App Check limited use tokens. Defaults to false.
|
|
95
|
+
*/
|
|
96
|
+
useLimitedUseAppCheckTokens?: boolean;
|
|
88
97
|
}
|
|
@@ -35,6 +35,12 @@ export declare class RequestUrl {
|
|
|
35
35
|
private get modelPath();
|
|
36
36
|
private get queryParams();
|
|
37
37
|
}
|
|
38
|
+
export declare class WebSocketUrl {
|
|
39
|
+
apiSettings: ApiSettings;
|
|
40
|
+
constructor(apiSettings: ApiSettings);
|
|
41
|
+
toString(): string;
|
|
42
|
+
private get pathname();
|
|
43
|
+
}
|
|
38
44
|
export declare function getHeaders(url: RequestUrl): Promise<Headers>;
|
|
39
45
|
export declare function constructRequest(model: string, task: Task, apiSettings: ApiSettings, stream: boolean, body: string, requestOptions?: RequestOptions): Promise<{
|
|
40
46
|
url: string;
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
* See the License for the specific language governing permissions and
|
|
15
15
|
* limitations under the License.
|
|
16
16
|
*/
|
|
17
|
-
import { EnhancedGenerateContentResponse, FunctionCall, GenerateContentResponse, ImagenGCSImage, ImagenInlineImage, InlineDataPart } from '../types';
|
|
17
|
+
import { EnhancedGenerateContentResponse, FunctionCall, GenerateContentResponse, ImagenGCSImage, ImagenInlineImage, InlineDataPart, Part } from '../types';
|
|
18
18
|
/**
|
|
19
19
|
* Creates an EnhancedGenerateContentResponse object that has helper functions and
|
|
20
20
|
* other modifications that improve usability.
|
|
@@ -26,15 +26,19 @@ export declare function createEnhancedContentResponse(response: GenerateContentR
|
|
|
26
26
|
*/
|
|
27
27
|
export declare function addHelpers(response: GenerateContentResponse): EnhancedGenerateContentResponse;
|
|
28
28
|
/**
|
|
29
|
-
* Returns all text
|
|
29
|
+
* Returns all text from the first candidate's parts, filtering by whether
|
|
30
|
+
* `partFilter()` returns true.
|
|
31
|
+
*
|
|
32
|
+
* @param response - The `GenerateContentResponse` from which to extract text.
|
|
33
|
+
* @param partFilter - Only return `Part`s for which this returns true
|
|
30
34
|
*/
|
|
31
|
-
export declare function getText(response: GenerateContentResponse): string;
|
|
35
|
+
export declare function getText(response: GenerateContentResponse, partFilter: (part: Part) => boolean): string;
|
|
32
36
|
/**
|
|
33
|
-
* Returns {@link FunctionCall}
|
|
37
|
+
* Returns every {@link FunctionCall} associated with first candidate.
|
|
34
38
|
*/
|
|
35
39
|
export declare function getFunctionCalls(response: GenerateContentResponse): FunctionCall[] | undefined;
|
|
36
40
|
/**
|
|
37
|
-
* Returns {@link InlineDataPart}
|
|
41
|
+
* Returns every {@link InlineDataPart} in the first candidate if present.
|
|
38
42
|
*
|
|
39
43
|
* @internal
|
|
40
44
|
*/
|
package/dist/src/service.d.ts
CHANGED
|
@@ -15,17 +15,22 @@
|
|
|
15
15
|
* limitations under the License.
|
|
16
16
|
*/
|
|
17
17
|
import { FirebaseApp, _FirebaseService } from '@firebase/app';
|
|
18
|
-
import { AI } from './public-types';
|
|
18
|
+
import { AI, AIOptions, InferenceMode, OnDeviceParams } from './public-types';
|
|
19
19
|
import { AppCheckInternalComponentName, FirebaseAppCheckInternal } from '@firebase/app-check-interop-types';
|
|
20
20
|
import { Provider } from '@firebase/component';
|
|
21
21
|
import { FirebaseAuthInternal, FirebaseAuthInternalName } from '@firebase/auth-interop-types';
|
|
22
22
|
import { Backend } from './backend';
|
|
23
|
+
import { ChromeAdapterImpl } from './methods/chrome-adapter';
|
|
23
24
|
export declare class AIService implements AI, _FirebaseService {
|
|
24
25
|
app: FirebaseApp;
|
|
25
26
|
backend: Backend;
|
|
27
|
+
chromeAdapterFactory?: ((mode: InferenceMode, window?: Window, params?: OnDeviceParams) => ChromeAdapterImpl | undefined) | undefined;
|
|
26
28
|
auth: FirebaseAuthInternal | null;
|
|
27
29
|
appCheck: FirebaseAppCheckInternal | null;
|
|
30
|
+
_options?: Omit<AIOptions, 'backend'>;
|
|
28
31
|
location: string;
|
|
29
|
-
constructor(app: FirebaseApp, backend: Backend, authProvider?: Provider<FirebaseAuthInternalName>, appCheckProvider?: Provider<AppCheckInternalComponentName
|
|
32
|
+
constructor(app: FirebaseApp, backend: Backend, authProvider?: Provider<FirebaseAuthInternalName>, appCheckProvider?: Provider<AppCheckInternalComponentName>, chromeAdapterFactory?: ((mode: InferenceMode, window?: Window, params?: OnDeviceParams) => ChromeAdapterImpl | undefined) | undefined);
|
|
30
33
|
_delete(): Promise<void>;
|
|
34
|
+
set options(optionsToSet: AIOptions);
|
|
35
|
+
get options(): AIOptions | undefined;
|
|
31
36
|
}
|
|
@@ -38,6 +38,11 @@ export interface TextPart {
|
|
|
38
38
|
inlineData?: never;
|
|
39
39
|
functionCall?: never;
|
|
40
40
|
functionResponse?: never;
|
|
41
|
+
thought?: boolean;
|
|
42
|
+
/**
|
|
43
|
+
* @internal
|
|
44
|
+
*/
|
|
45
|
+
thoughtSignature?: string;
|
|
41
46
|
}
|
|
42
47
|
/**
|
|
43
48
|
* Content part interface if the part represents an image.
|
|
@@ -52,6 +57,11 @@ export interface InlineDataPart {
|
|
|
52
57
|
* Applicable if `inlineData` is a video.
|
|
53
58
|
*/
|
|
54
59
|
videoMetadata?: VideoMetadata;
|
|
60
|
+
thought?: boolean;
|
|
61
|
+
/**
|
|
62
|
+
* @internal
|
|
63
|
+
*/
|
|
64
|
+
thoughtSignature?: never;
|
|
55
65
|
}
|
|
56
66
|
/**
|
|
57
67
|
* Describes the input video content.
|
|
@@ -78,6 +88,11 @@ export interface FunctionCallPart {
|
|
|
78
88
|
inlineData?: never;
|
|
79
89
|
functionCall: FunctionCall;
|
|
80
90
|
functionResponse?: never;
|
|
91
|
+
thought?: boolean;
|
|
92
|
+
/**
|
|
93
|
+
* @internal
|
|
94
|
+
*/
|
|
95
|
+
thoughtSignature?: never;
|
|
81
96
|
}
|
|
82
97
|
/**
|
|
83
98
|
* Content part interface if the part represents {@link FunctionResponse}.
|
|
@@ -88,6 +103,11 @@ export interface FunctionResponsePart {
|
|
|
88
103
|
inlineData?: never;
|
|
89
104
|
functionCall?: never;
|
|
90
105
|
functionResponse: FunctionResponse;
|
|
106
|
+
thought?: boolean;
|
|
107
|
+
/**
|
|
108
|
+
* @internal
|
|
109
|
+
*/
|
|
110
|
+
thoughtSignature?: never;
|
|
91
111
|
}
|
|
92
112
|
/**
|
|
93
113
|
* Content part interface if the part represents {@link FileData}
|
|
@@ -99,6 +119,11 @@ export interface FileDataPart {
|
|
|
99
119
|
functionCall?: never;
|
|
100
120
|
functionResponse?: never;
|
|
101
121
|
fileData: FileData;
|
|
122
|
+
thought?: boolean;
|
|
123
|
+
/**
|
|
124
|
+
* @internal
|
|
125
|
+
*/
|
|
126
|
+
thoughtSignature?: never;
|
|
102
127
|
}
|
|
103
128
|
/**
|
|
104
129
|
* A predicted {@link FunctionCall} returned from the model
|
|
@@ -107,6 +132,15 @@ export interface FileDataPart {
|
|
|
107
132
|
* @public
|
|
108
133
|
*/
|
|
109
134
|
export interface FunctionCall {
|
|
135
|
+
/**
|
|
136
|
+
* The id of the function call. This must be sent back in the associated {@link FunctionResponse}.
|
|
137
|
+
*
|
|
138
|
+
*
|
|
139
|
+
* @remarks This property is only supported in the Gemini Developer API ({@link GoogleAIBackend}).
|
|
140
|
+
* When using the Gemini Developer API ({@link GoogleAIBackend}), this property will be
|
|
141
|
+
* `undefined`.
|
|
142
|
+
*/
|
|
143
|
+
id?: string;
|
|
110
144
|
name: string;
|
|
111
145
|
args: object;
|
|
112
146
|
}
|
|
@@ -120,6 +154,14 @@ export interface FunctionCall {
|
|
|
120
154
|
* @public
|
|
121
155
|
*/
|
|
122
156
|
export interface FunctionResponse {
|
|
157
|
+
/**
|
|
158
|
+
* The id of the {@link FunctionCall}.
|
|
159
|
+
*
|
|
160
|
+
* @remarks This property is only supported in the Gemini Developer API ({@link GoogleAIBackend}).
|
|
161
|
+
* When using the Gemini Developer API ({@link GoogleAIBackend}), this property will be
|
|
162
|
+
* `undefined`.
|
|
163
|
+
*/
|
|
164
|
+
id?: string;
|
|
123
165
|
name: string;
|
|
124
166
|
response: object;
|
|
125
167
|
}
|
|
@@ -60,6 +60,8 @@ export declare const AIErrorCode: {
|
|
|
60
60
|
readonly RESPONSE_ERROR: "response-error";
|
|
61
61
|
/** An error occurred while performing a fetch. */
|
|
62
62
|
readonly FETCH_ERROR: "fetch-error";
|
|
63
|
+
/** An error occurred because an operation was attempted on a closed session. */
|
|
64
|
+
readonly SESSION_CLOSED: "session-closed";
|
|
63
65
|
/** An error associated with a Content object. */
|
|
64
66
|
readonly INVALID_CONTENT: "invalid-content";
|
|
65
67
|
/** An error due to the Firebase API not being enabled in the Console. */
|
|
@@ -59,6 +59,14 @@ export interface ImagenResponseInternal {
|
|
|
59
59
|
* The reason why the image was filtered.
|
|
60
60
|
*/
|
|
61
61
|
raiFilteredReason?: string;
|
|
62
|
+
/**
|
|
63
|
+
* The safety attributes.
|
|
64
|
+
*
|
|
65
|
+
* This type is currently unused in the SDK. It is sent back because our requests set
|
|
66
|
+
* `includeSafetyAttributes`. This property is currently only used to avoid throwing an error
|
|
67
|
+
* when encountering this unsupported prediction type.
|
|
68
|
+
*/
|
|
69
|
+
safetyAttributes?: unknown;
|
|
62
70
|
}>;
|
|
63
71
|
}
|
|
64
72
|
/**
|
|
@@ -81,6 +89,7 @@ export interface ImagenResponseInternal {
|
|
|
81
89
|
* "personGeneration": "allow_all",
|
|
82
90
|
* "sampleCount": 2,
|
|
83
91
|
* "includeRaiReason": true,
|
|
92
|
+
* "includeSafetyAttributes": true,
|
|
84
93
|
* "aspectRatio": "9:16"
|
|
85
94
|
* }
|
|
86
95
|
* }
|
|
@@ -108,6 +117,7 @@ export interface PredictRequestBody {
|
|
|
108
117
|
safetyFilterLevel?: string;
|
|
109
118
|
personGeneration?: string;
|
|
110
119
|
includeRaiReason: boolean;
|
|
120
|
+
includeSafetyAttributes: boolean;
|
|
111
121
|
};
|
|
112
122
|
}
|
|
113
123
|
/**
|