@firebase/ai 1.4.1-canary.d91169f06 → 1.4.1-canary.f11b55294
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/{index.esm.js → index.esm2017.js} +92 -131
- package/dist/esm/index.esm2017.js.map +1 -0
- package/dist/index.cjs.js +91 -130
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.node.cjs.js +91 -130
- package/dist/index.node.cjs.js.map +1 -1
- package/dist/index.node.mjs +91 -130
- package/dist/index.node.mjs.map +1 -1
- package/package.json +12 -12
- package/dist/esm/index.esm.js.map +0 -1
package/dist/index.node.mjs
CHANGED
|
@@ -2,9 +2,10 @@ import { _isFirebaseServerApp, _getProvider, getApp, _registerComponent, registe
|
|
|
2
2
|
import { Component } from '@firebase/component';
|
|
3
3
|
import { FirebaseError, getModularInstance } from '@firebase/util';
|
|
4
4
|
import { Logger } from '@firebase/logger';
|
|
5
|
+
import { __asyncGenerator, __await } from 'tslib';
|
|
5
6
|
|
|
6
7
|
var name = "@firebase/ai";
|
|
7
|
-
var version = "1.4.1-canary.
|
|
8
|
+
var version = "1.4.1-canary.f11b55294";
|
|
8
9
|
|
|
9
10
|
/**
|
|
10
11
|
* @license
|
|
@@ -625,8 +626,8 @@ class AIService {
|
|
|
625
626
|
constructor(app, backend, authProvider, appCheckProvider) {
|
|
626
627
|
this.app = app;
|
|
627
628
|
this.backend = backend;
|
|
628
|
-
const appCheck = appCheckProvider
|
|
629
|
-
const auth = authProvider
|
|
629
|
+
const appCheck = appCheckProvider === null || appCheckProvider === void 0 ? void 0 : appCheckProvider.getImmediate({ optional: true });
|
|
630
|
+
const auth = authProvider === null || authProvider === void 0 ? void 0 : authProvider.getImmediate({ optional: true });
|
|
630
631
|
this.auth = auth || null;
|
|
631
632
|
this.appCheck = appCheck || null;
|
|
632
633
|
if (backend instanceof VertexAIBackend) {
|
|
@@ -798,13 +799,14 @@ class AIModel {
|
|
|
798
799
|
* @internal
|
|
799
800
|
*/
|
|
800
801
|
constructor(ai, modelName) {
|
|
801
|
-
|
|
802
|
+
var _a, _b, _c, _d, _e, _f;
|
|
803
|
+
if (!((_b = (_a = ai.app) === null || _a === void 0 ? void 0 : _a.options) === null || _b === void 0 ? void 0 : _b.apiKey)) {
|
|
802
804
|
throw new AIError(AIErrorCode.NO_API_KEY, `The "apiKey" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid API key.`);
|
|
803
805
|
}
|
|
804
|
-
else if (!ai.app
|
|
806
|
+
else if (!((_d = (_c = ai.app) === null || _c === void 0 ? void 0 : _c.options) === null || _d === void 0 ? void 0 : _d.projectId)) {
|
|
805
807
|
throw new AIError(AIErrorCode.NO_PROJECT_ID, `The "projectId" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid project ID.`);
|
|
806
808
|
}
|
|
807
|
-
else if (!ai.app
|
|
809
|
+
else if (!((_f = (_e = ai.app) === null || _e === void 0 ? void 0 : _e.options) === null || _f === void 0 ? void 0 : _f.appId)) {
|
|
808
810
|
throw new AIError(AIErrorCode.NO_APP_ID, `The "appId" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid app ID.`);
|
|
809
811
|
}
|
|
810
812
|
else {
|
|
@@ -932,7 +934,8 @@ class RequestUrl {
|
|
|
932
934
|
return url.toString();
|
|
933
935
|
}
|
|
934
936
|
get baseUrl() {
|
|
935
|
-
|
|
937
|
+
var _a;
|
|
938
|
+
return ((_a = this.requestOptions) === null || _a === void 0 ? void 0 : _a.baseUrl) || DEFAULT_BASE_URL;
|
|
936
939
|
}
|
|
937
940
|
get apiVersion() {
|
|
938
941
|
return DEFAULT_API_VERSION; // TODO: allow user-set options if that feature becomes available
|
|
@@ -1008,7 +1011,7 @@ async function makeRequest(model, task, apiSettings, stream, body, requestOption
|
|
|
1008
1011
|
try {
|
|
1009
1012
|
const request = await constructRequest(model, task, apiSettings, stream, body, requestOptions);
|
|
1010
1013
|
// Timeout is 180s by default
|
|
1011
|
-
const timeoutMillis = requestOptions
|
|
1014
|
+
const timeoutMillis = (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) != null && requestOptions.timeout >= 0
|
|
1012
1015
|
? requestOptions.timeout
|
|
1013
1016
|
: DEFAULT_FETCH_TIMEOUT_MS;
|
|
1014
1017
|
const abortController = new AbortController();
|
|
@@ -1032,7 +1035,10 @@ async function makeRequest(model, task, apiSettings, stream, body, requestOption
|
|
|
1032
1035
|
if (response.status === 403 &&
|
|
1033
1036
|
errorDetails &&
|
|
1034
1037
|
errorDetails.some((detail) => detail.reason === 'SERVICE_DISABLED') &&
|
|
1035
|
-
errorDetails.some((detail) =>
|
|
1038
|
+
errorDetails.some((detail) => {
|
|
1039
|
+
var _a, _b;
|
|
1040
|
+
return (_b = (_a = detail.links) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.description.includes('Google developers console API activation');
|
|
1041
|
+
})) {
|
|
1036
1042
|
throw new AIError(AIErrorCode.API_NOT_ENABLED, `The Firebase AI SDK requires the Firebase AI ` +
|
|
1037
1043
|
`API ('firebasevertexai.googleapis.com') to be enabled in your ` +
|
|
1038
1044
|
`Firebase project. Enable this API by visiting the Firebase Console ` +
|
|
@@ -1177,9 +1183,10 @@ function addHelpers(response) {
|
|
|
1177
1183
|
* Returns all text found in all parts of first candidate.
|
|
1178
1184
|
*/
|
|
1179
1185
|
function getText(response) {
|
|
1186
|
+
var _a, _b, _c, _d;
|
|
1180
1187
|
const textStrings = [];
|
|
1181
|
-
if (response.candidates
|
|
1182
|
-
for (const part of response.candidates
|
|
1188
|
+
if ((_b = (_a = response.candidates) === null || _a === void 0 ? void 0 : _a[0].content) === null || _b === void 0 ? void 0 : _b.parts) {
|
|
1189
|
+
for (const part of (_d = (_c = response.candidates) === null || _c === void 0 ? void 0 : _c[0].content) === null || _d === void 0 ? void 0 : _d.parts) {
|
|
1183
1190
|
if (part.text) {
|
|
1184
1191
|
textStrings.push(part.text);
|
|
1185
1192
|
}
|
|
@@ -1196,9 +1203,10 @@ function getText(response) {
|
|
|
1196
1203
|
* Returns {@link FunctionCall}s associated with first candidate.
|
|
1197
1204
|
*/
|
|
1198
1205
|
function getFunctionCalls(response) {
|
|
1206
|
+
var _a, _b, _c, _d;
|
|
1199
1207
|
const functionCalls = [];
|
|
1200
|
-
if (response.candidates
|
|
1201
|
-
for (const part of response.candidates
|
|
1208
|
+
if ((_b = (_a = response.candidates) === null || _a === void 0 ? void 0 : _a[0].content) === null || _b === void 0 ? void 0 : _b.parts) {
|
|
1209
|
+
for (const part of (_d = (_c = response.candidates) === null || _c === void 0 ? void 0 : _c[0].content) === null || _d === void 0 ? void 0 : _d.parts) {
|
|
1202
1210
|
if (part.functionCall) {
|
|
1203
1211
|
functionCalls.push(part.functionCall);
|
|
1204
1212
|
}
|
|
@@ -1217,9 +1225,10 @@ function getFunctionCalls(response) {
|
|
|
1217
1225
|
* @internal
|
|
1218
1226
|
*/
|
|
1219
1227
|
function getInlineDataParts(response) {
|
|
1228
|
+
var _a, _b, _c, _d;
|
|
1220
1229
|
const data = [];
|
|
1221
|
-
if (response.candidates
|
|
1222
|
-
for (const part of response.candidates
|
|
1230
|
+
if ((_b = (_a = response.candidates) === null || _a === void 0 ? void 0 : _a[0].content) === null || _b === void 0 ? void 0 : _b.parts) {
|
|
1231
|
+
for (const part of (_d = (_c = response.candidates) === null || _c === void 0 ? void 0 : _c[0].content) === null || _d === void 0 ? void 0 : _d.parts) {
|
|
1223
1232
|
if (part.inlineData) {
|
|
1224
1233
|
data.push(part);
|
|
1225
1234
|
}
|
|
@@ -1238,18 +1247,19 @@ function hadBadFinishReason(candidate) {
|
|
|
1238
1247
|
badFinishReasons.some(reason => reason === candidate.finishReason));
|
|
1239
1248
|
}
|
|
1240
1249
|
function formatBlockErrorMessage(response) {
|
|
1250
|
+
var _a, _b, _c;
|
|
1241
1251
|
let message = '';
|
|
1242
1252
|
if ((!response.candidates || response.candidates.length === 0) &&
|
|
1243
1253
|
response.promptFeedback) {
|
|
1244
1254
|
message += 'Response was blocked';
|
|
1245
|
-
if (response.promptFeedback
|
|
1255
|
+
if ((_a = response.promptFeedback) === null || _a === void 0 ? void 0 : _a.blockReason) {
|
|
1246
1256
|
message += ` due to ${response.promptFeedback.blockReason}`;
|
|
1247
1257
|
}
|
|
1248
|
-
if (response.promptFeedback
|
|
1258
|
+
if ((_b = response.promptFeedback) === null || _b === void 0 ? void 0 : _b.blockReasonMessage) {
|
|
1249
1259
|
message += `: ${response.promptFeedback.blockReasonMessage}`;
|
|
1250
1260
|
}
|
|
1251
1261
|
}
|
|
1252
|
-
else if (response.candidates
|
|
1262
|
+
else if ((_c = response.candidates) === null || _c === void 0 ? void 0 : _c[0]) {
|
|
1253
1263
|
const firstCandidate = response.candidates[0];
|
|
1254
1264
|
if (hadBadFinishReason(firstCandidate)) {
|
|
1255
1265
|
message += `Candidate was blocked due to ${firstCandidate.finishReason}`;
|
|
@@ -1268,11 +1278,12 @@ function formatBlockErrorMessage(response) {
|
|
|
1268
1278
|
* @internal
|
|
1269
1279
|
*/
|
|
1270
1280
|
async function handlePredictResponse(response) {
|
|
1281
|
+
var _a;
|
|
1271
1282
|
const responseJson = await response.json();
|
|
1272
1283
|
const images = [];
|
|
1273
1284
|
let filteredReason = undefined;
|
|
1274
1285
|
// The backend should always send a non-empty array of predictions if the response was successful.
|
|
1275
|
-
if (!responseJson.predictions || responseJson.predictions
|
|
1286
|
+
if (!responseJson.predictions || ((_a = responseJson.predictions) === null || _a === void 0 ? void 0 : _a.length) === 0) {
|
|
1276
1287
|
throw new AIError(AIErrorCode.RESPONSE_ERROR, 'No predictions or filtered reason received from Vertex AI. Please report this issue with the full error details at https://github.com/firebase/firebase-js-sdk/issues.');
|
|
1277
1288
|
}
|
|
1278
1289
|
for (const prediction of responseJson.predictions) {
|
|
@@ -1336,12 +1347,13 @@ async function handlePredictResponse(response) {
|
|
|
1336
1347
|
* @internal
|
|
1337
1348
|
*/
|
|
1338
1349
|
function mapGenerateContentRequest(generateContentRequest) {
|
|
1339
|
-
|
|
1350
|
+
var _a, _b;
|
|
1351
|
+
(_a = generateContentRequest.safetySettings) === null || _a === void 0 ? void 0 : _a.forEach(safetySetting => {
|
|
1340
1352
|
if (safetySetting.method) {
|
|
1341
1353
|
throw new AIError(AIErrorCode.UNSUPPORTED, 'SafetySetting.method is not supported in the the Gemini Developer API. Please remove this property.');
|
|
1342
1354
|
}
|
|
1343
1355
|
});
|
|
1344
|
-
if (generateContentRequest.generationConfig
|
|
1356
|
+
if ((_b = generateContentRequest.generationConfig) === null || _b === void 0 ? void 0 : _b.topK) {
|
|
1345
1357
|
const roundedTopK = Math.round(generateContentRequest.generationConfig.topK);
|
|
1346
1358
|
if (roundedTopK !== generateContentRequest.generationConfig.topK) {
|
|
1347
1359
|
logger.warn('topK in GenerationConfig has been rounded to the nearest integer to match the format for requests to the Gemini Developer API.');
|
|
@@ -1382,10 +1394,7 @@ function mapGenerateContentResponse(googleAIResponse) {
|
|
|
1382
1394
|
*/
|
|
1383
1395
|
function mapCountTokensRequest(countTokensRequest, model) {
|
|
1384
1396
|
const mappedCountTokensRequest = {
|
|
1385
|
-
generateContentRequest: {
|
|
1386
|
-
model,
|
|
1387
|
-
...countTokensRequest
|
|
1388
|
-
}
|
|
1397
|
+
generateContentRequest: Object.assign({ model }, countTokensRequest)
|
|
1389
1398
|
};
|
|
1390
1399
|
return mappedCountTokensRequest;
|
|
1391
1400
|
}
|
|
@@ -1405,6 +1414,7 @@ function mapGenerateContentCandidates(candidates) {
|
|
|
1405
1414
|
let mappedSafetyRatings;
|
|
1406
1415
|
if (mappedCandidates) {
|
|
1407
1416
|
candidates.forEach(candidate => {
|
|
1417
|
+
var _a;
|
|
1408
1418
|
// Map citationSources to citations.
|
|
1409
1419
|
let citationMetadata;
|
|
1410
1420
|
if (candidate.citationMetadata) {
|
|
@@ -1415,18 +1425,14 @@ function mapGenerateContentCandidates(candidates) {
|
|
|
1415
1425
|
// Assign missing candidate SafetyRatings properties to their defaults if undefined.
|
|
1416
1426
|
if (candidate.safetyRatings) {
|
|
1417
1427
|
mappedSafetyRatings = candidate.safetyRatings.map(safetyRating => {
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
severity: safetyRating.severity ?? HarmSeverity.HARM_SEVERITY_UNSUPPORTED,
|
|
1421
|
-
probabilityScore: safetyRating.probabilityScore ?? 0,
|
|
1422
|
-
severityScore: safetyRating.severityScore ?? 0
|
|
1423
|
-
};
|
|
1428
|
+
var _a, _b, _c;
|
|
1429
|
+
return Object.assign(Object.assign({}, safetyRating), { severity: (_a = safetyRating.severity) !== null && _a !== void 0 ? _a : HarmSeverity.HARM_SEVERITY_UNSUPPORTED, probabilityScore: (_b = safetyRating.probabilityScore) !== null && _b !== void 0 ? _b : 0, severityScore: (_c = safetyRating.severityScore) !== null && _c !== void 0 ? _c : 0 });
|
|
1424
1430
|
});
|
|
1425
1431
|
}
|
|
1426
1432
|
// videoMetadata is not supported.
|
|
1427
1433
|
// Throw early since developers may send a long video as input and only expect to pay
|
|
1428
1434
|
// for inference on a small portion of the video.
|
|
1429
|
-
if (candidate.content
|
|
1435
|
+
if ((_a = candidate.content) === null || _a === void 0 ? void 0 : _a.parts.some(part => part === null || part === void 0 ? void 0 : part.videoMetadata)) {
|
|
1430
1436
|
throw new AIError(AIErrorCode.UNSUPPORTED, 'Part.videoMetadata is not supported in the Gemini Developer API. Please remove this property.');
|
|
1431
1437
|
}
|
|
1432
1438
|
const mappedCandidate = {
|
|
@@ -1447,12 +1453,13 @@ function mapPromptFeedback(promptFeedback) {
|
|
|
1447
1453
|
// Assign missing SafetyRating properties to their defaults if undefined.
|
|
1448
1454
|
const mappedSafetyRatings = [];
|
|
1449
1455
|
promptFeedback.safetyRatings.forEach(safetyRating => {
|
|
1456
|
+
var _a, _b, _c;
|
|
1450
1457
|
mappedSafetyRatings.push({
|
|
1451
1458
|
category: safetyRating.category,
|
|
1452
1459
|
probability: safetyRating.probability,
|
|
1453
|
-
severity: safetyRating.severity
|
|
1454
|
-
probabilityScore: safetyRating.probabilityScore
|
|
1455
|
-
severityScore: safetyRating.severityScore
|
|
1460
|
+
severity: (_a = safetyRating.severity) !== null && _a !== void 0 ? _a : HarmSeverity.HARM_SEVERITY_UNSUPPORTED,
|
|
1461
|
+
probabilityScore: (_b = safetyRating.probabilityScore) !== null && _b !== void 0 ? _b : 0,
|
|
1462
|
+
severityScore: (_c = safetyRating.severityScore) !== null && _c !== void 0 ? _c : 0,
|
|
1456
1463
|
blocked: safetyRating.blocked
|
|
1457
1464
|
});
|
|
1458
1465
|
});
|
|
@@ -1513,22 +1520,24 @@ async function getResponsePromise(stream, apiSettings) {
|
|
|
1513
1520
|
allResponses.push(value);
|
|
1514
1521
|
}
|
|
1515
1522
|
}
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
|
|
1523
|
+
function generateResponseSequence(stream, apiSettings) {
|
|
1524
|
+
return __asyncGenerator(this, arguments, function* generateResponseSequence_1() {
|
|
1525
|
+
const reader = stream.getReader();
|
|
1526
|
+
while (true) {
|
|
1527
|
+
const { value, done } = yield __await(reader.read());
|
|
1528
|
+
if (done) {
|
|
1529
|
+
break;
|
|
1530
|
+
}
|
|
1531
|
+
let enhancedResponse;
|
|
1532
|
+
if (apiSettings.backend.backendType === BackendType.GOOGLE_AI) {
|
|
1533
|
+
enhancedResponse = createEnhancedContentResponse(mapGenerateContentResponse(value));
|
|
1534
|
+
}
|
|
1535
|
+
else {
|
|
1536
|
+
enhancedResponse = createEnhancedContentResponse(value);
|
|
1537
|
+
}
|
|
1538
|
+
yield yield __await(enhancedResponse);
|
|
1529
1539
|
}
|
|
1530
|
-
|
|
1531
|
-
}
|
|
1540
|
+
});
|
|
1532
1541
|
}
|
|
1533
1542
|
/**
|
|
1534
1543
|
* Reads a raw stream from the fetch response and join incomplete
|
|
@@ -1580,7 +1589,7 @@ function getResponseStream(inputStream) {
|
|
|
1580
1589
|
function aggregateResponses(responses) {
|
|
1581
1590
|
const lastResponse = responses[responses.length - 1];
|
|
1582
1591
|
const aggregatedResponse = {
|
|
1583
|
-
promptFeedback: lastResponse
|
|
1592
|
+
promptFeedback: lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.promptFeedback
|
|
1584
1593
|
};
|
|
1585
1594
|
for (const response of responses) {
|
|
1586
1595
|
if (response.candidates) {
|
|
@@ -1937,7 +1946,7 @@ class ChatSession {
|
|
|
1937
1946
|
this._history = [];
|
|
1938
1947
|
this._sendPromise = Promise.resolve();
|
|
1939
1948
|
this._apiSettings = apiSettings;
|
|
1940
|
-
if (params
|
|
1949
|
+
if (params === null || params === void 0 ? void 0 : params.history) {
|
|
1941
1950
|
validateChatHistory(params.history);
|
|
1942
1951
|
this._history = params.history;
|
|
1943
1952
|
}
|
|
@@ -1956,14 +1965,15 @@ class ChatSession {
|
|
|
1956
1965
|
* {@link GenerateContentResult}
|
|
1957
1966
|
*/
|
|
1958
1967
|
async sendMessage(request) {
|
|
1968
|
+
var _a, _b, _c, _d, _e;
|
|
1959
1969
|
await this._sendPromise;
|
|
1960
1970
|
const newContent = formatNewContent(request);
|
|
1961
1971
|
const generateContentRequest = {
|
|
1962
|
-
safetySettings: this.params
|
|
1963
|
-
generationConfig: this.params
|
|
1964
|
-
tools: this.params
|
|
1965
|
-
toolConfig: this.params
|
|
1966
|
-
systemInstruction: this.params
|
|
1972
|
+
safetySettings: (_a = this.params) === null || _a === void 0 ? void 0 : _a.safetySettings,
|
|
1973
|
+
generationConfig: (_b = this.params) === null || _b === void 0 ? void 0 : _b.generationConfig,
|
|
1974
|
+
tools: (_c = this.params) === null || _c === void 0 ? void 0 : _c.tools,
|
|
1975
|
+
toolConfig: (_d = this.params) === null || _d === void 0 ? void 0 : _d.toolConfig,
|
|
1976
|
+
systemInstruction: (_e = this.params) === null || _e === void 0 ? void 0 : _e.systemInstruction,
|
|
1967
1977
|
contents: [...this._history, newContent]
|
|
1968
1978
|
};
|
|
1969
1979
|
let finalResult = {};
|
|
@@ -1971,13 +1981,14 @@ class ChatSession {
|
|
|
1971
1981
|
this._sendPromise = this._sendPromise
|
|
1972
1982
|
.then(() => generateContent(this._apiSettings, this.model, generateContentRequest, this.requestOptions))
|
|
1973
1983
|
.then(result => {
|
|
1984
|
+
var _a, _b;
|
|
1974
1985
|
if (result.response.candidates &&
|
|
1975
1986
|
result.response.candidates.length > 0) {
|
|
1976
1987
|
this._history.push(newContent);
|
|
1977
1988
|
const responseContent = {
|
|
1978
|
-
parts: result.response.candidates
|
|
1989
|
+
parts: ((_a = result.response.candidates) === null || _a === void 0 ? void 0 : _a[0].content.parts) || [],
|
|
1979
1990
|
// Response seems to come back without a role set.
|
|
1980
|
-
role: result.response.candidates
|
|
1991
|
+
role: ((_b = result.response.candidates) === null || _b === void 0 ? void 0 : _b[0].content.role) || 'model'
|
|
1981
1992
|
};
|
|
1982
1993
|
this._history.push(responseContent);
|
|
1983
1994
|
}
|
|
@@ -1998,14 +2009,15 @@ class ChatSession {
|
|
|
1998
2009
|
* and a response promise.
|
|
1999
2010
|
*/
|
|
2000
2011
|
async sendMessageStream(request) {
|
|
2012
|
+
var _a, _b, _c, _d, _e;
|
|
2001
2013
|
await this._sendPromise;
|
|
2002
2014
|
const newContent = formatNewContent(request);
|
|
2003
2015
|
const generateContentRequest = {
|
|
2004
|
-
safetySettings: this.params
|
|
2005
|
-
generationConfig: this.params
|
|
2006
|
-
tools: this.params
|
|
2007
|
-
toolConfig: this.params
|
|
2008
|
-
systemInstruction: this.params
|
|
2016
|
+
safetySettings: (_a = this.params) === null || _a === void 0 ? void 0 : _a.safetySettings,
|
|
2017
|
+
generationConfig: (_b = this.params) === null || _b === void 0 ? void 0 : _b.generationConfig,
|
|
2018
|
+
tools: (_c = this.params) === null || _c === void 0 ? void 0 : _c.tools,
|
|
2019
|
+
toolConfig: (_d = this.params) === null || _d === void 0 ? void 0 : _d.toolConfig,
|
|
2020
|
+
systemInstruction: (_e = this.params) === null || _e === void 0 ? void 0 : _e.systemInstruction,
|
|
2009
2021
|
contents: [...this._history, newContent]
|
|
2010
2022
|
};
|
|
2011
2023
|
const streamPromise = generateContentStream(this._apiSettings, this.model, generateContentRequest, this.requestOptions);
|
|
@@ -2021,7 +2033,7 @@ class ChatSession {
|
|
|
2021
2033
|
.then(response => {
|
|
2022
2034
|
if (response.candidates && response.candidates.length > 0) {
|
|
2023
2035
|
this._history.push(newContent);
|
|
2024
|
-
const responseContent = {
|
|
2036
|
+
const responseContent = Object.assign({}, response.candidates[0].content);
|
|
2025
2037
|
// Response seems to come back without a role set.
|
|
2026
2038
|
if (!responseContent.role) {
|
|
2027
2039
|
responseContent.role = 'model';
|
|
@@ -2114,14 +2126,7 @@ class GenerativeModel extends AIModel {
|
|
|
2114
2126
|
*/
|
|
2115
2127
|
async generateContent(request) {
|
|
2116
2128
|
const formattedParams = formatGenerateContentInput(request);
|
|
2117
|
-
return generateContent(this._apiSettings, this.model, {
|
|
2118
|
-
generationConfig: this.generationConfig,
|
|
2119
|
-
safetySettings: this.safetySettings,
|
|
2120
|
-
tools: this.tools,
|
|
2121
|
-
toolConfig: this.toolConfig,
|
|
2122
|
-
systemInstruction: this.systemInstruction,
|
|
2123
|
-
...formattedParams
|
|
2124
|
-
}, this.requestOptions);
|
|
2129
|
+
return generateContent(this._apiSettings, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction }, formattedParams), this.requestOptions);
|
|
2125
2130
|
}
|
|
2126
2131
|
/**
|
|
2127
2132
|
* Makes a single streaming call to the model
|
|
@@ -2131,33 +2136,14 @@ class GenerativeModel extends AIModel {
|
|
|
2131
2136
|
*/
|
|
2132
2137
|
async generateContentStream(request) {
|
|
2133
2138
|
const formattedParams = formatGenerateContentInput(request);
|
|
2134
|
-
return generateContentStream(this._apiSettings, this.model, {
|
|
2135
|
-
generationConfig: this.generationConfig,
|
|
2136
|
-
safetySettings: this.safetySettings,
|
|
2137
|
-
tools: this.tools,
|
|
2138
|
-
toolConfig: this.toolConfig,
|
|
2139
|
-
systemInstruction: this.systemInstruction,
|
|
2140
|
-
...formattedParams
|
|
2141
|
-
}, this.requestOptions);
|
|
2139
|
+
return generateContentStream(this._apiSettings, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction }, formattedParams), this.requestOptions);
|
|
2142
2140
|
}
|
|
2143
2141
|
/**
|
|
2144
2142
|
* Gets a new {@link ChatSession} instance which can be used for
|
|
2145
2143
|
* multi-turn chats.
|
|
2146
2144
|
*/
|
|
2147
2145
|
startChat(startChatParams) {
|
|
2148
|
-
return new ChatSession(this._apiSettings, this.model, {
|
|
2149
|
-
tools: this.tools,
|
|
2150
|
-
toolConfig: this.toolConfig,
|
|
2151
|
-
systemInstruction: this.systemInstruction,
|
|
2152
|
-
generationConfig: this.generationConfig,
|
|
2153
|
-
safetySettings: this.safetySettings,
|
|
2154
|
-
/**
|
|
2155
|
-
* Overrides params inherited from GenerativeModel with those explicitly set in the
|
|
2156
|
-
* StartChatParams. For example, if startChatParams.generationConfig is set, it'll override
|
|
2157
|
-
* this.generationConfig.
|
|
2158
|
-
*/
|
|
2159
|
-
...startChatParams
|
|
2160
|
-
}, this.requestOptions);
|
|
2146
|
+
return new ChatSession(this._apiSettings, this.model, Object.assign({ tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, generationConfig: this.generationConfig, safetySettings: this.safetySettings }, startChatParams), this.requestOptions);
|
|
2161
2147
|
}
|
|
2162
2148
|
/**
|
|
2163
2149
|
* Counts the tokens in the provided request.
|
|
@@ -2243,10 +2229,7 @@ class ImagenModel extends AIModel {
|
|
|
2243
2229
|
* @beta
|
|
2244
2230
|
*/
|
|
2245
2231
|
async generateImages(prompt) {
|
|
2246
|
-
const body = createPredictRequestBody(prompt, {
|
|
2247
|
-
...this.generationConfig,
|
|
2248
|
-
...this.safetySettings
|
|
2249
|
-
});
|
|
2232
|
+
const body = createPredictRequestBody(prompt, Object.assign(Object.assign({}, this.generationConfig), this.safetySettings));
|
|
2250
2233
|
const response = await makeRequest(this.model, Task.PREDICT, this._apiSettings,
|
|
2251
2234
|
/* stream */ false, JSON.stringify(body), this.requestOptions);
|
|
2252
2235
|
return handlePredictResponse(response);
|
|
@@ -2271,11 +2254,7 @@ class ImagenModel extends AIModel {
|
|
|
2271
2254
|
* If all images are filtered, the `images` array will be empty.
|
|
2272
2255
|
*/
|
|
2273
2256
|
async generateImagesGCS(prompt, gcsURI) {
|
|
2274
|
-
const body = createPredictRequestBody(prompt, {
|
|
2275
|
-
gcsURI,
|
|
2276
|
-
...this.generationConfig,
|
|
2277
|
-
...this.safetySettings
|
|
2278
|
-
});
|
|
2257
|
+
const body = createPredictRequestBody(prompt, Object.assign(Object.assign({ gcsURI }, this.generationConfig), this.safetySettings));
|
|
2279
2258
|
const response = await makeRequest(this.model, Task.PREDICT, this._apiSettings,
|
|
2280
2259
|
/* stream */ false, JSON.stringify(body), this.requestOptions);
|
|
2281
2260
|
return handlePredictResponse(response);
|
|
@@ -2366,10 +2345,7 @@ class Schema {
|
|
|
2366
2345
|
*/
|
|
2367
2346
|
class IntegerSchema extends Schema {
|
|
2368
2347
|
constructor(schemaParams) {
|
|
2369
|
-
super({
|
|
2370
|
-
type: SchemaType.INTEGER,
|
|
2371
|
-
...schemaParams
|
|
2372
|
-
});
|
|
2348
|
+
super(Object.assign({ type: SchemaType.INTEGER }, schemaParams));
|
|
2373
2349
|
}
|
|
2374
2350
|
}
|
|
2375
2351
|
/**
|
|
@@ -2378,10 +2354,7 @@ class IntegerSchema extends Schema {
|
|
|
2378
2354
|
*/
|
|
2379
2355
|
class NumberSchema extends Schema {
|
|
2380
2356
|
constructor(schemaParams) {
|
|
2381
|
-
super({
|
|
2382
|
-
type: SchemaType.NUMBER,
|
|
2383
|
-
...schemaParams
|
|
2384
|
-
});
|
|
2357
|
+
super(Object.assign({ type: SchemaType.NUMBER }, schemaParams));
|
|
2385
2358
|
}
|
|
2386
2359
|
}
|
|
2387
2360
|
/**
|
|
@@ -2390,10 +2363,7 @@ class NumberSchema extends Schema {
|
|
|
2390
2363
|
*/
|
|
2391
2364
|
class BooleanSchema extends Schema {
|
|
2392
2365
|
constructor(schemaParams) {
|
|
2393
|
-
super({
|
|
2394
|
-
type: SchemaType.BOOLEAN,
|
|
2395
|
-
...schemaParams
|
|
2396
|
-
});
|
|
2366
|
+
super(Object.assign({ type: SchemaType.BOOLEAN }, schemaParams));
|
|
2397
2367
|
}
|
|
2398
2368
|
}
|
|
2399
2369
|
/**
|
|
@@ -2403,10 +2373,7 @@ class BooleanSchema extends Schema {
|
|
|
2403
2373
|
*/
|
|
2404
2374
|
class StringSchema extends Schema {
|
|
2405
2375
|
constructor(schemaParams, enumValues) {
|
|
2406
|
-
super({
|
|
2407
|
-
type: SchemaType.STRING,
|
|
2408
|
-
...schemaParams
|
|
2409
|
-
});
|
|
2376
|
+
super(Object.assign({ type: SchemaType.STRING }, schemaParams));
|
|
2410
2377
|
this.enum = enumValues;
|
|
2411
2378
|
}
|
|
2412
2379
|
/**
|
|
@@ -2428,10 +2395,7 @@ class StringSchema extends Schema {
|
|
|
2428
2395
|
*/
|
|
2429
2396
|
class ArraySchema extends Schema {
|
|
2430
2397
|
constructor(schemaParams, items) {
|
|
2431
|
-
super({
|
|
2432
|
-
type: SchemaType.ARRAY,
|
|
2433
|
-
...schemaParams
|
|
2434
|
-
});
|
|
2398
|
+
super(Object.assign({ type: SchemaType.ARRAY }, schemaParams));
|
|
2435
2399
|
this.items = items;
|
|
2436
2400
|
}
|
|
2437
2401
|
/**
|
|
@@ -2450,10 +2414,7 @@ class ArraySchema extends Schema {
|
|
|
2450
2414
|
*/
|
|
2451
2415
|
class ObjectSchema extends Schema {
|
|
2452
2416
|
constructor(schemaParams, properties, optionalProperties = []) {
|
|
2453
|
-
super({
|
|
2454
|
-
type: SchemaType.OBJECT,
|
|
2455
|
-
...schemaParams
|
|
2456
|
-
});
|
|
2417
|
+
super(Object.assign({ type: SchemaType.OBJECT }, schemaParams));
|
|
2457
2418
|
this.properties = properties;
|
|
2458
2419
|
this.optionalProperties = optionalProperties;
|
|
2459
2420
|
}
|
|
@@ -2462,7 +2423,7 @@ class ObjectSchema extends Schema {
|
|
|
2462
2423
|
*/
|
|
2463
2424
|
toJSON() {
|
|
2464
2425
|
const obj = super.toJSON();
|
|
2465
|
-
obj.properties = {
|
|
2426
|
+
obj.properties = Object.assign({}, this.properties);
|
|
2466
2427
|
const required = [];
|
|
2467
2428
|
if (this.optionalProperties) {
|
|
2468
2429
|
for (const propertyKey of this.optionalProperties) {
|
|
@@ -2655,8 +2616,8 @@ function registerAI() {
|
|
|
2655
2616
|
return new AIService(app, backend, auth, appCheckProvider);
|
|
2656
2617
|
}, "PUBLIC" /* ComponentType.PUBLIC */).setMultipleInstances(true));
|
|
2657
2618
|
registerVersion(name, version, 'node');
|
|
2658
|
-
// BUILD_TARGET will be replaced by values like
|
|
2659
|
-
registerVersion(name, version, '
|
|
2619
|
+
// BUILD_TARGET will be replaced by values like esm2017, cjs2017, etc during the compilation
|
|
2620
|
+
registerVersion(name, version, 'esm2017');
|
|
2660
2621
|
}
|
|
2661
2622
|
registerAI();
|
|
2662
2623
|
|