@google/gemini-cli-a2a-server 0.19.0-nightly.20251122.42c2e1b21 → 0.19.0-nightly.20251124.e177314a4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/a2a-server.mjs +64 -44
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +2 -2
package/dist/a2a-server.mjs
CHANGED
|
@@ -307689,8 +307689,8 @@ var Float64Vector = import_vector.default.Float64Vector;
|
|
|
307689
307689
|
var PointerVector = import_vector.default.PointerVector;
|
|
307690
307690
|
|
|
307691
307691
|
// packages/core/dist/src/generated/git-commit.js
|
|
307692
|
-
var GIT_COMMIT_INFO = "
|
|
307693
|
-
var CLI_VERSION = "0.19.0-nightly.
|
|
307692
|
+
var GIT_COMMIT_INFO = "e177314a4";
|
|
307693
|
+
var CLI_VERSION = "0.19.0-nightly.20251124.e177314a4";
|
|
307694
307694
|
|
|
307695
307695
|
// packages/core/dist/src/ide/detect-ide.js
|
|
307696
307696
|
var IDE_DEFINITIONS = {
|
|
@@ -310581,7 +310581,7 @@ async function createContentGenerator(config2, gcConfig, sessionId2) {
|
|
|
310581
310581
|
if (gcConfig.fakeResponses) {
|
|
310582
310582
|
return FakeContentGenerator.fromFile(gcConfig.fakeResponses);
|
|
310583
310583
|
}
|
|
310584
|
-
const version3 = "0.19.0-nightly.
|
|
310584
|
+
const version3 = "0.19.0-nightly.20251124.e177314a4";
|
|
310585
310585
|
const userAgent = `GeminiCLI/${version3} (${process.platform}; ${process.arch})`;
|
|
310586
310586
|
const baseHeaders = {
|
|
310587
310587
|
"User-Agent": userAgent
|
|
@@ -343134,49 +343134,32 @@ var BaseLlmClient = class {
|
|
|
343134
343134
|
this.config = config2;
|
|
343135
343135
|
}
|
|
343136
343136
|
async generateJson(options2) {
|
|
343137
|
-
const { modelConfigKey, contents,
|
|
343137
|
+
const { schema, modelConfigKey, contents, systemInstruction, abortSignal, promptId, maxAttempts } = options2;
|
|
343138
343138
|
const { model, generateContentConfig } = this.config.modelConfigService.getResolvedConfig(modelConfigKey);
|
|
343139
|
-
const
|
|
343140
|
-
|
|
343141
|
-
|
|
343142
|
-
|
|
343143
|
-
responseJsonSchema: schema,
|
|
343144
|
-
responseMimeType: "application/json"
|
|
343145
|
-
};
|
|
343146
|
-
try {
|
|
343147
|
-
const apiCall = () => this.contentGenerator.generateContent({
|
|
343148
|
-
model,
|
|
343149
|
-
config: requestConfig,
|
|
343150
|
-
contents
|
|
343151
|
-
}, promptId);
|
|
343152
|
-
const shouldRetryOnContent = (response) => {
|
|
343153
|
-
const text = getResponseText(response)?.trim();
|
|
343154
|
-
if (!text) {
|
|
343155
|
-
return true;
|
|
343156
|
-
}
|
|
343157
|
-
try {
|
|
343158
|
-
JSON.parse(this.cleanJsonResponse(text, model));
|
|
343159
|
-
return false;
|
|
343160
|
-
} catch (_e) {
|
|
343161
|
-
return true;
|
|
343162
|
-
}
|
|
343163
|
-
};
|
|
343164
|
-
const result = await retryWithBackoff(apiCall, {
|
|
343165
|
-
shouldRetryOnContent,
|
|
343166
|
-
maxAttempts: maxAttempts ?? DEFAULT_MAX_ATTEMPTS
|
|
343167
|
-
});
|
|
343168
|
-
return JSON.parse(this.cleanJsonResponse(getResponseText(result).trim(), model));
|
|
343169
|
-
} catch (error) {
|
|
343170
|
-
if (abortSignal.aborted) {
|
|
343171
|
-
throw error;
|
|
343139
|
+
const shouldRetryOnContent = (response) => {
|
|
343140
|
+
const text = getResponseText(response)?.trim();
|
|
343141
|
+
if (!text) {
|
|
343142
|
+
return true;
|
|
343172
343143
|
}
|
|
343173
|
-
|
|
343174
|
-
|
|
343175
|
-
|
|
343176
|
-
|
|
343144
|
+
try {
|
|
343145
|
+
JSON.parse(this.cleanJsonResponse(text, model));
|
|
343146
|
+
return false;
|
|
343147
|
+
} catch (_e) {
|
|
343148
|
+
return true;
|
|
343177
343149
|
}
|
|
343178
|
-
|
|
343179
|
-
|
|
343150
|
+
};
|
|
343151
|
+
const result = await this._generateWithRetry({
|
|
343152
|
+
model,
|
|
343153
|
+
contents,
|
|
343154
|
+
config: {
|
|
343155
|
+
...generateContentConfig,
|
|
343156
|
+
...systemInstruction && { systemInstruction },
|
|
343157
|
+
responseJsonSchema: schema,
|
|
343158
|
+
responseMimeType: "application/json",
|
|
343159
|
+
abortSignal
|
|
343160
|
+
}
|
|
343161
|
+
}, promptId, maxAttempts, shouldRetryOnContent, "generateJson");
|
|
343162
|
+
return JSON.parse(this.cleanJsonResponse(getResponseText(result).trim(), model));
|
|
343180
343163
|
}
|
|
343181
343164
|
async generateEmbedding(texts) {
|
|
343182
343165
|
if (!texts || texts.length === 0) {
|
|
@@ -343210,6 +343193,43 @@ var BaseLlmClient = class {
|
|
|
343210
343193
|
}
|
|
343211
343194
|
return text;
|
|
343212
343195
|
}
|
|
343196
|
+
async generateContent(options2) {
|
|
343197
|
+
const { modelConfigKey, contents, systemInstruction, abortSignal, promptId, maxAttempts } = options2;
|
|
343198
|
+
const { model, generateContentConfig } = this.config.modelConfigService.getResolvedConfig(modelConfigKey);
|
|
343199
|
+
const shouldRetryOnContent = (response) => {
|
|
343200
|
+
const text = getResponseText(response)?.trim();
|
|
343201
|
+
return !text;
|
|
343202
|
+
};
|
|
343203
|
+
return this._generateWithRetry({
|
|
343204
|
+
model,
|
|
343205
|
+
contents,
|
|
343206
|
+
config: {
|
|
343207
|
+
...generateContentConfig,
|
|
343208
|
+
...systemInstruction && { systemInstruction },
|
|
343209
|
+
abortSignal
|
|
343210
|
+
}
|
|
343211
|
+
}, promptId, maxAttempts, shouldRetryOnContent, "generateContent");
|
|
343212
|
+
}
|
|
343213
|
+
async _generateWithRetry(requestParams, promptId, maxAttempts, shouldRetryOnContent, errorContext) {
|
|
343214
|
+
const abortSignal = requestParams.config?.abortSignal;
|
|
343215
|
+
try {
|
|
343216
|
+
const apiCall = () => this.contentGenerator.generateContent(requestParams, promptId);
|
|
343217
|
+
return await retryWithBackoff(apiCall, {
|
|
343218
|
+
shouldRetryOnContent,
|
|
343219
|
+
maxAttempts: maxAttempts ?? DEFAULT_MAX_ATTEMPTS
|
|
343220
|
+
});
|
|
343221
|
+
} catch (error) {
|
|
343222
|
+
if (abortSignal?.aborted) {
|
|
343223
|
+
throw error;
|
|
343224
|
+
}
|
|
343225
|
+
if (error instanceof Error && error.message.includes("Retry attempts exhausted")) {
|
|
343226
|
+
await reportError(error, `API returned invalid content after all retries.`, requestParams.contents, `${errorContext}-invalid-content`);
|
|
343227
|
+
} else {
|
|
343228
|
+
await reportError(error, `Error generating content via API.`, requestParams.contents, `${errorContext}-api`);
|
|
343229
|
+
}
|
|
343230
|
+
throw new Error(`Failed to generate content: ${getErrorMessage(error)}`);
|
|
343231
|
+
}
|
|
343232
|
+
}
|
|
343213
343233
|
};
|
|
343214
343234
|
|
|
343215
343235
|
// packages/core/dist/src/utils/llm-edit-fixer.js
|
|
@@ -367718,7 +367738,7 @@ async function getClientMetadata() {
|
|
|
367718
367738
|
clientMetadataPromise = (async () => ({
|
|
367719
367739
|
ideName: "IDE_UNSPECIFIED",
|
|
367720
367740
|
pluginType: "GEMINI",
|
|
367721
|
-
ideVersion: "0.19.0-nightly.
|
|
367741
|
+
ideVersion: "0.19.0-nightly.20251124.e177314a4",
|
|
367722
367742
|
platform: getPlatform(),
|
|
367723
367743
|
updateChannel: await getReleaseChannel(__dirname5)
|
|
367724
367744
|
}))();
|