koishi-plugin-chatluna-google-gemini-adapter 1.1.0-alpha.4 → 1.1.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.cjs +0 -137
- package/lib/index.d.ts +1 -2
- package/lib/index.mjs +0 -137
- package/package.json +1 -1
package/lib/index.cjs
CHANGED
|
@@ -333,143 +333,6 @@ var GeminiRequester = class extends import_api.ModelRequester {
|
|
|
333
333
|
static {
|
|
334
334
|
__name(this, "GeminiRequester");
|
|
335
335
|
}
|
|
336
|
-
async completion(params) {
|
|
337
|
-
try {
|
|
338
|
-
const response = await this._post(
|
|
339
|
-
`models/${params.model}`,
|
|
340
|
-
{
|
|
341
|
-
contents: await langchainMessageToGeminiMessage(
|
|
342
|
-
params.input,
|
|
343
|
-
params.model
|
|
344
|
-
),
|
|
345
|
-
safetySettings: [
|
|
346
|
-
{
|
|
347
|
-
category: "HARM_CATEGORY_HARASSMENT",
|
|
348
|
-
threshold: "BLOCK_NONE"
|
|
349
|
-
},
|
|
350
|
-
{
|
|
351
|
-
category: "HARM_CATEGORY_HATE_SPEECH",
|
|
352
|
-
threshold: "BLOCK_NONE"
|
|
353
|
-
},
|
|
354
|
-
{
|
|
355
|
-
category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
356
|
-
threshold: "BLOCK_NONE"
|
|
357
|
-
},
|
|
358
|
-
{
|
|
359
|
-
category: "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
360
|
-
threshold: "BLOCK_NONE"
|
|
361
|
-
},
|
|
362
|
-
{
|
|
363
|
-
category: "HARM_CATEGORY_CIVIC_INTEGRITY",
|
|
364
|
-
threshold: "BLOCK_NONE"
|
|
365
|
-
}
|
|
366
|
-
],
|
|
367
|
-
generationConfig: {
|
|
368
|
-
stopSequences: params.stop,
|
|
369
|
-
temperature: params.temperature,
|
|
370
|
-
maxOutputTokens: params.model.includes("vision") ? void 0 : params.maxTokens,
|
|
371
|
-
topP: params.topP
|
|
372
|
-
// thinkingConfig: { includeThoughts: true }
|
|
373
|
-
},
|
|
374
|
-
tools: params.tools != null || this._pluginConfig.googleSearch ? formatToolsToGeminiAITools(
|
|
375
|
-
params.tools ?? [],
|
|
376
|
-
this._pluginConfig,
|
|
377
|
-
params.model
|
|
378
|
-
) : void 0
|
|
379
|
-
},
|
|
380
|
-
{
|
|
381
|
-
signal: params.signal
|
|
382
|
-
}
|
|
383
|
-
);
|
|
384
|
-
const rawResponse = await response.text();
|
|
385
|
-
let parsedResponse;
|
|
386
|
-
try {
|
|
387
|
-
parsedResponse = JSON.parse(rawResponse);
|
|
388
|
-
} catch (e) {
|
|
389
|
-
logger.error("error with parse json", rawResponse);
|
|
390
|
-
throw new import_error.ChatLunaError(import_error.ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
391
|
-
}
|
|
392
|
-
let groundingContent = "";
|
|
393
|
-
let currentGroudingIndex = 0;
|
|
394
|
-
let reasoningContent = "";
|
|
395
|
-
let content = "";
|
|
396
|
-
const functionCall = {
|
|
397
|
-
name: "",
|
|
398
|
-
args: "",
|
|
399
|
-
arguments: ""
|
|
400
|
-
};
|
|
401
|
-
const part = parsedResponse.candidates[0].content;
|
|
402
|
-
const messagePart = partAsType(part.parts[0]);
|
|
403
|
-
const chatFunctionCallingPart = partAsType(
|
|
404
|
-
part.parts[0]
|
|
405
|
-
);
|
|
406
|
-
if (messagePart.text) {
|
|
407
|
-
if (messagePart.thought) {
|
|
408
|
-
reasoningContent = messagePart.text;
|
|
409
|
-
}
|
|
410
|
-
content = messagePart.text;
|
|
411
|
-
}
|
|
412
|
-
const deltaFunctionCall = chatFunctionCallingPart.functionCall;
|
|
413
|
-
if (deltaFunctionCall) {
|
|
414
|
-
let args = deltaFunctionCall.args?.input ?? deltaFunctionCall.args;
|
|
415
|
-
try {
|
|
416
|
-
let parsedArgs = JSON.parse(args);
|
|
417
|
-
if (typeof parsedArgs !== "string") {
|
|
418
|
-
args = parsedArgs;
|
|
419
|
-
}
|
|
420
|
-
parsedArgs = JSON.parse(args);
|
|
421
|
-
if (typeof parsedArgs !== "string") {
|
|
422
|
-
args = parsedArgs;
|
|
423
|
-
}
|
|
424
|
-
} catch (e) {
|
|
425
|
-
}
|
|
426
|
-
functionCall.args = JSON.stringify(args);
|
|
427
|
-
functionCall.name = deltaFunctionCall.name;
|
|
428
|
-
functionCall.arguments = deltaFunctionCall.args;
|
|
429
|
-
}
|
|
430
|
-
for (const source of parsedResponse.candidates[0].groundingMetadata?.groundingChunks ?? []) {
|
|
431
|
-
groundingContent += `[^${currentGroudingIndex++}]: [${source.web.title}](${source.web.uri})
|
|
432
|
-
`;
|
|
433
|
-
}
|
|
434
|
-
try {
|
|
435
|
-
const messageChunk = new import_messages2.AIMessageChunk(content);
|
|
436
|
-
messageChunk.additional_kwargs = {
|
|
437
|
-
function_call: functionCall.name.length > 0 ? {
|
|
438
|
-
name: functionCall.name,
|
|
439
|
-
arguments: functionCall.args,
|
|
440
|
-
args: functionCall.arguments
|
|
441
|
-
} : void 0
|
|
442
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
443
|
-
};
|
|
444
|
-
messageChunk.content = content;
|
|
445
|
-
if (groundingContent.length > 0) {
|
|
446
|
-
logger.debug(`grounding content: ${groundingContent}`);
|
|
447
|
-
if (this._pluginConfig.groundingContentDisplay) {
|
|
448
|
-
messageChunk.content += `
|
|
449
|
-
|
|
450
|
-
${groundingContent}`;
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
if (reasoningContent.length > 0) {
|
|
454
|
-
logger.debug(`reasoning content: ${reasoningContent}`);
|
|
455
|
-
}
|
|
456
|
-
const generationChunk = new import_outputs.ChatGenerationChunk({
|
|
457
|
-
message: messageChunk,
|
|
458
|
-
text: messageChunk.content
|
|
459
|
-
});
|
|
460
|
-
return generationChunk;
|
|
461
|
-
} catch (e) {
|
|
462
|
-
logger.error("error with", parsedResponse);
|
|
463
|
-
throw new import_error.ChatLunaError(import_error.ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
464
|
-
}
|
|
465
|
-
} catch (e) {
|
|
466
|
-
if (e instanceof import_error.ChatLunaError) {
|
|
467
|
-
throw e;
|
|
468
|
-
} else {
|
|
469
|
-
throw new import_error.ChatLunaError(import_error.ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
470
|
-
}
|
|
471
|
-
}
|
|
472
|
-
}
|
|
473
336
|
async *completionStream(params) {
|
|
474
337
|
try {
|
|
475
338
|
const response = await this._post(
|
package/lib/index.d.ts
CHANGED
|
@@ -2,7 +2,7 @@ import { ChatLunaPlugin } from 'koishi-plugin-chatluna/services/chat';
|
|
|
2
2
|
import { Context, Logger, Schema } from 'koishi';
|
|
3
3
|
import { AIMessageChunk, BaseMessage, ChatMessageChunk, HumanMessageChunk, MessageType, SystemMessageChunk } from '@langchain/core/messages';
|
|
4
4
|
import { StructuredTool } from '@langchain/core/tools';
|
|
5
|
-
import {
|
|
5
|
+
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
6
6
|
import { EmbeddingsRequester, EmbeddingsRequestParams, ModelRequester, ModelRequestParams } from 'koishi-plugin-chatluna/llm-core/platform/api';
|
|
7
7
|
import { ClientConfig } from 'koishi-plugin-chatluna/llm-core/platform/config';
|
|
8
8
|
import { PlatformModelAndEmbeddingsClient } from 'koishi-plugin-chatluna/llm-core/platform/client';
|
|
@@ -113,7 +113,6 @@ export class GeminiRequester extends ModelRequester implements EmbeddingsRequest
|
|
|
113
113
|
private _plugin;
|
|
114
114
|
private _pluginConfig;
|
|
115
115
|
constructor(_config: ClientConfig, _plugin: ChatLunaPlugin, _pluginConfig: Config);
|
|
116
|
-
completion(params: ModelRequestParams): Promise<ChatGeneration>;
|
|
117
116
|
completionStream(params: ModelRequestParams): AsyncGenerator<ChatGenerationChunk>;
|
|
118
117
|
embeddings(params: EmbeddingsRequestParams): Promise<number[] | number[][]>;
|
|
119
118
|
getModels(): Promise<string[]>;
|
package/lib/index.mjs
CHANGED
|
@@ -327,143 +327,6 @@ var GeminiRequester = class extends ModelRequester {
|
|
|
327
327
|
static {
|
|
328
328
|
__name(this, "GeminiRequester");
|
|
329
329
|
}
|
|
330
|
-
async completion(params) {
|
|
331
|
-
try {
|
|
332
|
-
const response = await this._post(
|
|
333
|
-
`models/${params.model}`,
|
|
334
|
-
{
|
|
335
|
-
contents: await langchainMessageToGeminiMessage(
|
|
336
|
-
params.input,
|
|
337
|
-
params.model
|
|
338
|
-
),
|
|
339
|
-
safetySettings: [
|
|
340
|
-
{
|
|
341
|
-
category: "HARM_CATEGORY_HARASSMENT",
|
|
342
|
-
threshold: "BLOCK_NONE"
|
|
343
|
-
},
|
|
344
|
-
{
|
|
345
|
-
category: "HARM_CATEGORY_HATE_SPEECH",
|
|
346
|
-
threshold: "BLOCK_NONE"
|
|
347
|
-
},
|
|
348
|
-
{
|
|
349
|
-
category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
|
350
|
-
threshold: "BLOCK_NONE"
|
|
351
|
-
},
|
|
352
|
-
{
|
|
353
|
-
category: "HARM_CATEGORY_DANGEROUS_CONTENT",
|
|
354
|
-
threshold: "BLOCK_NONE"
|
|
355
|
-
},
|
|
356
|
-
{
|
|
357
|
-
category: "HARM_CATEGORY_CIVIC_INTEGRITY",
|
|
358
|
-
threshold: "BLOCK_NONE"
|
|
359
|
-
}
|
|
360
|
-
],
|
|
361
|
-
generationConfig: {
|
|
362
|
-
stopSequences: params.stop,
|
|
363
|
-
temperature: params.temperature,
|
|
364
|
-
maxOutputTokens: params.model.includes("vision") ? void 0 : params.maxTokens,
|
|
365
|
-
topP: params.topP
|
|
366
|
-
// thinkingConfig: { includeThoughts: true }
|
|
367
|
-
},
|
|
368
|
-
tools: params.tools != null || this._pluginConfig.googleSearch ? formatToolsToGeminiAITools(
|
|
369
|
-
params.tools ?? [],
|
|
370
|
-
this._pluginConfig,
|
|
371
|
-
params.model
|
|
372
|
-
) : void 0
|
|
373
|
-
},
|
|
374
|
-
{
|
|
375
|
-
signal: params.signal
|
|
376
|
-
}
|
|
377
|
-
);
|
|
378
|
-
const rawResponse = await response.text();
|
|
379
|
-
let parsedResponse;
|
|
380
|
-
try {
|
|
381
|
-
parsedResponse = JSON.parse(rawResponse);
|
|
382
|
-
} catch (e) {
|
|
383
|
-
logger.error("error with parse json", rawResponse);
|
|
384
|
-
throw new ChatLunaError(ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
385
|
-
}
|
|
386
|
-
let groundingContent = "";
|
|
387
|
-
let currentGroudingIndex = 0;
|
|
388
|
-
let reasoningContent = "";
|
|
389
|
-
let content = "";
|
|
390
|
-
const functionCall = {
|
|
391
|
-
name: "",
|
|
392
|
-
args: "",
|
|
393
|
-
arguments: ""
|
|
394
|
-
};
|
|
395
|
-
const part = parsedResponse.candidates[0].content;
|
|
396
|
-
const messagePart = partAsType(part.parts[0]);
|
|
397
|
-
const chatFunctionCallingPart = partAsType(
|
|
398
|
-
part.parts[0]
|
|
399
|
-
);
|
|
400
|
-
if (messagePart.text) {
|
|
401
|
-
if (messagePart.thought) {
|
|
402
|
-
reasoningContent = messagePart.text;
|
|
403
|
-
}
|
|
404
|
-
content = messagePart.text;
|
|
405
|
-
}
|
|
406
|
-
const deltaFunctionCall = chatFunctionCallingPart.functionCall;
|
|
407
|
-
if (deltaFunctionCall) {
|
|
408
|
-
let args = deltaFunctionCall.args?.input ?? deltaFunctionCall.args;
|
|
409
|
-
try {
|
|
410
|
-
let parsedArgs = JSON.parse(args);
|
|
411
|
-
if (typeof parsedArgs !== "string") {
|
|
412
|
-
args = parsedArgs;
|
|
413
|
-
}
|
|
414
|
-
parsedArgs = JSON.parse(args);
|
|
415
|
-
if (typeof parsedArgs !== "string") {
|
|
416
|
-
args = parsedArgs;
|
|
417
|
-
}
|
|
418
|
-
} catch (e) {
|
|
419
|
-
}
|
|
420
|
-
functionCall.args = JSON.stringify(args);
|
|
421
|
-
functionCall.name = deltaFunctionCall.name;
|
|
422
|
-
functionCall.arguments = deltaFunctionCall.args;
|
|
423
|
-
}
|
|
424
|
-
for (const source of parsedResponse.candidates[0].groundingMetadata?.groundingChunks ?? []) {
|
|
425
|
-
groundingContent += `[^${currentGroudingIndex++}]: [${source.web.title}](${source.web.uri})
|
|
426
|
-
`;
|
|
427
|
-
}
|
|
428
|
-
try {
|
|
429
|
-
const messageChunk = new AIMessageChunk2(content);
|
|
430
|
-
messageChunk.additional_kwargs = {
|
|
431
|
-
function_call: functionCall.name.length > 0 ? {
|
|
432
|
-
name: functionCall.name,
|
|
433
|
-
arguments: functionCall.args,
|
|
434
|
-
args: functionCall.arguments
|
|
435
|
-
} : void 0
|
|
436
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
437
|
-
};
|
|
438
|
-
messageChunk.content = content;
|
|
439
|
-
if (groundingContent.length > 0) {
|
|
440
|
-
logger.debug(`grounding content: ${groundingContent}`);
|
|
441
|
-
if (this._pluginConfig.groundingContentDisplay) {
|
|
442
|
-
messageChunk.content += `
|
|
443
|
-
|
|
444
|
-
${groundingContent}`;
|
|
445
|
-
}
|
|
446
|
-
}
|
|
447
|
-
if (reasoningContent.length > 0) {
|
|
448
|
-
logger.debug(`reasoning content: ${reasoningContent}`);
|
|
449
|
-
}
|
|
450
|
-
const generationChunk = new ChatGenerationChunk({
|
|
451
|
-
message: messageChunk,
|
|
452
|
-
text: messageChunk.content
|
|
453
|
-
});
|
|
454
|
-
return generationChunk;
|
|
455
|
-
} catch (e) {
|
|
456
|
-
logger.error("error with", parsedResponse);
|
|
457
|
-
throw new ChatLunaError(ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
458
|
-
}
|
|
459
|
-
} catch (e) {
|
|
460
|
-
if (e instanceof ChatLunaError) {
|
|
461
|
-
throw e;
|
|
462
|
-
} else {
|
|
463
|
-
throw new ChatLunaError(ChatLunaErrorCode.API_REQUEST_FAILED, e);
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
}
|
|
467
330
|
async *completionStream(params) {
|
|
468
331
|
try {
|
|
469
332
|
const response = await this._post(
|
package/package.json
CHANGED