koishi-plugin-chatluna-google-gemini-adapter 1.0.0-beta.2 → 1.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts CHANGED
@@ -1,12 +1,111 @@
1
- import { ChatLunaPlugin } from 'koishi-plugin-chatluna/lib/services/chat';
2
- import { Context, Logger, Schema } from 'koishi';
3
- export declare let logger: Logger;
4
- export declare function apply(ctx: Context, config: Config): void;
5
- export interface Config extends ChatLunaPlugin.Config {
6
- apiKeys: [string, string][];
7
- maxTokens: number;
8
- temperature: number;
9
- }
10
- export declare const Config: Schema<Config>;
11
- export declare const inject: string[];
12
- export declare const name = "chatluna-gemini-adapter";
1
+ import { ChatLunaPlugin } from 'koishi-plugin-chatluna/services/chat';
2
+ import { Context, Logger, Schema } from 'koishi';
3
+ import { AIMessageChunk, BaseMessage, ChatMessageChunk, HumanMessageChunk, MessageType, SystemMessageChunk } from '@langchain/core/messages';
4
+ import { StructuredTool } from '@langchain/core/tools';
5
+ import { ChatGenerationChunk } from '@langchain/core/outputs';
6
+ import { EmbeddingsRequester, EmbeddingsRequestParams, ModelRequester, ModelRequestParams } from 'koishi-plugin-chatluna/llm-core/platform/api';
7
+ import { ClientConfig } from 'koishi-plugin-chatluna/llm-core/platform/config';
8
+ import { PlatformModelAndEmbeddingsClient } from 'koishi-plugin-chatluna/llm-core/platform/client';
9
+ import { ChatHubBaseEmbeddings, ChatLunaChatModel } from 'koishi-plugin-chatluna/llm-core/platform/model';
10
+ import { ModelInfo } from 'koishi-plugin-chatluna/llm-core/platform/types';
11
+ export let logger: Logger;
12
+ export function apply(ctx: Context, config: Config): void;
13
+ export interface Config extends ChatLunaPlugin.Config {
14
+ apiKeys: [string, string][];
15
+ maxTokens: number;
16
+ temperature: number;
17
+ }
18
+ export const Config: Schema<Config>;
19
+ export const inject: string[];
20
+ export const name = "chatluna-google-gemini-adapter";
21
+ export interface ChatCompletionResponseMessage {
22
+ role: string;
23
+ parts?: ChatPart[];
24
+ }
25
+ export type ChatPart = ChatMessagePart | ChatUploadDataPart | ChatFunctionCallingPart | ChatFunctionResponsePart;
26
+ export type ChatMessagePart = {
27
+ text: string;
28
+ };
29
+ export type ChatUploadDataPart = {
30
+ inline_data: {
31
+ mime_type: string;
32
+ data?: string;
33
+ };
34
+ };
35
+ export type ChatFunctionCallingPart = {
36
+ functionCall: {
37
+ name: string;
38
+ args?: any;
39
+ };
40
+ };
41
+ export type ChatFunctionResponsePart = {
42
+ functionResponse: {
43
+ name: string;
44
+ response: any;
45
+ };
46
+ };
47
+ export interface ChatResponse {
48
+ candidates: {
49
+ content: ChatCompletionResponseMessage;
50
+ finishReason: string;
51
+ index: number;
52
+ safetyRatings: {
53
+ category: string;
54
+ probability: string;
55
+ }[];
56
+ }[];
57
+ promptFeedback: {
58
+ safetyRatings: {
59
+ category: string;
60
+ probability: string;
61
+ }[];
62
+ };
63
+ }
64
+ export interface ChatCompletionFunction {
65
+ name: string;
66
+ description?: string;
67
+ parameters?: {
68
+ [key: string]: any;
69
+ };
70
+ }
71
+ export interface ChatCompletionMessageFunctionCall {
72
+ name: string;
73
+ args?: any;
74
+ }
75
+ export interface CreateEmbeddingResponse {
76
+ embeddings: {
77
+ values: number[];
78
+ }[];
79
+ }
80
+ export type ChatCompletionResponseMessageRoleEnum = 'system' | 'model' | 'user' | 'function';
81
+ export function langchainMessageToGeminiMessage(messages: BaseMessage[], model?: string): Promise<ChatCompletionResponseMessage[]>;
82
+ export function partAsType<T extends ChatPart>(part: ChatPart): T;
83
+ export function formatToolsToGeminiAITools(tools: StructuredTool[]): ChatCompletionFunction[];
84
+ export function formatToolToGeminiAITool(tool: StructuredTool): ChatCompletionFunction;
85
+ export function messageTypeToGeminiRole(type: MessageType): ChatCompletionResponseMessageRoleEnum;
86
+ export function convertDeltaToMessageChunk(delta: Record<string, any>, defaultRole?: ChatCompletionResponseMessageRoleEnum): HumanMessageChunk | AIMessageChunk | SystemMessageChunk | ChatMessageChunk;
87
+ export class GeminiRequester extends ModelRequester implements EmbeddingsRequester {
88
+ private _config;
89
+ private _plugin;
90
+ constructor(_config: ClientConfig, _plugin: ChatLunaPlugin);
91
+ completionStream(params: ModelRequestParams): AsyncGenerator<ChatGenerationChunk>;
92
+ embeddings(params: EmbeddingsRequestParams): Promise<number[] | number[][]>;
93
+ getModels(): Promise<string[]>;
94
+ private _post;
95
+ private _get;
96
+ private _concatUrl;
97
+ private _buildHeaders;
98
+ init(): Promise<void>;
99
+ dispose(): Promise<void>;
100
+ }
101
+ export class GeminiClient extends PlatformModelAndEmbeddingsClient {
102
+ private _config;
103
+ platform: string;
104
+ private _requester;
105
+ private _models;
106
+ constructor(ctx: Context, _config: Config, clientConfig: ClientConfig, plugin: ChatLunaPlugin);
107
+ init(): Promise<void>;
108
+ refreshModels(): Promise<ModelInfo[]>;
109
+ getModels(): Promise<ModelInfo[]>;
110
+ protected _createModel(model: string): ChatLunaChatModel | ChatHubBaseEmbeddings;
111
+ }