@pga-ai/adapters-llm-google 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +42 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +98 -0
- package/dist/index.js.map +1 -0
- package/package.json +43 -0
package/README.md
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# @pga-ai/adapters-llm-google
|
|
2
|
+
|
|
3
|
+
> Google Gemini adapter for GSEP (Genomic Self-Evolving Prompts)
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @pga-ai/core @pga-ai/adapters-llm-google
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
import { PGA } from '@pga-ai/core';
|
|
15
|
+
import { GeminiAdapter } from '@pga-ai/adapters-llm-google';
|
|
16
|
+
|
|
17
|
+
const pga = new PGA({
|
|
18
|
+
llm: new GeminiAdapter({
|
|
19
|
+
apiKey: process.env.GOOGLE_API_KEY!,
|
|
20
|
+
model: 'gemini-2.0-flash',
|
|
21
|
+
}),
|
|
22
|
+
});
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Configuration
|
|
26
|
+
|
|
27
|
+
| Option | Type | Default | Description |
|
|
28
|
+
|--------|------|---------|-------------|
|
|
29
|
+
| `apiKey` | `string` | required | Google AI API key |
|
|
30
|
+
| `model` | `string` | `'gemini-2.0-flash'` | Gemini model to use |
|
|
31
|
+
| `temperature` | `number` | `0.7` | Temperature (0-2) |
|
|
32
|
+
| `maxRetries` | `number` | `2` | Max retries on failure |
|
|
33
|
+
|
|
34
|
+
## Supported Models
|
|
35
|
+
|
|
36
|
+
- `gemini-2.0-flash` (recommended — fast, capable)
|
|
37
|
+
- `gemini-1.5-pro` (most capable)
|
|
38
|
+
- `gemini-1.5-flash` (fastest)
|
|
39
|
+
|
|
40
|
+
## License
|
|
41
|
+
|
|
42
|
+
MIT
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { LLMAdapter, Message, ChatOptions, ChatResponse, ChatChunk } from '@pga-ai/core';
|
|
2
|
+
export interface GeminiAdapterConfig {
|
|
3
|
+
apiKey: string;
|
|
4
|
+
model?: string;
|
|
5
|
+
temperature?: number;
|
|
6
|
+
maxRetries?: number;
|
|
7
|
+
}
|
|
8
|
+
export declare class GeminiAdapter implements LLMAdapter {
|
|
9
|
+
readonly name = "google";
|
|
10
|
+
readonly model: string;
|
|
11
|
+
private client;
|
|
12
|
+
private config;
|
|
13
|
+
constructor(config: GeminiAdapterConfig);
|
|
14
|
+
chat(messages: Message[], options?: ChatOptions): Promise<ChatResponse>;
|
|
15
|
+
stream(messages: Message[], options?: ChatOptions): AsyncIterableIterator<ChatChunk>;
|
|
16
|
+
estimateCost(messages: Message[]): Promise<number>;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAsBA,OAAO,KAAK,EACR,UAAU,EACV,OAAO,EACP,WAAW,EACX,YAAY,EACZ,SAAS,EACZ,MAAM,cAAc,CAAC;AAEtB,MAAM,WAAW,mBAAmB;IAIhC,MAAM,EAAE,MAAM,CAAC;IAMf,KAAK,CAAC,EAAE,MAAM,CAAC;IAMf,WAAW,CAAC,EAAE,MAAM,CAAC;IAMrB,UAAU,CAAC,EAAE,MAAM,CAAC;CACvB;AAED,qBAAa,aAAc,YAAW,UAAU;IAC5C,QAAQ,CAAC,IAAI,YAAY;IACzB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,MAAM,CAAgC;gBAElC,MAAM,EAAE,mBAAmB;IAYjC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IA8CtE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,qBAAqB,CAAC,SAAS,CAAC;IA0CrF,YAAY,CAAC,QAAQ,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,MAAM,CAAC;CAY3D"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { GoogleGenerativeAI } from '@google/generative-ai';
|
|
2
|
+
export class GeminiAdapter {
|
|
3
|
+
name = 'google';
|
|
4
|
+
model;
|
|
5
|
+
client;
|
|
6
|
+
config;
|
|
7
|
+
constructor(config) {
|
|
8
|
+
this.config = {
|
|
9
|
+
apiKey: config.apiKey,
|
|
10
|
+
model: config.model || 'gemini-2.0-flash',
|
|
11
|
+
temperature: config.temperature ?? 0.7,
|
|
12
|
+
maxRetries: config.maxRetries ?? 2,
|
|
13
|
+
};
|
|
14
|
+
this.model = this.config.model;
|
|
15
|
+
this.client = new GoogleGenerativeAI(this.config.apiKey);
|
|
16
|
+
}
|
|
17
|
+
async chat(messages, options) {
|
|
18
|
+
const systemMessages = messages.filter(m => m.role === 'system');
|
|
19
|
+
const chatMessages = messages.filter(m => m.role !== 'system');
|
|
20
|
+
const systemPrompt = systemMessages.map(m => m.content).join('\n\n');
|
|
21
|
+
const model = this.client.getGenerativeModel({
|
|
22
|
+
model: this.model,
|
|
23
|
+
systemInstruction: systemPrompt || undefined,
|
|
24
|
+
});
|
|
25
|
+
const geminiMessages = chatMessages.map(m => ({
|
|
26
|
+
role: m.role === 'assistant' ? 'model' : 'user',
|
|
27
|
+
parts: [{ text: m.content }],
|
|
28
|
+
}));
|
|
29
|
+
try {
|
|
30
|
+
const chat = model.startChat({
|
|
31
|
+
history: geminiMessages.slice(0, -1),
|
|
32
|
+
generationConfig: {
|
|
33
|
+
maxOutputTokens: options?.maxTokens ?? 4096,
|
|
34
|
+
temperature: options?.temperature ?? this.config.temperature,
|
|
35
|
+
},
|
|
36
|
+
});
|
|
37
|
+
const lastMessage = geminiMessages[geminiMessages.length - 1];
|
|
38
|
+
const result = await chat.sendMessage(lastMessage?.parts[0]?.text ?? '');
|
|
39
|
+
const response = result.response;
|
|
40
|
+
return {
|
|
41
|
+
content: response.text(),
|
|
42
|
+
usage: {
|
|
43
|
+
inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
|
|
44
|
+
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
|
|
45
|
+
},
|
|
46
|
+
metadata: {
|
|
47
|
+
model: this.model,
|
|
48
|
+
finishReason: response.candidates?.[0]?.finishReason ?? 'STOP',
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
throw new Error(`Gemini API error: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
async *stream(messages, options) {
|
|
57
|
+
const systemMessages = messages.filter(m => m.role === 'system');
|
|
58
|
+
const chatMessages = messages.filter(m => m.role !== 'system');
|
|
59
|
+
const systemPrompt = systemMessages.map(m => m.content).join('\n\n');
|
|
60
|
+
const model = this.client.getGenerativeModel({
|
|
61
|
+
model: this.model,
|
|
62
|
+
systemInstruction: systemPrompt || undefined,
|
|
63
|
+
});
|
|
64
|
+
const geminiMessages = chatMessages.map(m => ({
|
|
65
|
+
role: m.role === 'assistant' ? 'model' : 'user',
|
|
66
|
+
parts: [{ text: m.content }],
|
|
67
|
+
}));
|
|
68
|
+
try {
|
|
69
|
+
const chat = model.startChat({
|
|
70
|
+
history: geminiMessages.slice(0, -1),
|
|
71
|
+
generationConfig: {
|
|
72
|
+
maxOutputTokens: options?.maxTokens ?? 4096,
|
|
73
|
+
temperature: options?.temperature ?? this.config.temperature,
|
|
74
|
+
},
|
|
75
|
+
});
|
|
76
|
+
const lastMessage = geminiMessages[geminiMessages.length - 1];
|
|
77
|
+
const result = await chat.sendMessageStream(lastMessage?.parts[0]?.text ?? '');
|
|
78
|
+
for await (const chunk of result.stream) {
|
|
79
|
+
const text = chunk.text();
|
|
80
|
+
if (text) {
|
|
81
|
+
yield { delta: text, done: false };
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
yield { delta: '', done: true };
|
|
85
|
+
}
|
|
86
|
+
catch (error) {
|
|
87
|
+
throw new Error(`Gemini streaming error: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
async estimateCost(messages) {
|
|
91
|
+
const totalChars = messages.reduce((sum, m) => sum + m.content.length, 0);
|
|
92
|
+
const estimatedTokens = Math.ceil(totalChars / 4);
|
|
93
|
+
const inputCost = (estimatedTokens * 0.7 * 0.10) / 1_000_000;
|
|
94
|
+
const outputCost = (estimatedTokens * 0.3 * 0.40) / 1_000_000;
|
|
95
|
+
return inputCost + outputCost;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAqBA,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAkC3D,MAAM,OAAO,aAAa;IACb,IAAI,GAAG,QAAQ,CAAC;IAChB,KAAK,CAAS;IAEf,MAAM,CAAqB;IAC3B,MAAM,CAAgC;IAE9C,YAAY,MAA2B;QACnC,IAAI,CAAC,MAAM,GAAG;YACV,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,KAAK,EAAE,MAAM,CAAC,KAAK,IAAI,kBAAkB;YACzC,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,GAAG;YACtC,UAAU,EAAE,MAAM,CAAC,UAAU,IAAI,CAAC;SACrC,CAAC;QAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;QAC/B,IAAI,CAAC,MAAM,GAAG,IAAI,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,QAAmB,EAAE,OAAqB;QACjD,MAAM,cAAc,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;QACjE,MAAM,YAAY,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;QAC/D,MAAM,YAAY,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAErE,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC;YACzC,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,iBAAiB,EAAE,YAAY,IAAI,SAAS;SAC/C,CAAC,CAAC;QAEH,MAAM,cAAc,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YAC1C,IAAI,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAgB,CAAC,CAAC,CAAC,MAAe;YACjE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;SAC/B,CAAC,CAAC,CAAC;QAEJ,IAAI,CAAC;YACD,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC;gBACzB,OAAO,EAAE,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACpC,gBAAgB,EAAE;oBACd,eAAe,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI;oBAC3C,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW;iBAC/D;aACJ,CAAC,CAAC;YAEH,MAAM,WAAW,GAAG,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YAC9D,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;YACzE,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;YAEjC,OAAO;gBACH,OAAO,EAAE,QAAQ,CAAC,IAAI,EAAE;gBACxB,KAAK,EAAE;oBACH,WAAW,EAAE,QAAQ,CAAC,aAAa,EAAE,gBAAgB,IAAI,CAAC;oBAC1D,YAAY,EAAE,QAAQ,CAAC,aAAa,EAAE,oBAAoB,IAAI,CAAC;iBAClE;gBACD,QAAQ,EAAE;oBACN,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,YAAY,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,YAAY,IAAI,MAAM;iBACjE;aACJ,CAAC;QACN,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CACX,qBAAqB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CAClF,CAAC;QACN,CAAC;IACL,CAAC;IAED,KAAK,CAAC,CAAC,MAAM,CAAC,QAAmB,EAAE,OAAqB;QACpD,MAAM,cAAc,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;QACjE,MAAM,YAAY,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;QAC/D,MAAM,YAAY,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAErE,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC;YACzC,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,iBAAiB,EAAE,YAAY,IAAI,SAAS;SAC/C,CAAC,CAAC;QAEH,MAAM,cAAc,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YAC1C,IAAI,EAAE,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAgB,CAAC,CAAC,CAAC,MAAe;YACjE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;SAC/B,CAAC,CAAC,CAAC;QAEJ,IAAI,CAAC;YACD,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC;gBACzB,OAAO,EAAE,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACpC,gBAAgB,EAAE;oBACd,eAAe,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI;oBAC3C,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW;iBAC/D;aACJ,CAAC,CAAC;YAEH,MAAM,WAAW,GAAG,cAAc,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YAC9D,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;YAE/E,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;gBACtC,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;gBAC1B,IAAI,IAAI,EAAE,CAAC;oBACP,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;gBACvC,CAAC;YACL,CAAC;YAED,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;QACpC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,MAAM,IAAI,KAAK,CACX,2BAA2B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CACxF,CAAC;QACN,CAAC;IACL,CAAC;IAED,KAAK,CAAC,YAAY,CAAC,QAAmB;QAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC1E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC;QAKlD,MAAM,SAAS,GAAG,CAAC,eAAe,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG,SAAS,CAAC;QAC7D,MAAM,UAAU,GAAG,CAAC,eAAe,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG,SAAS,CAAC;QAE9D,OAAO,SAAS,GAAG,UAAU,CAAC;IAClC,CAAC;CACJ"}
|
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@pga-ai/adapters-llm-google",
|
|
3
|
+
"version": "0.8.0",
|
|
4
|
+
"description": "Google Gemini adapter for GSEP",
|
|
5
|
+
"author": "Luis Alfredo Velasquez Duran <contact@gsepcore.com>",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"homepage": "https://gsepcore.com",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/LuisvelMarketer/pga-platform",
|
|
11
|
+
"directory": "packages/adapters-llm/google"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/LuisvelMarketer/pga-platform/issues"
|
|
15
|
+
},
|
|
16
|
+
"type": "module",
|
|
17
|
+
"main": "./dist/index.js",
|
|
18
|
+
"types": "./dist/index.d.ts",
|
|
19
|
+
"exports": {
|
|
20
|
+
".": {
|
|
21
|
+
"import": "./dist/index.js",
|
|
22
|
+
"types": "./dist/index.d.ts"
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist"
|
|
27
|
+
],
|
|
28
|
+
"scripts": {
|
|
29
|
+
"build": "tsc --build",
|
|
30
|
+
"clean": "rm -rf dist",
|
|
31
|
+
"dev": "tsc --watch"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@google/generative-ai": "^0.21.0",
|
|
35
|
+
"@pga-ai/core": "*"
|
|
36
|
+
},
|
|
37
|
+
"devDependencies": {
|
|
38
|
+
"typescript": "^5.6.0"
|
|
39
|
+
},
|
|
40
|
+
"publishConfig": {
|
|
41
|
+
"access": "public"
|
|
42
|
+
}
|
|
43
|
+
}
|