@pga-ai/adapters-llm-ollama 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +70 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +118 -0
- package/dist/index.js.map +1 -0
- package/package.json +42 -0
package/README.md
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# @pga-ai/adapters-llm-ollama
|
|
2
|
+
|
|
3
|
+
> Ollama adapter for GSEP — use local LLMs with self-evolving prompts
|
|
4
|
+
|
|
5
|
+
No API keys needed. Run any open-source model locally.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @pga-ai/core @pga-ai/adapters-llm-ollama
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Prerequisites
|
|
14
|
+
|
|
15
|
+
Install and run [Ollama](https://ollama.com):
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Install Ollama, then:
|
|
19
|
+
ollama pull llama3
|
|
20
|
+
ollama serve
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
```typescript
|
|
26
|
+
import { PGA } from '@pga-ai/core';
|
|
27
|
+
import { OllamaAdapter } from '@pga-ai/adapters-llm-ollama';
|
|
28
|
+
|
|
29
|
+
const pga = new PGA({
|
|
30
|
+
llm: new OllamaAdapter({
|
|
31
|
+
model: 'llama3',
|
|
32
|
+
}),
|
|
33
|
+
});
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Configuration
|
|
37
|
+
|
|
38
|
+
| Option | Type | Default | Description |
|
|
39
|
+
|--------|------|---------|-------------|
|
|
40
|
+
| `model` | `string` | required | Ollama model name |
|
|
41
|
+
| `baseURL` | `string` | `'http://localhost:11434'` | Ollama server URL |
|
|
42
|
+
| `temperature` | `number` | `0.7` | Temperature (0-2) |
|
|
43
|
+
| `timeout` | `number` | `120000` | Request timeout (ms) |
|
|
44
|
+
|
|
45
|
+
## Supported Models
|
|
46
|
+
|
|
47
|
+
Any model available in Ollama:
|
|
48
|
+
|
|
49
|
+
- `llama3` / `llama3.1` / `llama3.2`
|
|
50
|
+
- `mistral` / `mixtral`
|
|
51
|
+
- `deepseek-r1`
|
|
52
|
+
- `phi3` / `phi4`
|
|
53
|
+
- `qwen2` / `qwen2.5`
|
|
54
|
+
- `gemma2`
|
|
55
|
+
- And [hundreds more](https://ollama.com/library)
|
|
56
|
+
|
|
57
|
+
## Remote Ollama
|
|
58
|
+
|
|
59
|
+
Connect to Ollama running on a remote server:
|
|
60
|
+
|
|
61
|
+
```typescript
|
|
62
|
+
const llm = new OllamaAdapter({
|
|
63
|
+
model: 'llama3',
|
|
64
|
+
baseURL: 'http://your-gpu-server:11434',
|
|
65
|
+
});
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
## License
|
|
69
|
+
|
|
70
|
+
MIT
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { LLMAdapter, Message, ChatOptions, ChatResponse, ChatChunk } from '@pga-ai/core';
|
|
2
|
+
export interface OllamaAdapterConfig {
|
|
3
|
+
baseURL?: string;
|
|
4
|
+
model: string;
|
|
5
|
+
temperature?: number;
|
|
6
|
+
timeout?: number;
|
|
7
|
+
}
|
|
8
|
+
export declare class OllamaAdapter implements LLMAdapter {
|
|
9
|
+
readonly name = "ollama";
|
|
10
|
+
readonly model: string;
|
|
11
|
+
private baseURL;
|
|
12
|
+
private config;
|
|
13
|
+
constructor(config: OllamaAdapterConfig);
|
|
14
|
+
chat(messages: Message[], options?: ChatOptions): Promise<ChatResponse>;
|
|
15
|
+
stream(messages: Message[], options?: ChatOptions): AsyncIterableIterator<ChatChunk>;
|
|
16
|
+
}
|
|
17
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAuBA,OAAO,KAAK,EACR,UAAU,EACV,OAAO,EACP,WAAW,EACX,YAAY,EACZ,SAAS,EACZ,MAAM,cAAc,CAAC;AAEtB,MAAM,WAAW,mBAAmB;IAKhC,OAAO,CAAC,EAAE,MAAM,CAAC;IAMjB,KAAK,EAAE,MAAM,CAAC;IAMd,WAAW,CAAC,EAAE,MAAM,CAAC;IAMrB,OAAO,CAAC,EAAE,MAAM,CAAC;CACpB;AAUD,qBAAa,aAAc,YAAW,UAAU;IAC5C,QAAQ,CAAC,IAAI,YAAY;IACzB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IAEvB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,MAAM,CAAgC;gBAElC,MAAM,EAAE,mBAAmB;IAYjC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAmDtE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,qBAAqB,CAAC,SAAS,CAAC;CAqE9F"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
export class OllamaAdapter {
|
|
2
|
+
name = 'ollama';
|
|
3
|
+
model;
|
|
4
|
+
baseURL;
|
|
5
|
+
config;
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.config = {
|
|
8
|
+
baseURL: config.baseURL || 'http://localhost:11434',
|
|
9
|
+
model: config.model,
|
|
10
|
+
temperature: config.temperature ?? 0.7,
|
|
11
|
+
timeout: config.timeout ?? 120000,
|
|
12
|
+
};
|
|
13
|
+
this.model = this.config.model;
|
|
14
|
+
this.baseURL = this.config.baseURL;
|
|
15
|
+
}
|
|
16
|
+
async chat(messages, options) {
|
|
17
|
+
const ollamaMessages = messages.map(m => ({
|
|
18
|
+
role: m.role,
|
|
19
|
+
content: m.content,
|
|
20
|
+
}));
|
|
21
|
+
try {
|
|
22
|
+
const response = await fetch(`${this.baseURL}/api/chat`, {
|
|
23
|
+
method: 'POST',
|
|
24
|
+
headers: { 'Content-Type': 'application/json' },
|
|
25
|
+
body: JSON.stringify({
|
|
26
|
+
model: this.model,
|
|
27
|
+
messages: ollamaMessages,
|
|
28
|
+
stream: false,
|
|
29
|
+
options: {
|
|
30
|
+
temperature: options?.temperature ?? this.config.temperature,
|
|
31
|
+
num_predict: options?.maxTokens ?? 4096,
|
|
32
|
+
},
|
|
33
|
+
}),
|
|
34
|
+
signal: AbortSignal.timeout(this.config.timeout),
|
|
35
|
+
});
|
|
36
|
+
if (!response.ok) {
|
|
37
|
+
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
|
|
38
|
+
}
|
|
39
|
+
const data = await response.json();
|
|
40
|
+
return {
|
|
41
|
+
content: data.message.content,
|
|
42
|
+
usage: {
|
|
43
|
+
inputTokens: data.prompt_eval_count ?? 0,
|
|
44
|
+
outputTokens: data.eval_count ?? 0,
|
|
45
|
+
},
|
|
46
|
+
metadata: {
|
|
47
|
+
model: this.model,
|
|
48
|
+
totalDuration: data.total_duration,
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
if (error instanceof TypeError && error.message.includes('fetch')) {
|
|
54
|
+
throw new Error(`Ollama connection failed at ${this.baseURL}. Is Ollama running? (ollama serve)`);
|
|
55
|
+
}
|
|
56
|
+
throw new Error(`Ollama API error: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async *stream(messages, options) {
|
|
60
|
+
const ollamaMessages = messages.map(m => ({
|
|
61
|
+
role: m.role,
|
|
62
|
+
content: m.content,
|
|
63
|
+
}));
|
|
64
|
+
try {
|
|
65
|
+
const response = await fetch(`${this.baseURL}/api/chat`, {
|
|
66
|
+
method: 'POST',
|
|
67
|
+
headers: { 'Content-Type': 'application/json' },
|
|
68
|
+
body: JSON.stringify({
|
|
69
|
+
model: this.model,
|
|
70
|
+
messages: ollamaMessages,
|
|
71
|
+
stream: true,
|
|
72
|
+
options: {
|
|
73
|
+
temperature: options?.temperature ?? this.config.temperature,
|
|
74
|
+
num_predict: options?.maxTokens ?? 4096,
|
|
75
|
+
},
|
|
76
|
+
}),
|
|
77
|
+
signal: AbortSignal.timeout(this.config.timeout),
|
|
78
|
+
});
|
|
79
|
+
if (!response.ok) {
|
|
80
|
+
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
|
|
81
|
+
}
|
|
82
|
+
if (!response.body) {
|
|
83
|
+
throw new Error('No response body for streaming');
|
|
84
|
+
}
|
|
85
|
+
const reader = response.body.getReader();
|
|
86
|
+
const decoder = new TextDecoder();
|
|
87
|
+
let buffer = '';
|
|
88
|
+
while (true) {
|
|
89
|
+
const { done, value } = await reader.read();
|
|
90
|
+
if (done)
|
|
91
|
+
break;
|
|
92
|
+
buffer += decoder.decode(value, { stream: true });
|
|
93
|
+
const lines = buffer.split('\n');
|
|
94
|
+
buffer = lines.pop() ?? '';
|
|
95
|
+
for (const line of lines) {
|
|
96
|
+
if (!line.trim())
|
|
97
|
+
continue;
|
|
98
|
+
const chunk = JSON.parse(line);
|
|
99
|
+
if (chunk.done) {
|
|
100
|
+
yield { delta: '', done: true };
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
if (chunk.message.content) {
|
|
104
|
+
yield { delta: chunk.message.content, done: false };
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
yield { delta: '', done: true };
|
|
109
|
+
}
|
|
110
|
+
catch (error) {
|
|
111
|
+
if (error instanceof TypeError && error.message.includes('fetch')) {
|
|
112
|
+
throw new Error(`Ollama connection failed at ${this.baseURL}. Is Ollama running? (ollama serve)`);
|
|
113
|
+
}
|
|
114
|
+
throw new Error(`Ollama streaming error: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAiEA,MAAM,OAAO,aAAa;IACb,IAAI,GAAG,QAAQ,CAAC;IAChB,KAAK,CAAS;IAEf,OAAO,CAAS;IAChB,MAAM,CAAgC;IAE9C,YAAY,MAA2B;QACnC,IAAI,CAAC,MAAM,GAAG;YACV,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,wBAAwB;YACnD,KAAK,EAAE,MAAM,CAAC,KAAK;YACnB,WAAW,EAAE,MAAM,CAAC,WAAW,IAAI,GAAG;YACtC,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,MAAM;SACpC,CAAC;QAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC;IACvC,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,QAAmB,EAAE,OAAqB;QACjD,MAAM,cAAc,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACtC,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,OAAO,EAAE,CAAC,CAAC,OAAO;SACrB,CAAC,CAAC,CAAC;QAEJ,IAAI,CAAC;YACD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,WAAW,EAAE;gBACrD,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;gBAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACjB,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,QAAQ,EAAE,cAAc;oBACxB,MAAM,EAAE,KAAK;oBACb,OAAO,EAAE;wBACL,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW;wBAC5D,WAAW,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI;qBAC1C;iBACJ,CAAC;gBACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC;aACnD,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,KAAK,MAAM,QAAQ,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;YACzE,CAAC;YAED,MAAM,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAwB,CAAC;YAEzD,OAAO;gBACH,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,OAAO;gBAC7B,KAAK,EAAE;oBACH,WAAW,EAAE,IAAI,CAAC,iBAAiB,IAAI,CAAC;oBACxC,YAAY,EAAE,IAAI,CAAC,UAAU,IAAI,CAAC;iBACrC;gBACD,QAAQ,EAAE;oBACN,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,aAAa,EAAE,IAAI,CAAC,cAAc;iBACrC;aACJ,CAAC;QACN,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAI,KAAK,YAAY,SAAS,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;gBAChE,MAAM,IAAI,KAAK,CACX,+BAA+B,IAAI,CAAC,OAAO,qCAAqC,CACnF,CAAC;YACN,CAAC;YACD,MAAM,IAAI,KAAK,CACX,qBAAqB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CAClF,CAAC;QACN,CAAC;IACL,CAAC;IAED,KAAK,CAAC,CAAC,MAAM,CAAC,QAAmB,EAAE,OAAqB;QACpD,MAAM,cAAc,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACtC,IAAI,EAAE,CAAC,CAAC,IAAI;YACZ,OAAO,EAAE,CAAC,CAAC,OAAO;SACrB,CAAC,CAAC,CAAC;QAEJ,IAAI,CAAC;YACD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,WAAW,EAAE;gBACrD,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;gBAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACjB,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,QAAQ,EAAE,cAAc;oBACxB,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE;wBACL,WAAW,EAAE,OAAO,EAAE,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW;wBAC5D,WAAW,EAAE,OAAO,EAAE,SAAS,IAAI,IAAI;qBAC1C;iBACJ,CAAC;gBACF,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC;aACnD,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,KAAK,MAAM,QAAQ,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;YACzE,CAAC;YAED,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;gBACjB,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;YACtD,CAAC;YAED,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;YACzC,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClC,IAAI,MAAM,GAAG,EAAE,CAAC;YAEhB,OAAO,IAAI,EAAE,CAAC;gBACV,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBAC5C,IAAI,IAAI;oBAAE,MAAM;gBAEhB,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBACjC,MAAM,GAAG,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC;gBAE3B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;oBACvB,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE;wBAAE,SAAS;oBAC3B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAuB,CAAC;oBAErD,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;wBACb,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;wBAChC,OAAO;oBACX,CAAC;oBAED,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,EAAE,CAAC;wBACxB,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;oBACxD,CAAC;gBACL,CAAC;YACL,CAAC;YAED,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;QACpC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAI,KAAK,YAAY,SAAS,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;gBAChE,MAAM,IAAI,KAAK,CACX,+BAA+B,IAAI,CAAC,OAAO,qCAAqC,CACnF,CAAC;YACN,CAAC;YACD,MAAM,IAAI,KAAK,CACX,2BAA2B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CACxF,CAAC;QACN,CAAC;IACL,CAAC;CACJ"}
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@pga-ai/adapters-llm-ollama",
|
|
3
|
+
"version": "0.8.0",
|
|
4
|
+
"description": "Ollama adapter for GSEP — use local LLMs with GSEP",
|
|
5
|
+
"author": "Luis Alfredo Velasquez Duran <contact@gsepcore.com>",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"homepage": "https://gsepcore.com",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/LuisvelMarketer/pga-platform",
|
|
11
|
+
"directory": "packages/adapters-llm/ollama"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/LuisvelMarketer/pga-platform/issues"
|
|
15
|
+
},
|
|
16
|
+
"type": "module",
|
|
17
|
+
"main": "./dist/index.js",
|
|
18
|
+
"types": "./dist/index.d.ts",
|
|
19
|
+
"exports": {
|
|
20
|
+
".": {
|
|
21
|
+
"import": "./dist/index.js",
|
|
22
|
+
"types": "./dist/index.d.ts"
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist"
|
|
27
|
+
],
|
|
28
|
+
"scripts": {
|
|
29
|
+
"build": "tsc --build",
|
|
30
|
+
"clean": "rm -rf dist",
|
|
31
|
+
"dev": "tsc --watch"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@pga-ai/core": "*"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"typescript": "^5.6.0"
|
|
38
|
+
},
|
|
39
|
+
"publishConfig": {
|
|
40
|
+
"access": "public"
|
|
41
|
+
}
|
|
42
|
+
}
|