learngraph 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -1
- package/dist/cjs/llm/adapters/anthropic.js +124 -0
- package/dist/cjs/llm/adapters/anthropic.js.map +1 -0
- package/dist/cjs/llm/adapters/base.js +100 -0
- package/dist/cjs/llm/adapters/base.js.map +1 -0
- package/dist/cjs/llm/adapters/gemini.js +156 -0
- package/dist/cjs/llm/adapters/gemini.js.map +1 -0
- package/dist/cjs/llm/adapters/index.js +33 -0
- package/dist/cjs/llm/adapters/index.js.map +1 -0
- package/dist/cjs/llm/adapters/mediapipe.js +290 -0
- package/dist/cjs/llm/adapters/mediapipe.js.map +1 -0
- package/dist/cjs/llm/adapters/ollama.js +149 -0
- package/dist/cjs/llm/adapters/ollama.js.map +1 -0
- package/dist/cjs/llm/adapters/openai.js +126 -0
- package/dist/cjs/llm/adapters/openai.js.map +1 -0
- package/dist/cjs/llm/adapters/openrouter.js +190 -0
- package/dist/cjs/llm/adapters/openrouter.js.map +1 -0
- package/dist/cjs/llm/index.js +42 -5
- package/dist/cjs/llm/index.js.map +1 -1
- package/dist/cjs/llm/orchestrator.js +219 -0
- package/dist/cjs/llm/orchestrator.js.map +1 -0
- package/dist/cjs/llm/prompts.js +367 -0
- package/dist/cjs/llm/prompts.js.map +1 -0
- package/dist/cjs/types/llm.js +8 -0
- package/dist/cjs/types/llm.js.map +1 -0
- package/dist/esm/llm/adapters/anthropic.js +119 -0
- package/dist/esm/llm/adapters/anthropic.js.map +1 -0
- package/dist/esm/llm/adapters/base.js +95 -0
- package/dist/esm/llm/adapters/base.js.map +1 -0
- package/dist/esm/llm/adapters/gemini.js +151 -0
- package/dist/esm/llm/adapters/gemini.js.map +1 -0
- package/dist/esm/llm/adapters/index.js +13 -0
- package/dist/esm/llm/adapters/index.js.map +1 -0
- package/dist/esm/llm/adapters/mediapipe.js +252 -0
- package/dist/esm/llm/adapters/mediapipe.js.map +1 -0
- package/dist/esm/llm/adapters/ollama.js +144 -0
- package/dist/esm/llm/adapters/ollama.js.map +1 -0
- package/dist/esm/llm/adapters/openai.js +121 -0
- package/dist/esm/llm/adapters/openai.js.map +1 -0
- package/dist/esm/llm/adapters/openrouter.js +185 -0
- package/dist/esm/llm/adapters/openrouter.js.map +1 -0
- package/dist/esm/llm/index.js +12 -6
- package/dist/esm/llm/index.js.map +1 -1
- package/dist/esm/llm/orchestrator.js +214 -0
- package/dist/esm/llm/orchestrator.js.map +1 -0
- package/dist/esm/llm/prompts.js +360 -0
- package/dist/esm/llm/prompts.js.map +1 -0
- package/dist/esm/types/llm.js +7 -0
- package/dist/esm/types/llm.js.map +1 -0
- package/dist/types/llm/adapters/anthropic.d.ts +21 -0
- package/dist/types/llm/adapters/anthropic.d.ts.map +1 -0
- package/dist/types/llm/adapters/base.d.ts +46 -0
- package/dist/types/llm/adapters/base.d.ts.map +1 -0
- package/dist/types/llm/adapters/gemini.d.ts +30 -0
- package/dist/types/llm/adapters/gemini.d.ts.map +1 -0
- package/dist/types/llm/adapters/index.d.ts +14 -0
- package/dist/types/llm/adapters/index.d.ts.map +1 -0
- package/dist/types/llm/adapters/mediapipe.d.ts +113 -0
- package/dist/types/llm/adapters/mediapipe.d.ts.map +1 -0
- package/dist/types/llm/adapters/ollama.d.ts +30 -0
- package/dist/types/llm/adapters/ollama.d.ts.map +1 -0
- package/dist/types/llm/adapters/openai.d.ts +22 -0
- package/dist/types/llm/adapters/openai.d.ts.map +1 -0
- package/dist/types/llm/adapters/openrouter.d.ts +58 -0
- package/dist/types/llm/adapters/openrouter.d.ts.map +1 -0
- package/dist/types/llm/index.d.ts +5 -0
- package/dist/types/llm/index.d.ts.map +1 -1
- package/dist/types/llm/orchestrator.d.ts +35 -0
- package/dist/types/llm/orchestrator.d.ts.map +1 -0
- package/dist/types/llm/prompts.d.ts +269 -0
- package/dist/types/llm/prompts.d.ts.map +1 -0
- package/dist/types/types/index.d.ts +1 -0
- package/dist/types/types/index.d.ts.map +1 -1
- package/dist/types/types/llm.d.ts +337 -0
- package/dist/types/types/llm.d.ts.map +1 -0
- package/package.json +6 -2
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Google Gemini LLM adapter
|
|
3
|
+
*
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
import { BaseLLMAdapter, LLMError } from './base.js';
|
|
7
|
+
/**
|
|
8
|
+
* Adapter for Google Gemini models
|
|
9
|
+
*
|
|
10
|
+
* Supports:
|
|
11
|
+
* - gemini-2.0-flash-exp
|
|
12
|
+
* - gemini-1.5-pro
|
|
13
|
+
* - gemini-1.5-flash
|
|
14
|
+
* - gemini-1.0-pro
|
|
15
|
+
*/
|
|
16
|
+
export class GeminiAdapter extends BaseLLMAdapter {
|
|
17
|
+
baseUrl;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
super(config);
|
|
20
|
+
// Use Google AI Studio API by default
|
|
21
|
+
this.baseUrl =
|
|
22
|
+
config.baseUrl ?? 'https://generativelanguage.googleapis.com/v1beta';
|
|
23
|
+
}
|
|
24
|
+
get provider() {
|
|
25
|
+
return 'gemini';
|
|
26
|
+
}
|
|
27
|
+
async complete(request) {
|
|
28
|
+
if (!this.isConfigured()) {
|
|
29
|
+
throw new LLMError('Gemini adapter is not configured. Set GOOGLE_API_KEY or GEMINI_API_KEY environment variable.', 'NOT_CONFIGURED', this.provider);
|
|
30
|
+
}
|
|
31
|
+
const geminiRequest = this.buildRequest(request);
|
|
32
|
+
try {
|
|
33
|
+
const response = await this.executeRequest(geminiRequest);
|
|
34
|
+
return this.parseResponse(response);
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
if (error instanceof LLMError) {
|
|
38
|
+
throw error;
|
|
39
|
+
}
|
|
40
|
+
throw new LLMError(`Gemini API request failed: ${error instanceof Error ? error.message : 'Unknown error'}`, 'API_ERROR', this.provider, error instanceof Error ? error : undefined);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
buildRequest(request) {
|
|
44
|
+
const geminiRequest = {
|
|
45
|
+
contents: [],
|
|
46
|
+
generationConfig: {
|
|
47
|
+
maxOutputTokens: request.maxTokens ?? this.config.maxTokens ?? 4096,
|
|
48
|
+
temperature: request.temperature ?? this.config.temperature ?? 0.3,
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
// Handle JSON response format
|
|
52
|
+
if (request.responseFormat === 'json') {
|
|
53
|
+
geminiRequest.generationConfig.responseMimeType = 'application/json';
|
|
54
|
+
}
|
|
55
|
+
// Convert messages to Gemini format
|
|
56
|
+
for (const message of request.messages) {
|
|
57
|
+
if (message.role === 'system') {
|
|
58
|
+
// Gemini uses systemInstruction for system messages
|
|
59
|
+
geminiRequest.systemInstruction = {
|
|
60
|
+
parts: [{ text: message.content }],
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
geminiRequest.contents.push({
|
|
65
|
+
role: message.role === 'assistant' ? 'model' : 'user',
|
|
66
|
+
parts: [{ text: message.content }],
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return geminiRequest;
|
|
71
|
+
}
|
|
72
|
+
async executeRequest(request) {
|
|
73
|
+
const url = `${this.baseUrl}/models/${this.config.model}:generateContent?key=${this.config.apiKey}`;
|
|
74
|
+
const response = await fetch(url, {
|
|
75
|
+
method: 'POST',
|
|
76
|
+
headers: {
|
|
77
|
+
'Content-Type': 'application/json',
|
|
78
|
+
},
|
|
79
|
+
body: JSON.stringify(request),
|
|
80
|
+
signal: AbortSignal.timeout(this.config.timeout ?? 60000),
|
|
81
|
+
});
|
|
82
|
+
if (!response.ok) {
|
|
83
|
+
const errorBody = await response.text();
|
|
84
|
+
let errorMessage = `HTTP ${response.status}`;
|
|
85
|
+
try {
|
|
86
|
+
const errorJson = JSON.parse(errorBody);
|
|
87
|
+
errorMessage = errorJson.error?.message ?? errorMessage;
|
|
88
|
+
}
|
|
89
|
+
catch {
|
|
90
|
+
errorMessage = errorBody || errorMessage;
|
|
91
|
+
}
|
|
92
|
+
if (response.status === 429) {
|
|
93
|
+
throw new LLMError(`Rate limit exceeded: ${errorMessage}`, 'RATE_LIMIT', this.provider);
|
|
94
|
+
}
|
|
95
|
+
throw new LLMError(`Gemini API error: ${errorMessage}`, 'API_ERROR', this.provider);
|
|
96
|
+
}
|
|
97
|
+
return response.json();
|
|
98
|
+
}
|
|
99
|
+
parseResponse(response) {
|
|
100
|
+
if (!response.candidates || response.candidates.length === 0) {
|
|
101
|
+
throw new LLMError('No candidates in Gemini response', 'INVALID_RESPONSE', this.provider);
|
|
102
|
+
}
|
|
103
|
+
const candidate = response.candidates[0];
|
|
104
|
+
if (!candidate) {
|
|
105
|
+
throw new LLMError('No candidate in Gemini response', 'INVALID_RESPONSE', this.provider);
|
|
106
|
+
}
|
|
107
|
+
const content = candidate.content.parts.map((p) => p.text).join('');
|
|
108
|
+
// Try to parse JSON if present
|
|
109
|
+
let json;
|
|
110
|
+
try {
|
|
111
|
+
json = this.parseJSON(content);
|
|
112
|
+
}
|
|
113
|
+
catch {
|
|
114
|
+
// Not JSON, that's fine
|
|
115
|
+
}
|
|
116
|
+
// Map Gemini finish reasons to our standard format
|
|
117
|
+
const finishReason = candidate.finishReason === 'STOP' ? 'stop' :
|
|
118
|
+
candidate.finishReason === 'MAX_TOKENS' ? 'length' :
|
|
119
|
+
candidate.finishReason === 'SAFETY' ? 'content_filter' : 'stop';
|
|
120
|
+
return {
|
|
121
|
+
content,
|
|
122
|
+
json,
|
|
123
|
+
usage: {
|
|
124
|
+
promptTokens: response.usageMetadata?.promptTokenCount ?? 0,
|
|
125
|
+
completionTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
|
|
126
|
+
totalTokens: response.usageMetadata?.totalTokenCount ?? 0,
|
|
127
|
+
},
|
|
128
|
+
model: this.config.model,
|
|
129
|
+
finishReason,
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Create a Gemini adapter from environment variables
|
|
135
|
+
*/
|
|
136
|
+
export function createGeminiAdapter(model = 'gemini-2.0-flash-exp', overrides) {
|
|
137
|
+
// Support both GOOGLE_API_KEY and GEMINI_API_KEY
|
|
138
|
+
const apiKey = typeof process !== 'undefined'
|
|
139
|
+
? process.env.GOOGLE_API_KEY ?? process.env.GEMINI_API_KEY
|
|
140
|
+
: undefined;
|
|
141
|
+
const config = {
|
|
142
|
+
provider: 'gemini',
|
|
143
|
+
model,
|
|
144
|
+
...overrides,
|
|
145
|
+
};
|
|
146
|
+
if (apiKey) {
|
|
147
|
+
config.apiKey = apiKey;
|
|
148
|
+
}
|
|
149
|
+
return new GeminiAdapter(config);
|
|
150
|
+
}
|
|
151
|
+
//# sourceMappingURL=gemini.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gemini.js","sourceRoot":"","sources":["../../../../src/llm/adapters/gemini.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAQH,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AAsCrD;;;;;;;;GAQG;AACH,MAAM,OAAO,aAAc,SAAQ,cAAc;IAC9B,OAAO,CAAS;IAEjC,YAAY,MAAoB;QAC9B,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,sCAAsC;QACtC,IAAI,CAAC,OAAO;YACV,MAAM,CAAC,OAAO,IAAI,kDAAkD,CAAC;IACzE,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAA0B;QACvC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;YACzB,MAAM,IAAI,QAAQ,CAChB,8FAA8F,EAC9F,gBAAgB,EAChB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,MAAM,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;YAC1D,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QACtC,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,IAAI,KAAK,YAAY,QAAQ,EAAE,CAAC;gBAC9B,MAAM,KAAK,CAAC;YACd,CAAC;YACD,MAAM,IAAI,QAAQ,CAChB,8BAA8B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,EACxF,WAAW,EACX,IAAI,CAAC,QAAQ,EACb,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAC3C,CAAC;QACJ,CAAC;IACH,CAAC;IAEO,YAAY,CAAC,OAA0B;QAC7C,MAAM,aAAa,GAAkB;YACnC,QAAQ,EAAE,EAAE;YACZ,gBAAgB,EAAE;gBAChB,eAAe,EAAE,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,IAAI,IAAI;gBACnE,WAAW,EAAE,OAAO,CAAC,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,GAAG;aACnE;SACF,CAAC;QAEF,8BAA8B;QAC9B,IAAI,OAAO,CAAC,cAAc,KAAK,MAAM,EAAE,CAAC;YACtC,aAAa,CAAC,gBAAiB,CAAC,gBAAgB,GAAG,kBAAkB,CAAC;QACxE,CAAC;QAED,oCAAoC;QACpC,KAAK,MAAM,OAAO,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;YACvC,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,oDAAoD;gBACpD,aAAa,CAAC,iBAAiB,GAAG;oBAChC,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;iBACnC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACN,aAAa,CAAC,QAAQ,CAAC,IAAI,CAAC;oBAC1B,IAAI,EAAE,OAAO,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;oBACrD,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;iBACnC,CAAC,CAAC;YACL,CAAC;QACH,CAAC;QAED,OAAO,aAAa,CAAC;IACvB,CAAC;IAEO,KAAK,CAAC,cAAc,CAAC,OAAsB;QACjD,MAAM,GAAG,GAAG,GAAG,IAAI,CAAC,OAAO,WAAW,IAAI,CAAC,MAAM,CAAC,KAAK,wBAAwB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;QAEpG,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;YAChC,MAAM,EAAE,MAAM;YACd,OAAO,EAAE;gBACP,cAAc,EAAE,kBAAkB;aACnC;YACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC;YAC7B,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,KAAK,CAAC;SAC1D,CAAC,CAAC;QAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YACxC,IAAI,YAAY,GAAG,QAAQ,QAAQ,CAAC,MAAM,EAAE,CAAC;YAE7C,IAAI,CAAC;gBACH,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;gBACxC,YAAY,GAAG,SAAS,CAAC,KAAK,EAAE,OAAO,IAAI,YAAY,CAAC;YAC1D,CAAC;YAAC,MAAM,CAAC;gBACP,YAAY,GAAG,SAAS,IAAI,YAAY,CAAC;YAC3C,CAAC;YAED,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;gBAC5B,MAAM,IAAI,QAAQ,CAChB,wBAAwB,YAAY,EAAE,EACtC,YAAY,EACZ,IAAI,CAAC,QAAQ,CACd,CAAC;YACJ,CAAC;YAED,MAAM,IAAI,QAAQ,CAChB,qBAAqB,YAAY,EAAE,EACnC,WAAW,EACX,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,OAAO,QAAQ,CAAC,IAAI,EAA6B,CAAC;IACpD,CAAC;IAEO,aAAa,CAAC,QAAwB;QAC5C,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC7D,MAAM,IAAI,QAAQ,CAChB,kCAAkC,EAClC,kBAAkB,EAClB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACzC,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,MAAM,IAAI,QAAQ,CAChB,iCAAiC,EACjC,kBAAkB,EAClB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,MAAM,OAAO,GAAG,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QAEpE,+BAA+B;QAC/B,IAAI,IAAyC,CAAC;QAC9C,IAAI,CAAC;YACH,IAAI,GAAG,IAAI,CAAC,SAAS,CAA0B,OAAO,CAAC,CAAC;QAC1D,CAAC;QAAC,MAAM,CAAC;YACP,wBAAwB;QAC1B,CAAC;QAED,mDAAmD;QACnD,MAAM,YAAY,GAAG,SAAS,CAAC,YAAY,KAAK,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;YAC7C,SAAS,CAAC,YAAY,KAAK,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;gBACpD,SAAS,CAAC,YAAY,KAAK,QAAQ,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC,CAAC,MAAM,CAAC;QAEpF,OAAO;YACL,OAAO;YACP,IAAI;YACJ,KAAK,EAAE;gBACL,YAAY,EAAE,QAAQ,CAAC,aAAa,EAAE,gBAAgB,IAAI,CAAC;gBAC3D,gBAAgB,EAAE,QAAQ,CAAC,aAAa,EAAE,oBAAoB,IAAI,CAAC;gBACnE,WAAW,EAAE,QAAQ,CAAC,aAAa,EAAE,eAAe,IAAI,CAAC;aAC1D;YACD,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK;YACxB,YAAY;SACb,CAAC;IACJ,CAAC;CACF;AAED;;GAEG;AACH,MAAM,UAAU,mBAAmB,CACjC,KAAK,GAAG,sBAAsB,EAC9B,SAAiC;IAEjC,iDAAiD;IACjD,MAAM,MAAM,GACV,OAAO,OAAO,KAAK,WAAW;QAC5B,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc;QAC1D,CAAC,CAAC,SAAS,CAAC;IAEhB,MAAM,MAAM,GAAiB;QAC3B,QAAQ,EAAE,QAAQ;QAClB,KAAK;QACL,GAAG,SAAS;KACb,CAAC;IAEF,IAAI,MAAM,EAAE,CAAC;QACX,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC;IACzB,CAAC;IAED,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,CAAC;AACnC,CAAC"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM adapter exports
|
|
3
|
+
*
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
export { BaseLLMAdapter, LLMError, DEFAULT_CONFIG } from './base.js';
|
|
7
|
+
export { OpenAIAdapter, createOpenAIAdapter } from './openai.js';
|
|
8
|
+
export { AnthropicAdapter, createAnthropicAdapter } from './anthropic.js';
|
|
9
|
+
export { OllamaAdapter, createOllamaAdapter } from './ollama.js';
|
|
10
|
+
export { GeminiAdapter, createGeminiAdapter } from './gemini.js';
|
|
11
|
+
export { OpenRouterAdapter, createOpenRouterAdapter, OPENROUTER_MODELS } from './openrouter.js';
|
|
12
|
+
export { MediaPipeAdapter, createMediaPipeAdapter, MEDIAPIPE_MODELS } from './mediapipe.js';
|
|
13
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/llm/adapters/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAGrE,OAAO,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC;AAC1E,OAAO,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,mBAAmB,EAAE,MAAM,aAAa,CAAC;AACjE,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAChG,OAAO,EAAE,gBAAgB,EAAE,sBAAsB,EAAE,gBAAgB,EAAE,MAAM,gBAAgB,CAAC"}
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MediaPipe LLM Inference adapter for local Gemma models
|
|
3
|
+
*
|
|
4
|
+
* This adapter integrates with Google's MediaPipe LLM Inference Task API
|
|
5
|
+
* to run Gemma models locally using WebGPU acceleration.
|
|
6
|
+
*
|
|
7
|
+
* Supported models:
|
|
8
|
+
* - Gemma 2B / 7B
|
|
9
|
+
* - Gemma 2 2B
|
|
10
|
+
* - Gemma 3n (2B, 4B parameters)
|
|
11
|
+
*
|
|
12
|
+
* Requirements:
|
|
13
|
+
* - WebGPU-capable browser or Node.js with WebGPU support
|
|
14
|
+
* - @mediapipe/tasks-genai package
|
|
15
|
+
* - Downloaded model weights (.bin file)
|
|
16
|
+
*
|
|
17
|
+
* @packageDocumentation
|
|
18
|
+
*/
|
|
19
|
+
import { BaseLLMAdapter, LLMError } from './base.js';
|
|
20
|
+
/**
|
|
21
|
+
* Gemma model variants for MediaPipe
|
|
22
|
+
*/
|
|
23
|
+
export const MEDIAPIPE_MODELS = {
|
|
24
|
+
'gemma-2b': 'gemma-2b-it-gpu-int4.bin',
|
|
25
|
+
'gemma-7b': 'gemma-7b-it-gpu-int8.bin',
|
|
26
|
+
'gemma2-2b': 'gemma2-2b-it-gpu-int4.bin',
|
|
27
|
+
'gemma3n-e2b': 'gemma-3n-E2B-it-int4.task',
|
|
28
|
+
'gemma3n-e4b': 'gemma-3n-E4B-it-int4.task',
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Adapter for MediaPipe LLM Inference API
|
|
32
|
+
*
|
|
33
|
+
* Runs Gemma models locally using WebGPU acceleration.
|
|
34
|
+
* The adapter lazily loads the model on first use.
|
|
35
|
+
*
|
|
36
|
+
* @example
|
|
37
|
+
* ```typescript
|
|
38
|
+
* import { MediaPipeAdapter } from 'learngraph/llm';
|
|
39
|
+
*
|
|
40
|
+
* const adapter = new MediaPipeAdapter({
|
|
41
|
+
* provider: 'mediapipe',
|
|
42
|
+
* model: 'gemma3n-e2b',
|
|
43
|
+
* modelPath: '/models/gemma-3n-E2B-it-int4.task',
|
|
44
|
+
* });
|
|
45
|
+
*
|
|
46
|
+
* // Use with orchestrator
|
|
47
|
+
* const orchestrator = createOrchestrator(adapter);
|
|
48
|
+
* ```
|
|
49
|
+
*/
|
|
50
|
+
export class MediaPipeAdapter extends BaseLLMAdapter {
|
|
51
|
+
inference = null;
|
|
52
|
+
modelPath;
|
|
53
|
+
randomSeed;
|
|
54
|
+
topK;
|
|
55
|
+
loraRanks;
|
|
56
|
+
initPromise = null;
|
|
57
|
+
constructor(config) {
|
|
58
|
+
super(config);
|
|
59
|
+
this.modelPath = config.modelPath;
|
|
60
|
+
this.topK = config.topK ?? 40;
|
|
61
|
+
if (config.randomSeed !== undefined) {
|
|
62
|
+
this.randomSeed = config.randomSeed;
|
|
63
|
+
}
|
|
64
|
+
if (config.loraRanks !== undefined) {
|
|
65
|
+
this.loraRanks = config.loraRanks;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
get provider() {
|
|
69
|
+
return 'mediapipe';
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Check if MediaPipe is configured (model path exists)
|
|
73
|
+
*/
|
|
74
|
+
isConfigured() {
|
|
75
|
+
return !!this.modelPath;
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Initialize the MediaPipe LLM Inference engine
|
|
79
|
+
* Called automatically on first request
|
|
80
|
+
*/
|
|
81
|
+
async initialize() {
|
|
82
|
+
if (this.inference) {
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
// Prevent multiple simultaneous initializations
|
|
86
|
+
if (this.initPromise) {
|
|
87
|
+
return this.initPromise;
|
|
88
|
+
}
|
|
89
|
+
this.initPromise = this.doInitialize();
|
|
90
|
+
await this.initPromise;
|
|
91
|
+
}
|
|
92
|
+
async doInitialize() {
|
|
93
|
+
try {
|
|
94
|
+
// Dynamic import of MediaPipe
|
|
95
|
+
// Users must install @mediapipe/tasks-genai separately
|
|
96
|
+
const mediapipe = await this.loadMediaPipe();
|
|
97
|
+
const maxTokens = this.config.maxTokens ?? 1024;
|
|
98
|
+
const temperature = this.config.temperature ?? 0.3;
|
|
99
|
+
const options = {
|
|
100
|
+
baseOptions: {
|
|
101
|
+
modelAssetPath: this.modelPath,
|
|
102
|
+
},
|
|
103
|
+
maxTokens,
|
|
104
|
+
temperature,
|
|
105
|
+
topK: this.topK,
|
|
106
|
+
};
|
|
107
|
+
if (this.randomSeed !== undefined) {
|
|
108
|
+
options.randomSeed = this.randomSeed;
|
|
109
|
+
}
|
|
110
|
+
if (this.loraRanks !== undefined) {
|
|
111
|
+
options.loraRanks = this.loraRanks;
|
|
112
|
+
}
|
|
113
|
+
this.inference = await mediapipe.LlmInference.createFromOptions(options);
|
|
114
|
+
}
|
|
115
|
+
catch (error) {
|
|
116
|
+
this.initPromise = null;
|
|
117
|
+
throw new LLMError(`Failed to initialize MediaPipe: ${error instanceof Error ? error.message : 'Unknown error'}. ` +
|
|
118
|
+
'Make sure @mediapipe/tasks-genai is installed and the model file exists.', 'NOT_CONFIGURED', this.provider, error instanceof Error ? error : undefined);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Load MediaPipe library dynamically
|
|
123
|
+
*/
|
|
124
|
+
async loadMediaPipe() {
|
|
125
|
+
try {
|
|
126
|
+
// Try to import the MediaPipe package dynamically
|
|
127
|
+
// @ts-expect-error - Dynamic import of optional dependency
|
|
128
|
+
const module = await import('@mediapipe/tasks-genai');
|
|
129
|
+
return module;
|
|
130
|
+
}
|
|
131
|
+
catch {
|
|
132
|
+
throw new Error('MediaPipe tasks-genai not found. Install it with: npm install @mediapipe/tasks-genai');
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
async complete(request) {
|
|
136
|
+
if (!this.isConfigured()) {
|
|
137
|
+
throw new LLMError('MediaPipe adapter is not configured. Provide modelPath in config.', 'NOT_CONFIGURED', this.provider);
|
|
138
|
+
}
|
|
139
|
+
// Initialize on first use
|
|
140
|
+
await this.initialize();
|
|
141
|
+
if (!this.inference) {
|
|
142
|
+
throw new LLMError('MediaPipe inference engine not initialized', 'NOT_CONFIGURED', this.provider);
|
|
143
|
+
}
|
|
144
|
+
// Build prompt from messages
|
|
145
|
+
const prompt = this.buildPrompt(request);
|
|
146
|
+
try {
|
|
147
|
+
const response = await this.inference.generateResponse(prompt);
|
|
148
|
+
return this.parseGeneratedResponse(response, prompt);
|
|
149
|
+
}
|
|
150
|
+
catch (error) {
|
|
151
|
+
throw new LLMError(`MediaPipe generation failed: ${error instanceof Error ? error.message : 'Unknown error'}`, 'API_ERROR', this.provider, error instanceof Error ? error : undefined);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Build a prompt string from chat messages
|
|
156
|
+
* Uses Gemma's instruction format
|
|
157
|
+
*/
|
|
158
|
+
buildPrompt(request) {
|
|
159
|
+
const parts = [];
|
|
160
|
+
for (const message of request.messages) {
|
|
161
|
+
switch (message.role) {
|
|
162
|
+
case 'system':
|
|
163
|
+
// Gemma uses <start_of_turn> format
|
|
164
|
+
parts.push(`<start_of_turn>user\n${message.content}<end_of_turn>`);
|
|
165
|
+
break;
|
|
166
|
+
case 'user':
|
|
167
|
+
parts.push(`<start_of_turn>user\n${message.content}<end_of_turn>`);
|
|
168
|
+
break;
|
|
169
|
+
case 'assistant':
|
|
170
|
+
parts.push(`<start_of_turn>model\n${message.content}<end_of_turn>`);
|
|
171
|
+
break;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
// Add model turn prefix
|
|
175
|
+
parts.push('<start_of_turn>model\n');
|
|
176
|
+
// Add JSON format instruction if requested
|
|
177
|
+
if (request.responseFormat === 'json') {
|
|
178
|
+
return parts.join('\n') + '\nRespond with valid JSON only:\n';
|
|
179
|
+
}
|
|
180
|
+
return parts.join('\n');
|
|
181
|
+
}
|
|
182
|
+
/**
|
|
183
|
+
* Parse the generated response
|
|
184
|
+
*/
|
|
185
|
+
parseGeneratedResponse(response, prompt) {
|
|
186
|
+
// Estimate tokens (rough approximation: ~4 chars per token)
|
|
187
|
+
const promptTokens = Math.ceil(prompt.length / 4);
|
|
188
|
+
const completionTokens = Math.ceil(response.length / 4);
|
|
189
|
+
// Try to parse JSON if present
|
|
190
|
+
let json;
|
|
191
|
+
try {
|
|
192
|
+
json = this.parseJSON(response);
|
|
193
|
+
}
|
|
194
|
+
catch {
|
|
195
|
+
// Not JSON, that's fine
|
|
196
|
+
}
|
|
197
|
+
return {
|
|
198
|
+
content: response,
|
|
199
|
+
json,
|
|
200
|
+
usage: {
|
|
201
|
+
promptTokens,
|
|
202
|
+
completionTokens,
|
|
203
|
+
totalTokens: promptTokens + completionTokens,
|
|
204
|
+
},
|
|
205
|
+
model: this.config.model,
|
|
206
|
+
finishReason: 'stop', // MediaPipe doesn't provide finish reason, assume stop
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
/**
|
|
210
|
+
* Close the inference engine and release resources
|
|
211
|
+
*/
|
|
212
|
+
close() {
|
|
213
|
+
if (this.inference) {
|
|
214
|
+
this.inference.close();
|
|
215
|
+
this.inference = null;
|
|
216
|
+
this.initPromise = null;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Create a MediaPipe adapter for local Gemma models
|
|
222
|
+
*
|
|
223
|
+
* @param modelPath - Path to the model file (.bin or .task)
|
|
224
|
+
* @param model - Model identifier (for reference)
|
|
225
|
+
* @param overrides - Additional configuration options
|
|
226
|
+
*
|
|
227
|
+
* @example
|
|
228
|
+
* ```typescript
|
|
229
|
+
* // Using a Gemma 3n model
|
|
230
|
+
* const adapter = createMediaPipeAdapter(
|
|
231
|
+
* '/models/gemma-3n-E2B-it-int4.task',
|
|
232
|
+
* 'gemma3n-e2b'
|
|
233
|
+
* );
|
|
234
|
+
*
|
|
235
|
+
* // With custom options
|
|
236
|
+
* const adapter = createMediaPipeAdapter(
|
|
237
|
+
* '/models/gemma2-2b-it-gpu-int4.bin',
|
|
238
|
+
* 'gemma2-2b',
|
|
239
|
+
* { maxTokens: 2048, temperature: 0.5 }
|
|
240
|
+
* );
|
|
241
|
+
* ```
|
|
242
|
+
*/
|
|
243
|
+
export function createMediaPipeAdapter(modelPath, model = 'gemma3n', overrides) {
|
|
244
|
+
const config = {
|
|
245
|
+
provider: 'mediapipe',
|
|
246
|
+
model,
|
|
247
|
+
modelPath,
|
|
248
|
+
...overrides,
|
|
249
|
+
};
|
|
250
|
+
return new MediaPipeAdapter(config);
|
|
251
|
+
}
|
|
252
|
+
//# sourceMappingURL=mediapipe.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mediapipe.js","sourceRoot":"","sources":["../../../../src/llm/adapters/mediapipe.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;GAiBG;AAQH,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AAoCrD;;GAEG;AACH,MAAM,CAAC,MAAM,gBAAgB,GAAG;IAC9B,UAAU,EAAE,0BAA0B;IACtC,UAAU,EAAE,0BAA0B;IACtC,WAAW,EAAE,2BAA2B;IACxC,aAAa,EAAE,2BAA2B;IAC1C,aAAa,EAAE,2BAA2B;CAClC,CAAC;AAEX;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,OAAO,gBAAiB,SAAQ,cAAc;IAC1C,SAAS,GAAwB,IAAI,CAAC;IAC7B,SAAS,CAAS;IAClB,UAAU,CAAU;IACpB,IAAI,CAAS;IACb,SAAS,CAAU;IAC5B,WAAW,GAAyB,IAAI,CAAC;IAEjD,YAAY,MAAuB;QACjC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QAClC,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,EAAE,CAAC;QAC9B,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;YACpC,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC;QACtC,CAAC;QACD,IAAI,MAAM,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QACpC,CAAC;IACH,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,YAAY;QACV,OAAO,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC;IAC1B,CAAC;IAED;;;OAGG;IACH,KAAK,CAAC,UAAU;QACd,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,OAAO;QACT,CAAC;QAED,gDAAgD;QAChD,IAAI,IAAI,CAAC,WAAW,EAAE,CAAC;YACrB,OAAO,IAAI,CAAC,WAAW,CAAC;QAC1B,CAAC;QAED,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QACvC,MAAM,IAAI,CAAC,WAAW,CAAC;IACzB,CAAC;IAEO,KAAK,CAAC,YAAY;QACxB,IAAI,CAAC;YACH,8BAA8B;YAC9B,uDAAuD;YACvD,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,aAAa,EAAE,CAAC;YAE7C,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC;YAChD,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,GAAG,CAAC;YAEnD,MAAM,OAAO,GAAwB;gBACnC,WAAW,EAAE;oBACX,cAAc,EAAE,IAAI,CAAC,SAAS;iBAC/B;gBACD,SAAS;gBACT,WAAW;gBACX,IAAI,EAAE,IAAI,CAAC,IAAI;aAChB,CAAC;YAEF,IAAI,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;gBAClC,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC;YACvC,CAAC;YAED,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE,CAAC;gBACjC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC;YACrC,CAAC;YAED,IAAI,CAAC,SAAS,GAAG,MAAM,SAAS,CAAC,YAAY,CAAC,iBAAiB,CAAC,OAAO,CAAC,CAAC;QAC3E,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;YACxB,MAAM,IAAI,QAAQ,CAChB,mCAAmC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,IAAI;gBAC7F,0EAA0E,EAC5E,gBAAgB,EAChB,IAAI,CAAC,QAAQ,EACb,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAC3C,CAAC;QACJ,CAAC;IACH,CAAC;IAED;;OAEG;IACK,KAAK,CAAC,aAAa;QACzB,IAAI,CAAC;YACH,kDAAkD;YAClD,2DAA2D;YAC3D,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,wBAAwB,CAAC,CAAC;YACtD,OAAO,MAAmD,CAAC;QAC7D,CAAC;QAAC,MAAM,CAAC;YACP,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;QACJ,CAAC;IACH,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAA0B;QACvC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;YACzB,MAAM,IAAI,QAAQ,CAChB,mEAAmE,EACnE,gBAAgB,EAChB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,0BAA0B;QAC1B,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;QAExB,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;YACpB,MAAM,IAAI,QAAQ,CAChB,4CAA4C,EAC5C,gBAAgB,EAChB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,6BAA6B;QAC7B,MAAM,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;YAE/D,OAAO,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QACvD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,IAAI,QAAQ,CAChB,gCAAgC,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,EAC1F,WAAW,EACX,IAAI,CAAC,QAAQ,EACb,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAC3C,CAAC;QACJ,CAAC;IACH,CAAC;IAED;;;OAGG;IACK,WAAW,CAAC,OAA0B;QAC5C,MAAM,KAAK,GAAa,EAAE,CAAC;QAE3B,KAAK,MAAM,OAAO,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;YACvC,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;gBACrB,KAAK,QAAQ;oBACX,oCAAoC;oBACpC,KAAK,CAAC,IAAI,CAAC,wBAAwB,OAAO,CAAC,OAAO,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,KAAK,CAAC,IAAI,CAAC,wBAAwB,OAAO,CAAC,OAAO,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,WAAW;oBACd,KAAK,CAAC,IAAI,CAAC,yBAAyB,OAAO,CAAC,OAAO,eAAe,CAAC,CAAC;oBACpE,MAAM;YACV,CAAC;QACH,CAAC;QAED,wBAAwB;QACxB,KAAK,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;QAErC,2CAA2C;QAC3C,IAAI,OAAO,CAAC,cAAc,KAAK,MAAM,EAAE,CAAC;YACtC,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,mCAAmC,CAAC;QAChE,CAAC;QAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC1B,CAAC;IAED;;OAEG;IACK,sBAAsB,CAC5B,QAAgB,EAChB,MAAc;QAEd,4DAA4D;QAC5D,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAClD,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAExD,+BAA+B;QAC/B,IAAI,IAAyC,CAAC;QAC9C,IAAI,CAAC;YACH,IAAI,GAAG,IAAI,CAAC,SAAS,CAA0B,QAAQ,CAAC,CAAC;QAC3D,CAAC;QAAC,MAAM,CAAC;YACP,wBAAwB;QAC1B,CAAC;QAED,OAAO;YACL,OAAO,EAAE,QAAQ;YACjB,IAAI;YACJ,KAAK,EAAE;gBACL,YAAY;gBACZ,gBAAgB;gBAChB,WAAW,EAAE,YAAY,GAAG,gBAAgB;aAC7C;YACD,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK;YACxB,YAAY,EAAE,MAAM,EAAE,uDAAuD;SAC9E,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC;YACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;QAC1B,CAAC;IACH,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;GAsBG;AACH,MAAM,UAAU,sBAAsB,CACpC,SAAiB,EACjB,KAAK,GAAG,SAAS,EACjB,SAAoE;IAEpE,MAAM,MAAM,GAAoB;QAC9B,QAAQ,EAAE,WAAW;QACrB,KAAK;QACL,SAAS;QACT,GAAG,SAAS;KACb,CAAC;IAEF,OAAO,IAAI,gBAAgB,CAAC,MAAM,CAAC,CAAC;AACtC,CAAC"}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama adapter for local LLM integration
|
|
3
|
+
*
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
import { BaseLLMAdapter, LLMError, DEFAULT_CONFIG } from './base.js';
|
|
7
|
+
/**
|
|
8
|
+
* Ollama adapter for local models
|
|
9
|
+
*/
|
|
10
|
+
export class OllamaAdapter extends BaseLLMAdapter {
|
|
11
|
+
baseUrl;
|
|
12
|
+
constructor(config) {
|
|
13
|
+
super(config);
|
|
14
|
+
this.baseUrl = config.baseUrl ?? 'http://localhost:11434';
|
|
15
|
+
}
|
|
16
|
+
get provider() {
|
|
17
|
+
return 'ollama';
|
|
18
|
+
}
|
|
19
|
+
isConfigured() {
|
|
20
|
+
return !!this.config.model;
|
|
21
|
+
}
|
|
22
|
+
async complete(request) {
|
|
23
|
+
if (!this.isConfigured()) {
|
|
24
|
+
throw new LLMError('Ollama adapter not configured. Set model in config.', 'NOT_CONFIGURED', this.provider);
|
|
25
|
+
}
|
|
26
|
+
return this.withRetry(async () => {
|
|
27
|
+
const body = {
|
|
28
|
+
model: this.config.model,
|
|
29
|
+
messages: request.messages.map((m) => ({
|
|
30
|
+
role: m.role,
|
|
31
|
+
content: m.content,
|
|
32
|
+
})),
|
|
33
|
+
stream: false,
|
|
34
|
+
options: {
|
|
35
|
+
num_predict: request.maxTokens ?? this.config.maxTokens ?? DEFAULT_CONFIG.maxTokens,
|
|
36
|
+
temperature: request.temperature ?? this.config.temperature ?? DEFAULT_CONFIG.temperature,
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
// Add JSON format if requested
|
|
40
|
+
if (request.responseFormat === 'json') {
|
|
41
|
+
body.format = 'json';
|
|
42
|
+
}
|
|
43
|
+
const controller = new AbortController();
|
|
44
|
+
// Ollama can be slow, use longer timeout
|
|
45
|
+
const timeout = setTimeout(() => controller.abort(), this.config.timeout ?? DEFAULT_CONFIG.timeout * 2);
|
|
46
|
+
try {
|
|
47
|
+
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
48
|
+
method: 'POST',
|
|
49
|
+
headers: {
|
|
50
|
+
'Content-Type': 'application/json',
|
|
51
|
+
},
|
|
52
|
+
body: JSON.stringify(body),
|
|
53
|
+
signal: controller.signal,
|
|
54
|
+
});
|
|
55
|
+
clearTimeout(timeout);
|
|
56
|
+
if (!response.ok) {
|
|
57
|
+
const errorText = await response.text().catch(() => '');
|
|
58
|
+
throw new LLMError(`Ollama API error: HTTP ${response.status} - ${errorText}`, 'API_ERROR', this.provider);
|
|
59
|
+
}
|
|
60
|
+
const data = (await response.json());
|
|
61
|
+
const content = data.message?.content ?? '';
|
|
62
|
+
let json;
|
|
63
|
+
if (request.responseFormat === 'json') {
|
|
64
|
+
json = this.parseJSON(content);
|
|
65
|
+
}
|
|
66
|
+
// Estimate tokens (Ollama provides eval_count)
|
|
67
|
+
const promptTokens = data.prompt_eval_count ?? Math.ceil(request.messages.reduce((acc, m) => acc + m.content.length, 0) / 4);
|
|
68
|
+
const completionTokens = data.eval_count ?? Math.ceil(content.length / 4);
|
|
69
|
+
return {
|
|
70
|
+
content,
|
|
71
|
+
json,
|
|
72
|
+
usage: {
|
|
73
|
+
promptTokens,
|
|
74
|
+
completionTokens,
|
|
75
|
+
totalTokens: promptTokens + completionTokens,
|
|
76
|
+
},
|
|
77
|
+
model: data.model,
|
|
78
|
+
finishReason: data.done ? 'stop' : 'error',
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
catch (error) {
|
|
82
|
+
clearTimeout(timeout);
|
|
83
|
+
if (error instanceof LLMError) {
|
|
84
|
+
throw error;
|
|
85
|
+
}
|
|
86
|
+
if (error instanceof Error) {
|
|
87
|
+
if (error.name === 'AbortError') {
|
|
88
|
+
throw new LLMError('Request timeout', 'TIMEOUT', this.provider, error);
|
|
89
|
+
}
|
|
90
|
+
// Check for connection refused
|
|
91
|
+
if (error.message.includes('ECONNREFUSED') || error.message.includes('fetch failed')) {
|
|
92
|
+
throw new LLMError(`Cannot connect to Ollama at ${this.baseUrl}. Is Ollama running?`, 'NETWORK_ERROR', this.provider, error);
|
|
93
|
+
}
|
|
94
|
+
throw new LLMError(`Network error: ${error.message}`, 'NETWORK_ERROR', this.provider, error);
|
|
95
|
+
}
|
|
96
|
+
throw new LLMError('Unknown error', 'API_ERROR', this.provider);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Check if Ollama is available
|
|
102
|
+
*/
|
|
103
|
+
async isAvailable() {
|
|
104
|
+
try {
|
|
105
|
+
const response = await fetch(`${this.baseUrl}/api/tags`, {
|
|
106
|
+
method: 'GET',
|
|
107
|
+
});
|
|
108
|
+
return response.ok;
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
return false;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* List available models
|
|
116
|
+
*/
|
|
117
|
+
async listModels() {
|
|
118
|
+
try {
|
|
119
|
+
const response = await fetch(`${this.baseUrl}/api/tags`, {
|
|
120
|
+
method: 'GET',
|
|
121
|
+
});
|
|
122
|
+
if (!response.ok) {
|
|
123
|
+
return [];
|
|
124
|
+
}
|
|
125
|
+
const data = (await response.json());
|
|
126
|
+
return data.models?.map((m) => m.name) ?? [];
|
|
127
|
+
}
|
|
128
|
+
catch {
|
|
129
|
+
return [];
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Create an Ollama adapter
|
|
135
|
+
*/
|
|
136
|
+
export function createOllamaAdapter(model = 'llama3.2', overrides) {
|
|
137
|
+
return new OllamaAdapter({
|
|
138
|
+
provider: 'ollama',
|
|
139
|
+
model,
|
|
140
|
+
baseUrl: 'http://localhost:11434',
|
|
141
|
+
...overrides,
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
//# sourceMappingURL=ollama.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ollama.js","sourceRoot":"","sources":["../../../../src/llm/adapters/ollama.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAQH,OAAO,EAAE,cAAc,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAuBrE;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,cAAc;IAC9B,OAAO,CAAS;IAEjC,YAAY,MAAoB;QAC9B,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,OAAO,IAAI,wBAAwB,CAAC;IAC5D,CAAC;IAED,IAAI,QAAQ;QACV,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,YAAY;QACV,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;IAC7B,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAA0B;QACvC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;YACzB,MAAM,IAAI,QAAQ,CAChB,qDAAqD,EACrD,gBAAgB,EAChB,IAAI,CAAC,QAAQ,CACd,CAAC;QACJ,CAAC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YAC/B,MAAM,IAAI,GAA4B;gBACpC,KAAK,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK;gBACxB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;oBACrC,IAAI,EAAE,CAAC,CAAC,IAAI;oBACZ,OAAO,EAAE,CAAC,CAAC,OAAO;iBACnB,CAAC,CAAC;gBACH,MAAM,EAAE,KAAK;gBACb,OAAO,EAAE;oBACP,WAAW,EAAE,OAAO,CAAC,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,IAAI,cAAc,CAAC,SAAS;oBACnF,WAAW,EAAE,OAAO,CAAC,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,cAAc,CAAC,WAAW;iBAC1F;aACF,CAAC;YAEF,+BAA+B;YAC/B,IAAI,OAAO,CAAC,cAAc,KAAK,MAAM,EAAE,CAAC;gBACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;YACvB,CAAC;YAED,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;YACzC,yCAAyC;YACzC,MAAM,OAAO,GAAG,UAAU,CACxB,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EACxB,IAAI,CAAC,MAAM,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,GAAG,CAAC,CAClD,CAAC;YAEF,IAAI,CAAC;gBACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,WAAW,EAAE;oBACvD,MAAM,EAAE,MAAM;oBACd,OAAO,EAAE;wBACP,cAAc,EAAE,kBAAkB;qBACnC;oBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;oBAC1B,MAAM,EAAE,UAAU,CAAC,MAAM;iBAC1B,CAAC,CAAC;gBAEH,YAAY,CAAC,OAAO,CAAC,CAAC;gBAEtB,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;oBACjB,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,CAAC;oBACxD,MAAM,IAAI,QAAQ,CAChB,0BAA0B,QAAQ,CAAC,MAAM,MAAM,SAAS,EAAE,EAC1D,WAAW,EACX,IAAI,CAAC,QAAQ,CACd,CAAC;gBACJ,CAAC;gBAED,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAuB,CAAC;gBAC3D,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;gBAE5C,IAAI,IAAa,CAAC;gBAClB,IAAI,OAAO,CAAC,cAAc,KAAK,MAAM,EAAE,CAAC;oBACtC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;gBACjC,CAAC;gBAED,+CAA+C;gBAC/C,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,IAAI,CACtD,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,CAAC,CACnE,CAAC;gBACF,MAAM,gBAAgB,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;gBAE1E,OAAO;oBACL,OAAO;oBACP,IAAI;oBACJ,KAAK,EAAE;wBACL,YAAY;wBACZ,gBAAgB;wBAChB,WAAW,EAAE,YAAY,GAAG,gBAAgB;qBAC7C;oBACD,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,YAAY,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO;iBAC3C,CAAC;YACJ,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,YAAY,CAAC,OAAO,CAAC,CAAC;gBAEtB,IAAI,KAAK,YAAY,QAAQ,EAAE,CAAC;oBAC9B,MAAM,KAAK,CAAC;gBACd,CAAC;gBAED,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;oBAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;wBAChC,MAAM,IAAI,QAAQ,CAAC,iBAAiB,EAAE,SAAS,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;oBACzE,CAAC;oBAED,+BAA+B;oBAC/B,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC;wBACrF,MAAM,IAAI,QAAQ,CAChB,+BAA+B,IAAI,CAAC,OAAO,sBAAsB,EACjE,eAAe,EACf,IAAI,CAAC,QAAQ,EACb,KAAK,CACN,CAAC;oBACJ,CAAC;oBAED,MAAM,IAAI,QAAQ,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,EAAE,eAAe,EAAE,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;gBAC/F,CAAC;gBAED,MAAM,IAAI,QAAQ,CAAC,eAAe,EAAE,WAAW,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;YAClE,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,WAAW;QACf,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,WAAW,EAAE;gBACvD,MAAM,EAAE,KAAK;aACd,CAAC,CAAC;YACH,OAAO,QAAQ,CAAC,EAAE,CAAC;QACrB,CAAC;QAAC,MAAM,CAAC;YACP,OAAO,KAAK,CAAC;QACf,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,UAAU;QACd,IAAI,CAAC;YACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,OAAO,WAAW,EAAE;gBACvD,MAAM,EAAE,KAAK;aACd,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACjB,OAAO,EAAE,CAAC;YACZ,CAAC;YAED,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAyC,CAAC;YAC7E,OAAO,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;QAC/C,CAAC;QAAC,MAAM,CAAC;YACP,OAAO,EAAE,CAAC;QACZ,CAAC;IACH,CAAC;CACF;AAED;;GAEG;AACH,MAAM,UAAU,mBAAmB,CACjC,KAAK,GAAG,UAAU,EAClB,SAAiC;IAEjC,OAAO,IAAI,aAAa,CAAC;QACvB,QAAQ,EAAE,QAAQ;QAClB,KAAK;QACL,OAAO,EAAE,wBAAwB;QACjC,GAAG,SAAS;KACb,CAAC,CAAC;AACL,CAAC"}
|