@marktoflow/integrations 2.0.0-alpha.12 → 2.0.0-alpha.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +26 -0
- package/dist/adapters/ollama-types.d.ts +746 -0
- package/dist/adapters/ollama-types.d.ts.map +1 -0
- package/dist/adapters/ollama-types.js +65 -0
- package/dist/adapters/ollama-types.js.map +1 -0
- package/dist/adapters/ollama.d.ts +160 -0
- package/dist/adapters/ollama.d.ts.map +1 -1
- package/dist/adapters/ollama.js +425 -3
- package/dist/adapters/ollama.js.map +1 -1
- package/dist/services/airtable.d.ts +144 -0
- package/dist/services/airtable.d.ts.map +1 -1
- package/dist/services/airtable.js +8 -22
- package/dist/services/airtable.js.map +1 -1
- package/dist/services/base-client.d.ts +102 -0
- package/dist/services/base-client.d.ts.map +1 -0
- package/dist/services/base-client.js +219 -0
- package/dist/services/base-client.js.map +1 -0
- package/dist/services/confluence.d.ts +202 -0
- package/dist/services/confluence.d.ts.map +1 -1
- package/dist/services/confluence.js +11 -26
- package/dist/services/confluence.js.map +1 -1
- package/dist/services/discord.d.ts +210 -0
- package/dist/services/discord.d.ts.map +1 -1
- package/dist/services/discord.js +7 -23
- package/dist/services/discord.js.map +1 -1
- package/dist/services/notion.d.ts +205 -0
- package/dist/services/notion.d.ts.map +1 -1
- package/dist/services/notion.js +7 -15
- package/dist/services/notion.js.map +1 -1
- package/dist/services/supabase.d.ts +200 -0
- package/dist/services/supabase.d.ts.map +1 -1
- package/dist/services/supabase.js +25 -20
- package/dist/services/supabase.js.map +1 -1
- package/dist/services/teams.d.ts.map +1 -1
- package/dist/services/teams.js +35 -5
- package/dist/services/teams.js.map +1 -1
- package/dist/services/telegram.d.ts +250 -0
- package/dist/services/telegram.d.ts.map +1 -1
- package/dist/services/telegram.js +31 -32
- package/dist/services/telegram.js.map +1 -1
- package/package.json +2 -2
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ollama-types.d.ts","sourceRoot":"","sources":["../../src/adapters/ollama-types.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAMxB;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,+DAA+D;IAC/D,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,0CAA0C;IAC1C,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,sCAAsC;IACtC,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,uBAAuB;IACvB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,kCAAkC;IAClC,MAAM,EAAE,MAAM,CAAC;IACf,gCAAgC;IAChC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,qCAAqC;IACrC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,uCAAuC;IACvC,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,qCAAqC;IACrC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,+BAA+B;IAC/B,GAAG,CAAC,EAAE,OAAO,CAAC;IACd,yCAAyC;IACzC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,oDAAoD;IACpD,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,yBAAyB;IACzB,OAAO,CAAC,EAAE,kBAAkB,CAAC;IAC7B,0BAA0B;IAC1B,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;CAC9B;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,uBAAuB;IACvB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,6CAA6C;IAC7C,QAAQ,EAAE,iBAAiB,EAAE,CAAC;IAC9B,qCAAqC;IACrC,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,yCAAyC;IACzC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,yBAAyB;IACzB,OAAO,CAAC,EAAE,kBAAkB,CAAC;IAC7B,0BAA0B;IAC1B,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IAC7B,iCAAiC;IACjC,KAAK,CAAC,EAAE,UAAU,EAAE,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,iCAAiC;IACjC,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,MAAM,CAAC;IAC/C,sBAAsB;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,oDAAoD;IACpD,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,0CAA0C;IAC1C,UAAU,CAAC,EAAE,cAAc,EAAE,CAAC;CAC/B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,gBAAgB;IAChB,IAAI,EAAE,UAAU,CAAC;IACjB,0BAA0B;IAC1B,QAAQ,EAAE;QACR,oBAAoB;QACpB,IAAI,EAAE,MAAM,CAAC;QACb,2BAA2B;QAC3B,WAAW,EAAE,MAAM,CAAC;QACpB,uBAAuB;QACvB,UAAU,EAAE;YACV,IAAI,EAAE,QAAQ,CAAC;YACf,UAAU,EAAE;gBACV,CAAC,GAAG,EAAE,MAAM,GAAG;oBACb,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;oBACzB,KAAK,CAAC,EAAE,OAAO,CAAC;oBAChB,WAAW,CAAC,EAAE,MAAM,CAAC;oBACrB,IAAI,CAAC,EAAE,OAAO,EAAE,CAAC;iBAClB,CAAC;aACH,CAAC;YACF,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;SACrB,CAAC;KACH,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,mBAAmB;IACnB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,gBAAgB;IAChB,IAAI,CAAC,EAAE,UAAU,CAAC;IAClB,uBAAuB;IACvB,QAAQ,EAAE;QACR,oBAAoB;QACpB,IAAI,EAAE,MAAM,CAAC;QACb,yDAAyD;QACzD,SAAS,EAAE;YAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;SAAE,CAAC;KACvC,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,mFAAmF;IACnF,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,sDAAsD;IACtD,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,qBAAqB;IACrB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,+BAA+B;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,qBAAqB;IACrB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,uCAAuC;IACvC,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,kBAAkB;IAClB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,qBAAqB;IACrB,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAChB,0BAA0B;IAC1B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,4BAA4B;IAC5B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,qBAAqB;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe;IACf,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,gBAAgB;IAChB,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,wBAAwB;IACxB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC,sCAAsC;IACtC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,sCAAsC;IACtC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IACzB,2CAA2C;IAC3C,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,yBAAyB;IACzB,OAAO,CAAC,EAAE,kBAAkB,CAAC;IAC7B,0BAA0B;IAC1B,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;CAC9B;AAMD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,8BAA8B;IAC9B,QAAQ,EAAE,MAAM,CAAC;IACjB,iBAAiB;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,gDAAgD;IAChD,UAAU,EAAE,IAAI,CAAC;IACjB,qCAAqC;IACrC,IAAI,EAAE,OAAO,CAAC;IACd,4BAA4B;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,yCAAyC;IACzC,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,+CAA+C;IAC/C,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,4CAA4C;IAC5C,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,8BAA8B;IAC9B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,gDAAgD;IAChD,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,kCAAkC;IAClC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,oDAAoD;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,qBAAqB;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,gDAAgD;IAChD,UAAU,EAAE,IAAI,CAAC;IACjB,8BAA8B;IAC9B,OAAO,EAAE,iBAAiB,CAAC;IAC3B,+BAA+B;IAC/B,IAAI,EAAE,OAAO,CAAC;IACd,4BAA4B;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,oCAAoC;IACpC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,4CAA4C;IAC5C,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,8BAA8B;IAC9B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,gDAAgD;IAChD,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,kCAAkC;IAClC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,oDAAoD;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC,qBAAqB;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,iCAAiC;IACjC,UAAU,EAAE,MAAM,EAAE,EAAE,CAAC;IACvB,oCAAoC;IACpC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,4CAA4C;IAC5C,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,8BAA8B;IAC9B,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,iBAAiB;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,mBAAmB;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,0BAA0B;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,qDAAqD;IACrD,WAAW,EAAE,IAAI,CAAC;IAClB,oBAAoB;IACpB,OAAO,CAAC,EAAE;QACR,mBAAmB;QACnB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,mBAAmB;QACnB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,mBAAmB;QACnB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,qBAAqB;QACrB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,qBAAqB;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,yBAAyB;QACzB,kBAAkB,CAAC,EAAE,MAAM,CAAC;KAC7B,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,iBAAiB;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,mBAAmB;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,0BAA0B;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,mBAAmB;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,6CAA6C;IAC7C,UAAU,EAAE,IAAI,CAAC;IACjB,oBAAoB;IACpB,OAAO,CAAC,EAAE;QACR,mBAAmB;QACnB,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,mBAAmB;QACnB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,mBAAmB;QACnB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,qBAAqB;QACrB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,qBAAqB;QACrB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,yBAAyB;QACzB,kBAAkB,CAAC,EAAE,MAAM,CAAC;KAC7B,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC,qBAAqB;IACrB,MAAM,EAAE,MAAM,CAAC;IACf,sCAAsC;IACtC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,kBAAkB;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,sBAAsB;IACtB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAMD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,uBAAuB;IACvB,OAAO,EAAE,MAAM,CAAC;IAChB,iBAAiB;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,kCAAkC;IAClC,IAAI,EAAE,OAAO,CAAC;IACd,wBAAwB;IACxB,GAAG,CAAC,EAAE,oBAAoB,GAAG,gBAAgB,CAAC;IAC9C,+BAA+B;IAC/B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,kBAAkB;IAClB,KAAK,CAAC,EAAE;QACN,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAM1F;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,yBAAyB;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,oBAAoB;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,sBAAsB;IACtB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,4BAA4B;IAC5B,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B;AAMD,eAAO,MAAM,wBAAwB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAenC,CAAC;AAEH,eAAO,MAAM,wBAAwB;;;;;;;;;;;;EAInC,CAAC;AAEH,eAAO,MAAM,uBAAuB;;;;;;;;;;;;EAIlC,CAAC;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAYtC,CAAC;AAEH,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAOlC,CAAC;AAEH,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAMxC,CAAC"}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Type definitions for Ollama SDK integration with marktoflow
|
|
3
|
+
*
|
|
4
|
+
* These types enable integration with Ollama for local LLM workflows
|
|
5
|
+
* with generation, chat, embeddings, and model management capabilities.
|
|
6
|
+
*/
|
|
7
|
+
import { z } from 'zod';
|
|
8
|
+
// ============================================================================
|
|
9
|
+
// Zod Schemas for Runtime Validation
|
|
10
|
+
// ============================================================================
|
|
11
|
+
export const OllamaModelOptionsSchema = z.object({
|
|
12
|
+
num_predict: z.number().optional(),
|
|
13
|
+
temperature: z.number().min(0).max(2).optional(),
|
|
14
|
+
top_k: z.number().optional(),
|
|
15
|
+
top_p: z.number().min(0).max(1).optional(),
|
|
16
|
+
repeat_penalty: z.number().optional(),
|
|
17
|
+
repeat_last_n: z.number().optional(),
|
|
18
|
+
seed: z.number().optional(),
|
|
19
|
+
stop: z.array(z.string()).optional(),
|
|
20
|
+
num_ctx: z.number().optional(),
|
|
21
|
+
num_gpu: z.number().optional(),
|
|
22
|
+
main_gpu: z.number().optional(),
|
|
23
|
+
use_mmap: z.boolean().optional(),
|
|
24
|
+
use_mlock: z.boolean().optional(),
|
|
25
|
+
num_thread: z.number().optional(),
|
|
26
|
+
});
|
|
27
|
+
export const OllamaClientConfigSchema = z.object({
|
|
28
|
+
host: z.string().optional(),
|
|
29
|
+
model: z.string().optional(),
|
|
30
|
+
timeout: z.number().optional(),
|
|
31
|
+
});
|
|
32
|
+
export const OllamaChatMessageSchema = z.object({
|
|
33
|
+
role: z.enum(['system', 'user', 'assistant', 'tool']),
|
|
34
|
+
content: z.string(),
|
|
35
|
+
images: z.array(z.string()).optional(),
|
|
36
|
+
});
|
|
37
|
+
export const OllamaGenerateOptionsSchema = z.object({
|
|
38
|
+
model: z.string().optional(),
|
|
39
|
+
prompt: z.string(),
|
|
40
|
+
system: z.string().optional(),
|
|
41
|
+
template: z.string().optional(),
|
|
42
|
+
context: z.array(z.number()).optional(),
|
|
43
|
+
stream: z.boolean().optional(),
|
|
44
|
+
raw: z.boolean().optional(),
|
|
45
|
+
format: z.string().optional(),
|
|
46
|
+
images: z.array(z.string()).optional(),
|
|
47
|
+
options: OllamaModelOptionsSchema.optional(),
|
|
48
|
+
keep_alive: z.union([z.string(), z.number()]).optional(),
|
|
49
|
+
});
|
|
50
|
+
export const OllamaChatOptionsSchema = z.object({
|
|
51
|
+
model: z.string().optional(),
|
|
52
|
+
messages: z.array(OllamaChatMessageSchema),
|
|
53
|
+
stream: z.boolean().optional(),
|
|
54
|
+
format: z.string().optional(),
|
|
55
|
+
options: OllamaModelOptionsSchema.optional(),
|
|
56
|
+
keep_alive: z.union([z.string(), z.number()]).optional(),
|
|
57
|
+
});
|
|
58
|
+
export const OllamaEmbeddingsOptionsSchema = z.object({
|
|
59
|
+
model: z.string().optional(),
|
|
60
|
+
input: z.union([z.string(), z.array(z.string())]),
|
|
61
|
+
truncate: z.boolean().optional(),
|
|
62
|
+
options: OllamaModelOptionsSchema.optional(),
|
|
63
|
+
keep_alive: z.union([z.string(), z.number()]).optional(),
|
|
64
|
+
});
|
|
65
|
+
//# sourceMappingURL=ollama-types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ollama-types.js","sourceRoot":"","sources":["../../src/adapters/ollama-types.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAwXxB,+EAA+E;AAC/E,qCAAqC;AACrC,+EAA+E;AAE/E,MAAM,CAAC,MAAM,wBAAwB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC/C,WAAW,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAClC,WAAW,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE;IAChD,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE;IAC1C,cAAc,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IACrC,aAAa,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IACpC,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC3B,IAAI,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IACpC,OAAO,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC9B,OAAO,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC9B,QAAQ,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC/B,QAAQ,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IAChC,SAAS,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IACjC,UAAU,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;CAClC,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,wBAAwB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC/C,IAAI,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC3B,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,OAAO,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;CAC/B,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC9C,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;IACrD,OAAO,EAAE,CAAC,CAAC,MAAM,EAAE;IACnB,MAAM,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;CACvC,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,2BAA2B,GAAG,CAAC,CAAC,MAAM,CAAC;IAClD,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE;IAClB,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC7B,QAAQ,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC/B,OAAO,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IACvC,MAAM,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IAC9B,GAAG,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IAC3B,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC7B,MAAM,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IACtC,OAAO,EAAE,wBAAwB,CAAC,QAAQ,EAAE;IAC5C,UAAU,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;CACzD,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC9C,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,QAAQ,EAAE,CAAC,CAAC,KAAK,CAAC,uBAAuB,CAAC;IAC1C,MAAM,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IAC9B,MAAM,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC7B,OAAO,EAAE,wBAAwB,CAAC,QAAQ,EAAE;IAC5C,UAAU,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;CACzD,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,6BAA6B,GAAG,CAAC,CAAC,MAAM,CAAC;IACpD,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IACjD,QAAQ,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE;IAChC,OAAO,EAAE,wBAAwB,CAAC,QAAQ,EAAE;IAC5C,UAAU,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;CACzD,CAAC,CAAC"}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama SDK Adapter for marktoflow
|
|
3
|
+
*
|
|
4
|
+
* This adapter provides integration with Ollama for local LLM workflows,
|
|
5
|
+
* enabling AI-powered automation with generation, chat, embeddings,
|
|
6
|
+
* streaming, and model management capabilities.
|
|
7
|
+
*/
|
|
8
|
+
import { Ollama } from 'ollama';
|
|
9
|
+
import { SDKInitializer } from '@marktoflow/core';
|
|
10
|
+
import type { OllamaClientConfig, OllamaGenerateOptions, OllamaChatOptions, OllamaEmbeddingsOptions, OllamaModelInfo, OllamaRunningModel, OllamaPullProgress, OllamaResult, OllamaStreamCallback } from './ollama-types.js';
|
|
11
|
+
/**
|
|
12
|
+
* Client for interacting with Ollama for local LLM inference
|
|
13
|
+
*
|
|
14
|
+
* Provides multiple interfaces for different use cases:
|
|
15
|
+
* - generate(): Simple text generation
|
|
16
|
+
* - generateStream(): Streaming text generation
|
|
17
|
+
* - chat(): Conversational interface
|
|
18
|
+
* - chatStream(): Streaming chat
|
|
19
|
+
* - embed(): Generate embeddings for RAG
|
|
20
|
+
* - Model management: list, pull, show, delete
|
|
21
|
+
*/
|
|
22
|
+
export declare class OllamaClient {
|
|
23
|
+
private ollama;
|
|
24
|
+
private defaultModel;
|
|
25
|
+
constructor(config?: OllamaClientConfig);
|
|
26
|
+
/**
|
|
27
|
+
* Generate text from a prompt (simple interface)
|
|
28
|
+
*/
|
|
29
|
+
generate(prompt: string, model?: string): Promise<string>;
|
|
30
|
+
/**
|
|
31
|
+
* Generate with full options
|
|
32
|
+
*/
|
|
33
|
+
generateFull(options: OllamaGenerateOptions): Promise<OllamaResult>;
|
|
34
|
+
/**
|
|
35
|
+
* Stream text generation
|
|
36
|
+
*/
|
|
37
|
+
generateStream(options: OllamaGenerateOptions): AsyncGenerator<string>;
|
|
38
|
+
/**
|
|
39
|
+
* Generate with streaming callback
|
|
40
|
+
*/
|
|
41
|
+
generateWithCallback(options: OllamaGenerateOptions, callback: OllamaStreamCallback): Promise<OllamaResult>;
|
|
42
|
+
/**
|
|
43
|
+
* Chat completion (simple interface)
|
|
44
|
+
*/
|
|
45
|
+
chat(options: OllamaChatOptions): Promise<OllamaResult>;
|
|
46
|
+
/**
|
|
47
|
+
* Stream chat completion
|
|
48
|
+
*/
|
|
49
|
+
chatStream(options: OllamaChatOptions): AsyncGenerator<string>;
|
|
50
|
+
/**
|
|
51
|
+
* Chat with streaming callback
|
|
52
|
+
*/
|
|
53
|
+
chatWithCallback(options: OllamaChatOptions, callback: OllamaStreamCallback): Promise<OllamaResult>;
|
|
54
|
+
/**
|
|
55
|
+
* Generate embeddings for RAG applications
|
|
56
|
+
*/
|
|
57
|
+
embeddings(options: OllamaEmbeddingsOptions): Promise<number[][]>;
|
|
58
|
+
/**
|
|
59
|
+
* Generate embedding for a single text (convenience method)
|
|
60
|
+
*/
|
|
61
|
+
embed(text: string, model?: string): Promise<number[]>;
|
|
62
|
+
/**
|
|
63
|
+
* Generate embeddings for multiple texts
|
|
64
|
+
*/
|
|
65
|
+
embedBatch(texts: string[], model?: string): Promise<number[][]>;
|
|
66
|
+
/**
|
|
67
|
+
* List available models
|
|
68
|
+
*/
|
|
69
|
+
listModels(): Promise<OllamaModelInfo[]>;
|
|
70
|
+
/**
|
|
71
|
+
* List running models
|
|
72
|
+
*/
|
|
73
|
+
listRunning(): Promise<OllamaRunningModel[]>;
|
|
74
|
+
/**
|
|
75
|
+
* Pull a model from the registry
|
|
76
|
+
*/
|
|
77
|
+
pullModel(name: string, onProgress?: (progress: OllamaPullProgress) => void): Promise<void>;
|
|
78
|
+
/**
|
|
79
|
+
* Pull a model without streaming (waits for completion)
|
|
80
|
+
*/
|
|
81
|
+
pullModelSync(name: string): Promise<void>;
|
|
82
|
+
/**
|
|
83
|
+
* Show model information
|
|
84
|
+
*/
|
|
85
|
+
showModel(name: string): Promise<unknown>;
|
|
86
|
+
/**
|
|
87
|
+
* Delete a model
|
|
88
|
+
*/
|
|
89
|
+
deleteModel(name: string): Promise<void>;
|
|
90
|
+
/**
|
|
91
|
+
* Copy a model
|
|
92
|
+
*/
|
|
93
|
+
copyModel(source: string, destination: string): Promise<void>;
|
|
94
|
+
/**
|
|
95
|
+
* Create a model from a base model
|
|
96
|
+
*/
|
|
97
|
+
createModel(name: string, fromModel: string, onProgress?: (progress: {
|
|
98
|
+
status: string;
|
|
99
|
+
}) => void): Promise<void>;
|
|
100
|
+
/**
|
|
101
|
+
* Check if Ollama is available
|
|
102
|
+
*/
|
|
103
|
+
isAvailable(): Promise<boolean>;
|
|
104
|
+
/**
|
|
105
|
+
* Check if a specific model is available
|
|
106
|
+
*/
|
|
107
|
+
hasModel(name: string): Promise<boolean>;
|
|
108
|
+
/**
|
|
109
|
+
* Ensure a model is available, pulling if necessary
|
|
110
|
+
*/
|
|
111
|
+
ensureModel(name: string): Promise<void>;
|
|
112
|
+
/**
|
|
113
|
+
* Get the default model
|
|
114
|
+
*/
|
|
115
|
+
getDefaultModel(): string;
|
|
116
|
+
/**
|
|
117
|
+
* Set the default model
|
|
118
|
+
*/
|
|
119
|
+
setDefaultModel(model: string): void;
|
|
120
|
+
/**
|
|
121
|
+
* OpenAI-compatible chat interface for workflow compatibility
|
|
122
|
+
*/
|
|
123
|
+
chatCompletions: {
|
|
124
|
+
create: (inputs: {
|
|
125
|
+
model?: string;
|
|
126
|
+
messages: Array<{
|
|
127
|
+
role: string;
|
|
128
|
+
content: string;
|
|
129
|
+
}>;
|
|
130
|
+
stream?: boolean;
|
|
131
|
+
}) => Promise<{
|
|
132
|
+
choices: Array<{
|
|
133
|
+
message: {
|
|
134
|
+
role: string;
|
|
135
|
+
content: string;
|
|
136
|
+
};
|
|
137
|
+
}>;
|
|
138
|
+
}>;
|
|
139
|
+
};
|
|
140
|
+
/**
|
|
141
|
+
* OpenAI-compatible embeddings interface
|
|
142
|
+
*/
|
|
143
|
+
embeddingsCreate: {
|
|
144
|
+
create: (inputs: {
|
|
145
|
+
model?: string;
|
|
146
|
+
input: string | string[];
|
|
147
|
+
}) => Promise<{
|
|
148
|
+
data: Array<{
|
|
149
|
+
embedding: number[];
|
|
150
|
+
index: number;
|
|
151
|
+
}>;
|
|
152
|
+
}>;
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* SDK Initializer for Ollama
|
|
157
|
+
*/
|
|
158
|
+
export declare const OllamaInitializer: SDKInitializer;
|
|
159
|
+
export { Ollama };
|
|
160
|
+
//# sourceMappingURL=ollama.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ollama.d.ts","sourceRoot":"","sources":["../../src/adapters/ollama.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"ollama.d.ts","sourceRoot":"","sources":["../../src/adapters/ollama.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAChC,OAAO,EAAc,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAC9D,OAAO,KAAK,EACV,kBAAkB,EAClB,qBAAqB,EACrB,iBAAiB,EACjB,uBAAuB,EAIvB,eAAe,EACf,kBAAkB,EAClB,kBAAkB,EAClB,YAAY,EACZ,oBAAoB,EAErB,MAAM,mBAAmB,CAAC;AAM3B;;;;;;;;;;GAUG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,YAAY,CAAS;gBAEjB,MAAM,GAAE,kBAAuB;IAU3C;;OAEG;IACG,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAS/D;;OAEG;IACG,YAAY,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CAAC,YAAY,CAAC;IA6BzE;;OAEG;IACI,cAAc,CAAC,OAAO,EAAE,qBAAqB,GAAG,cAAc,CAAC,MAAM,CAAC;IAoB7E;;OAEG;IACG,oBAAoB,CACxB,OAAO,EAAE,qBAAqB,EAC9B,QAAQ,EAAE,oBAAoB,GAC7B,OAAO,CAAC,YAAY,CAAC;IA4CxB;;OAEG;IACG,IAAI,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,CAAC;IAyB7D;;OAEG;IACI,UAAU,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,MAAM,CAAC;IAgBrE;;OAEG;IACG,gBAAgB,CACpB,OAAO,EAAE,iBAAiB,EAC1B,QAAQ,EAAE,oBAAoB,GAC7B,OAAO,CAAC,YAAY,CAAC;IAwCxB;;OAEG;IACG,UAAU,CAAC,OAAO,EAAE,uBAAuB,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;IAYvE;;OAEG;IACG,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAQ5D;;OAEG;IACG,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;IAWtE;;OAEG;IACG,UAAU,IAAI,OAAO,CAAC,eAAe,EAAE,CAAC;IAK9C;;OAEG;IACG,WAAW,IAAI,OAAO,CAAC,kBAAkB,EAAE,CAAC;IAKlD;;OAEG;IACG,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,kBAAkB,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAUjG;;OAEG;IACG,aAAa,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIhD;;OAEG;IACG,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAI/C;;OAEG;IACG,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAI9C;;OAEG;IACG,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAInE;;OAEG;IACG,WAAW,CACf,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,EACjB,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE;QAAE,MAAM,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,GAClD,OAAO,CAAC,IAAI,CAAC;IAchB;;OAEG;IACG,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;IASrC;;OAEG;IACG,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAS9C;;OAEG;IACG,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAM9C;;OAEG;IACH,eAAe,IAAI,MAAM;IAIzB;;OAEG;IACH,eAAe,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAQpC;;OAEG;IACH,eAAe;yBACU;YACrB,KAAK,CAAC,EAAE,MAAM,CAAC;YACf,QAAQ,EAAE,KAAK,CAAC;gBAAE,IAAI,EAAE,MAAM,CAAC;gBAAC,OAAO,EAAE,MAAM,CAAA;aAAE,CAAC,CAAC;YACnD,MAAM,CAAC,EAAE,OAAO,CAAC;SAClB,KAAG,OAAO,CAAC;YAAE,OAAO,EAAE,KAAK,CAAC;gBAAE,OAAO,EAAE;oBAAE,IAAI,EAAE,MAAM,CAAC;oBAAC,OAAO,EAAE,MAAM,CAAA;iBAAE,CAAA;aAAE,CAAC,CAAA;SAAE,CAAC;MAsB/E;IAEF;;OAEG;IACH,gBAAgB;yBACS;YACrB,KAAK,CAAC,EAAE,MAAM,CAAC;YACf,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;SAC1B,KAAG,OAAO,CAAC;YAAE,IAAI,EAAE,KAAK,CAAC;gBAAE,SAAS,EAAE,MAAM,EAAE,CAAC;gBAAC,KAAK,EAAE,MAAM,CAAA;aAAE,CAAC,CAAA;SAAE,CAAC;MAapE;CACH;AAMD;;GAEG;AACH,eAAO,MAAM,iBAAiB,EAAE,cAe/B,CAAC;AAMF,OAAO,EAAE,MAAM,EAAE,CAAC"}
|
package/dist/adapters/ollama.js
CHANGED
|
@@ -1,9 +1,431 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama SDK Adapter for marktoflow
|
|
3
|
+
*
|
|
4
|
+
* This adapter provides integration with Ollama for local LLM workflows,
|
|
5
|
+
* enabling AI-powered automation with generation, chat, embeddings,
|
|
6
|
+
* streaming, and model management capabilities.
|
|
7
|
+
*/
|
|
1
8
|
import { Ollama } from 'ollama';
|
|
9
|
+
// ============================================================================
|
|
10
|
+
// Ollama Client
|
|
11
|
+
// ============================================================================
|
|
12
|
+
/**
|
|
13
|
+
* Client for interacting with Ollama for local LLM inference
|
|
14
|
+
*
|
|
15
|
+
* Provides multiple interfaces for different use cases:
|
|
16
|
+
* - generate(): Simple text generation
|
|
17
|
+
* - generateStream(): Streaming text generation
|
|
18
|
+
* - chat(): Conversational interface
|
|
19
|
+
* - chatStream(): Streaming chat
|
|
20
|
+
* - embed(): Generate embeddings for RAG
|
|
21
|
+
* - Model management: list, pull, show, delete
|
|
22
|
+
*/
|
|
23
|
+
export class OllamaClient {
|
|
24
|
+
ollama;
|
|
25
|
+
defaultModel;
|
|
26
|
+
constructor(config = {}) {
|
|
27
|
+
const host = config.host || 'http://127.0.0.1:11434';
|
|
28
|
+
this.ollama = new Ollama({ host });
|
|
29
|
+
this.defaultModel = config.model || 'llama3.2';
|
|
30
|
+
}
|
|
31
|
+
// ============================================================================
|
|
32
|
+
// Simple Interface
|
|
33
|
+
// ============================================================================
|
|
34
|
+
/**
|
|
35
|
+
* Generate text from a prompt (simple interface)
|
|
36
|
+
*/
|
|
37
|
+
async generate(prompt, model) {
|
|
38
|
+
const response = await this.ollama.generate({
|
|
39
|
+
model: model || this.defaultModel,
|
|
40
|
+
prompt,
|
|
41
|
+
stream: false,
|
|
42
|
+
});
|
|
43
|
+
return response.response;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Generate with full options
|
|
47
|
+
*/
|
|
48
|
+
async generateFull(options) {
|
|
49
|
+
const response = await this.ollama.generate({
|
|
50
|
+
model: options.model || this.defaultModel,
|
|
51
|
+
prompt: options.prompt,
|
|
52
|
+
system: options.system,
|
|
53
|
+
template: options.template,
|
|
54
|
+
context: options.context,
|
|
55
|
+
stream: false,
|
|
56
|
+
raw: options.raw,
|
|
57
|
+
format: options.format,
|
|
58
|
+
images: options.images,
|
|
59
|
+
options: options.options,
|
|
60
|
+
keep_alive: options.keep_alive,
|
|
61
|
+
});
|
|
62
|
+
return {
|
|
63
|
+
content: response.response,
|
|
64
|
+
model: response.model,
|
|
65
|
+
done: response.done,
|
|
66
|
+
raw: response,
|
|
67
|
+
duration: response.total_duration ? response.total_duration / 1_000_000 : undefined,
|
|
68
|
+
usage: {
|
|
69
|
+
prompt_tokens: response.prompt_eval_count,
|
|
70
|
+
completion_tokens: response.eval_count,
|
|
71
|
+
total_tokens: (response.prompt_eval_count || 0) + (response.eval_count || 0),
|
|
72
|
+
},
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Stream text generation
|
|
77
|
+
*/
|
|
78
|
+
async *generateStream(options) {
|
|
79
|
+
const stream = await this.ollama.generate({
|
|
80
|
+
model: options.model || this.defaultModel,
|
|
81
|
+
prompt: options.prompt,
|
|
82
|
+
system: options.system,
|
|
83
|
+
template: options.template,
|
|
84
|
+
context: options.context,
|
|
85
|
+
stream: true,
|
|
86
|
+
raw: options.raw,
|
|
87
|
+
format: options.format,
|
|
88
|
+
images: options.images,
|
|
89
|
+
options: options.options,
|
|
90
|
+
keep_alive: options.keep_alive,
|
|
91
|
+
});
|
|
92
|
+
for await (const chunk of stream) {
|
|
93
|
+
yield chunk.response;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Generate with streaming callback
|
|
98
|
+
*/
|
|
99
|
+
async generateWithCallback(options, callback) {
|
|
100
|
+
const stream = await this.ollama.generate({
|
|
101
|
+
model: options.model || this.defaultModel,
|
|
102
|
+
prompt: options.prompt,
|
|
103
|
+
system: options.system,
|
|
104
|
+
template: options.template,
|
|
105
|
+
context: options.context,
|
|
106
|
+
stream: true,
|
|
107
|
+
raw: options.raw,
|
|
108
|
+
format: options.format,
|
|
109
|
+
images: options.images,
|
|
110
|
+
options: options.options,
|
|
111
|
+
keep_alive: options.keep_alive,
|
|
112
|
+
});
|
|
113
|
+
let fullResponse = '';
|
|
114
|
+
let lastChunk = null;
|
|
115
|
+
for await (const chunk of stream) {
|
|
116
|
+
fullResponse += chunk.response;
|
|
117
|
+
await Promise.resolve(callback(chunk.response, chunk.done));
|
|
118
|
+
if (chunk.done) {
|
|
119
|
+
lastChunk = chunk;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
content: fullResponse,
|
|
124
|
+
model: lastChunk?.model || options.model || this.defaultModel,
|
|
125
|
+
done: true,
|
|
126
|
+
raw: lastChunk || undefined,
|
|
127
|
+
duration: lastChunk?.total_duration ? lastChunk.total_duration / 1_000_000 : undefined,
|
|
128
|
+
usage: lastChunk ? {
|
|
129
|
+
prompt_tokens: lastChunk.prompt_eval_count,
|
|
130
|
+
completion_tokens: lastChunk.eval_count,
|
|
131
|
+
total_tokens: (lastChunk.prompt_eval_count || 0) + (lastChunk.eval_count || 0),
|
|
132
|
+
} : undefined,
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
// ============================================================================
|
|
136
|
+
// Chat Interface
|
|
137
|
+
// ============================================================================
|
|
138
|
+
/**
|
|
139
|
+
* Chat completion (simple interface)
|
|
140
|
+
*/
|
|
141
|
+
async chat(options) {
|
|
142
|
+
const response = await this.ollama.chat({
|
|
143
|
+
model: options.model || this.defaultModel,
|
|
144
|
+
messages: options.messages,
|
|
145
|
+
stream: false,
|
|
146
|
+
format: options.format,
|
|
147
|
+
options: options.options,
|
|
148
|
+
keep_alive: options.keep_alive,
|
|
149
|
+
tools: options.tools,
|
|
150
|
+
});
|
|
151
|
+
return {
|
|
152
|
+
content: response.message.content,
|
|
153
|
+
model: response.model,
|
|
154
|
+
done: response.done,
|
|
155
|
+
raw: response,
|
|
156
|
+
duration: response.total_duration ? response.total_duration / 1_000_000 : undefined,
|
|
157
|
+
usage: {
|
|
158
|
+
prompt_tokens: response.prompt_eval_count,
|
|
159
|
+
completion_tokens: response.eval_count,
|
|
160
|
+
total_tokens: (response.prompt_eval_count || 0) + (response.eval_count || 0),
|
|
161
|
+
},
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Stream chat completion
|
|
166
|
+
*/
|
|
167
|
+
async *chatStream(options) {
|
|
168
|
+
const stream = await this.ollama.chat({
|
|
169
|
+
model: options.model || this.defaultModel,
|
|
170
|
+
messages: options.messages,
|
|
171
|
+
stream: true,
|
|
172
|
+
format: options.format,
|
|
173
|
+
options: options.options,
|
|
174
|
+
keep_alive: options.keep_alive,
|
|
175
|
+
tools: options.tools,
|
|
176
|
+
});
|
|
177
|
+
for await (const chunk of stream) {
|
|
178
|
+
yield chunk.message.content;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Chat with streaming callback
|
|
183
|
+
*/
|
|
184
|
+
async chatWithCallback(options, callback) {
|
|
185
|
+
const stream = await this.ollama.chat({
|
|
186
|
+
model: options.model || this.defaultModel,
|
|
187
|
+
messages: options.messages,
|
|
188
|
+
stream: true,
|
|
189
|
+
format: options.format,
|
|
190
|
+
options: options.options,
|
|
191
|
+
keep_alive: options.keep_alive,
|
|
192
|
+
tools: options.tools,
|
|
193
|
+
});
|
|
194
|
+
let fullResponse = '';
|
|
195
|
+
let lastChunk = null;
|
|
196
|
+
for await (const chunk of stream) {
|
|
197
|
+
fullResponse += chunk.message.content;
|
|
198
|
+
await Promise.resolve(callback(chunk.message.content, chunk.done));
|
|
199
|
+
if (chunk.done) {
|
|
200
|
+
lastChunk = chunk;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
return {
|
|
204
|
+
content: fullResponse,
|
|
205
|
+
model: lastChunk?.model || options.model || this.defaultModel,
|
|
206
|
+
done: true,
|
|
207
|
+
raw: lastChunk || undefined,
|
|
208
|
+
duration: lastChunk?.total_duration ? lastChunk.total_duration / 1_000_000 : undefined,
|
|
209
|
+
usage: lastChunk ? {
|
|
210
|
+
prompt_tokens: lastChunk.prompt_eval_count,
|
|
211
|
+
completion_tokens: lastChunk.eval_count,
|
|
212
|
+
total_tokens: (lastChunk.prompt_eval_count || 0) + (lastChunk.eval_count || 0),
|
|
213
|
+
} : undefined,
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
// ============================================================================
|
|
217
|
+
// Embeddings Interface
|
|
218
|
+
// ============================================================================
|
|
219
|
+
/**
|
|
220
|
+
* Generate embeddings for RAG applications
|
|
221
|
+
*/
|
|
222
|
+
async embeddings(options) {
|
|
223
|
+
const response = await this.ollama.embed({
|
|
224
|
+
model: options.model || this.defaultModel,
|
|
225
|
+
input: options.input,
|
|
226
|
+
truncate: options.truncate,
|
|
227
|
+
options: options.options,
|
|
228
|
+
keep_alive: options.keep_alive,
|
|
229
|
+
});
|
|
230
|
+
return response.embeddings;
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Generate embedding for a single text (convenience method)
|
|
234
|
+
*/
|
|
235
|
+
async embed(text, model) {
|
|
236
|
+
const result = await this.embeddings({
|
|
237
|
+
model: model || this.defaultModel,
|
|
238
|
+
input: text,
|
|
239
|
+
});
|
|
240
|
+
return result[0];
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Generate embeddings for multiple texts
|
|
244
|
+
*/
|
|
245
|
+
async embedBatch(texts, model) {
|
|
246
|
+
return this.embeddings({
|
|
247
|
+
model: model || this.defaultModel,
|
|
248
|
+
input: texts,
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
// ============================================================================
|
|
252
|
+
// Model Management
|
|
253
|
+
// ============================================================================
|
|
254
|
+
/**
|
|
255
|
+
* List available models
|
|
256
|
+
*/
|
|
257
|
+
async listModels() {
|
|
258
|
+
const response = await this.ollama.list();
|
|
259
|
+
return response.models;
|
|
260
|
+
}
|
|
261
|
+
/**
|
|
262
|
+
* List running models
|
|
263
|
+
*/
|
|
264
|
+
async listRunning() {
|
|
265
|
+
const response = await this.ollama.ps();
|
|
266
|
+
return response.models;
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Pull a model from the registry
|
|
270
|
+
*/
|
|
271
|
+
async pullModel(name, onProgress) {
|
|
272
|
+
const stream = await this.ollama.pull({ model: name, stream: true });
|
|
273
|
+
for await (const progress of stream) {
|
|
274
|
+
if (onProgress) {
|
|
275
|
+
onProgress(progress);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Pull a model without streaming (waits for completion)
|
|
281
|
+
*/
|
|
282
|
+
async pullModelSync(name) {
|
|
283
|
+
await this.ollama.pull({ model: name, stream: false });
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Show model information
|
|
287
|
+
*/
|
|
288
|
+
async showModel(name) {
|
|
289
|
+
return this.ollama.show({ model: name });
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Delete a model
|
|
293
|
+
*/
|
|
294
|
+
async deleteModel(name) {
|
|
295
|
+
await this.ollama.delete({ model: name });
|
|
296
|
+
}
|
|
297
|
+
/**
|
|
298
|
+
* Copy a model
|
|
299
|
+
*/
|
|
300
|
+
async copyModel(source, destination) {
|
|
301
|
+
await this.ollama.copy({ source, destination });
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* Create a model from a base model
|
|
305
|
+
*/
|
|
306
|
+
async createModel(name, fromModel, onProgress) {
|
|
307
|
+
const stream = await this.ollama.create({ model: name, from: fromModel, stream: true });
|
|
308
|
+
for await (const progress of stream) {
|
|
309
|
+
if (onProgress) {
|
|
310
|
+
onProgress(progress);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
// ============================================================================
|
|
315
|
+
// Utility Methods
|
|
316
|
+
// ============================================================================
|
|
317
|
+
/**
|
|
318
|
+
* Check if Ollama is available
|
|
319
|
+
*/
|
|
320
|
+
async isAvailable() {
|
|
321
|
+
try {
|
|
322
|
+
await this.ollama.list();
|
|
323
|
+
return true;
|
|
324
|
+
}
|
|
325
|
+
catch {
|
|
326
|
+
return false;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Check if a specific model is available
|
|
331
|
+
*/
|
|
332
|
+
async hasModel(name) {
|
|
333
|
+
try {
|
|
334
|
+
const models = await this.listModels();
|
|
335
|
+
return models.some(m => m.name === name || m.name.startsWith(`${name}:`));
|
|
336
|
+
}
|
|
337
|
+
catch {
|
|
338
|
+
return false;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Ensure a model is available, pulling if necessary
|
|
343
|
+
*/
|
|
344
|
+
async ensureModel(name) {
|
|
345
|
+
if (!(await this.hasModel(name))) {
|
|
346
|
+
await this.pullModelSync(name);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
/**
|
|
350
|
+
* Get the default model
|
|
351
|
+
*/
|
|
352
|
+
getDefaultModel() {
|
|
353
|
+
return this.defaultModel;
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* Set the default model
|
|
357
|
+
*/
|
|
358
|
+
setDefaultModel(model) {
|
|
359
|
+
this.defaultModel = model;
|
|
360
|
+
}
|
|
361
|
+
// ============================================================================
|
|
362
|
+
// OpenAI-Compatible Interface
|
|
363
|
+
// ============================================================================
|
|
364
|
+
/**
|
|
365
|
+
* OpenAI-compatible chat interface for workflow compatibility
|
|
366
|
+
*/
|
|
367
|
+
chatCompletions = {
|
|
368
|
+
create: async (inputs) => {
|
|
369
|
+
const messages = inputs.messages.map(m => ({
|
|
370
|
+
role: m.role,
|
|
371
|
+
content: m.content,
|
|
372
|
+
}));
|
|
373
|
+
const result = await this.chat({
|
|
374
|
+
model: inputs.model || this.defaultModel,
|
|
375
|
+
messages,
|
|
376
|
+
});
|
|
377
|
+
return {
|
|
378
|
+
choices: [
|
|
379
|
+
{
|
|
380
|
+
message: {
|
|
381
|
+
role: 'assistant',
|
|
382
|
+
content: result.content,
|
|
383
|
+
},
|
|
384
|
+
},
|
|
385
|
+
],
|
|
386
|
+
};
|
|
387
|
+
},
|
|
388
|
+
};
|
|
389
|
+
/**
|
|
390
|
+
* OpenAI-compatible embeddings interface
|
|
391
|
+
*/
|
|
392
|
+
embeddingsCreate = {
|
|
393
|
+
create: async (inputs) => {
|
|
394
|
+
const embeddings = await this.embeddings({
|
|
395
|
+
model: inputs.model || this.defaultModel,
|
|
396
|
+
input: inputs.input,
|
|
397
|
+
});
|
|
398
|
+
return {
|
|
399
|
+
data: embeddings.map((embedding, index) => ({
|
|
400
|
+
embedding,
|
|
401
|
+
index,
|
|
402
|
+
})),
|
|
403
|
+
};
|
|
404
|
+
},
|
|
405
|
+
};
|
|
406
|
+
}
|
|
407
|
+
// ============================================================================
|
|
408
|
+
// SDK Initializer for marktoflow Registry
|
|
409
|
+
// ============================================================================
|
|
410
|
+
/**
|
|
411
|
+
* SDK Initializer for Ollama
|
|
412
|
+
*/
|
|
2
413
|
export const OllamaInitializer = {
|
|
3
414
|
async initialize(_module, config) {
|
|
4
|
-
const
|
|
5
|
-
|
|
6
|
-
|
|
415
|
+
const options = config.options || {};
|
|
416
|
+
const auth = config.auth || {};
|
|
417
|
+
const host = options['host'] ||
|
|
418
|
+
auth['host'] ||
|
|
419
|
+
'http://127.0.0.1:11434';
|
|
420
|
+
const model = options['model'] || 'llama3.2';
|
|
421
|
+
return new OllamaClient({
|
|
422
|
+
host,
|
|
423
|
+
model,
|
|
424
|
+
});
|
|
7
425
|
},
|
|
8
426
|
};
|
|
427
|
+
// ============================================================================
|
|
428
|
+
// Re-export SDK class for convenience
|
|
429
|
+
// ============================================================================
|
|
430
|
+
export { Ollama };
|
|
9
431
|
//# sourceMappingURL=ollama.js.map
|