@sisu-ai/adapter-ollama 2.0.0 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +79 -29
  2. package/package.json +11 -3
package/dist/index.js CHANGED
@@ -3,33 +3,77 @@ export function ollamaAdapter(opts) {
3
3
  const envBase = firstConfigValue(['OLLAMA_BASE_URL', 'BASE_URL']);
4
4
  const baseUrl = (opts.baseUrl ?? envBase ?? 'http://localhost:11434').replace(/\/$/, '');
5
5
  const modelName = `ollama:${opts.model}`;
6
- return {
7
- name: modelName,
8
- capabilities: { functionCall: true, streaming: false },
9
- async generate(messages, genOpts) {
10
- // Map messages to Ollama format; include assistant tool_calls and tool messages
11
- const mapped = messages.map((m) => {
12
- const base = { role: m.role };
13
- if (m.role === 'assistant' && Array.isArray(m.tool_calls)) {
14
- base.tool_calls = m.tool_calls.map((tc) => ({ id: tc.id, type: 'function', function: { name: tc.name, arguments: (tc.arguments ?? {}) } }));
15
- base.content = m.content ? String(m.content) : null;
16
- }
17
- else if (m.role === 'tool') {
18
- base.content = String(m.content ?? '');
19
- if (m.tool_call_id)
20
- base.tool_call_id = m.tool_call_id;
21
- if (m.name && !m.tool_call_id)
22
- base.name = m.name;
6
+ function generate(messages, genOpts) {
7
+ // Map messages to Ollama format; include assistant tool_calls and tool messages
8
+ const mapped = messages.map((m) => {
9
+ const base = { role: m.role };
10
+ if (m.role === 'assistant' && Array.isArray(m.tool_calls)) {
11
+ base.tool_calls = m.tool_calls.map((tc) => ({ id: tc.id, type: 'function', function: { name: tc.name, arguments: (tc.arguments ?? {}) } }));
12
+ base.content = m.content ? String(m.content) : null;
13
+ }
14
+ else if (m.role === 'tool') {
15
+ base.content = String(m.content ?? '');
16
+ if (m.tool_call_id)
17
+ base.tool_call_id = m.tool_call_id;
18
+ if (m.name && !m.tool_call_id)
19
+ base.name = m.name;
20
+ }
21
+ else {
22
+ base.content = String(m.content ?? '');
23
+ }
24
+ return base;
25
+ });
26
+ const toolsParam = (genOpts?.tools ?? []).map(toOllamaTool);
27
+ const baseBody = { model: opts.model, messages: mapped };
28
+ if (toolsParam.length)
29
+ baseBody.tools = toolsParam;
30
+ if (genOpts?.stream === true) {
31
+ return (async function* () {
32
+ const res = await fetch(`${baseUrl}/api/chat`, {
33
+ method: 'POST',
34
+ headers: {
35
+ 'Content-Type': 'application/json',
36
+ Accept: 'application/json',
37
+ ...(opts.headers ?? {}),
38
+ },
39
+ body: JSON.stringify({ ...baseBody, stream: true }),
40
+ });
41
+ if (!res.ok || !res.body) {
42
+ const err = await res.text();
43
+ throw new Error(`Ollama API error: ${res.status} ${res.statusText} — ${String(err).slice(0, 500)}`);
23
44
  }
24
- else {
25
- base.content = String(m.content ?? '');
45
+ const decoder = new TextDecoder();
46
+ let buf = '';
47
+ let full = '';
48
+ for await (const chunk of res.body) {
49
+ const piece = typeof chunk === 'string' ? chunk : decoder.decode(chunk);
50
+ buf += piece;
51
+ const lines = buf.split('\n');
52
+ buf = lines.pop() ?? '';
53
+ for (const line of lines) {
54
+ if (!line.trim())
55
+ continue;
56
+ try {
57
+ const j = JSON.parse(line);
58
+ if (j.done) {
59
+ yield { type: 'assistant_message', message: { role: 'assistant', content: full } };
60
+ return;
61
+ }
62
+ const token = j.message?.content;
63
+ if (typeof token === 'string' && token) {
64
+ full += token;
65
+ yield { type: 'token', token };
66
+ }
67
+ }
68
+ catch (e) {
69
+ console.error('[DEBUG_LLM] stream_parse_error', { error: e });
70
+ }
71
+ }
26
72
  }
27
- return base;
28
- });
29
- const toolsParam = (genOpts?.tools ?? []).map(toOllamaTool);
30
- const body = { model: opts.model, messages: mapped, stream: false };
31
- if (toolsParam.length)
32
- body.tools = toolsParam;
73
+ })();
74
+ }
75
+ // Non-stream path
76
+ return (async () => {
33
77
  const res = await fetch(`${baseUrl}/api/chat`, {
34
78
  method: 'POST',
35
79
  headers: {
@@ -37,7 +81,7 @@ export function ollamaAdapter(opts) {
37
81
  Accept: 'application/json',
38
82
  ...(opts.headers ?? {}),
39
83
  },
40
- body: JSON.stringify(body),
84
+ body: JSON.stringify({ ...baseBody, stream: false }),
41
85
  });
42
86
  const raw = await res.text();
43
87
  if (!res.ok) {
@@ -46,11 +90,12 @@ export function ollamaAdapter(opts) {
46
90
  const j = JSON.parse(raw);
47
91
  details = j.error ?? j.message ?? raw;
48
92
  }
49
- catch { }
93
+ catch (e) {
94
+ console.error('[DEBUG_LLM] request_error', { error: e });
95
+ }
50
96
  throw new Error(`Ollama API error: ${res.status} ${res.statusText} — ${String(details).slice(0, 500)}`);
51
97
  }
52
98
  const data = raw ? JSON.parse(raw) : {};
53
- // /api/chat response example (non-stream): { message: { role:'assistant', content:'...', tool_calls?: [...] }, done: true }
54
99
  const choice = data?.message ?? {};
55
100
  const content = choice?.content ?? '';
56
101
  const tcs = Array.isArray(choice?.tool_calls)
@@ -60,7 +105,12 @@ export function ollamaAdapter(opts) {
60
105
  if (tcs)
61
106
  out.tool_calls = tcs;
62
107
  return { message: out };
63
- },
108
+ })();
109
+ }
110
+ return {
111
+ name: modelName,
112
+ capabilities: { functionCall: true, streaming: true },
113
+ generate: generate,
64
114
  };
65
115
  }
66
116
  function toOllamaTool(tool) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sisu-ai/adapter-ollama",
3
- "version": "2.0.0",
3
+ "version": "3.0.1",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -23,6 +23,14 @@
23
23
  "build": "tsc -b"
24
24
  },
25
25
  "peerDependencies": {
26
- "@sisu-ai/core": "0.3.0"
27
- }
26
+ "@sisu-ai/core": "1.0.1"
27
+ },
28
+ "keywords": [
29
+ "sisu",
30
+ "ai",
31
+ "ai-agent",
32
+ "agentic",
33
+ "adapter",
34
+ "ollama"
35
+ ]
28
36
  }