@openvole/paw-ollama 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +188 -0
- package/dist/index.js.map +1 -0
- package/package.json +50 -0
- package/vole-paw.json +16 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 OpenVole
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { definePaw } from "@openvole/paw-sdk";
|
|
3
|
+
|
|
4
|
+
// src/ollama.ts
|
|
5
|
+
import { Ollama } from "ollama";
|
|
6
|
+
var OllamaClient = class {
|
|
7
|
+
client;
|
|
8
|
+
model;
|
|
9
|
+
constructor(host = "http://localhost:11434", model = "qwen3:latest") {
|
|
10
|
+
this.client = new Ollama({ host });
|
|
11
|
+
this.model = model;
|
|
12
|
+
}
|
|
13
|
+
getModel() {
|
|
14
|
+
return this.model;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Build the system prompt from active skills and available tools.
|
|
18
|
+
*/
|
|
19
|
+
buildSystemPrompt(activeSkills, availableTools) {
|
|
20
|
+
const parts = ["You are an AI agent powered by OpenVole."];
|
|
21
|
+
if (activeSkills.length > 0) {
|
|
22
|
+
parts.push("");
|
|
23
|
+
parts.push("## Available Skills");
|
|
24
|
+
parts.push(
|
|
25
|
+
"The following skills are available. Use the skill_read tool to load full instructions when a skill is relevant to the current task."
|
|
26
|
+
);
|
|
27
|
+
for (const skill of activeSkills) {
|
|
28
|
+
parts.push(`- **${skill.name}**: ${skill.description}`);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
if (availableTools.length > 0) {
|
|
32
|
+
parts.push("");
|
|
33
|
+
parts.push("## Available Tools");
|
|
34
|
+
parts.push(
|
|
35
|
+
"You have access to the following tools. Use function calling to invoke them when needed."
|
|
36
|
+
);
|
|
37
|
+
for (const tool of availableTools) {
|
|
38
|
+
parts.push(`- **${tool.name}** (from ${tool.pawName}): ${tool.description}`);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return parts.join("\n");
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Convert AgentMessage[] to Ollama Message[].
|
|
45
|
+
*/
|
|
46
|
+
convertMessages(systemPrompt, messages) {
|
|
47
|
+
const result = [{ role: "system", content: systemPrompt }];
|
|
48
|
+
for (const msg of messages) {
|
|
49
|
+
switch (msg.role) {
|
|
50
|
+
case "user":
|
|
51
|
+
result.push({ role: "user", content: msg.content });
|
|
52
|
+
break;
|
|
53
|
+
case "brain":
|
|
54
|
+
result.push({ role: "assistant", content: msg.content });
|
|
55
|
+
break;
|
|
56
|
+
case "tool_result":
|
|
57
|
+
result.push({ role: "tool", content: msg.content });
|
|
58
|
+
break;
|
|
59
|
+
case "error":
|
|
60
|
+
result.push({
|
|
61
|
+
role: "tool",
|
|
62
|
+
content: `Error: ${msg.content}`
|
|
63
|
+
});
|
|
64
|
+
break;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return result;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Convert ToolSummary[] to Ollama Tool[] for function calling.
|
|
71
|
+
*/
|
|
72
|
+
convertTools(tools) {
|
|
73
|
+
return tools.map((tool) => ({
|
|
74
|
+
type: "function",
|
|
75
|
+
function: {
|
|
76
|
+
name: tool.name,
|
|
77
|
+
description: tool.description,
|
|
78
|
+
parameters: {
|
|
79
|
+
type: "object",
|
|
80
|
+
properties: {}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}));
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Send a chat request to Ollama and return the raw response.
|
|
87
|
+
*/
|
|
88
|
+
async chat(systemPrompt, messages, tools) {
|
|
89
|
+
const ollamaMessages = this.convertMessages(systemPrompt, messages);
|
|
90
|
+
const ollamaTools = this.convertTools(tools);
|
|
91
|
+
return this.client.chat({
|
|
92
|
+
model: this.model,
|
|
93
|
+
messages: ollamaMessages,
|
|
94
|
+
tools: ollamaTools.length > 0 ? ollamaTools : void 0,
|
|
95
|
+
stream: false
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Extract PlannedAction[] from Ollama tool_calls.
|
|
100
|
+
*/
|
|
101
|
+
parseToolCalls(response) {
|
|
102
|
+
if (!response.message.tool_calls || response.message.tool_calls.length === 0) {
|
|
103
|
+
return [];
|
|
104
|
+
}
|
|
105
|
+
return response.message.tool_calls.map((call) => ({
|
|
106
|
+
tool: call.function.name,
|
|
107
|
+
params: call.function.arguments
|
|
108
|
+
}));
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
// src/paw.ts
|
|
113
|
+
var client;
|
|
114
|
+
function getClient() {
|
|
115
|
+
if (!client) {
|
|
116
|
+
const host = process.env.OLLAMA_HOST || "http://localhost:11434";
|
|
117
|
+
const model = process.env.OLLAMA_MODEL || "qwen3:latest";
|
|
118
|
+
client = new OllamaClient(host, model);
|
|
119
|
+
}
|
|
120
|
+
return client;
|
|
121
|
+
}
|
|
122
|
+
var paw = {
|
|
123
|
+
name: "@openvole/paw-ollama",
|
|
124
|
+
version: "0.1.0",
|
|
125
|
+
description: "Brain Paw powered by Ollama for local LLM inference",
|
|
126
|
+
brain: true,
|
|
127
|
+
async think(context) {
|
|
128
|
+
const ollamaClient = getClient();
|
|
129
|
+
const start = Date.now();
|
|
130
|
+
try {
|
|
131
|
+
const systemPrompt = ollamaClient.buildSystemPrompt(
|
|
132
|
+
context.activeSkills,
|
|
133
|
+
context.availableTools
|
|
134
|
+
);
|
|
135
|
+
const response = await ollamaClient.chat(
|
|
136
|
+
systemPrompt,
|
|
137
|
+
context.messages,
|
|
138
|
+
context.availableTools
|
|
139
|
+
);
|
|
140
|
+
const durationMs = Date.now() - start;
|
|
141
|
+
console.log(
|
|
142
|
+
`[paw-ollama] think completed in ${durationMs}ms (model: ${ollamaClient.getModel()})`
|
|
143
|
+
);
|
|
144
|
+
const actions = ollamaClient.parseToolCalls(response);
|
|
145
|
+
if (actions.length > 0) {
|
|
146
|
+
return {
|
|
147
|
+
actions,
|
|
148
|
+
execution: "sequential"
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
const text = response.message.content || "";
|
|
152
|
+
return {
|
|
153
|
+
actions: [],
|
|
154
|
+
response: text,
|
|
155
|
+
done: true
|
|
156
|
+
};
|
|
157
|
+
} catch (error) {
|
|
158
|
+
const durationMs = Date.now() - start;
|
|
159
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
160
|
+
console.error(
|
|
161
|
+
`[paw-ollama] think failed after ${durationMs}ms: ${message}`
|
|
162
|
+
);
|
|
163
|
+
const isConnectionError = message.includes("ECONNREFUSED") || message.includes("fetch failed") || message.includes("ENOTFOUND");
|
|
164
|
+
return {
|
|
165
|
+
actions: [],
|
|
166
|
+
response: isConnectionError ? "Ollama is not running or unreachable. Please start Ollama and try again." : `Error communicating with Ollama: ${message}`,
|
|
167
|
+
done: true
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
},
|
|
171
|
+
async onLoad() {
|
|
172
|
+
const ollamaClient = getClient();
|
|
173
|
+
console.log(
|
|
174
|
+
`[paw-ollama] loaded \u2014 model: ${ollamaClient.getModel()}, host: ${process.env.OLLAMA_HOST || "http://localhost:11434"}`
|
|
175
|
+
);
|
|
176
|
+
},
|
|
177
|
+
async onUnload() {
|
|
178
|
+
client = void 0;
|
|
179
|
+
console.log("[paw-ollama] unloaded");
|
|
180
|
+
}
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
// src/index.ts
|
|
184
|
+
var index_default = definePaw(paw);
|
|
185
|
+
export {
|
|
186
|
+
index_default as default
|
|
187
|
+
};
|
|
188
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/ollama.ts","../src/paw.ts"],"sourcesContent":["import { definePaw } from '@openvole/paw-sdk'\nimport { paw } from './paw.js'\n\nexport default definePaw(paw)\n","import { Ollama, type ChatResponse, type Message, type Tool } from 'ollama'\nimport type {\n\tAgentMessage,\n\tActiveSkill,\n\tPlannedAction,\n\tToolSummary,\n} from '@openvole/paw-sdk'\n\nexport class OllamaClient {\n\tprivate client: Ollama\n\tprivate model: string\n\n\tconstructor(\n\t\thost: string = 'http://localhost:11434',\n\t\tmodel: string = 'qwen3:latest',\n\t) {\n\t\tthis.client = new Ollama({ host })\n\t\tthis.model = model\n\t}\n\n\tgetModel(): string {\n\t\treturn this.model\n\t}\n\n\t/**\n\t * Build the system prompt from active skills and available tools.\n\t */\n\tbuildSystemPrompt(\n\t\tactiveSkills: ActiveSkill[],\n\t\tavailableTools: ToolSummary[],\n\t): string {\n\t\tconst parts: string[] = ['You are an AI agent powered by OpenVole.']\n\n\t\tif (activeSkills.length > 0) {\n\t\t\tparts.push('')\n\t\t\tparts.push('## Available Skills')\n\t\t\tparts.push(\n\t\t\t\t'The following skills are available. Use the skill_read tool to load full instructions when a skill is relevant to the current task.',\n\t\t\t)\n\t\t\tfor (const skill of activeSkills) {\n\t\t\t\tparts.push(`- **${skill.name}**: ${skill.description}`)\n\t\t\t}\n\t\t}\n\n\t\tif (availableTools.length > 0) {\n\t\t\tparts.push('')\n\t\t\tparts.push('## Available Tools')\n\t\t\tparts.push(\n\t\t\t\t'You have access to the following tools. Use function calling to invoke them when needed.',\n\t\t\t)\n\t\t\tfor (const tool of availableTools) {\n\t\t\t\tparts.push(`- **${tool.name}** (from ${tool.pawName}): ${tool.description}`)\n\t\t\t}\n\t\t}\n\n\t\treturn parts.join('\\n')\n\t}\n\n\t/**\n\t * Convert AgentMessage[] to Ollama Message[].\n\t */\n\tconvertMessages(\n\t\tsystemPrompt: string,\n\t\tmessages: AgentMessage[],\n\t): Message[] {\n\t\tconst result: Message[] = [{ role: 'system', content: systemPrompt }]\n\n\t\tfor (const msg of messages) {\n\t\t\tswitch (msg.role) {\n\t\t\t\tcase 'user':\n\t\t\t\t\tresult.push({ role: 'user', content: msg.content })\n\t\t\t\t\tbreak\n\t\t\t\tcase 'brain':\n\t\t\t\t\tresult.push({ role: 'assistant', content: msg.content })\n\t\t\t\t\tbreak\n\t\t\t\tcase 'tool_result':\n\t\t\t\t\tresult.push({ role: 'tool', content: msg.content })\n\t\t\t\t\tbreak\n\t\t\t\tcase 'error':\n\t\t\t\t\tresult.push({\n\t\t\t\t\t\trole: 'tool',\n\t\t\t\t\t\tcontent: `Error: ${msg.content}`,\n\t\t\t\t\t})\n\t\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Convert ToolSummary[] to Ollama Tool[] for function calling.\n\t */\n\tconvertTools(tools: ToolSummary[]): Tool[] {\n\t\treturn tools.map((tool) => ({\n\t\t\ttype: 'function',\n\t\t\tfunction: {\n\t\t\t\tname: tool.name,\n\t\t\t\tdescription: tool.description,\n\t\t\t\tparameters: {\n\t\t\t\t\ttype: 'object',\n\t\t\t\t\tproperties: {},\n\t\t\t\t},\n\t\t\t},\n\t\t}))\n\t}\n\n\t/**\n\t * Send a chat request to Ollama and return the raw response.\n\t */\n\tasync chat(\n\t\tsystemPrompt: string,\n\t\tmessages: AgentMessage[],\n\t\ttools: ToolSummary[],\n\t): Promise<ChatResponse> {\n\t\tconst ollamaMessages = this.convertMessages(systemPrompt, messages)\n\t\tconst ollamaTools = this.convertTools(tools)\n\n\t\treturn this.client.chat({\n\t\t\tmodel: this.model,\n\t\t\tmessages: ollamaMessages,\n\t\t\ttools: ollamaTools.length > 0 ? ollamaTools : undefined,\n\t\t\tstream: false,\n\t\t})\n\t}\n\n\t/**\n\t * Extract PlannedAction[] from Ollama tool_calls.\n\t */\n\tparseToolCalls(response: ChatResponse): PlannedAction[] {\n\t\tif (!response.message.tool_calls || response.message.tool_calls.length === 0) {\n\t\t\treturn []\n\t\t}\n\n\t\treturn response.message.tool_calls.map((call) => ({\n\t\t\ttool: call.function.name,\n\t\t\tparams: call.function.arguments,\n\t\t}))\n\t}\n}\n","import type { PawDefinition, AgentContext, AgentPlan } from '@openvole/paw-sdk'\nimport { OllamaClient } from './ollama.js'\n\nlet client: OllamaClient | undefined\n\nfunction getClient(): OllamaClient {\n\tif (!client) {\n\t\tconst host = process.env.OLLAMA_HOST || 'http://localhost:11434'\n\t\tconst model = process.env.OLLAMA_MODEL || 'qwen3:latest'\n\t\tclient = new OllamaClient(host, model)\n\t}\n\treturn client\n}\n\nexport const paw: PawDefinition = {\n\tname: '@openvole/paw-ollama',\n\tversion: '0.1.0',\n\tdescription: 'Brain Paw powered by Ollama for local LLM inference',\n\tbrain: true,\n\n\tasync think(context: AgentContext): Promise<AgentPlan> {\n\t\tconst ollamaClient = getClient()\n\t\tconst start = Date.now()\n\n\t\ttry {\n\t\t\tconst systemPrompt = ollamaClient.buildSystemPrompt(\n\t\t\t\tcontext.activeSkills,\n\t\t\t\tcontext.availableTools,\n\t\t\t)\n\n\t\t\tconst response = await ollamaClient.chat(\n\t\t\t\tsystemPrompt,\n\t\t\t\tcontext.messages,\n\t\t\t\tcontext.availableTools,\n\t\t\t)\n\n\t\t\tconst durationMs = Date.now() - start\n\t\t\tconsole.log(\n\t\t\t\t`[paw-ollama] think completed in ${durationMs}ms (model: ${ollamaClient.getModel()})`,\n\t\t\t)\n\n\t\t\tconst actions = ollamaClient.parseToolCalls(response)\n\n\t\t\tif (actions.length > 0) {\n\t\t\t\treturn {\n\t\t\t\t\tactions,\n\t\t\t\t\texecution: 'sequential',\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tconst text = response.message.content || ''\n\n\t\t\t// If the model produced a text response, return it as done\n\t\t\treturn {\n\t\t\t\tactions: [],\n\t\t\t\tresponse: text,\n\t\t\t\tdone: true,\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tconst durationMs = Date.now() - start\n\t\t\tconst message =\n\t\t\t\terror instanceof Error ? error.message : String(error)\n\t\t\tconsole.error(\n\t\t\t\t`[paw-ollama] think failed after ${durationMs}ms: ${message}`,\n\t\t\t)\n\n\t\t\t// Check for connection errors (Ollama not running)\n\t\t\tconst isConnectionError =\n\t\t\t\tmessage.includes('ECONNREFUSED') ||\n\t\t\t\tmessage.includes('fetch failed') ||\n\t\t\t\tmessage.includes('ENOTFOUND')\n\n\t\t\treturn {\n\t\t\t\tactions: [],\n\t\t\t\tresponse: isConnectionError\n\t\t\t\t\t? 'Ollama is not running or unreachable. Please start Ollama and try again.'\n\t\t\t\t\t: `Error communicating with Ollama: ${message}`,\n\t\t\t\tdone: true,\n\t\t\t}\n\t\t}\n\t},\n\n\tasync onLoad() {\n\t\tconst ollamaClient = getClient()\n\t\tconsole.log(\n\t\t\t`[paw-ollama] loaded — model: ${ollamaClient.getModel()}, host: ${process.env.OLLAMA_HOST || 'http://localhost:11434'}`,\n\t\t)\n\t},\n\n\tasync onUnload() {\n\t\tclient = undefined\n\t\tconsole.log('[paw-ollama] unloaded')\n\t},\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;;;ACA1B,SAAS,cAA0D;AAQ5D,IAAM,eAAN,MAAmB;AAAA,EACjB;AAAA,EACA;AAAA,EAER,YACC,OAAe,0BACf,QAAgB,gBACf;AACD,SAAK,SAAS,IAAI,OAAO,EAAE,KAAK,CAAC;AACjC,SAAK,QAAQ;AAAA,EACd;AAAA,EAEA,WAAmB;AAClB,WAAO,KAAK;AAAA,EACb;AAAA;AAAA;AAAA;AAAA,EAKA,kBACC,cACA,gBACS;AACT,UAAM,QAAkB,CAAC,0CAA0C;AAEnE,QAAI,aAAa,SAAS,GAAG;AAC5B,YAAM,KAAK,EAAE;AACb,YAAM,KAAK,qBAAqB;AAChC,YAAM;AAAA,QACL;AAAA,MACD;AACA,iBAAW,SAAS,cAAc;AACjC,cAAM,KAAK,OAAO,MAAM,IAAI,OAAO,MAAM,WAAW,EAAE;AAAA,MACvD;AAAA,IACD;AAEA,QAAI,eAAe,SAAS,GAAG;AAC9B,YAAM,KAAK,EAAE;AACb,YAAM,KAAK,oBAAoB;AAC/B,YAAM;AAAA,QACL;AAAA,MACD;AACA,iBAAW,QAAQ,gBAAgB;AAClC,cAAM,KAAK,OAAO,KAAK,IAAI,YAAY,KAAK,OAAO,MAAM,KAAK,WAAW,EAAE;AAAA,MAC5E;AAAA,IACD;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKA,gBACC,cACA,UACY;AACZ,UAAM,SAAoB,CAAC,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAEpE,eAAW,OAAO,UAAU;AAC3B,cAAQ,IAAI,MAAM;AAAA,QACjB,KAAK;AACJ,iBAAO,KAAK,EAAE,MAAM,QAAQ,SAAS,IAAI,QAAQ,CAAC;AAClD;AAAA,QACD,KAAK;AACJ,iBAAO,KAAK,EAAE,MAAM,aAAa,SAAS,IAAI,QAAQ,CAAC;AACvD;AAAA,QACD,KAAK;AACJ,iBAAO,KAAK,EAAE,MAAM,QAAQ,SAAS,IAAI,QAAQ,CAAC;AAClD;AAAA,QACD,KAAK;AACJ,iBAAO,KAAK;AAAA,YACX,MAAM;AAAA,YACN,SAAS,UAAU,IAAI,OAAO;AAAA,UAC/B,CAAC;AACD;AAAA,MACF;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,OAA8B;AAC1C,WAAO,MAAM,IAAI,CAAC,UAAU;AAAA,MAC3B,MAAM;AAAA,MACN,UAAU;AAAA,QACT,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,YAAY;AAAA,UACX,MAAM;AAAA,UACN,YAAY,CAAC;AAAA,QACd;AAAA,MACD;AAAA,IACD,EAAE;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KACL,cACA,UACA,OACwB;AACxB,UAAM,iBAAiB,KAAK,gBAAgB,cAAc,QAAQ;AAClE,UAAM,cAAc,KAAK,aAAa,KAAK;AAE3C,WAAO,KAAK,OAAO,KAAK;AAAA,MACvB,OAAO,KAAK;AAAA,MACZ,UAAU;AAAA,MACV,OAAO,YAAY,SAAS,IAAI,cAAc;AAAA,MAC9C,QAAQ;AAAA,IACT,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,UAAyC;AACvD,QAAI,CAAC,SAAS,QAAQ,cAAc,SAAS,QAAQ,WAAW,WAAW,GAAG;AAC7E,aAAO,CAAC;AAAA,IACT;AAEA,WAAO,SAAS,QAAQ,WAAW,IAAI,CAAC,UAAU;AAAA,MACjD,MAAM,KAAK,SAAS;AAAA,MACpB,QAAQ,KAAK,SAAS;AAAA,IACvB,EAAE;AAAA,EACH;AACD;;;ACxIA,IAAI;AAEJ,SAAS,YAA0B;AAClC,MAAI,CAAC,QAAQ;AACZ,UAAM,OAAO,QAAQ,IAAI,eAAe;AACxC,UAAM,QAAQ,QAAQ,IAAI,gBAAgB;AAC1C,aAAS,IAAI,aAAa,MAAM,KAAK;AAAA,EACtC;AACA,SAAO;AACR;AAEO,IAAM,MAAqB;AAAA,EACjC,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AAAA,EACb,OAAO;AAAA,EAEP,MAAM,MAAM,SAA2C;AACtD,UAAM,eAAe,UAAU;AAC/B,UAAM,QAAQ,KAAK,IAAI;AAEvB,QAAI;AACH,YAAM,eAAe,aAAa;AAAA,QACjC,QAAQ;AAAA,QACR,QAAQ;AAAA,MACT;AAEA,YAAM,WAAW,MAAM,aAAa;AAAA,QACnC;AAAA,QACA,QAAQ;AAAA,QACR,QAAQ;AAAA,MACT;AAEA,YAAM,aAAa,KAAK,IAAI,IAAI;AAChC,cAAQ;AAAA,QACP,mCAAmC,UAAU,cAAc,aAAa,SAAS,CAAC;AAAA,MACnF;AAEA,YAAM,UAAU,aAAa,eAAe,QAAQ;AAEpD,UAAI,QAAQ,SAAS,GAAG;AACvB,eAAO;AAAA,UACN;AAAA,UACA,WAAW;AAAA,QACZ;AAAA,MACD;AAEA,YAAM,OAAO,SAAS,QAAQ,WAAW;AAGzC,aAAO;AAAA,QACN,SAAS,CAAC;AAAA,QACV,UAAU;AAAA,QACV,MAAM;AAAA,MACP;AAAA,IACD,SAAS,OAAO;AACf,YAAM,aAAa,KAAK,IAAI,IAAI;AAChC,YAAM,UACL,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACtD,cAAQ;AAAA,QACP,mCAAmC,UAAU,OAAO,OAAO;AAAA,MAC5D;AAGA,YAAM,oBACL,QAAQ,SAAS,cAAc,KAC/B,QAAQ,SAAS,cAAc,KAC/B,QAAQ,SAAS,WAAW;AAE7B,aAAO;AAAA,QACN,SAAS,CAAC;AAAA,QACV,UAAU,oBACP,6EACA,oCAAoC,OAAO;AAAA,QAC9C,MAAM;AAAA,MACP;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,SAAS;AACd,UAAM,eAAe,UAAU;AAC/B,YAAQ;AAAA,MACP,qCAAgC,aAAa,SAAS,CAAC,WAAW,QAAQ,IAAI,eAAe,wBAAwB;AAAA,IACtH;AAAA,EACD;AAAA,EAEA,MAAM,WAAW;AAChB,aAAS;AACT,YAAQ,IAAI,uBAAuB;AAAA,EACpC;AACD;;;AF1FA,IAAO,gBAAQ,UAAU,GAAG;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@openvole/paw-ollama",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Brain Paw powered by Ollama for local LLM inference",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"import": "./dist/index.js",
|
|
11
|
+
"types": "./dist/index.d.ts"
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"ollama": "^0.5.0"
|
|
16
|
+
},
|
|
17
|
+
"devDependencies": {
|
|
18
|
+
"@types/node": "^22.0.0",
|
|
19
|
+
"tsup": "^8.3.0",
|
|
20
|
+
"typescript": "^5.6.0",
|
|
21
|
+
"@openvole/paw-sdk": "^0.1.0"
|
|
22
|
+
},
|
|
23
|
+
"engines": {
|
|
24
|
+
"node": ">=20.0.0"
|
|
25
|
+
},
|
|
26
|
+
"files": [
|
|
27
|
+
"dist",
|
|
28
|
+
"vole-paw.json"
|
|
29
|
+
],
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "https://github.com/openvole/pawhub",
|
|
34
|
+
"directory": "paws/paw-ollama"
|
|
35
|
+
},
|
|
36
|
+
"keywords": [
|
|
37
|
+
"openvole",
|
|
38
|
+
"paw",
|
|
39
|
+
"ollama",
|
|
40
|
+
"brain",
|
|
41
|
+
"llm"
|
|
42
|
+
],
|
|
43
|
+
"peerDependencies": {
|
|
44
|
+
"@openvole/paw-sdk": "^0.1.0"
|
|
45
|
+
},
|
|
46
|
+
"scripts": {
|
|
47
|
+
"build": "tsup",
|
|
48
|
+
"typecheck": "tsc --noEmit"
|
|
49
|
+
}
|
|
50
|
+
}
|
package/vole-paw.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@openvole/paw-ollama",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Brain Paw powered by Ollama for local LLM inference",
|
|
5
|
+
"entry": "./dist/index.js",
|
|
6
|
+
"brain": true,
|
|
7
|
+
"inProcess": false,
|
|
8
|
+
"transport": "ipc",
|
|
9
|
+
"tools": [],
|
|
10
|
+
"permissions": {
|
|
11
|
+
"network": ["127.0.0.1"],
|
|
12
|
+
"listen": [],
|
|
13
|
+
"filesystem": [],
|
|
14
|
+
"env": ["OLLAMA_MODEL", "OLLAMA_HOST"]
|
|
15
|
+
}
|
|
16
|
+
}
|