@pentatonic-ai/ai-agent-sdk 0.5.1 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pentatonic-ai/ai-agent-sdk",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.2",
|
|
4
4
|
"description": "TES SDK — LLM observability and lifecycle tracking via Pentatonic Thing Event System. Track token usage, tool calls, and conversations. Manage things through event-sourced lifecycle stages with AI enrichment and vector search.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.cjs",
|
|
@@ -168,6 +168,11 @@ describe("named exports", () => {
|
|
|
168
168
|
// --- AI client ---
|
|
169
169
|
|
|
170
170
|
describe("createAIClient", () => {
|
|
171
|
+
const realFetch = globalThis.fetch;
|
|
172
|
+
afterEach(() => {
|
|
173
|
+
globalThis.fetch = realFetch;
|
|
174
|
+
});
|
|
175
|
+
|
|
171
176
|
it("returns an object with embed() and chat()", () => {
|
|
172
177
|
const client = createAIClient({
|
|
173
178
|
url: "http://localhost:11434/v1",
|
|
@@ -185,6 +190,79 @@ describe("createAIClient", () => {
|
|
|
185
190
|
});
|
|
186
191
|
expect(client).toBeDefined();
|
|
187
192
|
});
|
|
193
|
+
|
|
194
|
+
it("hits /embeddings by default (OpenAI spec)", async () => {
|
|
195
|
+
let hitUrl;
|
|
196
|
+
globalThis.fetch = async (url) => {
|
|
197
|
+
hitUrl = url;
|
|
198
|
+
return { ok: true, json: async () => ({ data: [{ embedding: [0.1, 0.2] }] }) };
|
|
199
|
+
};
|
|
200
|
+
const client = createAIClient({
|
|
201
|
+
url: "http://localhost:11434/v1",
|
|
202
|
+
model: "test",
|
|
203
|
+
});
|
|
204
|
+
await client.embed("hello");
|
|
205
|
+
expect(hitUrl).toBe("http://localhost:11434/v1/embeddings");
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
it("uses embeddingPath override (e.g. Pentatonic AI Gateway)", async () => {
|
|
209
|
+
let hitUrl;
|
|
210
|
+
globalThis.fetch = async (url) => {
|
|
211
|
+
hitUrl = url;
|
|
212
|
+
return { ok: true, json: async () => ({ data: [{ embedding: [0.1] }] }) };
|
|
213
|
+
};
|
|
214
|
+
const client = createAIClient({
|
|
215
|
+
url: "https://lambda-gateway.pentatonic.com/v1",
|
|
216
|
+
model: "NV-Embed-v2",
|
|
217
|
+
embeddingPath: "embed",
|
|
218
|
+
});
|
|
219
|
+
await client.embed("hello");
|
|
220
|
+
expect(hitUrl).toBe("https://lambda-gateway.pentatonic.com/v1/embed");
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
it("normalises leading slashes and trailing base-url slashes", async () => {
|
|
224
|
+
let hitUrl;
|
|
225
|
+
globalThis.fetch = async (url) => {
|
|
226
|
+
hitUrl = url;
|
|
227
|
+
return { ok: true, json: async () => ({ data: [{ embedding: [0.1] }] }) };
|
|
228
|
+
};
|
|
229
|
+
const client = createAIClient({
|
|
230
|
+
url: "https://gateway.test/v1/",
|
|
231
|
+
model: "m",
|
|
232
|
+
embeddingPath: "/embed",
|
|
233
|
+
});
|
|
234
|
+
await client.embed("hi");
|
|
235
|
+
expect(hitUrl).toBe("https://gateway.test/v1/embed");
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
it("chatPath override applies to chat() too", async () => {
|
|
239
|
+
let hitUrl;
|
|
240
|
+
globalThis.fetch = async (url) => {
|
|
241
|
+
hitUrl = url;
|
|
242
|
+
return { ok: true, json: async () => ({ choices: [{ message: { content: "hi" } }] }) };
|
|
243
|
+
};
|
|
244
|
+
const client = createAIClient({
|
|
245
|
+
url: "https://gateway.test/v1",
|
|
246
|
+
model: "m",
|
|
247
|
+
chatPath: "chat",
|
|
248
|
+
});
|
|
249
|
+
await client.chat([{ role: "user", content: "q" }]);
|
|
250
|
+
expect(hitUrl).toBe("https://gateway.test/v1/chat");
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
it("chat defaults to /chat/completions", async () => {
|
|
254
|
+
let hitUrl;
|
|
255
|
+
globalThis.fetch = async (url) => {
|
|
256
|
+
hitUrl = url;
|
|
257
|
+
return { ok: true, json: async () => ({ choices: [{ message: { content: "hi" } }] }) };
|
|
258
|
+
};
|
|
259
|
+
const client = createAIClient({
|
|
260
|
+
url: "http://localhost:11434/v1",
|
|
261
|
+
model: "m",
|
|
262
|
+
});
|
|
263
|
+
await client.chat([{ role: "user", content: "q" }]);
|
|
264
|
+
expect(hitUrl).toBe("http://localhost:11434/v1/chat/completions");
|
|
265
|
+
});
|
|
188
266
|
});
|
|
189
267
|
|
|
190
268
|
// --- Search options contract ---
|
|
@@ -8,10 +8,16 @@
|
|
|
8
8
|
/**
|
|
9
9
|
* Create an AI client from config.
|
|
10
10
|
*
|
|
11
|
+
* Defaults to OpenAI-standard paths (`/embeddings`, `/chat/completions`).
|
|
12
|
+
* Override with `embeddingPath` / `chatPath` for gateways that use
|
|
13
|
+
* different routes — e.g. Pentatonic AI Gateway exposes `/embed`.
|
|
14
|
+
*
|
|
11
15
|
* @param {object} config
|
|
12
16
|
* @param {string} config.url - Base URL (e.g. "http://ollama:11434/v1")
|
|
13
17
|
* @param {string} config.model - Model name
|
|
14
18
|
* @param {string} [config.apiKey] - Optional API key
|
|
19
|
+
* @param {string} [config.embeddingPath="embeddings"] - Path appended to url
|
|
20
|
+
* @param {string} [config.chatPath="chat/completions"] - Path appended to url
|
|
15
21
|
* @param {number} [config.dimensions] - Expected embedding dimensions
|
|
16
22
|
* @returns {object} Client with embed() and chat() methods
|
|
17
23
|
*/
|
|
@@ -22,6 +28,23 @@ export function createAIClient(config) {
|
|
|
22
28
|
headers["X-API-Key"] = config.apiKey;
|
|
23
29
|
}
|
|
24
30
|
|
|
31
|
+
// Strip leading slashes so callers can use "embed" or "/embed"
|
|
32
|
+
// interchangeably. Base url may or may not have a trailing slash.
|
|
33
|
+
// Plain loops (not regex) to avoid polynomial-regex scanner flags.
|
|
34
|
+
const stripLeading = (s) => {
|
|
35
|
+
let i = 0;
|
|
36
|
+
while (i < s.length && s[i] === "/") i++;
|
|
37
|
+
return i === 0 ? s : s.slice(i);
|
|
38
|
+
};
|
|
39
|
+
const stripTrailing = (s) => {
|
|
40
|
+
let i = s.length;
|
|
41
|
+
while (i > 0 && s[i - 1] === "/") i--;
|
|
42
|
+
return i === s.length ? s : s.slice(0, i);
|
|
43
|
+
};
|
|
44
|
+
const embeddingPath = stripLeading(config.embeddingPath || "embeddings");
|
|
45
|
+
const chatPath = stripLeading(config.chatPath || "chat/completions");
|
|
46
|
+
const baseUrl = stripTrailing(config.url);
|
|
47
|
+
|
|
25
48
|
return {
|
|
26
49
|
/**
|
|
27
50
|
* Generate an embedding vector for text.
|
|
@@ -32,7 +55,7 @@ export function createAIClient(config) {
|
|
|
32
55
|
*/
|
|
33
56
|
async embed(text, inputType = "passage") {
|
|
34
57
|
try {
|
|
35
|
-
const res = await fetch(`${
|
|
58
|
+
const res = await fetch(`${baseUrl}/${embeddingPath}`, {
|
|
36
59
|
method: "POST",
|
|
37
60
|
headers,
|
|
38
61
|
body: JSON.stringify({
|
|
@@ -70,7 +93,7 @@ export function createAIClient(config) {
|
|
|
70
93
|
*/
|
|
71
94
|
async chat(messages, opts = {}) {
|
|
72
95
|
try {
|
|
73
|
-
const res = await fetch(`${
|
|
96
|
+
const res = await fetch(`${baseUrl}/${chatPath}`, {
|
|
74
97
|
method: "POST",
|
|
75
98
|
headers,
|
|
76
99
|
body: JSON.stringify({
|
|
@@ -13,6 +13,9 @@
|
|
|
13
13
|
* LLM_URL — OpenAI-compatible chat endpoint (required)
|
|
14
14
|
* LLM_MODEL — Chat model name for HyDE (required)
|
|
15
15
|
* API_KEY — API key for embedding/LLM endpoints (optional)
|
|
16
|
+
* EMBEDDING_PATH — Path appended to EMBEDDING_URL (default: "embeddings").
|
|
17
|
+
* Set to "embed" for the Pentatonic AI Gateway.
|
|
18
|
+
* CHAT_PATH — Path appended to LLM_URL (default: "chat/completions")
|
|
16
19
|
* CLIENT_ID — Client ID for memory scoping (default: "default")
|
|
17
20
|
* PORT — HTTP port for SSE transport (default: 3333)
|
|
18
21
|
*/
|
|
@@ -46,11 +49,13 @@ function createMemory() {
|
|
|
46
49
|
url: process.env.EMBEDDING_URL,
|
|
47
50
|
model: process.env.EMBEDDING_MODEL,
|
|
48
51
|
apiKey: process.env.API_KEY,
|
|
52
|
+
embeddingPath: process.env.EMBEDDING_PATH,
|
|
49
53
|
},
|
|
50
54
|
llm: {
|
|
51
55
|
url: process.env.LLM_URL,
|
|
52
56
|
model: process.env.LLM_MODEL,
|
|
53
57
|
apiKey: process.env.API_KEY,
|
|
58
|
+
chatPath: process.env.CHAT_PATH,
|
|
54
59
|
},
|
|
55
60
|
logger: (msg) => process.stderr.write(`[memory] ${msg}\n`),
|
|
56
61
|
});
|
|
@@ -342,7 +347,7 @@ async function main() {
|
|
|
342
347
|
const health = {
|
|
343
348
|
status: "ok",
|
|
344
349
|
client: CLIENT_ID,
|
|
345
|
-
version: "0.5.
|
|
350
|
+
version: "0.5.2",
|
|
346
351
|
search: "text",
|
|
347
352
|
db: false,
|
|
348
353
|
ollama: false,
|