@promptev/client 0.0.2 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +273 -122
- package/dist/esm/index.d.ts +75 -16
- package/dist/esm/index.js +265 -85
- package/package.json +6 -9
- package/readme.md +387 -62
- package/dist/cjs/index.d.ts +0 -30
package/dist/cjs/index.cjs
CHANGED
|
@@ -1,142 +1,293 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
2
|
+
// ── Types ───────────────────────────────────────────────
|
|
38
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
-
exports.PromptevClient = void 0;
|
|
40
|
-
|
|
4
|
+
exports.PromptevClient = exports.NetworkError = exports.ServerError = exports.RateLimitError = exports.NotFoundError = exports.AuthenticationError = exports.ValidationError = exports.PromptevError = void 0;
|
|
5
|
+
// ── Errors ──────────────────────────────────────────────
|
|
6
|
+
class PromptevError extends Error {
|
|
7
|
+
constructor(message, statusCode, responseText) {
|
|
8
|
+
super(message);
|
|
9
|
+
this.name = "PromptevError";
|
|
10
|
+
this.statusCode = statusCode;
|
|
11
|
+
this.responseText = responseText;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
exports.PromptevError = PromptevError;
|
|
15
|
+
class ValidationError extends PromptevError {
|
|
16
|
+
constructor(message, statusCode, responseText) {
|
|
17
|
+
super(message, statusCode, responseText);
|
|
18
|
+
this.name = "ValidationError";
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
exports.ValidationError = ValidationError;
|
|
22
|
+
class AuthenticationError extends PromptevError {
|
|
23
|
+
constructor(message, statusCode, responseText) {
|
|
24
|
+
super(message, statusCode, responseText);
|
|
25
|
+
this.name = "AuthenticationError";
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
exports.AuthenticationError = AuthenticationError;
|
|
29
|
+
class NotFoundError extends PromptevError {
|
|
30
|
+
constructor(message, statusCode, responseText) {
|
|
31
|
+
super(message, statusCode, responseText);
|
|
32
|
+
this.name = "NotFoundError";
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
exports.NotFoundError = NotFoundError;
|
|
36
|
+
class RateLimitError extends PromptevError {
|
|
37
|
+
constructor(message, statusCode, responseText) {
|
|
38
|
+
super(message, statusCode, responseText);
|
|
39
|
+
this.name = "RateLimitError";
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
exports.RateLimitError = RateLimitError;
|
|
43
|
+
class ServerError extends PromptevError {
|
|
44
|
+
constructor(message, statusCode, responseText) {
|
|
45
|
+
super(message, statusCode, responseText);
|
|
46
|
+
this.name = "ServerError";
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
exports.ServerError = ServerError;
|
|
50
|
+
class NetworkError extends PromptevError {
|
|
51
|
+
constructor(message, statusCode, responseText) {
|
|
52
|
+
super(message, statusCode, responseText);
|
|
53
|
+
this.name = "NetworkError";
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
exports.NetworkError = NetworkError;
|
|
57
|
+
// ── Client ──────────────────────────────────────────────
|
|
58
|
+
const RETRYABLE_STATUS = new Set([502, 503, 504]);
|
|
59
|
+
const DEFAULT_MAX_RETRIES = 2;
|
|
60
|
+
const DEFAULT_BACKOFF = 500; // ms
|
|
41
61
|
class PromptevClient {
|
|
42
62
|
constructor(config) {
|
|
43
|
-
this.refreshInterval = null;
|
|
44
|
-
this.baseUrl = config.baseUrl || "https://api.promptev.ai";
|
|
45
63
|
this.projectKey = config.projectKey;
|
|
46
|
-
this.
|
|
47
|
-
this.
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
64
|
+
this.baseUrl = (config.baseUrl ?? "https://api.promptev.ai").replace(/\/+$/, "");
|
|
65
|
+
this.headers = {
|
|
66
|
+
"Content-Type": "application/json",
|
|
67
|
+
...(config.headers ?? {}),
|
|
68
|
+
};
|
|
69
|
+
this.timeout = config.timeout ?? 30000;
|
|
70
|
+
this.maxRetries = config.maxRetries ?? DEFAULT_MAX_RETRIES;
|
|
71
|
+
}
|
|
72
|
+
// ── Error handling ──────────────────────────────────
|
|
73
|
+
static raiseForStatus(status, text) {
|
|
74
|
+
if (status < 400)
|
|
75
|
+
return;
|
|
76
|
+
let detail;
|
|
77
|
+
try {
|
|
78
|
+
const body = JSON.parse(text);
|
|
79
|
+
detail =
|
|
80
|
+
typeof body.detail === "string" ? body.detail : text || "Unknown error";
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
detail = text || "Unknown error";
|
|
84
|
+
}
|
|
85
|
+
if (status === 400)
|
|
86
|
+
throw new ValidationError(detail, status, text);
|
|
87
|
+
if (status === 401)
|
|
88
|
+
throw new AuthenticationError(detail, status, text);
|
|
89
|
+
if (status === 403)
|
|
90
|
+
throw new AuthenticationError(detail, status, text);
|
|
91
|
+
if (status === 404)
|
|
92
|
+
throw new NotFoundError(detail, status, text);
|
|
93
|
+
if (status === 429)
|
|
94
|
+
throw new RateLimitError(detail, status, text);
|
|
95
|
+
if (status >= 500)
|
|
96
|
+
throw new ServerError(detail, status, text);
|
|
97
|
+
throw new PromptevError(detail, status, text);
|
|
98
|
+
}
|
|
99
|
+
// ── Retry helper ────────────────────────────────────
|
|
100
|
+
async fetchWithRetry(url, init) {
|
|
101
|
+
let lastError = null;
|
|
102
|
+
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
|
|
103
|
+
try {
|
|
104
|
+
const controller = new AbortController();
|
|
105
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
106
|
+
const resp = await fetch(url, {
|
|
107
|
+
...init,
|
|
108
|
+
signal: controller.signal,
|
|
56
109
|
});
|
|
110
|
+
clearTimeout(timeoutId);
|
|
111
|
+
if (!RETRYABLE_STATUS.has(resp.status) || attempt === this.maxRetries) {
|
|
112
|
+
return resp;
|
|
113
|
+
}
|
|
114
|
+
lastError = new ServerError(`Server error (${resp.status})`, resp.status);
|
|
57
115
|
}
|
|
58
|
-
|
|
59
|
-
|
|
116
|
+
catch (err) {
|
|
117
|
+
if (err.name === "AbortError") {
|
|
118
|
+
lastError = new NetworkError("Request timed out");
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
lastError = new NetworkError(`Network error: ${err.message}`);
|
|
122
|
+
}
|
|
123
|
+
if (attempt === this.maxRetries)
|
|
124
|
+
throw lastError;
|
|
60
125
|
}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}
|
|
64
|
-
async ensureReady() {
|
|
65
|
-
await this.isReady;
|
|
66
|
-
}
|
|
67
|
-
makeCacheKey(promptKey, variables) {
|
|
68
|
-
const sorted = Object.keys(variables)
|
|
69
|
-
.sort()
|
|
70
|
-
.map(k => `${k}=${variables[k]}`)
|
|
71
|
-
.join("&");
|
|
72
|
-
return `${promptKey}:${btoa(sorted)}`;
|
|
73
|
-
}
|
|
74
|
-
async fetchPromptFromServer(promptKey) {
|
|
75
|
-
const url = `/api/sdk/v1/prompt/client/${this.projectKey}/${promptKey}`;
|
|
76
|
-
const response = await this.client.get(url);
|
|
77
|
-
const rawPrompt = response.data.prompt;
|
|
78
|
-
const rawVars = Array.isArray(response.data.variables)
|
|
79
|
-
? response.data.variables
|
|
80
|
-
: typeof response.data.variables === "string"
|
|
81
|
-
? response.data.variables.split(",").map(v => v.trim())
|
|
82
|
-
: [];
|
|
83
|
-
return { prompt: rawPrompt, variables: rawVars };
|
|
84
|
-
}
|
|
85
|
-
async formatPrompt(template, requiredVars, values) {
|
|
86
|
-
if (!requiredVars.length)
|
|
87
|
-
return template;
|
|
88
|
-
const missing = requiredVars.filter(v => !(v in values));
|
|
89
|
-
if (missing.length)
|
|
90
|
-
throw new Error(`Missing variables: ${missing.join(", ")}`);
|
|
91
|
-
let formatted = template;
|
|
92
|
-
for (const [key, val] of Object.entries(values)) {
|
|
93
|
-
const pattern = new RegExp(`{{\\s*${key.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\s*}}`, "g");
|
|
94
|
-
formatted = formatted.replace(pattern, val);
|
|
126
|
+
const delay = DEFAULT_BACKOFF * Math.pow(2, attempt);
|
|
127
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
95
128
|
}
|
|
96
|
-
|
|
129
|
+
throw lastError;
|
|
97
130
|
}
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
const
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
131
|
+
// ── SSE parser ──────────────────────────────────────
|
|
132
|
+
async *parseSSE(response) {
|
|
133
|
+
const reader = response.body.getReader();
|
|
134
|
+
const decoder = new TextDecoder();
|
|
135
|
+
let buffer = "";
|
|
136
|
+
try {
|
|
137
|
+
while (true) {
|
|
138
|
+
const { done, value } = await reader.read();
|
|
139
|
+
if (done)
|
|
140
|
+
break;
|
|
141
|
+
buffer += decoder.decode(value, { stream: true });
|
|
142
|
+
const lines = buffer.split("\n");
|
|
143
|
+
buffer = lines.pop() || "";
|
|
144
|
+
for (const line of lines) {
|
|
145
|
+
if (!line.startsWith("data: "))
|
|
146
|
+
continue;
|
|
147
|
+
try {
|
|
148
|
+
const data = JSON.parse(line.slice(6));
|
|
149
|
+
const event = {
|
|
150
|
+
type: data.type || "unknown",
|
|
151
|
+
output: data.output || "",
|
|
152
|
+
raw: data,
|
|
153
|
+
};
|
|
154
|
+
yield event;
|
|
155
|
+
if (event.type === "done" || event.type === "error")
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
catch {
|
|
159
|
+
continue;
|
|
160
|
+
}
|
|
109
161
|
}
|
|
110
|
-
catch { }
|
|
111
162
|
}
|
|
112
|
-
}
|
|
163
|
+
}
|
|
164
|
+
finally {
|
|
165
|
+
reader.releaseLock();
|
|
166
|
+
}
|
|
113
167
|
}
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
168
|
+
// ── Prompt API ──────────────────────────────────────
|
|
169
|
+
/**
|
|
170
|
+
* Compile and execute a prompt.
|
|
171
|
+
*
|
|
172
|
+
* If the prompt has a model configured, it compiles the template,
|
|
173
|
+
* sends it to the LLM, and returns the AI response. If no model
|
|
174
|
+
* is configured, it returns the compiled template string.
|
|
175
|
+
*/
|
|
176
|
+
async runPrompt(promptKey, query, variables) {
|
|
177
|
+
const url = `${this.baseUrl}/api/sdk/v1/prompt/client/${this.projectKey}/${promptKey}`;
|
|
178
|
+
const merged = { query, ...(variables ?? {}) };
|
|
179
|
+
const resp = await this.fetchWithRetry(url, {
|
|
180
|
+
method: "POST",
|
|
181
|
+
headers: this.headers,
|
|
182
|
+
body: JSON.stringify({ variables: merged }),
|
|
183
|
+
});
|
|
184
|
+
const text = await resp.text();
|
|
185
|
+
PromptevClient.raiseForStatus(resp.status, text);
|
|
186
|
+
const data = JSON.parse(text);
|
|
187
|
+
if (typeof data.prompt !== "string") {
|
|
188
|
+
throw new PromptevError("Unexpected response: missing 'prompt' field");
|
|
189
|
+
}
|
|
190
|
+
return data.prompt;
|
|
120
191
|
}
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
192
|
+
/**
|
|
193
|
+
* Compile and execute a prompt with streaming.
|
|
194
|
+
*
|
|
195
|
+
* Use this when the prompt has tools attached or you want real-time
|
|
196
|
+
* output. Returns the same event types as agent streaming.
|
|
197
|
+
*/
|
|
198
|
+
async *streamPrompt(promptKey, query, variables) {
|
|
199
|
+
const url = `${this.baseUrl}/api/sdk/v1/prompt/client/${this.projectKey}/${promptKey}?stream=true`;
|
|
200
|
+
const merged = { query, ...(variables ?? {}) };
|
|
201
|
+
const controller = new AbortController();
|
|
202
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
203
|
+
let resp;
|
|
204
|
+
try {
|
|
205
|
+
resp = await fetch(url, {
|
|
206
|
+
method: "POST",
|
|
207
|
+
headers: this.headers,
|
|
208
|
+
body: JSON.stringify({ variables: merged }),
|
|
209
|
+
signal: controller.signal,
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
catch (err) {
|
|
213
|
+
clearTimeout(timeoutId);
|
|
214
|
+
if (err.name === "AbortError")
|
|
215
|
+
throw new NetworkError("Request timed out");
|
|
216
|
+
throw new NetworkError(`Network error: ${err.message}`);
|
|
217
|
+
}
|
|
218
|
+
clearTimeout(timeoutId);
|
|
219
|
+
if (resp.status >= 400) {
|
|
220
|
+
const text = await resp.text();
|
|
221
|
+
PromptevClient.raiseForStatus(resp.status, text);
|
|
222
|
+
}
|
|
223
|
+
const contentType = resp.headers.get("content-type") || "";
|
|
224
|
+
// Non-streaming fallback: prompt has no model, backend returns JSON
|
|
225
|
+
if (!contentType.includes("text/event-stream")) {
|
|
226
|
+
const data = await resp.json();
|
|
227
|
+
yield {
|
|
228
|
+
type: "done",
|
|
229
|
+
output: data.prompt || "",
|
|
230
|
+
raw: data,
|
|
231
|
+
};
|
|
232
|
+
return;
|
|
233
|
+
}
|
|
234
|
+
yield* this.parseSSE(resp);
|
|
235
|
+
}
|
|
236
|
+
// ── Agent API ───────────────────────────────────────
|
|
237
|
+
/**
|
|
238
|
+
* Start a new agent chat session.
|
|
239
|
+
*/
|
|
240
|
+
async startAgent(chatbotId, options) {
|
|
241
|
+
const url = `${this.baseUrl}/api/sdk/v1/agent/${this.projectKey}/${chatbotId}/start`;
|
|
242
|
+
const payload = {};
|
|
243
|
+
if (options?.visitor != null)
|
|
244
|
+
payload.visitor = options.visitor;
|
|
245
|
+
if (options?.platform && options.platform !== "sdk")
|
|
246
|
+
payload.platform = options.platform;
|
|
247
|
+
const resp = await this.fetchWithRetry(url, {
|
|
248
|
+
method: "POST",
|
|
249
|
+
headers: this.headers,
|
|
250
|
+
body: JSON.stringify(payload),
|
|
251
|
+
});
|
|
252
|
+
const text = await resp.text();
|
|
253
|
+
PromptevClient.raiseForStatus(resp.status, text);
|
|
254
|
+
const data = JSON.parse(text);
|
|
255
|
+
return {
|
|
256
|
+
sessionToken: data.session_token,
|
|
257
|
+
chatbotId: data.chatbot_id,
|
|
258
|
+
name: data.name,
|
|
259
|
+
memoryEnabled: data.memory_enabled ?? false,
|
|
260
|
+
messages: data.messages ?? [],
|
|
261
|
+
};
|
|
128
262
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
263
|
+
/**
|
|
264
|
+
* Stream an agent response as SSE events.
|
|
265
|
+
*/
|
|
266
|
+
async *streamAgent(chatbotId, options) {
|
|
267
|
+
const url = `${this.baseUrl}/api/sdk/v1/agent/${this.projectKey}/${chatbotId}/stream?session_token=${encodeURIComponent(options.sessionToken)}`;
|
|
268
|
+
const controller = new AbortController();
|
|
269
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
270
|
+
let resp;
|
|
271
|
+
try {
|
|
272
|
+
resp = await fetch(url, {
|
|
273
|
+
method: "POST",
|
|
274
|
+
headers: this.headers,
|
|
275
|
+
body: JSON.stringify({ query: options.query }),
|
|
276
|
+
signal: controller.signal,
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
catch (err) {
|
|
280
|
+
clearTimeout(timeoutId);
|
|
281
|
+
if (err.name === "AbortError")
|
|
282
|
+
throw new NetworkError("Request timed out");
|
|
283
|
+
throw new NetworkError(`Network error: ${err.message}`);
|
|
135
284
|
}
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
285
|
+
clearTimeout(timeoutId);
|
|
286
|
+
if (resp.status >= 400) {
|
|
287
|
+
const text = await resp.text();
|
|
288
|
+
PromptevClient.raiseForStatus(resp.status, text);
|
|
139
289
|
}
|
|
290
|
+
yield* this.parseSSE(resp);
|
|
140
291
|
}
|
|
141
292
|
}
|
|
142
293
|
exports.PromptevClient = PromptevClient;
|
package/dist/esm/index.d.ts
CHANGED
|
@@ -1,24 +1,83 @@
|
|
|
1
1
|
export interface PromptevClientConfig {
|
|
2
|
-
baseUrl?: string;
|
|
3
2
|
projectKey: string;
|
|
4
|
-
|
|
3
|
+
baseUrl?: string;
|
|
4
|
+
headers?: Record<string, string>;
|
|
5
|
+
timeout?: number;
|
|
6
|
+
maxRetries?: number;
|
|
7
|
+
}
|
|
8
|
+
export interface AgentSession {
|
|
9
|
+
sessionToken: string;
|
|
10
|
+
chatbotId: string;
|
|
11
|
+
name: string;
|
|
12
|
+
memoryEnabled: boolean;
|
|
13
|
+
messages: Record<string, any>[];
|
|
14
|
+
}
|
|
15
|
+
export interface AgentEvent {
|
|
16
|
+
type: string;
|
|
17
|
+
output: string;
|
|
18
|
+
raw: Record<string, any>;
|
|
19
|
+
}
|
|
20
|
+
export declare class PromptevError extends Error {
|
|
21
|
+
statusCode?: number;
|
|
22
|
+
responseText?: string;
|
|
23
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
24
|
+
}
|
|
25
|
+
export declare class ValidationError extends PromptevError {
|
|
26
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
27
|
+
}
|
|
28
|
+
export declare class AuthenticationError extends PromptevError {
|
|
29
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
30
|
+
}
|
|
31
|
+
export declare class NotFoundError extends PromptevError {
|
|
32
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
33
|
+
}
|
|
34
|
+
export declare class RateLimitError extends PromptevError {
|
|
35
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
36
|
+
}
|
|
37
|
+
export declare class ServerError extends PromptevError {
|
|
38
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
39
|
+
}
|
|
40
|
+
export declare class NetworkError extends PromptevError {
|
|
41
|
+
constructor(message: string, statusCode?: number, responseText?: string);
|
|
5
42
|
}
|
|
6
43
|
export declare class PromptevClient {
|
|
7
|
-
private client;
|
|
8
|
-
private baseUrl;
|
|
9
44
|
private projectKey;
|
|
10
|
-
private
|
|
11
|
-
private
|
|
12
|
-
private
|
|
13
|
-
private
|
|
45
|
+
private baseUrl;
|
|
46
|
+
private headers;
|
|
47
|
+
private timeout;
|
|
48
|
+
private maxRetries;
|
|
14
49
|
constructor(config: PromptevClientConfig);
|
|
15
|
-
private
|
|
16
|
-
private
|
|
17
|
-
private
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
50
|
+
private static raiseForStatus;
|
|
51
|
+
private fetchWithRetry;
|
|
52
|
+
private parseSSE;
|
|
53
|
+
/**
|
|
54
|
+
* Compile and execute a prompt.
|
|
55
|
+
*
|
|
56
|
+
* If the prompt has a model configured, it compiles the template,
|
|
57
|
+
* sends it to the LLM, and returns the AI response. If no model
|
|
58
|
+
* is configured, it returns the compiled template string.
|
|
59
|
+
*/
|
|
60
|
+
runPrompt(promptKey: string, query: string, variables?: Record<string, string>): Promise<string>;
|
|
61
|
+
/**
|
|
62
|
+
* Compile and execute a prompt with streaming.
|
|
63
|
+
*
|
|
64
|
+
* Use this when the prompt has tools attached or you want real-time
|
|
65
|
+
* output. Returns the same event types as agent streaming.
|
|
66
|
+
*/
|
|
67
|
+
streamPrompt(promptKey: string, query: string, variables?: Record<string, string>): AsyncGenerator<AgentEvent>;
|
|
68
|
+
/**
|
|
69
|
+
* Start a new agent chat session.
|
|
70
|
+
*/
|
|
71
|
+
startAgent(chatbotId: string, options?: {
|
|
72
|
+
visitor?: string;
|
|
73
|
+
platform?: string;
|
|
74
|
+
}): Promise<AgentSession>;
|
|
75
|
+
/**
|
|
76
|
+
* Stream an agent response as SSE events.
|
|
77
|
+
*/
|
|
78
|
+
streamAgent(chatbotId: string, options: {
|
|
79
|
+
sessionToken: string;
|
|
80
|
+
query: string;
|
|
81
|
+
}): AsyncGenerator<AgentEvent>;
|
|
23
82
|
}
|
|
24
83
|
export default PromptevClient;
|