primeorbit 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +260 -0
- package/build/index.cjs +697 -0
- package/build/index.d.cts +277 -0
- package/build/index.d.ts +277 -0
- package/build/index.js +649 -0
- package/package.json +136 -0
package/build/index.js
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
1
|
+
// src/api_utils.ts
|
|
2
|
+
async function recordData(endpoint, payload, headers = { "Content-Type": "application/json" }) {
|
|
3
|
+
const res = await fetch(endpoint, {
|
|
4
|
+
method: "POST",
|
|
5
|
+
headers,
|
|
6
|
+
body: JSON.stringify(payload)
|
|
7
|
+
});
|
|
8
|
+
const text = await res.text();
|
|
9
|
+
let responseBody;
|
|
10
|
+
try {
|
|
11
|
+
responseBody = text ? JSON.parse(text) : null;
|
|
12
|
+
} catch {
|
|
13
|
+
responseBody = text;
|
|
14
|
+
}
|
|
15
|
+
if (!res.ok) {
|
|
16
|
+
throw new Error(`HTTP ${res.status} ${res.statusText}: ${text}`);
|
|
17
|
+
}
|
|
18
|
+
return responseBody;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// src/routes.ts
|
|
22
|
+
var routes = {
|
|
23
|
+
latencies: "/system-metrics/latencies",
|
|
24
|
+
"star-ratings": "/user-metrics/star-ratings",
|
|
25
|
+
"thumbs-feedbacks": "/user-metrics/thumbs-feedbacks",
|
|
26
|
+
"raw-events": "/user-metrics/raw-events",
|
|
27
|
+
"raw-events-batch": "/user-metrics/raw-events/batch"
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
// src/logger.ts
|
|
31
|
+
var LOG_LEVELS = {
|
|
32
|
+
debug: 0,
|
|
33
|
+
info: 1,
|
|
34
|
+
warn: 2,
|
|
35
|
+
error: 3,
|
|
36
|
+
silent: 4
|
|
37
|
+
};
|
|
38
|
+
var currentLogLevel = "info";
|
|
39
|
+
function setLogLevel(level) {
|
|
40
|
+
currentLogLevel = level;
|
|
41
|
+
}
|
|
42
|
+
function getLogLevel() {
|
|
43
|
+
return currentLogLevel;
|
|
44
|
+
}
|
|
45
|
+
function shouldLog(level) {
|
|
46
|
+
return LOG_LEVELS[level] >= LOG_LEVELS[currentLogLevel];
|
|
47
|
+
}
|
|
48
|
+
function formatMessage(level, message) {
|
|
49
|
+
return `[PrimeOrbit ${level.toUpperCase()}] ${message}`;
|
|
50
|
+
}
|
|
51
|
+
var logger = {
|
|
52
|
+
/**
|
|
53
|
+
* Log a debug message. Only shown when log level is 'debug'.
|
|
54
|
+
*/
|
|
55
|
+
debug(message, ...args) {
|
|
56
|
+
if (shouldLog("debug")) {
|
|
57
|
+
console.debug(formatMessage("debug", message), ...args);
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
/**
|
|
61
|
+
* Log an informational message.
|
|
62
|
+
*/
|
|
63
|
+
info(message, ...args) {
|
|
64
|
+
if (shouldLog("info")) {
|
|
65
|
+
console.info(formatMessage("info", message), ...args);
|
|
66
|
+
}
|
|
67
|
+
},
|
|
68
|
+
/**
|
|
69
|
+
* Log a warning message.
|
|
70
|
+
*/
|
|
71
|
+
warn(message, ...args) {
|
|
72
|
+
if (shouldLog("warn")) {
|
|
73
|
+
console.warn(formatMessage("warn", message), ...args);
|
|
74
|
+
}
|
|
75
|
+
},
|
|
76
|
+
/**
|
|
77
|
+
* Log an error message.
|
|
78
|
+
*/
|
|
79
|
+
error(message, ...args) {
|
|
80
|
+
if (shouldLog("error")) {
|
|
81
|
+
console.error(formatMessage("error", message), ...args);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
// src/system_metrics/measure_latency.ts
|
|
87
|
+
function recordLatency(startTime, agentId, actionId, actionFailed) {
|
|
88
|
+
const endTime = performance.now();
|
|
89
|
+
const latency = Math.floor(endTime - startTime);
|
|
90
|
+
recordData(routes["latencies"], {
|
|
91
|
+
agent_id: agentId,
|
|
92
|
+
latency,
|
|
93
|
+
action_id: actionId,
|
|
94
|
+
action_failed: actionFailed
|
|
95
|
+
}).catch((err) => {
|
|
96
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
97
|
+
logger.error(`Failed to record latency: ${message}`);
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
function isPromise(value) {
|
|
101
|
+
if (value === null || typeof value !== "object") {
|
|
102
|
+
return false;
|
|
103
|
+
}
|
|
104
|
+
const obj = value;
|
|
105
|
+
return "then" in obj && typeof obj["then"] === "function";
|
|
106
|
+
}
|
|
107
|
+
function wrapSyncToRecordLatency(fn, agentId, actionId) {
|
|
108
|
+
const resolvedActionId = actionId ?? fn.name;
|
|
109
|
+
return function(...args) {
|
|
110
|
+
const start = performance.now();
|
|
111
|
+
let failed = true;
|
|
112
|
+
try {
|
|
113
|
+
const result = fn.apply(this, args);
|
|
114
|
+
failed = false;
|
|
115
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
116
|
+
return result;
|
|
117
|
+
} catch (err) {
|
|
118
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
119
|
+
throw err;
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
function wrapAsyncToRecordLatency(fn, agentId, actionId) {
|
|
124
|
+
const resolvedActionId = actionId ?? fn.name;
|
|
125
|
+
return async function(...args) {
|
|
126
|
+
const start = performance.now();
|
|
127
|
+
let failed = true;
|
|
128
|
+
try {
|
|
129
|
+
const result = await fn.apply(this, args);
|
|
130
|
+
failed = false;
|
|
131
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
132
|
+
return result;
|
|
133
|
+
} catch (err) {
|
|
134
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
135
|
+
throw err;
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
function wrapToRecordLatency(fn, agentId, actionId) {
|
|
140
|
+
const resolvedActionId = actionId ?? fn.name;
|
|
141
|
+
return function(...args) {
|
|
142
|
+
const start = performance.now();
|
|
143
|
+
let failed = true;
|
|
144
|
+
try {
|
|
145
|
+
const result = fn.apply(this, args);
|
|
146
|
+
if (isPromise(result)) {
|
|
147
|
+
return result.then((res) => {
|
|
148
|
+
failed = false;
|
|
149
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
150
|
+
return res;
|
|
151
|
+
}).catch((err) => {
|
|
152
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
153
|
+
throw err;
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
failed = false;
|
|
157
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
158
|
+
return result;
|
|
159
|
+
} catch (err) {
|
|
160
|
+
recordLatency(start, agentId, resolvedActionId, failed);
|
|
161
|
+
throw err;
|
|
162
|
+
}
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// src/config.ts
|
|
167
|
+
var baseApiUrl;
|
|
168
|
+
function setPrimeOrbitApiUrl(url) {
|
|
169
|
+
baseApiUrl = url;
|
|
170
|
+
}
|
|
171
|
+
function getPrimeOrbitApiUrl() {
|
|
172
|
+
if (!baseApiUrl) {
|
|
173
|
+
throw new Error(
|
|
174
|
+
`PrimeOrbit API URL is not set. Please call 'setPrimeOrbitApiUrl(<url>)' before using the SDK.`
|
|
175
|
+
);
|
|
176
|
+
}
|
|
177
|
+
return baseApiUrl;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// src/user_metrics/record_star_rating.ts
|
|
181
|
+
async function record_star_rating(agentId, rating, taskName, userId = "") {
|
|
182
|
+
try {
|
|
183
|
+
const payload = {
|
|
184
|
+
agent_id: agentId,
|
|
185
|
+
rating,
|
|
186
|
+
task_name: taskName
|
|
187
|
+
};
|
|
188
|
+
if (userId) {
|
|
189
|
+
payload["user_id"] = userId;
|
|
190
|
+
}
|
|
191
|
+
await recordData(routes["star-ratings"], payload);
|
|
192
|
+
} catch (err) {
|
|
193
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
194
|
+
logger.error(`recordData failed: ${message}`);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// src/user_metrics/record_thumbs_feedback.ts
|
|
199
|
+
async function record_thumbs_feedback(agentId, isThumbsUp, taskName, userId = "") {
|
|
200
|
+
try {
|
|
201
|
+
const payload = {
|
|
202
|
+
agent_id: agentId,
|
|
203
|
+
is_thumbs_up: isThumbsUp,
|
|
204
|
+
task_name: taskName
|
|
205
|
+
};
|
|
206
|
+
if (userId) {
|
|
207
|
+
payload["user_id"] = userId;
|
|
208
|
+
}
|
|
209
|
+
await recordData(routes["thumbs-feedbacks"], payload);
|
|
210
|
+
} catch (err) {
|
|
211
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
212
|
+
logger.error(`Failed to record thumbs feedback: ${message}`);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// src/event_queue.ts
|
|
217
|
+
var EventQueue = class {
|
|
218
|
+
queue = [];
|
|
219
|
+
batchSize;
|
|
220
|
+
flushIntervalMs;
|
|
221
|
+
maxConcurrentRequests;
|
|
222
|
+
maxRetries;
|
|
223
|
+
retryBaseDelayMs;
|
|
224
|
+
maxQueueSize;
|
|
225
|
+
onEventDropped;
|
|
226
|
+
onBatchFailed;
|
|
227
|
+
onBatchSuccess;
|
|
228
|
+
flushTimer = null;
|
|
229
|
+
activeBatches = [];
|
|
230
|
+
isShuttingDown = false;
|
|
231
|
+
isFlushing = false;
|
|
232
|
+
endpoint;
|
|
233
|
+
headers;
|
|
234
|
+
sendBatch;
|
|
235
|
+
constructor(endpoint, headers, options = {}) {
|
|
236
|
+
this.endpoint = endpoint;
|
|
237
|
+
this.headers = headers;
|
|
238
|
+
this.batchSize = options.batchSize ?? 100;
|
|
239
|
+
this.flushIntervalMs = options.flushIntervalMs ?? 5e3;
|
|
240
|
+
this.maxConcurrentRequests = options.maxConcurrentRequests ?? 5;
|
|
241
|
+
this.maxRetries = options.maxRetries ?? 3;
|
|
242
|
+
this.retryBaseDelayMs = options.retryBaseDelayMs ?? 1e3;
|
|
243
|
+
this.maxQueueSize = options.maxQueueSize ?? 1e5;
|
|
244
|
+
this.onEventDropped = options.onEventDropped;
|
|
245
|
+
this.onBatchFailed = options.onBatchFailed;
|
|
246
|
+
this.onBatchSuccess = options.onBatchSuccess;
|
|
247
|
+
this.sendBatch = this.createBatchSender();
|
|
248
|
+
this.startFlushTimer();
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Add an event to the queue. Events are batched and sent automatically.
|
|
252
|
+
*/
|
|
253
|
+
enqueue(payload) {
|
|
254
|
+
if (this.isShuttingDown) {
|
|
255
|
+
logger.warn("Cannot enqueue events after shutdown has started");
|
|
256
|
+
return false;
|
|
257
|
+
}
|
|
258
|
+
if (this.queue.length >= this.maxQueueSize) {
|
|
259
|
+
const droppedEvent = {
|
|
260
|
+
payload,
|
|
261
|
+
timestamp: Date.now(),
|
|
262
|
+
retryCount: 0
|
|
263
|
+
};
|
|
264
|
+
this.onEventDropped?.(droppedEvent, "Queue is full");
|
|
265
|
+
logger.warn(
|
|
266
|
+
`Event dropped: queue is full (max ${this.maxQueueSize} events)`
|
|
267
|
+
);
|
|
268
|
+
return false;
|
|
269
|
+
}
|
|
270
|
+
const event = {
|
|
271
|
+
payload,
|
|
272
|
+
timestamp: Date.now(),
|
|
273
|
+
retryCount: 0
|
|
274
|
+
};
|
|
275
|
+
this.queue.push(event);
|
|
276
|
+
if (this.queue.length >= this.batchSize) {
|
|
277
|
+
void this.flush();
|
|
278
|
+
}
|
|
279
|
+
return true;
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Flush all pending events. Call this before your application exits.
|
|
283
|
+
*/
|
|
284
|
+
async flush() {
|
|
285
|
+
if (this.isFlushing) {
|
|
286
|
+
await Promise.all(this.activeBatches.map((b) => b.promise));
|
|
287
|
+
return;
|
|
288
|
+
}
|
|
289
|
+
this.isFlushing = true;
|
|
290
|
+
try {
|
|
291
|
+
while (this.queue.length > 0) {
|
|
292
|
+
while (this.activeBatches.length >= this.maxConcurrentRequests) {
|
|
293
|
+
await Promise.race(this.activeBatches.map((b) => b.promise));
|
|
294
|
+
this.cleanupCompletedBatches();
|
|
295
|
+
}
|
|
296
|
+
const batch = this.queue.splice(0, this.batchSize);
|
|
297
|
+
if (batch.length > 0) {
|
|
298
|
+
const batchPromise = this.processBatch(batch);
|
|
299
|
+
const pendingBatch = {
|
|
300
|
+
events: batch,
|
|
301
|
+
promise: batchPromise
|
|
302
|
+
};
|
|
303
|
+
this.activeBatches.push(pendingBatch);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
await Promise.all(this.activeBatches.map((b) => b.promise));
|
|
307
|
+
this.cleanupCompletedBatches();
|
|
308
|
+
} finally {
|
|
309
|
+
this.isFlushing = false;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Shutdown the queue gracefully. Flushes all pending events and stops the timer.
|
|
314
|
+
*/
|
|
315
|
+
async shutdown() {
|
|
316
|
+
this.isShuttingDown = true;
|
|
317
|
+
this.stopFlushTimer();
|
|
318
|
+
await this.flush();
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* Get current queue statistics.
|
|
322
|
+
*/
|
|
323
|
+
getStats() {
|
|
324
|
+
return {
|
|
325
|
+
queuedEvents: this.queue.length,
|
|
326
|
+
activeBatches: this.activeBatches.length,
|
|
327
|
+
isShuttingDown: this.isShuttingDown
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
startFlushTimer() {
|
|
331
|
+
if (this.flushTimer) return;
|
|
332
|
+
this.flushTimer = setInterval(() => {
|
|
333
|
+
if (this.queue.length > 0 && !this.isFlushing) {
|
|
334
|
+
void this.flush();
|
|
335
|
+
}
|
|
336
|
+
}, this.flushIntervalMs);
|
|
337
|
+
if (this.flushTimer.unref) {
|
|
338
|
+
this.flushTimer.unref();
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
stopFlushTimer() {
|
|
342
|
+
if (this.flushTimer) {
|
|
343
|
+
clearInterval(this.flushTimer);
|
|
344
|
+
this.flushTimer = null;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
cleanupCompletedBatches() {
|
|
348
|
+
this.activeBatches = [];
|
|
349
|
+
}
|
|
350
|
+
async processBatch(events) {
|
|
351
|
+
let lastError = null;
|
|
352
|
+
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
|
|
353
|
+
try {
|
|
354
|
+
const response = await this.sendBatch(events);
|
|
355
|
+
this.onBatchSuccess?.(events, response);
|
|
356
|
+
return;
|
|
357
|
+
} catch (error) {
|
|
358
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
359
|
+
if (attempt < this.maxRetries) {
|
|
360
|
+
const delay = this.retryBaseDelayMs * Math.pow(2, attempt) + Math.random() * 1e3;
|
|
361
|
+
await this.sleep(delay);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
if (lastError) {
|
|
366
|
+
this.onBatchFailed?.(events, lastError);
|
|
367
|
+
logger.error(
|
|
368
|
+
`Batch of ${events.length} events failed after ${this.maxRetries + 1} attempts: ${lastError.message}`
|
|
369
|
+
);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
createBatchSender() {
|
|
373
|
+
return async (events) => {
|
|
374
|
+
const payloads = events.map((e) => e.payload);
|
|
375
|
+
const response = await fetch(this.endpoint, {
|
|
376
|
+
method: "POST",
|
|
377
|
+
headers: this.headers,
|
|
378
|
+
body: JSON.stringify({ events: payloads })
|
|
379
|
+
});
|
|
380
|
+
const text = await response.text();
|
|
381
|
+
let responseBody;
|
|
382
|
+
try {
|
|
383
|
+
responseBody = text ? JSON.parse(text) : null;
|
|
384
|
+
} catch {
|
|
385
|
+
responseBody = text;
|
|
386
|
+
}
|
|
387
|
+
if (!response.ok) {
|
|
388
|
+
throw new Error(`HTTP ${response.status} ${response.statusText}: ${text}`);
|
|
389
|
+
}
|
|
390
|
+
return responseBody;
|
|
391
|
+
};
|
|
392
|
+
}
|
|
393
|
+
sleep(ms) {
|
|
394
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
|
|
398
|
+
// src/user_metrics/record_user_event.ts
|
|
399
|
+
import * as dotenv from "dotenv";
|
|
400
|
+
dotenv.config();
|
|
401
|
+
var PrimeOrbitClient = class {
|
|
402
|
+
apiKey;
|
|
403
|
+
baseUrl;
|
|
404
|
+
baseProperties = {};
|
|
405
|
+
verbose = false;
|
|
406
|
+
enableBatching = false;
|
|
407
|
+
eventQueue = null;
|
|
408
|
+
constructor(apiKeyOrOptions, endpoint) {
|
|
409
|
+
let apiKey;
|
|
410
|
+
let endpointUrl;
|
|
411
|
+
let options = {};
|
|
412
|
+
if (typeof apiKeyOrOptions === "object" && apiKeyOrOptions !== null) {
|
|
413
|
+
options = apiKeyOrOptions;
|
|
414
|
+
apiKey = options.apiKey;
|
|
415
|
+
endpointUrl = options.endpoint;
|
|
416
|
+
this.verbose = options.verbose ?? false;
|
|
417
|
+
this.enableBatching = options.enableBatching ?? false;
|
|
418
|
+
} else {
|
|
419
|
+
apiKey = apiKeyOrOptions;
|
|
420
|
+
endpointUrl = endpoint;
|
|
421
|
+
}
|
|
422
|
+
const envApiKey = process.env["PRIMEORBIT_API_KEY"];
|
|
423
|
+
const envEndpoint = process.env["PRIMEORBIT_ENDPOINT"];
|
|
424
|
+
if (apiKey) {
|
|
425
|
+
this.apiKey = apiKey;
|
|
426
|
+
} else if (envApiKey) {
|
|
427
|
+
this.apiKey = envApiKey;
|
|
428
|
+
} else {
|
|
429
|
+
throw new Error(
|
|
430
|
+
"API key not provided. Set the 'apiKey' parameter or the 'PRIMEORBIT_API_KEY' environment variable."
|
|
431
|
+
);
|
|
432
|
+
}
|
|
433
|
+
if (endpointUrl) {
|
|
434
|
+
this.baseUrl = endpointUrl;
|
|
435
|
+
} else if (envEndpoint) {
|
|
436
|
+
this.baseUrl = envEndpoint;
|
|
437
|
+
} else {
|
|
438
|
+
this.baseUrl = "https://sdk-dev.primeorbit.ai";
|
|
439
|
+
}
|
|
440
|
+
this.baseProperties = {};
|
|
441
|
+
if (this.enableBatching) {
|
|
442
|
+
this.initializeEventQueue(options.queueOptions);
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
initializeEventQueue(queueOptions) {
|
|
446
|
+
const batchEndpoint = `${this.baseUrl}${routes["raw-events-batch"]}`;
|
|
447
|
+
this.eventQueue = new EventQueue(batchEndpoint, this._headers(), {
|
|
448
|
+
batchSize: 100,
|
|
449
|
+
flushIntervalMs: 5e3,
|
|
450
|
+
maxConcurrentRequests: 5,
|
|
451
|
+
maxRetries: 3,
|
|
452
|
+
retryBaseDelayMs: 1e3,
|
|
453
|
+
maxQueueSize: 1e5,
|
|
454
|
+
...queueOptions
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
add_properties(props) {
|
|
458
|
+
this.baseProperties = {
|
|
459
|
+
...this.baseProperties,
|
|
460
|
+
...props
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
_headers() {
|
|
464
|
+
return {
|
|
465
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
466
|
+
"Content-Type": "application/json"
|
|
467
|
+
};
|
|
468
|
+
}
|
|
469
|
+
/**
|
|
470
|
+
* Enable or disable event batching at runtime.
|
|
471
|
+
* When enabled, events are queued and sent in batches.
|
|
472
|
+
* When disabled, events are sent immediately (original behavior).
|
|
473
|
+
*
|
|
474
|
+
* @param enable - Whether to enable batching
|
|
475
|
+
* @param queueOptions - Optional queue configuration (only used when enabling)
|
|
476
|
+
*/
|
|
477
|
+
setBatching(enable, queueOptions) {
|
|
478
|
+
if (enable && !this.eventQueue) {
|
|
479
|
+
this.enableBatching = true;
|
|
480
|
+
this.initializeEventQueue(queueOptions);
|
|
481
|
+
} else if (!enable && this.eventQueue) {
|
|
482
|
+
void this.eventQueue.flush().then(() => {
|
|
483
|
+
this.enableBatching = false;
|
|
484
|
+
this.eventQueue = null;
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
/**
|
|
489
|
+
* Flush all queued events immediately.
|
|
490
|
+
* Call this before your application exits to ensure all events are sent.
|
|
491
|
+
* Only applicable when batching is enabled.
|
|
492
|
+
*/
|
|
493
|
+
async flush() {
|
|
494
|
+
if (this.eventQueue) {
|
|
495
|
+
await this.eventQueue.flush();
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Shutdown the client gracefully.
|
|
500
|
+
* Flushes all pending events and releases resources.
|
|
501
|
+
* Call this when your application is shutting down.
|
|
502
|
+
*/
|
|
503
|
+
async shutdown() {
|
|
504
|
+
if (this.eventQueue) {
|
|
505
|
+
await this.eventQueue.shutdown();
|
|
506
|
+
this.eventQueue = null;
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
/**
|
|
510
|
+
* Get queue statistics.
|
|
511
|
+
* Only applicable when batching is enabled.
|
|
512
|
+
*/
|
|
513
|
+
getQueueStats() {
|
|
514
|
+
if (this.eventQueue) {
|
|
515
|
+
return this.eventQueue.getStats();
|
|
516
|
+
}
|
|
517
|
+
return null;
|
|
518
|
+
}
|
|
519
|
+
/**
|
|
520
|
+
* Records any agent interaction event such as user_message, agent_response, or user_feedback.
|
|
521
|
+
*
|
|
522
|
+
* @param eventType - Type of event. Can be:
|
|
523
|
+
* - 'user_message' – when the user sends a message
|
|
524
|
+
* - 'agent_response' – when the agent replies
|
|
525
|
+
* - 'user_feedback' – when the user gives feedback
|
|
526
|
+
*
|
|
527
|
+
* @param params - Object containing event details:
|
|
528
|
+
* - conversationId: string – Unique conversation identifier
|
|
529
|
+
* - agentId: string – Agent ID
|
|
530
|
+
* - userId: string – User ID
|
|
531
|
+
* - productId?: string – Optional product ID
|
|
532
|
+
* - content: string – Text content of the event (user message, agent reply, or feedback)
|
|
533
|
+
* - sessionId?: string – Optional session ID
|
|
534
|
+
* - messageId?: string – Optional message ID (for tracking within conversation)
|
|
535
|
+
* - inputMode?: string – Optional input mode (e.g., 'text', 'voice')
|
|
536
|
+
* - device?: string – Optional device name or model (e.g., 'iPhone 13')
|
|
537
|
+
* - country?: string – Optional country of the user (ISO country code, e.g., 'US')
|
|
538
|
+
* - platform?: string – Optional platform or OS (e.g., 'iOS', 'Android', 'Web')
|
|
539
|
+
* - language?: string – Optional language of the user (e.g., 'en-US')
|
|
540
|
+
* - Any additional properties will be captured in additional_properties
|
|
541
|
+
*
|
|
542
|
+
* @returns Promise that resolves when the event is queued (batching) or sent (non-batching).
|
|
543
|
+
* When batching is enabled, returns true if queued successfully, false if dropped.
|
|
544
|
+
*/
|
|
545
|
+
async record_raw_event(eventType, params) {
|
|
546
|
+
const finalParams = {
|
|
547
|
+
...this.baseProperties,
|
|
548
|
+
...params
|
|
549
|
+
};
|
|
550
|
+
try {
|
|
551
|
+
const {
|
|
552
|
+
conversationId,
|
|
553
|
+
messageId,
|
|
554
|
+
agentId,
|
|
555
|
+
userId,
|
|
556
|
+
appId,
|
|
557
|
+
content,
|
|
558
|
+
sessionId,
|
|
559
|
+
eventId,
|
|
560
|
+
inputMode,
|
|
561
|
+
device,
|
|
562
|
+
country,
|
|
563
|
+
platform,
|
|
564
|
+
experimentId,
|
|
565
|
+
...extraProps
|
|
566
|
+
} = finalParams;
|
|
567
|
+
const requiredFields = ["conversationId", "userId", "content"];
|
|
568
|
+
const missingRequired = requiredFields.filter(
|
|
569
|
+
(field) => !(field in finalParams) || finalParams[field] === void 0
|
|
570
|
+
);
|
|
571
|
+
if (missingRequired.length > 0) {
|
|
572
|
+
throw new Error(
|
|
573
|
+
`Missing required fields: ${missingRequired.join(", ")}`
|
|
574
|
+
);
|
|
575
|
+
}
|
|
576
|
+
const optionalFields = [
|
|
577
|
+
"agentId",
|
|
578
|
+
"sessionId",
|
|
579
|
+
"eventId",
|
|
580
|
+
"messageId",
|
|
581
|
+
"experimentId",
|
|
582
|
+
"inputMode",
|
|
583
|
+
"device",
|
|
584
|
+
"country",
|
|
585
|
+
"platform",
|
|
586
|
+
"appId",
|
|
587
|
+
"model"
|
|
588
|
+
];
|
|
589
|
+
const missingOptionals = optionalFields.filter(
|
|
590
|
+
(field) => !(field in finalParams) || finalParams[field] === void 0
|
|
591
|
+
);
|
|
592
|
+
if (missingOptionals.length > 0) {
|
|
593
|
+
if (this.verbose) {
|
|
594
|
+
logger.warn(
|
|
595
|
+
`Missing optional fields: ${missingOptionals.join(", ")}. Analytics may be less accurate or incomplete.`
|
|
596
|
+
);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
const payload = {
|
|
600
|
+
event_type: eventType,
|
|
601
|
+
conversation_id: conversationId,
|
|
602
|
+
session_id: sessionId,
|
|
603
|
+
user_id: userId,
|
|
604
|
+
agent_id: agentId,
|
|
605
|
+
message_id: messageId,
|
|
606
|
+
event_id: eventId,
|
|
607
|
+
app_id: appId,
|
|
608
|
+
content,
|
|
609
|
+
input_mode: inputMode,
|
|
610
|
+
device,
|
|
611
|
+
country,
|
|
612
|
+
platform,
|
|
613
|
+
experiment_id: experimentId,
|
|
614
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
615
|
+
additional_properties: Object.keys(extraProps).length > 0 ? extraProps : void 0
|
|
616
|
+
};
|
|
617
|
+
if (this.enableBatching && this.eventQueue) {
|
|
618
|
+
const queued = this.eventQueue.enqueue(payload);
|
|
619
|
+
return queued;
|
|
620
|
+
} else {
|
|
621
|
+
logger.debug("Recording event:", payload);
|
|
622
|
+
await recordData(
|
|
623
|
+
`${this.baseUrl}${routes["raw-events"]}`,
|
|
624
|
+
payload,
|
|
625
|
+
this._headers()
|
|
626
|
+
);
|
|
627
|
+
return void 0;
|
|
628
|
+
}
|
|
629
|
+
} catch (err) {
|
|
630
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
631
|
+
logger.error(`recording_event failed: ${message}`);
|
|
632
|
+
return void 0;
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
};
|
|
636
|
+
export {
|
|
637
|
+
EventQueue,
|
|
638
|
+
PrimeOrbitClient,
|
|
639
|
+
getLogLevel,
|
|
640
|
+
getPrimeOrbitApiUrl,
|
|
641
|
+
logger,
|
|
642
|
+
record_star_rating,
|
|
643
|
+
record_thumbs_feedback,
|
|
644
|
+
setLogLevel,
|
|
645
|
+
setPrimeOrbitApiUrl,
|
|
646
|
+
wrapAsyncToRecordLatency,
|
|
647
|
+
wrapSyncToRecordLatency,
|
|
648
|
+
wrapToRecordLatency
|
|
649
|
+
};
|