lmnr 0.6.20__py3-none-any.whl → 0.6.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/opentelemetry_lib/decorators/__init__.py +188 -138
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +674 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +13 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +211 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +256 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +295 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +179 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +485 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +8 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +143 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +229 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +92 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +3 -3
- lmnr/opentelemetry_lib/tracing/__init__.py +1 -1
- lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +12 -7
- lmnr/opentelemetry_lib/tracing/processor.py +1 -1
- lmnr/opentelemetry_lib/utils/package_check.py +9 -0
- lmnr/sdk/browser/browser_use_otel.py +4 -2
- lmnr/sdk/browser/patchright_otel.py +0 -26
- lmnr/sdk/browser/playwright_otel.py +51 -78
- lmnr/sdk/browser/pw_utils.py +359 -114
- lmnr/sdk/decorators.py +39 -4
- lmnr/sdk/evaluations.py +23 -9
- lmnr/sdk/laminar.py +75 -48
- lmnr/version.py +1 -1
- {lmnr-0.6.20.dist-info → lmnr-0.6.21.dist-info}/METADATA +8 -7
- {lmnr-0.6.20.dist-info → lmnr-0.6.21.dist-info}/RECORD +33 -18
- {lmnr-0.6.20.dist-info → lmnr-0.6.21.dist-info}/WHEEL +1 -1
- {lmnr-0.6.20.dist-info → lmnr-0.6.21.dist-info}/entry_points.txt +0 -0
lmnr/sdk/browser/pw_utils.py
CHANGED
@@ -1,8 +1,5 @@
|
|
1
|
-
import asyncio
|
2
1
|
import logging
|
3
2
|
import os
|
4
|
-
import time
|
5
|
-
import threading
|
6
3
|
|
7
4
|
from opentelemetry import trace
|
8
5
|
|
@@ -40,25 +37,267 @@ with open(os.path.join(current_dir, "rrweb", "rrweb.umd.min.cjs"), "r") as f:
|
|
40
37
|
|
41
38
|
INJECT_PLACEHOLDER = """
|
42
39
|
() => {
|
43
|
-
const
|
40
|
+
const BATCH_TIMEOUT = 2000; // Send events after 2 seconds
|
44
41
|
|
45
|
-
window.lmnrRrwebEventsBatch =
|
46
|
-
|
47
|
-
//
|
48
|
-
|
42
|
+
window.lmnrRrwebEventsBatch = [];
|
43
|
+
|
44
|
+
// Create a Web Worker for heavy JSON processing with chunked processing
|
45
|
+
const createCompressionWorker = () => {
|
46
|
+
const workerCode = `
|
47
|
+
self.onmessage = async function(e) {
|
48
|
+
const { jsonString, buffer, id, useBuffer } = e.data;
|
49
|
+
try {
|
50
|
+
let uint8Array;
|
51
|
+
|
52
|
+
if (useBuffer && buffer) {
|
53
|
+
// Use transferred ArrayBuffer (no copying needed!)
|
54
|
+
uint8Array = new Uint8Array(buffer);
|
55
|
+
} else {
|
56
|
+
// Convert JSON string to bytes
|
57
|
+
const textEncoder = new TextEncoder();
|
58
|
+
uint8Array = textEncoder.encode(jsonString);
|
59
|
+
}
|
60
|
+
|
61
|
+
const compressionStream = new CompressionStream('gzip');
|
62
|
+
const writer = compressionStream.writable.getWriter();
|
63
|
+
const reader = compressionStream.readable.getReader();
|
64
|
+
|
65
|
+
writer.write(uint8Array);
|
66
|
+
writer.close();
|
67
|
+
|
68
|
+
const chunks = [];
|
69
|
+
let totalLength = 0;
|
70
|
+
|
71
|
+
while (true) {
|
72
|
+
const { done, value } = await reader.read();
|
73
|
+
if (done) break;
|
74
|
+
chunks.push(value);
|
75
|
+
totalLength += value.length;
|
76
|
+
}
|
77
|
+
|
78
|
+
const compressedData = new Uint8Array(totalLength);
|
79
|
+
let offset = 0;
|
80
|
+
for (const chunk of chunks) {
|
81
|
+
compressedData.set(chunk, offset);
|
82
|
+
offset += chunk.length;
|
83
|
+
}
|
84
|
+
|
85
|
+
self.postMessage({ id, success: true, data: compressedData });
|
86
|
+
} catch (error) {
|
87
|
+
self.postMessage({ id, success: false, error: error.message });
|
88
|
+
}
|
89
|
+
};
|
90
|
+
`;
|
91
|
+
|
92
|
+
const blob = new Blob([workerCode], { type: 'application/javascript' });
|
93
|
+
return new Worker(URL.createObjectURL(blob));
|
94
|
+
};
|
95
|
+
|
96
|
+
let compressionWorker = null;
|
97
|
+
let workerPromises = new Map();
|
98
|
+
let workerId = 0;
|
99
|
+
|
100
|
+
// Non-blocking JSON.stringify using chunked processing
|
101
|
+
function stringifyNonBlocking(obj, chunkSize = 10000) {
|
102
|
+
return new Promise((resolve, reject) => {
|
103
|
+
try {
|
104
|
+
// For very large objects, we need to be more careful
|
105
|
+
// Use requestIdleCallback if available, otherwise setTimeout
|
106
|
+
const scheduleWork = window.requestIdleCallback ||
|
107
|
+
((cb) => setTimeout(cb, 0));
|
108
|
+
|
109
|
+
let result = '';
|
110
|
+
let keys = [];
|
111
|
+
let keyIndex = 0;
|
112
|
+
|
113
|
+
// Pre-process to get all keys if it's an object
|
114
|
+
if (typeof obj === 'object' && obj !== null && !Array.isArray(obj)) {
|
115
|
+
keys = Object.keys(obj);
|
116
|
+
}
|
117
|
+
|
118
|
+
function processChunk() {
|
119
|
+
try {
|
120
|
+
if (Array.isArray(obj) || typeof obj !== 'object' || obj === null) {
|
121
|
+
// For arrays and primitives, just stringify directly
|
122
|
+
result = JSON.stringify(obj);
|
123
|
+
resolve(result);
|
124
|
+
return;
|
125
|
+
}
|
126
|
+
|
127
|
+
// For objects, process in chunks
|
128
|
+
const endIndex = Math.min(keyIndex + chunkSize, keys.length);
|
129
|
+
|
130
|
+
if (keyIndex === 0) {
|
131
|
+
result = '{';
|
132
|
+
}
|
133
|
+
|
134
|
+
for (let i = keyIndex; i < endIndex; i++) {
|
135
|
+
const key = keys[i];
|
136
|
+
const value = obj[key];
|
137
|
+
|
138
|
+
if (i > 0) result += ',';
|
139
|
+
result += JSON.stringify(key) + ':' + JSON.stringify(value);
|
140
|
+
}
|
141
|
+
|
142
|
+
keyIndex = endIndex;
|
143
|
+
|
144
|
+
if (keyIndex >= keys.length) {
|
145
|
+
result += '}';
|
146
|
+
resolve(result);
|
147
|
+
} else {
|
148
|
+
// Schedule next chunk
|
149
|
+
scheduleWork(processChunk);
|
150
|
+
}
|
151
|
+
} catch (error) {
|
152
|
+
reject(error);
|
153
|
+
}
|
154
|
+
}
|
155
|
+
|
156
|
+
processChunk();
|
157
|
+
} catch (error) {
|
158
|
+
reject(error);
|
159
|
+
}
|
160
|
+
});
|
161
|
+
}
|
162
|
+
|
163
|
+
// Fast compression for small objects (main thread)
|
164
|
+
async function compressSmallObject(data) {
|
49
165
|
const jsonString = JSON.stringify(data);
|
50
|
-
const
|
51
|
-
const
|
52
|
-
|
53
|
-
const
|
54
|
-
|
166
|
+
const textEncoder = new TextEncoder();
|
167
|
+
const uint8Array = textEncoder.encode(jsonString);
|
168
|
+
|
169
|
+
const compressionStream = new CompressionStream('gzip');
|
170
|
+
const writer = compressionStream.writable.getWriter();
|
171
|
+
const reader = compressionStream.readable.getReader();
|
172
|
+
|
173
|
+
writer.write(uint8Array);
|
174
|
+
writer.close();
|
175
|
+
|
176
|
+
const chunks = [];
|
177
|
+
let totalLength = 0;
|
178
|
+
|
179
|
+
while (true) {
|
180
|
+
const { done, value } = await reader.read();
|
181
|
+
if (done) break;
|
182
|
+
chunks.push(value);
|
183
|
+
totalLength += value.length;
|
184
|
+
}
|
185
|
+
|
186
|
+
const compressedData = new Uint8Array(totalLength);
|
187
|
+
let offset = 0;
|
188
|
+
for (const chunk of chunks) {
|
189
|
+
compressedData.set(chunk, offset);
|
190
|
+
offset += chunk.length;
|
191
|
+
}
|
192
|
+
|
193
|
+
return compressedData;
|
55
194
|
}
|
56
195
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
196
|
+
// Alternative: Use transferable objects for maximum efficiency
|
197
|
+
async function compressLargeObjectTransferable(data) {
|
198
|
+
try {
|
199
|
+
// Stringify on main thread but non-blocking
|
200
|
+
const jsonString = await stringifyNonBlocking(data);
|
201
|
+
|
202
|
+
// Convert to ArrayBuffer (transferable)
|
203
|
+
const encoder = new TextEncoder();
|
204
|
+
const uint8Array = encoder.encode(jsonString);
|
205
|
+
const buffer = uint8Array.buffer; // Use the original buffer for transfer
|
206
|
+
|
207
|
+
return new Promise((resolve, reject) => {
|
208
|
+
if (!compressionWorker) {
|
209
|
+
compressionWorker = createCompressionWorker();
|
210
|
+
compressionWorker.onmessage = (e) => {
|
211
|
+
const { id, success, data: result, error } = e.data;
|
212
|
+
const promise = workerPromises.get(id);
|
213
|
+
if (promise) {
|
214
|
+
workerPromises.delete(id);
|
215
|
+
if (success) {
|
216
|
+
promise.resolve(result);
|
217
|
+
} else {
|
218
|
+
promise.reject(new Error(error));
|
219
|
+
}
|
220
|
+
}
|
221
|
+
};
|
222
|
+
}
|
223
|
+
|
224
|
+
const id = ++workerId;
|
225
|
+
workerPromises.set(id, { resolve, reject });
|
226
|
+
|
227
|
+
// Transfer the ArrayBuffer (no copying!)
|
228
|
+
compressionWorker.postMessage({
|
229
|
+
buffer,
|
230
|
+
id,
|
231
|
+
useBuffer: true
|
232
|
+
}, [buffer]);
|
233
|
+
});
|
234
|
+
} catch (error) {
|
235
|
+
console.warn('Failed to process large object with transferable:', error);
|
236
|
+
return compressSmallObject(data);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
|
240
|
+
// Worker-based compression for large objects
|
241
|
+
async function compressLargeObject(data, isLarge = true) {
|
242
|
+
try {
|
243
|
+
// Use transferable objects for better performance
|
244
|
+
return await compressLargeObjectTransferable(data);
|
245
|
+
} catch (error) {
|
246
|
+
console.warn('Transferable failed, falling back to string method:', error);
|
247
|
+
// Fallback to string method
|
248
|
+
const jsonString = await stringifyNonBlocking(data);
|
249
|
+
|
250
|
+
return new Promise((resolve, reject) => {
|
251
|
+
if (!compressionWorker) {
|
252
|
+
compressionWorker = createCompressionWorker();
|
253
|
+
compressionWorker.onmessage = (e) => {
|
254
|
+
const { id, success, data: result, error } = e.data;
|
255
|
+
const promise = workerPromises.get(id);
|
256
|
+
if (promise) {
|
257
|
+
workerPromises.delete(id);
|
258
|
+
if (success) {
|
259
|
+
promise.resolve(result);
|
260
|
+
} else {
|
261
|
+
promise.reject(new Error(error));
|
262
|
+
}
|
263
|
+
}
|
264
|
+
};
|
265
|
+
}
|
266
|
+
|
267
|
+
const id = ++workerId;
|
268
|
+
workerPromises.set(id, { resolve, reject });
|
269
|
+
compressionWorker.postMessage({ jsonString, id });
|
270
|
+
});
|
271
|
+
}
|
272
|
+
}
|
273
|
+
|
274
|
+
function isLargeEvent(type) {
|
275
|
+
const LARGE_EVENT_TYPES = [
|
276
|
+
2, // FullSnapshot
|
277
|
+
3, // IncrementalSnapshot
|
278
|
+
];
|
279
|
+
|
280
|
+
if (LARGE_EVENT_TYPES.includes(type)) {
|
281
|
+
return true;
|
282
|
+
}
|
283
|
+
|
284
|
+
return false;
|
285
|
+
}
|
286
|
+
|
287
|
+
async function sendBatchIfReady() {
|
288
|
+
if (window.lmnrRrwebEventsBatch.length > 0 && typeof window.lmnrSendEvents === 'function') {
|
289
|
+
const events = window.lmnrRrwebEventsBatch;
|
290
|
+
window.lmnrRrwebEventsBatch = [];
|
291
|
+
|
292
|
+
try {
|
293
|
+
await window.lmnrSendEvents(events);
|
294
|
+
} catch (error) {
|
295
|
+
console.error('Failed to send events:', error);
|
296
|
+
}
|
297
|
+
}
|
298
|
+
}
|
299
|
+
|
300
|
+
setInterval(sendBatchIfReady, BATCH_TIMEOUT);
|
62
301
|
|
63
302
|
// Add heartbeat events
|
64
303
|
setInterval(async () => {
|
@@ -66,17 +305,24 @@ INJECT_PLACEHOLDER = """
|
|
66
305
|
title: document.title,
|
67
306
|
url: document.URL,
|
68
307
|
})
|
69
|
-
|
70
308
|
}, 1000);
|
71
309
|
|
72
310
|
window.lmnrRrweb.record({
|
73
|
-
async emit(event) {
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
311
|
+
async emit(event) {
|
312
|
+
try {
|
313
|
+
const isLarge = isLargeEvent(event.type);
|
314
|
+
const compressedResult = isLarge ?
|
315
|
+
await compressLargeObject(event.data, true) :
|
316
|
+
await compressSmallObject(event.data);
|
317
|
+
|
318
|
+
const eventToSend = {
|
319
|
+
...event,
|
320
|
+
data: compressedResult,
|
321
|
+
};
|
322
|
+
window.lmnrRrwebEventsBatch.push(eventToSend);
|
323
|
+
} catch (error) {
|
324
|
+
console.warn('Failed to push event to batch', error);
|
325
|
+
}
|
80
326
|
},
|
81
327
|
recordCanvas: true,
|
82
328
|
collectFonts: true,
|
@@ -108,16 +354,10 @@ async def send_events_async(
|
|
108
354
|
|
109
355
|
await client._browser_events.send(session_id, trace_id, events)
|
110
356
|
except Exception as e:
|
111
|
-
if
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
# silence the error if the page has been closed, not an issue
|
116
|
-
if (
|
117
|
-
"Page.evaluate: Target page, context or browser has been closed"
|
118
|
-
not in str(e)
|
119
|
-
):
|
120
|
-
logger.warning(f"Could not send events: {e}")
|
357
|
+
if "Page.evaluate: Target page, context or browser has been closed" not in str(
|
358
|
+
e
|
359
|
+
):
|
360
|
+
logger.debug(f"Could not send events: {e}")
|
121
361
|
|
122
362
|
|
123
363
|
def send_events_sync(
|
@@ -141,23 +381,14 @@ def send_events_sync(
|
|
141
381
|
client._browser_events.send(session_id, trace_id, events)
|
142
382
|
|
143
383
|
except Exception as e:
|
144
|
-
if
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
# silence the error if the page has been closed, not an issue
|
149
|
-
if (
|
150
|
-
"Page.evaluate: Target page, context or browser has been closed"
|
151
|
-
not in str(e)
|
152
|
-
):
|
153
|
-
logger.warning(f"Could not send events: {e}")
|
384
|
+
if "Page.evaluate: Target page, context or browser has been closed" not in str(
|
385
|
+
e
|
386
|
+
):
|
387
|
+
logger.debug(f"Could not send events: {e}")
|
154
388
|
|
155
389
|
|
156
390
|
def inject_session_recorder_sync(page: SyncPage):
|
157
391
|
try:
|
158
|
-
page.wait_for_load_state("domcontentloaded")
|
159
|
-
|
160
|
-
# Wrap the evaluate call in a try-catch
|
161
392
|
try:
|
162
393
|
is_loaded = page.evaluate(
|
163
394
|
"""() => typeof window.lmnrRrweb !== 'undefined'"""
|
@@ -194,9 +425,6 @@ def inject_session_recorder_sync(page: SyncPage):
|
|
194
425
|
|
195
426
|
async def inject_session_recorder_async(page: Page):
|
196
427
|
try:
|
197
|
-
await page.wait_for_load_state("domcontentloaded")
|
198
|
-
|
199
|
-
# Wrap the evaluate call in a try-catch
|
200
428
|
try:
|
201
429
|
is_loaded = await page.evaluate(
|
202
430
|
"""() => typeof window.lmnrRrweb !== 'undefined'"""
|
@@ -232,27 +460,16 @@ async def inject_session_recorder_async(page: Page):
|
|
232
460
|
|
233
461
|
|
234
462
|
@observe(name="playwright.page", ignore_input=True, ignore_output=True)
|
235
|
-
def
|
463
|
+
def start_recording_events_sync(page: SyncPage, session_id: str, client: LaminarClient):
|
236
464
|
span = trace.get_current_span()
|
237
465
|
trace_id = format(span.get_span_context().trace_id, "032x")
|
238
466
|
span.set_attribute("lmnr.internal.has_browser_session", True)
|
239
|
-
original_bring_to_front = page.bring_to_front
|
240
|
-
|
241
|
-
def bring_to_front():
|
242
|
-
original_bring_to_front()
|
243
|
-
page.evaluate(
|
244
|
-
"""() => {
|
245
|
-
if (window.lmnrRrweb) {
|
246
|
-
try {
|
247
|
-
window.lmnrRrweb.record.takeFullSnapshot();
|
248
|
-
} catch (e) {
|
249
|
-
console.error("Error taking full snapshot:", e);
|
250
|
-
}
|
251
|
-
}
|
252
|
-
}"""
|
253
|
-
)
|
254
467
|
|
255
|
-
|
468
|
+
try:
|
469
|
+
if page.evaluate("""() => typeof window.lmnrSendEvents !== 'undefined'"""):
|
470
|
+
return
|
471
|
+
except Exception:
|
472
|
+
pass
|
256
473
|
|
257
474
|
def on_load():
|
258
475
|
try:
|
@@ -260,79 +477,107 @@ def handle_navigation_sync(page: SyncPage, session_id: str, client: LaminarClien
|
|
260
477
|
except Exception as e:
|
261
478
|
logger.error(f"Error in on_load handler: {e}")
|
262
479
|
|
263
|
-
def collection_loop():
|
264
|
-
while not page.is_closed(): # Stop when page closes
|
265
|
-
send_events_sync(page, session_id, trace_id, client)
|
266
|
-
time.sleep(2)
|
267
|
-
|
268
|
-
thread = threading.Thread(target=collection_loop, daemon=True)
|
269
|
-
thread.start()
|
270
|
-
|
271
480
|
def on_close():
|
272
481
|
try:
|
273
482
|
send_events_sync(page, session_id, trace_id, client)
|
274
|
-
thread.join()
|
275
483
|
except Exception:
|
276
484
|
pass
|
277
485
|
|
278
486
|
page.on("load", on_load)
|
279
487
|
page.on("close", on_close)
|
488
|
+
|
280
489
|
inject_session_recorder_sync(page)
|
281
490
|
|
491
|
+
# Expose function to browser so it can call us when events are ready
|
492
|
+
def send_events_from_browser(events):
|
493
|
+
try:
|
494
|
+
if events and len(events) > 0:
|
495
|
+
client._browser_events.send(session_id, trace_id, events)
|
496
|
+
except Exception as e:
|
497
|
+
logger.debug(f"Could not send events: {e}")
|
498
|
+
|
499
|
+
try:
|
500
|
+
page.expose_function("lmnrSendEvents", send_events_from_browser)
|
501
|
+
except Exception as e:
|
502
|
+
logger.debug(f"Could not expose function: {e}")
|
503
|
+
|
282
504
|
|
283
505
|
@observe(name="playwright.page", ignore_input=True, ignore_output=True)
|
284
|
-
async def
|
506
|
+
async def start_recording_events_async(
|
285
507
|
page: Page, session_id: str, client: AsyncLaminarClient
|
286
508
|
):
|
287
|
-
|
288
509
|
span = trace.get_current_span()
|
289
510
|
trace_id = format(span.get_span_context().trace_id, "032x")
|
290
511
|
span.set_attribute("lmnr.internal.has_browser_session", True)
|
291
512
|
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
logger.error(f"Event collection stopped: {e}")
|
300
|
-
|
301
|
-
# Create and store task
|
302
|
-
task = asyncio.create_task(collection_loop())
|
513
|
+
try:
|
514
|
+
if await page.evaluate(
|
515
|
+
"""() => typeof window.lmnrSendEvents !== 'undefined'"""
|
516
|
+
):
|
517
|
+
return
|
518
|
+
except Exception:
|
519
|
+
pass
|
303
520
|
|
304
|
-
async def on_load():
|
521
|
+
async def on_load(p):
|
305
522
|
try:
|
306
|
-
await inject_session_recorder_async(
|
523
|
+
await inject_session_recorder_async(p)
|
307
524
|
except Exception as e:
|
308
525
|
logger.error(f"Error in on_load handler: {e}")
|
309
526
|
|
310
|
-
async def on_close():
|
527
|
+
async def on_close(p):
|
311
528
|
try:
|
312
|
-
|
313
|
-
await send_events_async(
|
529
|
+
# Send any remaining events before closing
|
530
|
+
await send_events_async(p, session_id, trace_id, client)
|
314
531
|
except Exception:
|
315
532
|
pass
|
316
533
|
|
317
|
-
page.on("load",
|
318
|
-
page.on("close",
|
534
|
+
page.on("load", on_load)
|
535
|
+
page.on("close", on_close)
|
319
536
|
|
320
|
-
|
537
|
+
await inject_session_recorder_async(page)
|
321
538
|
|
322
|
-
async def
|
323
|
-
|
539
|
+
async def send_events_from_browser(events):
|
540
|
+
try:
|
541
|
+
if events and len(events) > 0:
|
542
|
+
await client._browser_events.send(session_id, trace_id, events)
|
543
|
+
except Exception as e:
|
544
|
+
logger.debug(f"Could not send events: {e}")
|
324
545
|
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
546
|
+
try:
|
547
|
+
await page.expose_function("lmnrSendEvents", send_events_from_browser)
|
548
|
+
except Exception as e:
|
549
|
+
logger.debug(f"Could not expose function: {e}")
|
550
|
+
|
551
|
+
|
552
|
+
def take_full_snapshot(page: Page):
|
553
|
+
return page.evaluate(
|
554
|
+
"""() => {
|
555
|
+
if (window.lmnrRrweb) {
|
556
|
+
try {
|
557
|
+
window.lmnrRrweb.record.takeFullSnapshot();
|
558
|
+
return true;
|
559
|
+
} catch (e) {
|
560
|
+
console.error("Error taking full snapshot:", e);
|
561
|
+
return false;
|
333
562
|
}
|
334
|
-
}
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
563
|
+
}
|
564
|
+
return false;
|
565
|
+
}"""
|
566
|
+
)
|
567
|
+
|
568
|
+
|
569
|
+
async def take_full_snapshot_async(page: Page):
|
570
|
+
return await page.evaluate(
|
571
|
+
"""() => {
|
572
|
+
if (window.lmnrRrweb) {
|
573
|
+
try {
|
574
|
+
window.lmnrRrweb.record.takeFullSnapshot();
|
575
|
+
return true;
|
576
|
+
} catch (e) {
|
577
|
+
console.error("Error taking full snapshot:", e);
|
578
|
+
return false;
|
579
|
+
}
|
580
|
+
}
|
581
|
+
return false;
|
582
|
+
}"""
|
583
|
+
)
|
lmnr/sdk/decorators.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
from lmnr.opentelemetry_lib.decorators import (
|
2
|
-
|
3
|
-
|
2
|
+
observe_base,
|
3
|
+
async_observe_base,
|
4
4
|
json_dumps,
|
5
5
|
)
|
6
6
|
from opentelemetry.trace import INVALID_SPAN, get_current_span
|
@@ -28,6 +28,8 @@ def observe(
|
|
28
28
|
ignore_output: bool = False,
|
29
29
|
span_type: Literal["DEFAULT", "LLM", "TOOL"] = "DEFAULT",
|
30
30
|
ignore_inputs: list[str] | None = None,
|
31
|
+
input_formatter: Callable[P, str] | None = None,
|
32
|
+
output_formatter: Callable[[R], str] | None = None,
|
31
33
|
metadata: dict[str, Any] | None = None,
|
32
34
|
tags: list[str] | None = None,
|
33
35
|
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
@@ -53,6 +55,16 @@ def observe(
|
|
53
55
|
def foo(a, b, `sensitive_data`), and you want to ignore the\
|
54
56
|
`sensitive_data` argument, you can pass ["sensitive_data"] to\
|
55
57
|
this argument. Defaults to None.
|
58
|
+
input_formatter (Callable[P, str] | None, optional): A custom function\
|
59
|
+
to format the input of the wrapped function. All function arguments\
|
60
|
+
are passed to this function. Must return a string. Ignored if\
|
61
|
+
`ignore_input` is True. Does not respect `ignore_inputs` argument.
|
62
|
+
Defaults to None.
|
63
|
+
output_formatter (Callable[[R], str] | None, optional): A custom function\
|
64
|
+
to format the output of the wrapped function. The output is passed\
|
65
|
+
to this function. Must return a string. Ignored if `ignore_output`
|
66
|
+
is True. Does not respect `ignore_inputs` argument.
|
67
|
+
Defaults to None.
|
56
68
|
metadata (dict[str, Any] | None, optional): Metadata to associate with\
|
57
69
|
the trace. Must be JSON serializable. Defaults to None.
|
58
70
|
tags (list[str] | None, optional): Tags to associate with the trace.
|
@@ -91,22 +103,45 @@ def observe(
|
|
91
103
|
logger.warning("Tags must be a list of strings. Tags will be ignored.")
|
92
104
|
else:
|
93
105
|
association_properties["tags"] = tags
|
106
|
+
if input_formatter is not None and ignore_input:
|
107
|
+
logger.warning(
|
108
|
+
f"observe, function {func.__name__}: Input formatter"
|
109
|
+
" is ignored because `ignore_input` is True. Specify only one of"
|
110
|
+
" `ignore_input` or `input_formatter`."
|
111
|
+
)
|
112
|
+
if input_formatter is not None and ignore_inputs is not None:
|
113
|
+
logger.warning(
|
114
|
+
f"observe, function {func.__name__}: Both input formatter and"
|
115
|
+
" `ignore_inputs` are specified. Input formatter"
|
116
|
+
" will pass all arguments to the formatter regardless of"
|
117
|
+
" `ignore_inputs`."
|
118
|
+
)
|
119
|
+
if output_formatter is not None and ignore_output:
|
120
|
+
logger.warning(
|
121
|
+
f"observe, function {func.__name__}: Output formatter"
|
122
|
+
" is ignored because `ignore_output` is True. Specify only one of"
|
123
|
+
" `ignore_output` or `output_formatter`."
|
124
|
+
)
|
94
125
|
result = (
|
95
|
-
|
126
|
+
async_observe_base(
|
96
127
|
name=name,
|
97
128
|
ignore_input=ignore_input,
|
98
129
|
ignore_output=ignore_output,
|
99
130
|
span_type=span_type,
|
100
131
|
ignore_inputs=ignore_inputs,
|
132
|
+
input_formatter=input_formatter,
|
133
|
+
output_formatter=output_formatter,
|
101
134
|
association_properties=association_properties,
|
102
135
|
)(func)
|
103
136
|
if is_async(func)
|
104
|
-
else
|
137
|
+
else observe_base(
|
105
138
|
name=name,
|
106
139
|
ignore_input=ignore_input,
|
107
140
|
ignore_output=ignore_output,
|
108
141
|
span_type=span_type,
|
109
142
|
ignore_inputs=ignore_inputs,
|
143
|
+
input_formatter=input_formatter,
|
144
|
+
output_formatter=output_formatter,
|
110
145
|
association_properties=association_properties,
|
111
146
|
)(func)
|
112
147
|
)
|