lmnr 0.6.20__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. lmnr/__init__.py +0 -4
  2. lmnr/opentelemetry_lib/decorators/__init__.py +211 -151
  3. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +678 -0
  4. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +13 -0
  5. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +211 -0
  6. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +41 -0
  7. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +256 -0
  8. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +295 -0
  9. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +179 -0
  10. lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +1 -0
  11. lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/__init__.py +4 -0
  12. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +488 -0
  13. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +8 -0
  14. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +143 -0
  15. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +41 -0
  16. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +229 -0
  17. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +92 -0
  18. lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +1 -0
  19. lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py +16 -16
  20. lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +3 -0
  21. lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +3 -0
  22. lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +3 -3
  23. lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +3 -0
  24. lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +7 -0
  25. lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py +190 -0
  26. lmnr/opentelemetry_lib/tracing/__init__.py +90 -2
  27. lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +12 -7
  28. lmnr/opentelemetry_lib/tracing/context.py +109 -0
  29. lmnr/opentelemetry_lib/tracing/processor.py +6 -7
  30. lmnr/opentelemetry_lib/tracing/tracer.py +29 -0
  31. lmnr/opentelemetry_lib/utils/package_check.py +9 -0
  32. lmnr/sdk/browser/browser_use_otel.py +9 -7
  33. lmnr/sdk/browser/patchright_otel.py +14 -26
  34. lmnr/sdk/browser/playwright_otel.py +72 -73
  35. lmnr/sdk/browser/pw_utils.py +436 -119
  36. lmnr/sdk/client/asynchronous/resources/browser_events.py +1 -0
  37. lmnr/sdk/decorators.py +39 -4
  38. lmnr/sdk/evaluations.py +23 -9
  39. lmnr/sdk/laminar.py +181 -209
  40. lmnr/sdk/types.py +0 -6
  41. lmnr/version.py +1 -1
  42. {lmnr-0.6.20.dist-info → lmnr-0.7.0.dist-info}/METADATA +10 -8
  43. {lmnr-0.6.20.dist-info → lmnr-0.7.0.dist-info}/RECORD +45 -29
  44. {lmnr-0.6.20.dist-info → lmnr-0.7.0.dist-info}/WHEEL +1 -1
  45. lmnr/opentelemetry_lib/tracing/context_properties.py +0 -65
  46. {lmnr-0.6.20.dist-info → lmnr-0.7.0.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,5 @@
1
- import asyncio
2
1
  import logging
3
2
  import os
4
- import time
5
- import threading
6
3
 
7
4
  from opentelemetry import trace
8
5
 
@@ -11,6 +8,7 @@ from lmnr.sdk.decorators import observe
11
8
  from lmnr.sdk.browser.utils import retry_sync, retry_async
12
9
  from lmnr.sdk.client.synchronous.sync_client import LaminarClient
13
10
  from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
11
+ from lmnr.opentelemetry_lib.tracing.context import get_current_context
14
12
 
15
13
  try:
16
14
  if is_package_installed("playwright"):
@@ -40,43 +38,360 @@ with open(os.path.join(current_dir, "rrweb", "rrweb.umd.min.cjs"), "r") as f:
40
38
 
41
39
  INJECT_PLACEHOLDER = """
42
40
  () => {
43
- const BATCH_SIZE = 1000; // Maximum events to store in memory
44
-
45
- window.lmnrRrwebEventsBatch = new Set();
41
+ const BATCH_TIMEOUT = 2000; // Send events after 2 seconds
42
+ const MAX_WORKER_PROMISES = 50; // Max concurrent worker promises
43
+ const HEARTBEAT_INTERVAL = 1000;
46
44
 
47
- // Utility function to compress individual event data
48
- async function compressEventData(data) {
45
+ window.lmnrRrwebEventsBatch = [];
46
+
47
+ // Create a Web Worker for heavy JSON processing with chunked processing
48
+ const createCompressionWorker = () => {
49
+ const workerCode = `
50
+ self.onmessage = async function(e) {
51
+ const { jsonString, buffer, id, useBuffer } = e.data;
52
+ try {
53
+ let uint8Array;
54
+
55
+ if (useBuffer && buffer) {
56
+ // Use transferred ArrayBuffer (no copying needed!)
57
+ uint8Array = new Uint8Array(buffer);
58
+ } else {
59
+ // Convert JSON string to bytes
60
+ const textEncoder = new TextEncoder();
61
+ uint8Array = textEncoder.encode(jsonString);
62
+ }
63
+
64
+ const compressionStream = new CompressionStream('gzip');
65
+ const writer = compressionStream.writable.getWriter();
66
+ const reader = compressionStream.readable.getReader();
67
+
68
+ writer.write(uint8Array);
69
+ writer.close();
70
+
71
+ const chunks = [];
72
+ let totalLength = 0;
73
+
74
+ while (true) {
75
+ const { done, value } = await reader.read();
76
+ if (done) break;
77
+ chunks.push(value);
78
+ totalLength += value.length;
79
+ }
80
+
81
+ const compressedData = new Uint8Array(totalLength);
82
+ let offset = 0;
83
+ for (const chunk of chunks) {
84
+ compressedData.set(chunk, offset);
85
+ offset += chunk.length;
86
+ }
87
+
88
+ self.postMessage({ id, success: true, data: compressedData });
89
+ } catch (error) {
90
+ self.postMessage({ id, success: false, error: error.message });
91
+ }
92
+ };
93
+ `;
94
+
95
+ const blob = new Blob([workerCode], { type: 'application/javascript' });
96
+ return new Worker(URL.createObjectURL(blob));
97
+ };
98
+
99
+ let compressionWorker = null;
100
+ let workerPromises = new Map();
101
+ let workerId = 0;
102
+
103
+ // Cleanup function for worker
104
+ const cleanupWorker = () => {
105
+ if (compressionWorker) {
106
+ compressionWorker.terminate();
107
+ compressionWorker = null;
108
+ }
109
+ workerPromises.clear();
110
+ workerId = 0;
111
+ };
112
+
113
+ // Clean up stale promises to prevent memory leaks
114
+ const cleanupStalePromises = () => {
115
+ if (workerPromises.size > MAX_WORKER_PROMISES) {
116
+ const toDelete = [];
117
+ for (const [id, promise] of workerPromises) {
118
+ if (toDelete.length >= workerPromises.size - MAX_WORKER_PROMISES) break;
119
+ toDelete.push(id);
120
+ promise.reject(new Error('Promise cleaned up due to memory pressure'));
121
+ }
122
+ toDelete.forEach(id => workerPromises.delete(id));
123
+ }
124
+ };
125
+
126
+ // Non-blocking JSON.stringify using chunked processing
127
+ function stringifyNonBlocking(obj, chunkSize = 10000) {
128
+ return new Promise((resolve, reject) => {
129
+ try {
130
+ // For very large objects, we need to be more careful
131
+ // Use requestIdleCallback if available, otherwise setTimeout
132
+ const scheduleWork = window.requestIdleCallback ||
133
+ ((cb) => setTimeout(cb, 0));
134
+
135
+ let result = '';
136
+ let keys = [];
137
+ let keyIndex = 0;
138
+
139
+ // Pre-process to get all keys if it's an object
140
+ if (typeof obj === 'object' && obj !== null && !Array.isArray(obj)) {
141
+ keys = Object.keys(obj);
142
+ }
143
+
144
+ function processChunk() {
145
+ try {
146
+ if (Array.isArray(obj) || typeof obj !== 'object' || obj === null) {
147
+ // For arrays and primitives, just stringify directly
148
+ result = JSON.stringify(obj);
149
+ resolve(result);
150
+ return;
151
+ }
152
+
153
+ // For objects, process in chunks
154
+ const endIndex = Math.min(keyIndex + chunkSize, keys.length);
155
+
156
+ if (keyIndex === 0) {
157
+ result = '{';
158
+ }
159
+
160
+ for (let i = keyIndex; i < endIndex; i++) {
161
+ const key = keys[i];
162
+ const value = obj[key];
163
+
164
+ if (i > 0) result += ',';
165
+ result += JSON.stringify(key) + ':' + JSON.stringify(value);
166
+ }
167
+
168
+ keyIndex = endIndex;
169
+
170
+ if (keyIndex >= keys.length) {
171
+ result += '}';
172
+ resolve(result);
173
+ } else {
174
+ // Schedule next chunk
175
+ scheduleWork(processChunk);
176
+ }
177
+ } catch (error) {
178
+ reject(error);
179
+ }
180
+ }
181
+
182
+ processChunk();
183
+ } catch (error) {
184
+ reject(error);
185
+ }
186
+ });
187
+ }
188
+
189
+ // Fast compression for small objects (main thread)
190
+ async function compressSmallObject(data) {
49
191
  const jsonString = JSON.stringify(data);
50
- const blob = new Blob([jsonString], { type: 'application/json' });
51
- const compressedStream = blob.stream().pipeThrough(new CompressionStream('gzip'));
52
- const compressedResponse = new Response(compressedStream);
53
- const compressedData = await compressedResponse.arrayBuffer();
54
- return Array.from(new Uint8Array(compressedData));
192
+ const textEncoder = new TextEncoder();
193
+ const uint8Array = textEncoder.encode(jsonString);
194
+
195
+ const compressionStream = new CompressionStream('gzip');
196
+ const writer = compressionStream.writable.getWriter();
197
+ const reader = compressionStream.readable.getReader();
198
+
199
+ writer.write(uint8Array);
200
+ writer.close();
201
+
202
+ const chunks = [];
203
+ let totalLength = 0;
204
+
205
+ while (true) {
206
+ const { done, value } = await reader.read();
207
+ if (done) break;
208
+ chunks.push(value);
209
+ totalLength += value.length;
210
+ }
211
+
212
+ const compressedData = new Uint8Array(totalLength);
213
+ let offset = 0;
214
+ for (const chunk of chunks) {
215
+ compressedData.set(chunk, offset);
216
+ offset += chunk.length;
217
+ }
218
+
219
+ return compressedData;
55
220
  }
56
221
 
57
- window.lmnrGetAndClearEvents = () => {
58
- const events = window.lmnrRrwebEventsBatch;
59
- window.lmnrRrwebEventsBatch = new Set();
60
- return Array.from(events);
61
- };
222
+ // Alternative: Use transferable objects for maximum efficiency
223
+ async function compressLargeObjectTransferable(data) {
224
+ try {
225
+ // Clean up stale promises first
226
+ cleanupStalePromises();
227
+
228
+ // Stringify on main thread but non-blocking
229
+ const jsonString = await stringifyNonBlocking(data);
230
+
231
+ // Convert to ArrayBuffer (transferable)
232
+ const encoder = new TextEncoder();
233
+ const uint8Array = encoder.encode(jsonString);
234
+ const buffer = uint8Array.buffer; // Use the original buffer for transfer
235
+
236
+ return new Promise((resolve, reject) => {
237
+ if (!compressionWorker) {
238
+ compressionWorker = createCompressionWorker();
239
+ compressionWorker.onmessage = (e) => {
240
+ const { id, success, data: result, error } = e.data;
241
+ const promise = workerPromises.get(id);
242
+ if (promise) {
243
+ workerPromises.delete(id);
244
+ if (success) {
245
+ promise.resolve(result);
246
+ } else {
247
+ promise.reject(new Error(error));
248
+ }
249
+ }
250
+ };
251
+
252
+ compressionWorker.onerror = (error) => {
253
+ console.error('Compression worker error:', error);
254
+ cleanupWorker();
255
+ };
256
+ }
257
+
258
+ const id = ++workerId;
259
+ workerPromises.set(id, { resolve, reject });
260
+
261
+ // Set timeout to prevent hanging promises
262
+ setTimeout(() => {
263
+ if (workerPromises.has(id)) {
264
+ workerPromises.delete(id);
265
+ reject(new Error('Compression timeout'));
266
+ }
267
+ }, 10000);
268
+
269
+ // Transfer the ArrayBuffer (no copying!)
270
+ compressionWorker.postMessage({
271
+ buffer,
272
+ id,
273
+ useBuffer: true
274
+ }, [buffer]);
275
+ });
276
+ } catch (error) {
277
+ console.warn('Failed to process large object with transferable:', error);
278
+ return compressSmallObject(data);
279
+ }
280
+ }
281
+
282
+ // Worker-based compression for large objects
283
+ async function compressLargeObject(data, isLarge = true) {
284
+ try {
285
+ // Use transferable objects for better performance
286
+ return await compressLargeObjectTransferable(data);
287
+ } catch (error) {
288
+ console.warn('Transferable failed, falling back to string method:', error);
289
+ // Fallback to string method
290
+ const jsonString = await stringifyNonBlocking(data);
291
+
292
+ return new Promise((resolve, reject) => {
293
+ if (!compressionWorker) {
294
+ compressionWorker = createCompressionWorker();
295
+ compressionWorker.onmessage = (e) => {
296
+ const { id, success, data: result, error } = e.data;
297
+ const promise = workerPromises.get(id);
298
+ if (promise) {
299
+ workerPromises.delete(id);
300
+ if (success) {
301
+ promise.resolve(result);
302
+ } else {
303
+ promise.reject(new Error(error));
304
+ }
305
+ }
306
+ };
307
+
308
+ compressionWorker.onerror = (error) => {
309
+ console.error('Compression worker error:', error);
310
+ cleanupWorker();
311
+ };
312
+ }
313
+
314
+ const id = ++workerId;
315
+ workerPromises.set(id, { resolve, reject });
316
+
317
+ // Set timeout to prevent hanging promises
318
+ setTimeout(() => {
319
+ if (workerPromises.has(id)) {
320
+ workerPromises.delete(id);
321
+ reject(new Error('Compression timeout'));
322
+ }
323
+ }, 10000);
324
+
325
+ compressionWorker.postMessage({ jsonString, id });
326
+ });
327
+ }
328
+ }
329
+
330
+
331
+ setInterval(cleanupWorker, 5000);
332
+
333
+ function isLargeEvent(type) {
334
+ const LARGE_EVENT_TYPES = [
335
+ 2, // FullSnapshot
336
+ 3, // IncrementalSnapshot
337
+ ];
338
+
339
+ if (LARGE_EVENT_TYPES.includes(type)) {
340
+ return true;
341
+ }
342
+
343
+ return false;
344
+ }
345
+
346
+ async function sendBatchIfReady() {
347
+ if (window.lmnrRrwebEventsBatch.length > 0 && typeof window.lmnrSendEvents === 'function') {
348
+ const events = window.lmnrRrwebEventsBatch;
349
+ window.lmnrRrwebEventsBatch = [];
350
+
351
+ try {
352
+ await window.lmnrSendEvents(events);
353
+ } catch (error) {
354
+ console.error('Failed to send events:', error);
355
+ }
356
+ }
357
+ }
358
+
359
+ setInterval(sendBatchIfReady, BATCH_TIMEOUT);
62
360
 
63
361
  // Add heartbeat events
64
- setInterval(async () => {
362
+ setInterval(() => {
65
363
  window.lmnrRrweb.record.addCustomEvent('heartbeat', {
66
364
  title: document.title,
67
365
  url: document.URL,
68
366
  })
69
-
70
- }, 1000);
71
-
367
+ }, HEARTBEAT_INTERVAL);
368
+
369
+ async function bufferToBase64(buffer) {
370
+ const base64url = await new Promise(r => {
371
+ const reader = new FileReader()
372
+ reader.onload = () => r(reader.result)
373
+ reader.readAsDataURL(new Blob([buffer]))
374
+ });
375
+ return base64url.slice(base64url.indexOf(',') + 1);
376
+ }
377
+
72
378
  window.lmnrRrweb.record({
73
- async emit(event) {
74
- // Compress the data field
75
- const compressedEvent = {
76
- ...event,
77
- data: await compressEventData(event.data)
78
- };
79
- window.lmnrRrwebEventsBatch.add(compressedEvent);
379
+ async emit(event) {
380
+ try {
381
+ const isLarge = isLargeEvent(event.type);
382
+ const compressedResult = isLarge ?
383
+ await compressLargeObject(event.data, true) :
384
+ await compressSmallObject(event.data);
385
+
386
+ const base64Data = await bufferToBase64(compressedResult);
387
+ const eventToSend = {
388
+ ...event,
389
+ data: base64Data,
390
+ };
391
+ window.lmnrRrwebEventsBatch.push(eventToSend);
392
+ } catch (error) {
393
+ console.warn('Failed to push event to batch', error);
394
+ }
80
395
  },
81
396
  recordCanvas: true,
82
397
  collectFonts: true,
@@ -108,16 +423,10 @@ async def send_events_async(
108
423
 
109
424
  await client._browser_events.send(session_id, trace_id, events)
110
425
  except Exception as e:
111
- if str(e).startswith("Page.evaluate: Execution context was destroyed"):
112
- await inject_session_recorder_async(page)
113
- await send_events_async(page, session_id, trace_id, client)
114
- else:
115
- # silence the error if the page has been closed, not an issue
116
- if (
117
- "Page.evaluate: Target page, context or browser has been closed"
118
- not in str(e)
119
- ):
120
- logger.warning(f"Could not send events: {e}")
426
+ if "Page.evaluate: Target page, context or browser has been closed" not in str(
427
+ e
428
+ ):
429
+ logger.debug(f"Could not send events: {e}")
121
430
 
122
431
 
123
432
  def send_events_sync(
@@ -141,23 +450,14 @@ def send_events_sync(
141
450
  client._browser_events.send(session_id, trace_id, events)
142
451
 
143
452
  except Exception as e:
144
- if str(e).startswith("Page.evaluate: Execution context was destroyed"):
145
- inject_session_recorder_sync(page)
146
- send_events_sync(page, session_id, trace_id, client)
147
- else:
148
- # silence the error if the page has been closed, not an issue
149
- if (
150
- "Page.evaluate: Target page, context or browser has been closed"
151
- not in str(e)
152
- ):
153
- logger.warning(f"Could not send events: {e}")
453
+ if "Page.evaluate: Target page, context or browser has been closed" not in str(
454
+ e
455
+ ):
456
+ logger.debug(f"Could not send events: {e}")
154
457
 
155
458
 
156
459
  def inject_session_recorder_sync(page: SyncPage):
157
460
  try:
158
- page.wait_for_load_state("domcontentloaded")
159
-
160
- # Wrap the evaluate call in a try-catch
161
461
  try:
162
462
  is_loaded = page.evaluate(
163
463
  """() => typeof window.lmnrRrweb !== 'undefined'"""
@@ -194,9 +494,6 @@ def inject_session_recorder_sync(page: SyncPage):
194
494
 
195
495
  async def inject_session_recorder_async(page: Page):
196
496
  try:
197
- await page.wait_for_load_state("domcontentloaded")
198
-
199
- # Wrap the evaluate call in a try-catch
200
497
  try:
201
498
  is_loaded = await page.evaluate(
202
499
  """() => typeof window.lmnrRrweb !== 'undefined'"""
@@ -232,27 +529,18 @@ async def inject_session_recorder_async(page: Page):
232
529
 
233
530
 
234
531
  @observe(name="playwright.page", ignore_input=True, ignore_output=True)
235
- def handle_navigation_sync(page: SyncPage, session_id: str, client: LaminarClient):
236
- span = trace.get_current_span()
532
+ def start_recording_events_sync(page: SyncPage, session_id: str, client: LaminarClient):
533
+
534
+ ctx = get_current_context()
535
+ span = trace.get_current_span(ctx)
237
536
  trace_id = format(span.get_span_context().trace_id, "032x")
238
537
  span.set_attribute("lmnr.internal.has_browser_session", True)
239
- original_bring_to_front = page.bring_to_front
240
538
 
241
- def bring_to_front():
242
- original_bring_to_front()
243
- page.evaluate(
244
- """() => {
245
- if (window.lmnrRrweb) {
246
- try {
247
- window.lmnrRrweb.record.takeFullSnapshot();
248
- } catch (e) {
249
- console.error("Error taking full snapshot:", e);
250
- }
251
- }
252
- }"""
253
- )
254
-
255
- page.bring_to_front = bring_to_front
539
+ try:
540
+ if page.evaluate("""() => typeof window.lmnrSendEvents !== 'undefined'"""):
541
+ return
542
+ except Exception:
543
+ pass
256
544
 
257
545
  def on_load():
258
546
  try:
@@ -260,79 +548,108 @@ def handle_navigation_sync(page: SyncPage, session_id: str, client: LaminarClien
260
548
  except Exception as e:
261
549
  logger.error(f"Error in on_load handler: {e}")
262
550
 
263
- def collection_loop():
264
- while not page.is_closed(): # Stop when page closes
265
- send_events_sync(page, session_id, trace_id, client)
266
- time.sleep(2)
267
-
268
- thread = threading.Thread(target=collection_loop, daemon=True)
269
- thread.start()
270
-
271
551
  def on_close():
272
552
  try:
273
553
  send_events_sync(page, session_id, trace_id, client)
274
- thread.join()
275
554
  except Exception:
276
555
  pass
277
556
 
278
557
  page.on("load", on_load)
279
558
  page.on("close", on_close)
559
+
280
560
  inject_session_recorder_sync(page)
281
561
 
562
+ # Expose function to browser so it can call us when events are ready
563
+ def send_events_from_browser(events):
564
+ try:
565
+ if events and len(events) > 0:
566
+ client._browser_events.send(session_id, trace_id, events)
567
+ except Exception as e:
568
+ logger.debug(f"Could not send events: {e}")
569
+
570
+ try:
571
+ page.expose_function("lmnrSendEvents", send_events_from_browser)
572
+ except Exception as e:
573
+ logger.debug(f"Could not expose function: {e}")
574
+
282
575
 
283
576
  @observe(name="playwright.page", ignore_input=True, ignore_output=True)
284
- async def handle_navigation_async(
577
+ async def start_recording_events_async(
285
578
  page: Page, session_id: str, client: AsyncLaminarClient
286
579
  ):
287
-
288
- span = trace.get_current_span()
580
+ ctx = get_current_context()
581
+ span = trace.get_current_span(ctx)
289
582
  trace_id = format(span.get_span_context().trace_id, "032x")
290
583
  span.set_attribute("lmnr.internal.has_browser_session", True)
291
584
 
292
- async def collection_loop():
293
- try:
294
- while not page.is_closed(): # Stop when page closes
295
- await send_events_async(page, session_id, trace_id, client)
296
- await asyncio.sleep(2)
297
- logger.info("Event collection stopped")
298
- except Exception as e:
299
- logger.error(f"Event collection stopped: {e}")
300
-
301
- # Create and store task
302
- task = asyncio.create_task(collection_loop())
585
+ try:
586
+ if await page.evaluate(
587
+ """() => typeof window.lmnrSendEvents !== 'undefined'"""
588
+ ):
589
+ return
590
+ except Exception:
591
+ pass
303
592
 
304
- async def on_load():
593
+ async def on_load(p):
305
594
  try:
306
- await inject_session_recorder_async(page)
595
+ await inject_session_recorder_async(p)
307
596
  except Exception as e:
308
597
  logger.error(f"Error in on_load handler: {e}")
309
598
 
310
- async def on_close():
599
+ async def on_close(p):
311
600
  try:
312
- task.cancel()
313
- await send_events_async(page, session_id, trace_id, client)
601
+ # Send any remaining events before closing
602
+ await send_events_async(p, session_id, trace_id, client)
314
603
  except Exception:
315
604
  pass
316
605
 
317
- page.on("load", lambda: asyncio.create_task(on_load()))
318
- page.on("close", lambda: asyncio.create_task(on_close()))
606
+ page.on("load", on_load)
607
+ page.on("close", on_close)
319
608
 
320
- original_bring_to_front = page.bring_to_front
609
+ await inject_session_recorder_async(page)
321
610
 
322
- async def bring_to_front():
323
- await original_bring_to_front()
611
+ async def send_events_from_browser(events):
612
+ try:
613
+ if events and len(events) > 0:
614
+ await client._browser_events.send(session_id, trace_id, events)
615
+ except Exception as e:
616
+ logger.debug(f"Could not send events: {e}")
324
617
 
325
- await page.evaluate(
326
- """() => {
327
- if (window.lmnrRrweb) {
328
- try {
329
- window.lmnrRrweb.record.takeFullSnapshot();
330
- } catch (e) {
331
- console.error("Error taking full snapshot:", e);
332
- }
618
+ try:
619
+ await page.expose_function("lmnrSendEvents", send_events_from_browser)
620
+ except Exception as e:
621
+ logger.debug(f"Could not expose function: {e}")
622
+
623
+
624
+ def take_full_snapshot(page: Page):
625
+ return page.evaluate(
626
+ """() => {
627
+ if (window.lmnrRrweb) {
628
+ try {
629
+ window.lmnrRrweb.record.takeFullSnapshot();
630
+ return true;
631
+ } catch (e) {
632
+ console.error("Error taking full snapshot:", e);
633
+ return false;
333
634
  }
334
- }"""
335
- )
336
-
337
- page.bring_to_front = bring_to_front
338
- await inject_session_recorder_async(page)
635
+ }
636
+ return false;
637
+ }"""
638
+ )
639
+
640
+
641
+ async def take_full_snapshot_async(page: Page):
642
+ return await page.evaluate(
643
+ """() => {
644
+ if (window.lmnrRrweb) {
645
+ try {
646
+ window.lmnrRrweb.record.takeFullSnapshot();
647
+ return true;
648
+ } catch (e) {
649
+ console.error("Error taking full snapshot:", e);
650
+ return false;
651
+ }
652
+ }
653
+ return false;
654
+ }"""
655
+ )
@@ -25,6 +25,7 @@ class AsyncBrowserEvents(BaseAsyncResource):
25
25
  "source": f"python@{PYTHON_VERSION}",
26
26
  "sdkVersion": __version__,
27
27
  }
28
+
28
29
  compressed_payload = gzip.compress(json.dumps(payload).encode("utf-8"))
29
30
  response = await self._client.post(
30
31
  url,