lmnr 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lmnr/__init__.py CHANGED
@@ -3,6 +3,7 @@ from .sdk.client.asynchronous.async_client import AsyncLaminarClient
3
3
  from .sdk.datasets import EvaluationDataset, LaminarDataset
4
4
  from .sdk.evaluations import evaluate
5
5
  from .sdk.laminar import Laminar
6
+ from .sdk.types import SessionRecordingOptions, MaskInputOptions
6
7
  from .sdk.types import (
7
8
  AgentOutput,
8
9
  FinalOutputChunkContent,
@@ -38,4 +39,6 @@ __all__ = [
38
39
  "get_tracer",
39
40
  "evaluate",
40
41
  "observe",
42
+ "SessionRecordingOptions",
43
+ "MaskInputOptions",
41
44
  ]
@@ -6,6 +6,7 @@ from opentelemetry.sdk.resources import SERVICE_NAME
6
6
 
7
7
  from lmnr.opentelemetry_lib.tracing.instruments import Instruments
8
8
  from lmnr.opentelemetry_lib.tracing import TracerWrapper
9
+ from lmnr.sdk.types import SessionRecordingOptions
9
10
 
10
11
  MAX_MANUAL_SPAN_PAYLOAD_SIZE = 1024 * 1024 * 10 # 10MB
11
12
 
@@ -30,6 +31,7 @@ class TracerManager:
30
31
  timeout_seconds: int = 30,
31
32
  set_global_tracer_provider: bool = True,
32
33
  otel_logger_level: int = logging.ERROR,
34
+ session_recording_options: SessionRecordingOptions | None = None,
33
35
  ) -> None:
34
36
  enable_content_tracing = True
35
37
 
@@ -50,6 +52,7 @@ class TracerManager:
50
52
  timeout_seconds=timeout_seconds,
51
53
  set_global_tracer_provider=set_global_tracer_provider,
52
54
  otel_logger_level=otel_logger_level,
55
+ session_recording_options=session_recording_options,
53
56
  )
54
57
 
55
58
  @staticmethod
@@ -56,8 +56,8 @@ from opentelemetry.trace import SpanKind, Tracer
56
56
  from opentelemetry.trace.status import Status, StatusCode
57
57
  from wrapt import ObjectProxy
58
58
 
59
- from openai.types.chat import ChatCompletionMessageToolCall
60
59
  from openai.types.chat.chat_completion_message import FunctionCall
60
+ import pydantic
61
61
 
62
62
  SPAN_NAME = "openai.chat"
63
63
  PROMPT_FILTER_KEY = "prompt_filter_results"
@@ -995,7 +995,7 @@ async def _abuild_from_streaming_response(
995
995
 
996
996
 
997
997
  def _parse_tool_calls(
998
- tool_calls: Optional[List[Union[dict, ChatCompletionMessageToolCall]]],
998
+ tool_calls: Optional[List[Union[dict, pydantic.BaseModel]]],
999
999
  ) -> Union[List[ToolCall], None]:
1000
1000
  """
1001
1001
  Util to correctly parse the tool calls data from the OpenAI API to this module's
@@ -1012,7 +1012,7 @@ def _parse_tool_calls(
1012
1012
  # Handle dict or ChatCompletionMessageToolCall
1013
1013
  if isinstance(tool_call, dict):
1014
1014
  tool_call_data = copy.deepcopy(tool_call)
1015
- elif isinstance(tool_call, ChatCompletionMessageToolCall):
1015
+ elif _is_tool_call_model(tool_call):
1016
1016
  tool_call_data = tool_call.model_dump()
1017
1017
  elif isinstance(tool_call, FunctionCall):
1018
1018
  function_call = tool_call.model_dump()
@@ -1029,6 +1029,17 @@ def _parse_tool_calls(
1029
1029
  return result
1030
1030
 
1031
1031
 
1032
+ def _is_tool_call_model(tool_call):
1033
+ try:
1034
+ from openai.types.chat.chat_completion_message_tool_call import (
1035
+ ChatCompletionMessageFunctionToolCall,
1036
+ )
1037
+
1038
+ return isinstance(tool_call, ChatCompletionMessageFunctionToolCall)
1039
+ except Exception:
1040
+ return False
1041
+
1042
+
1032
1043
  @singledispatch
1033
1044
  def _parse_choice_event(choice) -> ChoiceEvent:
1034
1045
  has_message = choice.message is not None
@@ -5,6 +5,7 @@ import threading
5
5
  from lmnr.opentelemetry_lib.tracing.processor import LaminarSpanProcessor
6
6
  from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
7
7
  from lmnr.sdk.client.synchronous.sync_client import LaminarClient
8
+ from lmnr.sdk.types import SessionRecordingOptions
8
9
  from lmnr.sdk.log import VerboseColorfulFormatter
9
10
  from lmnr.opentelemetry_lib.tracing.instruments import (
10
11
  Instruments,
@@ -38,6 +39,7 @@ MAX_EVENTS_OR_ATTRIBUTES_PER_SPAN = 5000
38
39
  class TracerWrapper(object):
39
40
  resource_attributes: dict = {}
40
41
  enable_content_tracing: bool = True
42
+ session_recording_options: SessionRecordingOptions = {}
41
43
  _lock = threading.Lock()
42
44
  _tracer_provider: TracerProvider | None = None
43
45
  _logger: logging.Logger
@@ -62,6 +64,7 @@ class TracerWrapper(object):
62
64
  timeout_seconds: int = 30,
63
65
  set_global_tracer_provider: bool = True,
64
66
  otel_logger_level: int = logging.ERROR,
67
+ session_recording_options: SessionRecordingOptions | None = None,
65
68
  ) -> "TracerWrapper":
66
69
  # Silence some opentelemetry warnings
67
70
  logging.getLogger("opentelemetry.trace").setLevel(otel_logger_level)
@@ -72,6 +75,9 @@ class TracerWrapper(object):
72
75
  cls._initialize_logger(cls)
73
76
  obj = super(TracerWrapper, cls).__new__(cls)
74
77
 
78
+ # Store session recording options
79
+ cls.session_recording_options = session_recording_options or {}
80
+
75
81
  obj._client = LaminarClient(
76
82
  base_url=base_http_url,
77
83
  project_api_key=project_api_key,
@@ -244,7 +250,12 @@ class TracerWrapper(object):
244
250
  return False
245
251
  return self._span_processor.force_flush()
246
252
 
247
- def get_tracer(self):
253
+ @classmethod
254
+ def get_session_recording_options(cls) -> SessionRecordingOptions:
255
+ """Get the session recording options set during initialization."""
256
+ return cls.session_recording_options
257
+
258
+ def get_tracer(self) -> trace.Tracer:
248
259
  if self._tracer_provider is None:
249
260
  return trace.get_tracer_provider().get_tracer(TRACER_NAME)
250
261
  return self._tracer_provider.get_tracer(TRACER_NAME)
@@ -14,6 +14,8 @@ SPAN_OUTPUT = "lmnr.span.output"
14
14
  SPAN_TYPE = "lmnr.span.type"
15
15
  SPAN_PATH = "lmnr.span.path"
16
16
  SPAN_IDS_PATH = "lmnr.span.ids_path"
17
+ PARENT_SPAN_PATH = "lmnr.span.parent_path"
18
+ PARENT_SPAN_IDS_PATH = "lmnr.span.parent_ids_path"
17
19
  SPAN_INSTRUMENTATION_SOURCE = "lmnr.span.instrumentation_source"
18
20
  SPAN_SDK_VERSION = "lmnr.span.sdk_version"
19
21
  SPAN_LANGUAGE_VERSION = "lmnr.span.language_version"
@@ -7,9 +7,11 @@ from opentelemetry.sdk.trace.export import (
7
7
  SimpleSpanProcessor,
8
8
  )
9
9
  from opentelemetry.sdk.trace import Span
10
- from opentelemetry.context import Context, get_value, get_current, set_value
10
+ from opentelemetry.context import Context, get_value
11
11
 
12
12
  from lmnr.opentelemetry_lib.tracing.attributes import (
13
+ PARENT_SPAN_IDS_PATH,
14
+ PARENT_SPAN_PATH,
13
15
  SPAN_IDS_PATH,
14
16
  SPAN_INSTRUMENTATION_SOURCE,
15
17
  SPAN_LANGUAGE_VERSION,
@@ -52,20 +54,18 @@ class LaminarSpanProcessor(SpanProcessor):
52
54
  )
53
55
 
54
56
  def on_start(self, span: Span, parent_context: Context | None = None):
55
- span_path_in_context = get_value("span_path", parent_context or get_current())
56
- parent_span_path = span_path_in_context or (
57
+ parent_span_path = list(span.attributes.get(PARENT_SPAN_PATH, tuple())) or (
57
58
  self.__span_id_to_path.get(span.parent.span_id) if span.parent else None
58
59
  )
59
- parent_span_ids_path = (
60
- self.__span_id_lists.get(span.parent.span_id, []) if span.parent else []
61
- )
60
+ parent_span_ids_path = list(
61
+ span.attributes.get(PARENT_SPAN_IDS_PATH, tuple())
62
+ ) or (self.__span_id_lists.get(span.parent.span_id, []) if span.parent else [])
62
63
  span_path = parent_span_path + [span.name] if parent_span_path else [span.name]
63
64
  span_ids_path = parent_span_ids_path + [
64
65
  str(uuid.UUID(int=span.get_span_context().span_id))
65
66
  ]
66
67
  span.set_attribute(SPAN_PATH, span_path)
67
68
  span.set_attribute(SPAN_IDS_PATH, span_ids_path)
68
- set_value("span_path", span_path, get_current())
69
69
  self.__span_id_to_path[span.get_span_context().span_id] = span_path
70
70
  self.__span_id_lists[span.get_span_context().span_id] = span_ids_path
71
71
 
@@ -1,5 +1,7 @@
1
+ import orjson
1
2
  import logging
2
3
  import os
4
+ import time
3
5
 
4
6
  from opentelemetry import trace
5
7
 
@@ -9,6 +11,8 @@ from lmnr.sdk.browser.utils import retry_sync, retry_async
9
11
  from lmnr.sdk.client.synchronous.sync_client import LaminarClient
10
12
  from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
11
13
  from lmnr.opentelemetry_lib.tracing.context import get_current_context
14
+ from lmnr.opentelemetry_lib.tracing import TracerWrapper
15
+ from lmnr.sdk.types import MaskInputOptions
12
16
 
13
17
  try:
14
18
  if is_package_installed("playwright"):
@@ -32,17 +36,24 @@ except ImportError as e:
32
36
 
33
37
  logger = logging.getLogger(__name__)
34
38
 
39
+ OLD_BUFFER_TIMEOUT = 60
40
+
35
41
  current_dir = os.path.dirname(os.path.abspath(__file__))
36
42
  with open(os.path.join(current_dir, "recorder", "record.umd.min.cjs"), "r") as f:
37
43
  RRWEB_CONTENT = f"() => {{ {f.read()} }}"
38
44
 
39
45
  INJECT_PLACEHOLDER = """
40
- () => {
46
+ (mask_input_options) => {
41
47
  const BATCH_TIMEOUT = 2000; // Send events after 2 seconds
42
48
  const MAX_WORKER_PROMISES = 50; // Max concurrent worker promises
43
49
  const HEARTBEAT_INTERVAL = 1000;
44
-
50
+ const CHUNK_SIZE = 256 * 1024; // 256KB chunks
51
+ const CHUNK_SEND_DELAY = 100; // 100ms delay between chunks
52
+
45
53
  window.lmnrRrwebEventsBatch = [];
54
+ window.lmnrChunkQueue = [];
55
+ window.lmnrChunkSequence = 0;
56
+ window.lmnrCurrentBatchId = null;
46
57
 
47
58
  // Create a Web Worker for heavy JSON processing with chunked processing
48
59
  const createCompressionWorker = () => {
@@ -99,6 +110,25 @@ INJECT_PLACEHOLDER = """
99
110
  let compressionWorker = null;
100
111
  let workerPromises = new Map();
101
112
  let workerId = 0;
113
+ let workerSupported = null; // null = unknown, true = supported, false = blocked by CSP
114
+
115
+ // Test if workers are supported (not blocked by CSP)
116
+ function testWorkerSupport() {
117
+ if (workerSupported !== null) {
118
+ return workerSupported;
119
+ }
120
+
121
+ try {
122
+ const testWorker = createCompressionWorker();
123
+ testWorker.terminate();
124
+ workerSupported = true;
125
+ return true;
126
+ } catch (error) {
127
+ console.warn('Web Workers blocked by CSP, will use main thread compression:', error);
128
+ workerSupported = false;
129
+ return false;
130
+ }
131
+ }
102
132
 
103
133
  // Cleanup function for worker
104
134
  const cleanupWorker = () => {
@@ -222,6 +252,11 @@ INJECT_PLACEHOLDER = """
222
252
  // Alternative: Use transferable objects for maximum efficiency
223
253
  async function compressLargeObjectTransferable(data) {
224
254
  try {
255
+ // Check if workers are supported first
256
+ if (!testWorkerSupport()) {
257
+ return compressSmallObject(data);
258
+ }
259
+
225
260
  // Clean up stale promises first
226
261
  cleanupStalePromises();
227
262
 
@@ -281,49 +316,60 @@ INJECT_PLACEHOLDER = """
281
316
 
282
317
  // Worker-based compression for large objects
283
318
  async function compressLargeObject(data, isLarge = true) {
319
+ // Check if workers are supported first - if not, use main thread compression
320
+ if (!testWorkerSupport()) {
321
+ return await compressSmallObject(data);
322
+ }
323
+
284
324
  try {
285
325
  // Use transferable objects for better performance
286
326
  return await compressLargeObjectTransferable(data);
287
327
  } catch (error) {
288
328
  console.warn('Transferable failed, falling back to string method:', error);
289
- // Fallback to string method
290
- const jsonString = await stringifyNonBlocking(data);
329
+ try {
330
+ // Fallback to string method with worker
331
+ const jsonString = await stringifyNonBlocking(data);
332
+
333
+ return new Promise((resolve, reject) => {
334
+ if (!compressionWorker) {
335
+ compressionWorker = createCompressionWorker();
336
+ compressionWorker.onmessage = (e) => {
337
+ const { id, success, data: result, error } = e.data;
338
+ const promise = workerPromises.get(id);
339
+ if (promise) {
340
+ workerPromises.delete(id);
341
+ if (success) {
342
+ promise.resolve(result);
343
+ } else {
344
+ promise.reject(new Error(error));
345
+ }
346
+ }
347
+ };
348
+
349
+ compressionWorker.onerror = (error) => {
350
+ console.error('Compression worker error:', error);
351
+ cleanupWorker();
352
+ };
353
+ }
291
354
 
292
- return new Promise((resolve, reject) => {
293
- if (!compressionWorker) {
294
- compressionWorker = createCompressionWorker();
295
- compressionWorker.onmessage = (e) => {
296
- const { id, success, data: result, error } = e.data;
297
- const promise = workerPromises.get(id);
298
- if (promise) {
355
+ const id = ++workerId;
356
+ workerPromises.set(id, { resolve, reject });
357
+
358
+ // Set timeout to prevent hanging promises
359
+ setTimeout(() => {
360
+ if (workerPromises.has(id)) {
299
361
  workerPromises.delete(id);
300
- if (success) {
301
- promise.resolve(result);
302
- } else {
303
- promise.reject(new Error(error));
304
- }
362
+ reject(new Error('Compression timeout'));
305
363
  }
306
- };
364
+ }, 10000);
307
365
 
308
- compressionWorker.onerror = (error) => {
309
- console.error('Compression worker error:', error);
310
- cleanupWorker();
311
- };
312
- }
313
-
314
- const id = ++workerId;
315
- workerPromises.set(id, { resolve, reject });
316
-
317
- // Set timeout to prevent hanging promises
318
- setTimeout(() => {
319
- if (workerPromises.has(id)) {
320
- workerPromises.delete(id);
321
- reject(new Error('Compression timeout'));
322
- }
323
- }, 10000);
324
-
325
- compressionWorker.postMessage({ jsonString, id });
326
- });
366
+ compressionWorker.postMessage({ jsonString, id });
367
+ });
368
+ } catch (workerError) {
369
+ console.warn('Worker creation failed, falling back to main thread compression:', workerError);
370
+ // Final fallback: compress on main thread (may block UI but will work)
371
+ return await compressSmallObject(data);
372
+ }
327
373
  }
328
374
  }
329
375
 
@@ -343,15 +389,82 @@ INJECT_PLACEHOLDER = """
343
389
  return false;
344
390
  }
345
391
 
392
+ // Create chunks from a string with metadata
393
+ function createChunks(str, batchId) {
394
+ const chunks = [];
395
+ const totalChunks = Math.ceil(str.length / CHUNK_SIZE);
396
+
397
+ for (let i = 0; i < str.length; i += CHUNK_SIZE) {
398
+ const chunk = str.slice(i, i + CHUNK_SIZE);
399
+ chunks.push({
400
+ batchId: batchId,
401
+ chunkIndex: chunks.length,
402
+ totalChunks: totalChunks,
403
+ data: chunk,
404
+ isFinal: chunks.length === totalChunks - 1
405
+ });
406
+ }
407
+
408
+ return chunks;
409
+ }
410
+
411
+ // Send chunks with flow control
412
+ async function sendChunks(chunks) {
413
+ if (typeof window.lmnrSendEvents !== 'function') {
414
+ return;
415
+ }
416
+
417
+ window.lmnrChunkQueue.push(...chunks);
418
+
419
+ // Process queue
420
+ while (window.lmnrChunkQueue.length > 0) {
421
+ const chunk = window.lmnrChunkQueue.shift();
422
+ try {
423
+ await window.lmnrSendEvents(chunk);
424
+ // Small delay between chunks to avoid overwhelming CDP
425
+ await new Promise(resolve => setTimeout(resolve, CHUNK_SEND_DELAY));
426
+ } catch (error) {
427
+ console.error('Failed to send chunk:', error);
428
+ // On error, clear failed chunk batch from queue
429
+ window.lmnrChunkQueue = window.lmnrChunkQueue.filter(c => c.batchId !== chunk.batchId);
430
+ break;
431
+ }
432
+ }
433
+ }
434
+
346
435
  async function sendBatchIfReady() {
347
436
  if (window.lmnrRrwebEventsBatch.length > 0 && typeof window.lmnrSendEvents === 'function') {
348
437
  const events = window.lmnrRrwebEventsBatch;
349
438
  window.lmnrRrwebEventsBatch = [];
350
439
 
351
440
  try {
352
- await window.lmnrSendEvents(events);
441
+ // Generate unique batch ID
442
+ const batchId = `${Date.now()}_${window.lmnrChunkSequence++}`;
443
+ window.lmnrCurrentBatchId = batchId;
444
+
445
+ // Stringify the entire batch
446
+ const batchString = JSON.stringify(events);
447
+
448
+ // Check size and chunk if necessary
449
+ if (batchString.length <= CHUNK_SIZE) {
450
+ // Small enough to send as single chunk
451
+ const chunk = {
452
+ batchId: batchId,
453
+ chunkIndex: 0,
454
+ totalChunks: 1,
455
+ data: batchString,
456
+ isFinal: true
457
+ };
458
+ await window.lmnrSendEvents(chunk);
459
+ } else {
460
+ // Need to chunk
461
+ const chunks = createChunks(batchString, batchId);
462
+ await sendChunks(chunks);
463
+ }
353
464
  } catch (error) {
354
465
  console.error('Failed to send events:', error);
466
+ // Clear batch to prevent memory buildup
467
+ window.lmnrRrwebEventsBatch = [];
355
468
  }
356
469
  }
357
470
  }
@@ -387,7 +500,16 @@ INJECT_PLACEHOLDER = """
387
500
  },
388
501
  recordCanvas: true,
389
502
  collectFonts: true,
390
- recordCrossOriginIframes: true
503
+ recordCrossOriginIframes: true,
504
+ maskInputOptions: {
505
+ password: true,
506
+ textarea: mask_input_options.textarea || false,
507
+ text: mask_input_options.text || false,
508
+ number: mask_input_options.number || false,
509
+ select: mask_input_options.select || false,
510
+ email: mask_input_options.email || false,
511
+ tel: mask_input_options.tel || false,
512
+ }
391
513
  });
392
514
 
393
515
  function heartbeat() {
@@ -407,6 +529,30 @@ INJECT_PLACEHOLDER = """
407
529
  """
408
530
 
409
531
 
532
+ def get_mask_input_setting() -> MaskInputOptions:
533
+ """Get the mask_input setting from session recording configuration."""
534
+ try:
535
+ config = TracerWrapper.get_session_recording_options()
536
+ return config.get("mask_input_options", MaskInputOptions(
537
+ textarea=False,
538
+ text=False,
539
+ number=False,
540
+ select=False,
541
+ email=False,
542
+ tel=False,
543
+ ))
544
+ except (AttributeError, Exception):
545
+ # Fallback to default configuration if TracerWrapper is not initialized
546
+ return MaskInputOptions(
547
+ textarea=False,
548
+ text=False,
549
+ number=False,
550
+ select=False,
551
+ email=False,
552
+ tel=False,
553
+ )
554
+
555
+
410
556
  def inject_session_recorder_sync(page: SyncPage):
411
557
  try:
412
558
  try:
@@ -435,7 +581,7 @@ def inject_session_recorder_sync(page: SyncPage):
435
581
  return
436
582
 
437
583
  try:
438
- page.evaluate(INJECT_PLACEHOLDER)
584
+ page.evaluate(INJECT_PLACEHOLDER, get_mask_input_setting())
439
585
  except Exception as e:
440
586
  logger.debug(f"Failed to inject session recorder: {e}")
441
587
 
@@ -471,7 +617,7 @@ async def inject_session_recorder_async(page: Page):
471
617
  return
472
618
 
473
619
  try:
474
- await page.evaluate(INJECT_PLACEHOLDER)
620
+ await page.evaluate(INJECT_PLACEHOLDER, get_mask_input_setting())
475
621
  except Exception as e:
476
622
  logger.debug(f"Failed to inject session recorder placeholder: {e}")
477
623
 
@@ -486,11 +632,54 @@ def start_recording_events_sync(page: SyncPage, session_id: str, client: Laminar
486
632
  span = trace.get_current_span(ctx)
487
633
  trace_id = format(span.get_span_context().trace_id, "032x")
488
634
  span.set_attribute("lmnr.internal.has_browser_session", True)
489
-
490
- def send_events_from_browser(events):
635
+
636
+ # Buffer for reassembling chunks
637
+ chunk_buffers = {}
638
+
639
+ def send_events_from_browser(chunk):
491
640
  try:
492
- if events and len(events) > 0:
493
- client._browser_events.send(session_id, trace_id, events)
641
+ # Handle chunked data
642
+ batch_id = chunk['batchId']
643
+ chunk_index = chunk['chunkIndex']
644
+ total_chunks = chunk['totalChunks']
645
+ data = chunk['data']
646
+
647
+ # Initialize buffer for this batch if needed
648
+ if batch_id not in chunk_buffers:
649
+ chunk_buffers[batch_id] = {
650
+ 'chunks': {},
651
+ 'total': total_chunks,
652
+ 'timestamp': time.time()
653
+ }
654
+
655
+ # Store chunk
656
+ chunk_buffers[batch_id]['chunks'][chunk_index] = data
657
+
658
+ # Check if we have all chunks
659
+ if len(chunk_buffers[batch_id]['chunks']) == total_chunks:
660
+ # Reassemble the full message
661
+ full_data = ''.join(chunk_buffers[batch_id]['chunks'][i] for i in range(total_chunks))
662
+
663
+ # Parse the JSON
664
+ events = orjson.loads(full_data)
665
+
666
+ # Send to server
667
+ if events and len(events) > 0:
668
+ client._browser_events.send(session_id, trace_id, events)
669
+
670
+ # Clean up buffer
671
+ del chunk_buffers[batch_id]
672
+
673
+ # Clean up old incomplete buffers
674
+ current_time = time.time()
675
+ to_delete = []
676
+ for bid, buffer in chunk_buffers.items():
677
+ if current_time - buffer['timestamp'] > OLD_BUFFER_TIMEOUT:
678
+ to_delete.append(bid)
679
+ for bid in to_delete:
680
+ logger.debug(f"Cleaning up incomplete chunk buffer: {bid}")
681
+ del chunk_buffers[bid]
682
+
494
683
  except Exception as e:
495
684
  logger.debug(f"Could not send events: {e}")
496
685
 
@@ -519,10 +708,55 @@ async def start_recording_events_async(
519
708
  trace_id = format(span.get_span_context().trace_id, "032x")
520
709
  span.set_attribute("lmnr.internal.has_browser_session", True)
521
710
 
522
- async def send_events_from_browser(events):
711
+ # Buffer for reassembling chunks
712
+ chunk_buffers = {}
713
+
714
+ async def send_events_from_browser(chunk):
523
715
  try:
524
- if events and len(events) > 0:
525
- await client._browser_events.send(session_id, trace_id, events)
716
+ # Handle chunked data
717
+ batch_id = chunk['batchId']
718
+ chunk_index = chunk['chunkIndex']
719
+ total_chunks = chunk['totalChunks']
720
+ data = chunk['data']
721
+
722
+ # Initialize buffer for this batch if needed
723
+ if batch_id not in chunk_buffers:
724
+ chunk_buffers[batch_id] = {
725
+ 'chunks': {},
726
+ 'total': total_chunks,
727
+ 'timestamp': time.time()
728
+ }
729
+
730
+ # Store chunk
731
+ chunk_buffers[batch_id]['chunks'][chunk_index] = data
732
+
733
+ # Check if we have all chunks
734
+ if len(chunk_buffers[batch_id]['chunks']) == total_chunks:
735
+ # Reassemble the full message
736
+ full_data = ''
737
+ for i in range(total_chunks):
738
+ full_data += chunk_buffers[batch_id]['chunks'][i]
739
+
740
+ # Parse the JSON
741
+ events = orjson.loads(full_data)
742
+
743
+ # Send to server
744
+ if events and len(events) > 0:
745
+ await client._browser_events.send(session_id, trace_id, events)
746
+
747
+ # Clean up buffer
748
+ del chunk_buffers[batch_id]
749
+
750
+ # Clean up old incomplete buffers
751
+ current_time = time.time()
752
+ to_delete = []
753
+ for bid, buffer in chunk_buffers.items():
754
+ if current_time - buffer['timestamp'] > OLD_BUFFER_TIMEOUT:
755
+ to_delete.append(bid)
756
+ for bid in to_delete:
757
+ logger.debug(f"Cleaning up incomplete chunk buffer: {bid}")
758
+ del chunk_buffers[bid]
759
+
526
760
  except Exception as e:
527
761
  logger.debug(f"Could not send events: {e}")
528
762
 
lmnr/sdk/decorators.py CHANGED
@@ -102,7 +102,8 @@ def observe(
102
102
  ):
103
103
  logger.warning("Tags must be a list of strings. Tags will be ignored.")
104
104
  else:
105
- association_properties["tags"] = tags
105
+ # list(set(tags)) to deduplicate tags
106
+ association_properties["tags"] = list(set(tags))
106
107
  if input_formatter is not None and ignore_input:
107
108
  logger.warning(
108
109
  f"observe, function {func.__name__}: Input formatter"
lmnr/sdk/laminar.py CHANGED
@@ -13,6 +13,10 @@ from lmnr.opentelemetry_lib.tracing.instruments import Instruments
13
13
  from lmnr.opentelemetry_lib.tracing.tracer import get_tracer_with_context
14
14
  from lmnr.opentelemetry_lib.tracing.attributes import (
15
15
  ASSOCIATION_PROPERTIES,
16
+ PARENT_SPAN_IDS_PATH,
17
+ PARENT_SPAN_PATH,
18
+ SPAN_IDS_PATH,
19
+ SPAN_PATH,
16
20
  USER_ID,
17
21
  Attributes,
18
22
  SPAN_TYPE,
@@ -45,6 +49,7 @@ from .log import VerboseColorfulFormatter
45
49
 
46
50
  from .types import (
47
51
  LaminarSpanContext,
52
+ SessionRecordingOptions,
48
53
  TraceType,
49
54
  )
50
55
 
@@ -73,6 +78,7 @@ class Laminar:
73
78
  export_timeout_seconds: int | None = None,
74
79
  set_global_tracer_provider: bool = True,
75
80
  otel_logger_level: int = logging.ERROR,
81
+ session_recording_options: SessionRecordingOptions | None = None,
76
82
  ):
77
83
  """Initialize Laminar context across the application.
78
84
  This method must be called before using any other Laminar methods or
@@ -119,6 +125,10 @@ class Laminar:
119
125
  Defaults to True.
120
126
  otel_logger_level (int, optional): OpenTelemetry logger level. Defaults\
121
127
  to logging.ERROR.
128
+ session_recording_options (SessionRecordingOptions | None, optional): Options\
129
+ for browser session recording. Currently supports 'mask_input'\
130
+ (bool) to control whether input fields are masked during recording.\
131
+ Defaults to None (uses default masking behavior).
122
132
 
123
133
  Raises:
124
134
  ValueError: If project API key is not set
@@ -179,6 +189,7 @@ class Laminar:
179
189
  timeout_seconds=export_timeout_seconds,
180
190
  set_global_tracer_provider=set_global_tracer_provider,
181
191
  otel_logger_level=otel_logger_level,
192
+ session_recording_options=session_recording_options,
182
193
  )
183
194
 
184
195
  @classmethod
@@ -309,9 +320,29 @@ class Laminar:
309
320
 
310
321
  with get_tracer_with_context() as (tracer, isolated_context):
311
322
  ctx = context or isolated_context
323
+ path = []
324
+ span_ids_path = []
312
325
  if parent_span_context is not None:
326
+ if isinstance(parent_span_context, (dict, str)):
327
+ try:
328
+ laminar_span_context = LaminarSpanContext.deserialize(
329
+ parent_span_context
330
+ )
331
+ path = laminar_span_context.span_path
332
+ span_ids_path = laminar_span_context.span_ids_path
333
+ except Exception:
334
+ cls.__logger.warning(
335
+ f"`start_as_current_span` Could not deserialize parent_span_context: {parent_span_context}. "
336
+ "Will use it as is."
337
+ )
338
+ laminar_span_context = parent_span_context
339
+ else:
340
+ laminar_span_context = parent_span_context
341
+ if isinstance(laminar_span_context, LaminarSpanContext):
342
+ path = laminar_span_context.span_path
343
+ span_ids_path = laminar_span_context.span_ids_path
313
344
  span_context = LaminarSpanContext.try_to_otel_span_context(
314
- parent_span_context, cls.__logger
345
+ laminar_span_context, cls.__logger
315
346
  )
316
347
  ctx = trace.set_span_in_context(
317
348
  trace.NonRecordingSpan(span_context), ctx
@@ -345,6 +376,8 @@ class Laminar:
345
376
  context=ctx,
346
377
  attributes={
347
378
  SPAN_TYPE: span_type,
379
+ PARENT_SPAN_PATH: path,
380
+ PARENT_SPAN_IDS_PATH: span_ids_path,
348
381
  **(label_props),
349
382
  **(tag_props),
350
383
  },
@@ -447,9 +480,29 @@ class Laminar:
447
480
 
448
481
  with get_tracer_with_context() as (tracer, isolated_context):
449
482
  ctx = context or isolated_context
483
+ path = []
484
+ span_ids_path = []
450
485
  if parent_span_context is not None:
486
+ if isinstance(parent_span_context, (dict, str)):
487
+ try:
488
+ laminar_span_context = LaminarSpanContext.deserialize(
489
+ parent_span_context
490
+ )
491
+ path = laminar_span_context.span_path
492
+ span_ids_path = laminar_span_context.span_ids_path
493
+ except Exception:
494
+ cls.__logger.warning(
495
+ f"`start_span` Could not deserialize parent_span_context: {parent_span_context}. "
496
+ "Will use it as is."
497
+ )
498
+ laminar_span_context = parent_span_context
499
+ else:
500
+ laminar_span_context = parent_span_context
501
+ if isinstance(laminar_span_context, LaminarSpanContext):
502
+ path = laminar_span_context.span_path
503
+ span_ids_path = laminar_span_context.span_ids_path
451
504
  span_context = LaminarSpanContext.try_to_otel_span_context(
452
- parent_span_context, cls.__logger
505
+ laminar_span_context, cls.__logger
453
506
  )
454
507
  ctx = trace.set_span_in_context(
455
508
  trace.NonRecordingSpan(span_context), ctx
@@ -484,6 +537,8 @@ class Laminar:
484
537
  context=ctx,
485
538
  attributes={
486
539
  SPAN_TYPE: span_type,
540
+ PARENT_SPAN_PATH: path,
541
+ PARENT_SPAN_IDS_PATH: span_ids_path,
487
542
  **(label_props),
488
543
  **(tag_props),
489
544
  },
@@ -655,6 +710,8 @@ class Laminar:
655
710
  trace_id=uuid.UUID(int=span.get_span_context().trace_id),
656
711
  span_id=uuid.UUID(int=span.get_span_context().span_id),
657
712
  is_remote=span.get_span_context().is_remote,
713
+ span_path=span.attributes.get(SPAN_PATH, []),
714
+ span_ids_path=span.attributes.get(SPAN_IDS_PATH, []),
658
715
  )
659
716
 
660
717
  @classmethod
@@ -741,7 +798,8 @@ class Laminar:
741
798
  "Tags must be a list of strings. Tags will be ignored."
742
799
  )
743
800
  return
744
- span.set_attribute(f"{ASSOCIATION_PROPERTIES}.tags", tags)
801
+ # list(set(tags)) to deduplicate tags
802
+ span.set_attribute(f"{ASSOCIATION_PROPERTIES}.tags", list(set(tags)))
745
803
 
746
804
  @classmethod
747
805
  def set_trace_session_id(cls, session_id: str | None = None):
lmnr/sdk/types.py CHANGED
@@ -9,7 +9,7 @@ import uuid
9
9
 
10
10
  from enum import Enum
11
11
  from opentelemetry.trace import SpanContext, TraceFlags
12
- from typing import Any, Awaitable, Callable, Literal, Optional
12
+ from typing import Any, Awaitable, Callable, Literal, Optional, TypedDict
13
13
 
14
14
  from .utils import serialize
15
15
 
@@ -169,6 +169,8 @@ class LaminarSpanContext(pydantic.BaseModel):
169
169
  trace_id: uuid.UUID
170
170
  span_id: uuid.UUID
171
171
  is_remote: bool = pydantic.Field(default=False)
172
+ span_path: list[str] = pydantic.Field(default=[])
173
+ span_ids_path: list[str] = pydantic.Field(default=[]) # stringified UUIDs
172
174
 
173
175
  def __str__(self) -> str:
174
176
  return self.model_dump_json()
@@ -199,7 +201,7 @@ class LaminarSpanContext(pydantic.BaseModel):
199
201
  "Please use `LaminarSpanContext` instead."
200
202
  )
201
203
  return span_context
202
- elif isinstance(span_context, dict) or isinstance(span_context, str):
204
+ elif isinstance(span_context, (dict, str)):
203
205
  try:
204
206
  laminar_span_context = cls.deserialize(span_context)
205
207
  return SpanContext(
@@ -221,6 +223,9 @@ class LaminarSpanContext(pydantic.BaseModel):
221
223
  "trace_id": data.get("trace_id") or data.get("traceId"),
222
224
  "span_id": data.get("span_id") or data.get("spanId"),
223
225
  "is_remote": data.get("is_remote") or data.get("isRemote", False),
226
+ "span_path": data.get("span_path") or data.get("spanPath", []),
227
+ "span_ids_path": data.get("span_ids_path")
228
+ or data.get("spanIdsPath", []),
224
229
  }
225
230
  return cls.model_validate(converted_data)
226
231
  elif isinstance(data, str):
@@ -346,3 +351,16 @@ class RunAgentResponseChunk(pydantic.RootModel):
346
351
  | ErrorChunkContent
347
352
  | TimeoutChunkContent
348
353
  )
354
+
355
+
356
+ class MaskInputOptions(TypedDict):
357
+ textarea: bool | None
358
+ text: bool | None
359
+ number: bool | None
360
+ select: bool | None
361
+ email: bool | None
362
+ tel: bool | None
363
+
364
+
365
+ class SessionRecordingOptions(TypedDict):
366
+ mask_input_options: MaskInputOptions | None
lmnr/version.py CHANGED
@@ -3,7 +3,7 @@ import httpx
3
3
  from packaging import version
4
4
 
5
5
 
6
- __version__ = "0.7.2"
6
+ __version__ = "0.7.4"
7
7
  PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}"
8
8
 
9
9
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lmnr
3
- Version: 0.7.2
3
+ Version: 0.7.4
4
4
  Summary: Python SDK for Laminar
5
5
  Author: lmnr.ai
6
6
  Author-email: lmnr.ai <founders@lmnr.ai>
@@ -1,7 +1,7 @@
1
- lmnr/__init__.py,sha256=47422a1fd58f5be3e7870ccb3ed7de4f1ac520d942e0c83cbcf903b0600a08f6,1278
1
+ lmnr/__init__.py,sha256=8be7b56ab62735fd54ca90a0642784c6153ed1d6e0f12734619ca0618dd9fbdb,1398
2
2
  lmnr/cli.py,sha256=b8780b51f37fe9e20db5495c41d3ad3837f6b48f408b09a58688d017850c0796,6047
3
3
  lmnr/opentelemetry_lib/.flake8,sha256=6c2c6e0e51b1dd8439e501ca3e21899277076a787da868d0254ba37056b79405,150
4
- lmnr/opentelemetry_lib/__init__.py,sha256=6962aca915d485586ed814b9e799ced898594ac2bc6d35329405705b26eab861,2160
4
+ lmnr/opentelemetry_lib/__init__.py,sha256=1350e8d12ea2f422751ab3a80d7d32d10c27ad8e4c2989407771dc6e544d9c65,2350
5
5
  lmnr/opentelemetry_lib/decorators/__init__.py,sha256=216536fb3ac8de18e6dfe4dfb2e571074c727466f97e6dcd609339c8458a345a,11511
6
6
  lmnr/opentelemetry_lib/litellm/__init__.py,sha256=8a3679381ca5660cf53e4b7571850906c6635264129149adebda8f3f7c248f68,15127
7
7
  lmnr/opentelemetry_lib/litellm/utils.py,sha256=da8cf0553f82dc7203109f117a4c7b4185e8baf34caad12d7823875515201a27,539
@@ -28,7 +28,7 @@ lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py,sha25
28
28
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/utils.py,sha256=9dff6c2595e79edb38818668aed1220efc188d8a982594c04f4ceeb6e3ff47a6,1512
29
29
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/__init__.py,sha256=8b91dc16af927eee75b969c0980c606680b347a87f8533bc0f4a092e5ec6e5c9,2071
30
30
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/__init__.py,sha256=9d182c8cef5ee1e205dc4c2f7c8e49d8403ee9fee66072c5cfdd29a0d54f61a2,15149
31
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=6042b3bcf94f38c90bdebaa2c6c9ac1a4723d1801b8e7c20cf8cc3926cef83ad,38657
31
+ lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=4a1e682cd455661dde3b9442c7c16d055958e77ba5fff24c6fe08ab967c30da2,38882
32
32
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py,sha256=3a45c07d9d0f37baf409a48e2a1b577f28041c623c41f59ada1c87b94285ae3b,9537
33
33
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/config.py,sha256=8016e4af0291a77484ce88d7d1ca06146b1229ae0e0a0f46e042faf75b456a8f,507
34
34
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py,sha256=324eeeaf8dd862f49c15bb7290d414e77ad51cdf532c2cfd74358783cdf654a5,9330
@@ -45,13 +45,13 @@ lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py,sha256=4f
45
45
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/opentelemetry/__init__.py,sha256=1f86cdf738e2f68586b0a4569bb1e40edddd85c529f511ef49945ddb7b61fab5,2648
46
46
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/skyvern/__init__.py,sha256=764e4fe979fb08d7821419a3cc5c3ae89a6664b626ef928259f8f175c939eaea,6334
47
47
  lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py,sha256=90aa8558467d7e469fe1a6c75372c113da403557715f03b522b2fab94b287c40,6320
48
- lmnr/opentelemetry_lib/tracing/__init__.py,sha256=a39e9a48f8a842ce7f7ec53364d793c1a303dcfd485aee7a72ade07d1b3635a2,9662
48
+ lmnr/opentelemetry_lib/tracing/__init__.py,sha256=b96aee7590af1853fffc4c3d8ce9127a67e1ce589f695a99aabe6b37d70b0e48,10203
49
49
  lmnr/opentelemetry_lib/tracing/_instrument_initializers.py,sha256=a15a46a0515462319195a96f7cdb695e72a1559c3212964f5883ab824031bf70,15125
50
- lmnr/opentelemetry_lib/tracing/attributes.py,sha256=32fa30565b977c2a92202dc2bf1ded583a81d02a6bf5ba52958f75a8be08cbbe,1497
50
+ lmnr/opentelemetry_lib/tracing/attributes.py,sha256=88afbd1ad56bd423167f27010d86169e061ebca1f00549961da763eca51055db,1591
51
51
  lmnr/opentelemetry_lib/tracing/context.py,sha256=83f842be0fc29a96647cbf005c39ea761b0fb5913c4102f965411f47906a6135,4103
52
52
  lmnr/opentelemetry_lib/tracing/exporter.py,sha256=6af8e61fd873e8f5db315d9b9f1edbf46b860ba7e50140f0bdcc6864c6d35a03,2082
53
53
  lmnr/opentelemetry_lib/tracing/instruments.py,sha256=e3c12315bda301416d1f3bc8d354ad16d4da211e2ecfa019265f4b565307c118,5655
54
- lmnr/opentelemetry_lib/tracing/processor.py,sha256=fd11e4d48eb5932f47898b8f70b8b5880f7ee7e58478f1ef20caff20e1f34252,3381
54
+ lmnr/opentelemetry_lib/tracing/processor.py,sha256=cbc70f138e70c878ef57b02a2c46ef48dd7f694a522623a82dff1623b73d1e1c,3353
55
55
  lmnr/opentelemetry_lib/tracing/tracer.py,sha256=33769a9a97385f5697eb0e0a6b1813a57ed956c7a8379d7ac2523e700e7dd528,1362
56
56
  lmnr/opentelemetry_lib/utils/__init__.py,sha256=a4d85fd06def4dde5c728734de2d4c5c36eb89c49a8aa09b8b50cb5a149e90af,604
57
57
  lmnr/opentelemetry_lib/utils/json_encoder.py,sha256=74ae9bfdac6bef42182fb56ff9bbb8c27b6f0c3bb29eda2ab0769d76a5fb3f9f,463
@@ -62,7 +62,7 @@ lmnr/sdk/browser/__init__.py,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b
62
62
  lmnr/sdk/browser/browser_use_otel.py,sha256=37d26de1af37f76774af176cb226e0b04988fc3bf419a2fd899ad36e79562fad,5104
63
63
  lmnr/sdk/browser/patchright_otel.py,sha256=9d22ab1f28f1eddbcfd0032a14fe306bfe00bfc7f11128cb99836c4dd15fb7c8,4800
64
64
  lmnr/sdk/browser/playwright_otel.py,sha256=50c0a5a75155a3a7ff5db84790ffb409c9cbd0351eef212d83d923893730223b,9459
65
- lmnr/sdk/browser/pw_utils.py,sha256=c84079f28c268c531ef86ed923028639671bb7c34a364f12f796556faa747393,20096
65
+ lmnr/sdk/browser/pw_utils.py,sha256=e8b839d729bf4d80e07db0a8bb616b41da43219a019d2ce9596c1b6e9f848074,29061
66
66
  lmnr/sdk/browser/recorder/record.umd.min.cjs,sha256=f09c09052c2fc474efb0405e63d8d26ed2184b994513ce8aee04efdac8be155d,181235
67
67
  lmnr/sdk/browser/utils.py,sha256=4a668776d2938108d25fbcecd61c8e1710a4da3e56230d5fefca5964dd09e3c1,2371
68
68
  lmnr/sdk/client/asynchronous/async_client.py,sha256=e8feae007506cd2e4b08e72706f5f1bb4ea54492b4aa6b68ef184a129de8f466,4948
@@ -82,15 +82,15 @@ lmnr/sdk/client/synchronous/resources/evaluators.py,sha256=3cd6a17e7a9cc0441c2d2
82
82
  lmnr/sdk/client/synchronous/resources/tags.py,sha256=123deec43128662c21cb275b2df6a102372f875315b0bd36806555394c1d4b5b,2270
83
83
  lmnr/sdk/client/synchronous/sync_client.py,sha256=0bebe88e3aed689505e9ed3d32036f76df4c3496e4d659162bd41abedc026f16,5299
84
84
  lmnr/sdk/datasets.py,sha256=3fd851c5f97bf88eaa84b1451a053eaff23b4497cbb45eac2f9ea0e5f2886c00,1708
85
- lmnr/sdk/decorators.py,sha256=0c6b95b92ec8023f28cd15ddc47849888fa91f2534d575f626e3557f5f0a0c02,6451
85
+ lmnr/sdk/decorators.py,sha256=2ccf9ecd9616ad1d52301febd8af630288ba63db2d36302236f606a460fc08ca,6516
86
86
  lmnr/sdk/eval_control.py,sha256=291394ac385c653ae9b5167e871bebeb4fe8fc6b7ff2ed38e636f87015dcba86,184
87
87
  lmnr/sdk/evaluations.py,sha256=b41f7737b084dc5b64b2952659b729622e0918fd492bfcddde7177d1a1c690ae,22572
88
- lmnr/sdk/laminar.py,sha256=c38590ec5d65d5dedad37258f13f4f88f989e9ae10cbdb30bd1acdad5443e1d6,34427
88
+ lmnr/sdk/laminar.py,sha256=24d680407ce694f1a7ec0e9c0524eae3deb7d638ad5caff3a591ddf7963ad480,37533
89
89
  lmnr/sdk/log.py,sha256=9edfd83263f0d4845b1b2d1beeae2b4ed3f8628de941f371a893d72b79c348d4,2213
90
- lmnr/sdk/types.py,sha256=c4868d7d1df2fbd108fe5990900675bff2e595f6ff207afcf166ad4853f5eb0a,12670
90
+ lmnr/sdk/types.py,sha256=49358ddd00a1f1e75ffc8cbba62aaf49f4d3031eebf52de86005715a54a846b7,13244
91
91
  lmnr/sdk/utils.py,sha256=4beb884ae6fbbc7d8cf639b036b726ea6a2a658f0a6386faf5735a13d706a2d8,5039
92
- lmnr/version.py,sha256=66adfc190221595f22dad751cd9efbdcff5c15c74bfe6929880661e779e139a7,1321
93
- lmnr-0.7.2.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
94
- lmnr-0.7.2.dist-info/entry_points.txt,sha256=abdf3411b7dd2d7329a241f2da6669bab4e314a747a586ecdb9f888f3035003c,39
95
- lmnr-0.7.2.dist-info/METADATA,sha256=dc224f7405f3038e9abb2d6dc0ffc8b06c0824e76e887b95b23f71c8636124f5,14196
96
- lmnr-0.7.2.dist-info/RECORD,,
92
+ lmnr/version.py,sha256=d1c12ee42adc0a3de888632fcfe36693a8dd749bbed2ad12dd1ab2783608bcd2,1321
93
+ lmnr-0.7.4.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
94
+ lmnr-0.7.4.dist-info/entry_points.txt,sha256=abdf3411b7dd2d7329a241f2da6669bab4e314a747a586ecdb9f888f3035003c,39
95
+ lmnr-0.7.4.dist-info/METADATA,sha256=91677f6dc8a33790ec9e9e3bc6e1e90561d335359f3a73590c563aed8fb90163,14196
96
+ lmnr-0.7.4.dist-info/RECORD,,
File without changes