lmnr 0.7.2__tar.gz → 0.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lmnr-0.7.2 → lmnr-0.7.3}/PKG-INFO +1 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/pyproject.toml +2 -2
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/__init__.py +3 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/__init__.py +3 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +12 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/__init__.py +11 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/pw_utils.py +281 -47
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/decorators.py +2 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/laminar.py +9 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/types.py +13 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/version.py +1 -1
- {lmnr-0.7.2 → lmnr-0.7.3}/README.md +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/cli.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/.flake8 +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/decorators/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/litellm/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/litellm/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/config.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/schema_utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/config.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_emitter.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_models.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v0/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/opentelemetry/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/skyvern/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/attributes.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/context.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/exporter.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/instruments.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/processor.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/tracing/tracer.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/utils/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/utils/json_encoder.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/utils/package_check.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/py.typed +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/browser_use_otel.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/patchright_otel.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/playwright_otel.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/recorder/record.umd.min.cjs +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/browser/utils.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/async_client.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/agent.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/base.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/browser_events.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/evals.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/evaluators.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/asynchronous/resources/tags.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/__init__.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/agent.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/base.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/browser_events.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/evals.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/evaluators.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/resources/tags.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/client/synchronous/sync_client.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/datasets.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/eval_control.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/evaluations.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/log.py +0 -0
- {lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/sdk/utils.py +0 -0
@@ -6,7 +6,7 @@
|
|
6
6
|
|
7
7
|
[project]
|
8
8
|
name = "lmnr"
|
9
|
-
version = "0.7.
|
9
|
+
version = "0.7.3"
|
10
10
|
description = "Python SDK for Laminar"
|
11
11
|
authors = [
|
12
12
|
{ name = "lmnr.ai", email = "founders@lmnr.ai" }
|
@@ -124,7 +124,7 @@ dev = [
|
|
124
124
|
"pytest-asyncio>=0.26.0",
|
125
125
|
"playwright>=1.52.0",
|
126
126
|
"vcrpy>=7.0.0",
|
127
|
-
"openai>=1.
|
127
|
+
"openai>=1.99.3",
|
128
128
|
"pytest-recording>=0.13.4",
|
129
129
|
"patchright>=1.52.3",
|
130
130
|
"google-genai>=1.19.0",
|
@@ -3,6 +3,7 @@ from .sdk.client.asynchronous.async_client import AsyncLaminarClient
|
|
3
3
|
from .sdk.datasets import EvaluationDataset, LaminarDataset
|
4
4
|
from .sdk.evaluations import evaluate
|
5
5
|
from .sdk.laminar import Laminar
|
6
|
+
from .sdk.types import SessionRecordingOptions, MaskInputOptions
|
6
7
|
from .sdk.types import (
|
7
8
|
AgentOutput,
|
8
9
|
FinalOutputChunkContent,
|
@@ -38,4 +39,6 @@ __all__ = [
|
|
38
39
|
"get_tracer",
|
39
40
|
"evaluate",
|
40
41
|
"observe",
|
42
|
+
"SessionRecordingOptions",
|
43
|
+
"MaskInputOptions",
|
41
44
|
]
|
@@ -6,6 +6,7 @@ from opentelemetry.sdk.resources import SERVICE_NAME
|
|
6
6
|
|
7
7
|
from lmnr.opentelemetry_lib.tracing.instruments import Instruments
|
8
8
|
from lmnr.opentelemetry_lib.tracing import TracerWrapper
|
9
|
+
from lmnr.sdk.types import SessionRecordingOptions
|
9
10
|
|
10
11
|
MAX_MANUAL_SPAN_PAYLOAD_SIZE = 1024 * 1024 * 10 # 10MB
|
11
12
|
|
@@ -30,6 +31,7 @@ class TracerManager:
|
|
30
31
|
timeout_seconds: int = 30,
|
31
32
|
set_global_tracer_provider: bool = True,
|
32
33
|
otel_logger_level: int = logging.ERROR,
|
34
|
+
session_recording_options: SessionRecordingOptions | None = None,
|
33
35
|
) -> None:
|
34
36
|
enable_content_tracing = True
|
35
37
|
|
@@ -50,6 +52,7 @@ class TracerManager:
|
|
50
52
|
timeout_seconds=timeout_seconds,
|
51
53
|
set_global_tracer_provider=set_global_tracer_provider,
|
52
54
|
otel_logger_level=otel_logger_level,
|
55
|
+
session_recording_options=session_recording_options,
|
53
56
|
)
|
54
57
|
|
55
58
|
@staticmethod
|
@@ -1012,7 +1012,7 @@ def _parse_tool_calls(
|
|
1012
1012
|
# Handle dict or ChatCompletionMessageToolCall
|
1013
1013
|
if isinstance(tool_call, dict):
|
1014
1014
|
tool_call_data = copy.deepcopy(tool_call)
|
1015
|
-
elif
|
1015
|
+
elif _is_tool_call_model(tool_call):
|
1016
1016
|
tool_call_data = tool_call.model_dump()
|
1017
1017
|
elif isinstance(tool_call, FunctionCall):
|
1018
1018
|
function_call = tool_call.model_dump()
|
@@ -1029,6 +1029,17 @@ def _parse_tool_calls(
|
|
1029
1029
|
return result
|
1030
1030
|
|
1031
1031
|
|
1032
|
+
def _is_tool_call_model(tool_call):
|
1033
|
+
try:
|
1034
|
+
from openai.types.chat.chat_completion_message_tool_call import (
|
1035
|
+
ChatCompletionMessageFunctionToolCall,
|
1036
|
+
)
|
1037
|
+
|
1038
|
+
return isinstance(tool_call, ChatCompletionMessageFunctionToolCall)
|
1039
|
+
except Exception:
|
1040
|
+
return False
|
1041
|
+
|
1042
|
+
|
1032
1043
|
@singledispatch
|
1033
1044
|
def _parse_choice_event(choice) -> ChoiceEvent:
|
1034
1045
|
has_message = choice.message is not None
|
@@ -5,6 +5,7 @@ import threading
|
|
5
5
|
from lmnr.opentelemetry_lib.tracing.processor import LaminarSpanProcessor
|
6
6
|
from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
|
7
7
|
from lmnr.sdk.client.synchronous.sync_client import LaminarClient
|
8
|
+
from lmnr.sdk.types import SessionRecordingOptions
|
8
9
|
from lmnr.sdk.log import VerboseColorfulFormatter
|
9
10
|
from lmnr.opentelemetry_lib.tracing.instruments import (
|
10
11
|
Instruments,
|
@@ -38,6 +39,7 @@ MAX_EVENTS_OR_ATTRIBUTES_PER_SPAN = 5000
|
|
38
39
|
class TracerWrapper(object):
|
39
40
|
resource_attributes: dict = {}
|
40
41
|
enable_content_tracing: bool = True
|
42
|
+
session_recording_options: SessionRecordingOptions = {}
|
41
43
|
_lock = threading.Lock()
|
42
44
|
_tracer_provider: TracerProvider | None = None
|
43
45
|
_logger: logging.Logger
|
@@ -62,6 +64,7 @@ class TracerWrapper(object):
|
|
62
64
|
timeout_seconds: int = 30,
|
63
65
|
set_global_tracer_provider: bool = True,
|
64
66
|
otel_logger_level: int = logging.ERROR,
|
67
|
+
session_recording_options: SessionRecordingOptions | None = None,
|
65
68
|
) -> "TracerWrapper":
|
66
69
|
# Silence some opentelemetry warnings
|
67
70
|
logging.getLogger("opentelemetry.trace").setLevel(otel_logger_level)
|
@@ -71,6 +74,9 @@ class TracerWrapper(object):
|
|
71
74
|
if not hasattr(cls, "instance"):
|
72
75
|
cls._initialize_logger(cls)
|
73
76
|
obj = super(TracerWrapper, cls).__new__(cls)
|
77
|
+
|
78
|
+
# Store session recording options
|
79
|
+
cls.session_recording_options = session_recording_options or {}
|
74
80
|
|
75
81
|
obj._client = LaminarClient(
|
76
82
|
base_url=base_http_url,
|
@@ -243,6 +249,11 @@ class TracerWrapper(object):
|
|
243
249
|
self._logger.warning("TracerWrapper not fully initialized, cannot flush")
|
244
250
|
return False
|
245
251
|
return self._span_processor.force_flush()
|
252
|
+
|
253
|
+
@classmethod
|
254
|
+
def get_session_recording_options(cls) -> SessionRecordingOptions:
|
255
|
+
"""Get the session recording options set during initialization."""
|
256
|
+
return cls.session_recording_options
|
246
257
|
|
247
258
|
def get_tracer(self):
|
248
259
|
if self._tracer_provider is None:
|
@@ -1,5 +1,7 @@
|
|
1
|
+
import orjson
|
1
2
|
import logging
|
2
3
|
import os
|
4
|
+
import time
|
3
5
|
|
4
6
|
from opentelemetry import trace
|
5
7
|
|
@@ -9,6 +11,8 @@ from lmnr.sdk.browser.utils import retry_sync, retry_async
|
|
9
11
|
from lmnr.sdk.client.synchronous.sync_client import LaminarClient
|
10
12
|
from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
|
11
13
|
from lmnr.opentelemetry_lib.tracing.context import get_current_context
|
14
|
+
from lmnr.opentelemetry_lib.tracing import TracerWrapper
|
15
|
+
from lmnr.sdk.types import MaskInputOptions
|
12
16
|
|
13
17
|
try:
|
14
18
|
if is_package_installed("playwright"):
|
@@ -32,17 +36,24 @@ except ImportError as e:
|
|
32
36
|
|
33
37
|
logger = logging.getLogger(__name__)
|
34
38
|
|
39
|
+
OLD_BUFFER_TIMEOUT = 60
|
40
|
+
|
35
41
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
36
42
|
with open(os.path.join(current_dir, "recorder", "record.umd.min.cjs"), "r") as f:
|
37
43
|
RRWEB_CONTENT = f"() => {{ {f.read()} }}"
|
38
44
|
|
39
45
|
INJECT_PLACEHOLDER = """
|
40
|
-
() => {
|
46
|
+
(mask_input_options) => {
|
41
47
|
const BATCH_TIMEOUT = 2000; // Send events after 2 seconds
|
42
48
|
const MAX_WORKER_PROMISES = 50; // Max concurrent worker promises
|
43
49
|
const HEARTBEAT_INTERVAL = 1000;
|
44
|
-
|
50
|
+
const CHUNK_SIZE = 256 * 1024; // 256KB chunks
|
51
|
+
const CHUNK_SEND_DELAY = 100; // 100ms delay between chunks
|
52
|
+
|
45
53
|
window.lmnrRrwebEventsBatch = [];
|
54
|
+
window.lmnrChunkQueue = [];
|
55
|
+
window.lmnrChunkSequence = 0;
|
56
|
+
window.lmnrCurrentBatchId = null;
|
46
57
|
|
47
58
|
// Create a Web Worker for heavy JSON processing with chunked processing
|
48
59
|
const createCompressionWorker = () => {
|
@@ -99,6 +110,25 @@ INJECT_PLACEHOLDER = """
|
|
99
110
|
let compressionWorker = null;
|
100
111
|
let workerPromises = new Map();
|
101
112
|
let workerId = 0;
|
113
|
+
let workerSupported = null; // null = unknown, true = supported, false = blocked by CSP
|
114
|
+
|
115
|
+
// Test if workers are supported (not blocked by CSP)
|
116
|
+
function testWorkerSupport() {
|
117
|
+
if (workerSupported !== null) {
|
118
|
+
return workerSupported;
|
119
|
+
}
|
120
|
+
|
121
|
+
try {
|
122
|
+
const testWorker = createCompressionWorker();
|
123
|
+
testWorker.terminate();
|
124
|
+
workerSupported = true;
|
125
|
+
return true;
|
126
|
+
} catch (error) {
|
127
|
+
console.warn('Web Workers blocked by CSP, will use main thread compression:', error);
|
128
|
+
workerSupported = false;
|
129
|
+
return false;
|
130
|
+
}
|
131
|
+
}
|
102
132
|
|
103
133
|
// Cleanup function for worker
|
104
134
|
const cleanupWorker = () => {
|
@@ -222,6 +252,11 @@ INJECT_PLACEHOLDER = """
|
|
222
252
|
// Alternative: Use transferable objects for maximum efficiency
|
223
253
|
async function compressLargeObjectTransferable(data) {
|
224
254
|
try {
|
255
|
+
// Check if workers are supported first
|
256
|
+
if (!testWorkerSupport()) {
|
257
|
+
return compressSmallObject(data);
|
258
|
+
}
|
259
|
+
|
225
260
|
// Clean up stale promises first
|
226
261
|
cleanupStalePromises();
|
227
262
|
|
@@ -281,49 +316,60 @@ INJECT_PLACEHOLDER = """
|
|
281
316
|
|
282
317
|
// Worker-based compression for large objects
|
283
318
|
async function compressLargeObject(data, isLarge = true) {
|
319
|
+
// Check if workers are supported first - if not, use main thread compression
|
320
|
+
if (!testWorkerSupport()) {
|
321
|
+
return await compressSmallObject(data);
|
322
|
+
}
|
323
|
+
|
284
324
|
try {
|
285
325
|
// Use transferable objects for better performance
|
286
326
|
return await compressLargeObjectTransferable(data);
|
287
327
|
} catch (error) {
|
288
328
|
console.warn('Transferable failed, falling back to string method:', error);
|
289
|
-
|
290
|
-
|
329
|
+
try {
|
330
|
+
// Fallback to string method with worker
|
331
|
+
const jsonString = await stringifyNonBlocking(data);
|
332
|
+
|
333
|
+
return new Promise((resolve, reject) => {
|
334
|
+
if (!compressionWorker) {
|
335
|
+
compressionWorker = createCompressionWorker();
|
336
|
+
compressionWorker.onmessage = (e) => {
|
337
|
+
const { id, success, data: result, error } = e.data;
|
338
|
+
const promise = workerPromises.get(id);
|
339
|
+
if (promise) {
|
340
|
+
workerPromises.delete(id);
|
341
|
+
if (success) {
|
342
|
+
promise.resolve(result);
|
343
|
+
} else {
|
344
|
+
promise.reject(new Error(error));
|
345
|
+
}
|
346
|
+
}
|
347
|
+
};
|
348
|
+
|
349
|
+
compressionWorker.onerror = (error) => {
|
350
|
+
console.error('Compression worker error:', error);
|
351
|
+
cleanupWorker();
|
352
|
+
};
|
353
|
+
}
|
291
354
|
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
if (promise) {
|
355
|
+
const id = ++workerId;
|
356
|
+
workerPromises.set(id, { resolve, reject });
|
357
|
+
|
358
|
+
// Set timeout to prevent hanging promises
|
359
|
+
setTimeout(() => {
|
360
|
+
if (workerPromises.has(id)) {
|
299
361
|
workerPromises.delete(id);
|
300
|
-
|
301
|
-
promise.resolve(result);
|
302
|
-
} else {
|
303
|
-
promise.reject(new Error(error));
|
304
|
-
}
|
362
|
+
reject(new Error('Compression timeout'));
|
305
363
|
}
|
306
|
-
};
|
364
|
+
}, 10000);
|
307
365
|
|
308
|
-
compressionWorker.
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
workerPromises.set(id, { resolve, reject });
|
316
|
-
|
317
|
-
// Set timeout to prevent hanging promises
|
318
|
-
setTimeout(() => {
|
319
|
-
if (workerPromises.has(id)) {
|
320
|
-
workerPromises.delete(id);
|
321
|
-
reject(new Error('Compression timeout'));
|
322
|
-
}
|
323
|
-
}, 10000);
|
324
|
-
|
325
|
-
compressionWorker.postMessage({ jsonString, id });
|
326
|
-
});
|
366
|
+
compressionWorker.postMessage({ jsonString, id });
|
367
|
+
});
|
368
|
+
} catch (workerError) {
|
369
|
+
console.warn('Worker creation failed, falling back to main thread compression:', workerError);
|
370
|
+
// Final fallback: compress on main thread (may block UI but will work)
|
371
|
+
return await compressSmallObject(data);
|
372
|
+
}
|
327
373
|
}
|
328
374
|
}
|
329
375
|
|
@@ -343,15 +389,82 @@ INJECT_PLACEHOLDER = """
|
|
343
389
|
return false;
|
344
390
|
}
|
345
391
|
|
392
|
+
// Create chunks from a string with metadata
|
393
|
+
function createChunks(str, batchId) {
|
394
|
+
const chunks = [];
|
395
|
+
const totalChunks = Math.ceil(str.length / CHUNK_SIZE);
|
396
|
+
|
397
|
+
for (let i = 0; i < str.length; i += CHUNK_SIZE) {
|
398
|
+
const chunk = str.slice(i, i + CHUNK_SIZE);
|
399
|
+
chunks.push({
|
400
|
+
batchId: batchId,
|
401
|
+
chunkIndex: chunks.length,
|
402
|
+
totalChunks: totalChunks,
|
403
|
+
data: chunk,
|
404
|
+
isFinal: chunks.length === totalChunks - 1
|
405
|
+
});
|
406
|
+
}
|
407
|
+
|
408
|
+
return chunks;
|
409
|
+
}
|
410
|
+
|
411
|
+
// Send chunks with flow control
|
412
|
+
async function sendChunks(chunks) {
|
413
|
+
if (typeof window.lmnrSendEvents !== 'function') {
|
414
|
+
return;
|
415
|
+
}
|
416
|
+
|
417
|
+
window.lmnrChunkQueue.push(...chunks);
|
418
|
+
|
419
|
+
// Process queue
|
420
|
+
while (window.lmnrChunkQueue.length > 0) {
|
421
|
+
const chunk = window.lmnrChunkQueue.shift();
|
422
|
+
try {
|
423
|
+
await window.lmnrSendEvents(chunk);
|
424
|
+
// Small delay between chunks to avoid overwhelming CDP
|
425
|
+
await new Promise(resolve => setTimeout(resolve, CHUNK_SEND_DELAY));
|
426
|
+
} catch (error) {
|
427
|
+
console.error('Failed to send chunk:', error);
|
428
|
+
// On error, clear failed chunk batch from queue
|
429
|
+
window.lmnrChunkQueue = window.lmnrChunkQueue.filter(c => c.batchId !== chunk.batchId);
|
430
|
+
break;
|
431
|
+
}
|
432
|
+
}
|
433
|
+
}
|
434
|
+
|
346
435
|
async function sendBatchIfReady() {
|
347
436
|
if (window.lmnrRrwebEventsBatch.length > 0 && typeof window.lmnrSendEvents === 'function') {
|
348
437
|
const events = window.lmnrRrwebEventsBatch;
|
349
438
|
window.lmnrRrwebEventsBatch = [];
|
350
439
|
|
351
440
|
try {
|
352
|
-
|
441
|
+
// Generate unique batch ID
|
442
|
+
const batchId = `${Date.now()}_${window.lmnrChunkSequence++}`;
|
443
|
+
window.lmnrCurrentBatchId = batchId;
|
444
|
+
|
445
|
+
// Stringify the entire batch
|
446
|
+
const batchString = JSON.stringify(events);
|
447
|
+
|
448
|
+
// Check size and chunk if necessary
|
449
|
+
if (batchString.length <= CHUNK_SIZE) {
|
450
|
+
// Small enough to send as single chunk
|
451
|
+
const chunk = {
|
452
|
+
batchId: batchId,
|
453
|
+
chunkIndex: 0,
|
454
|
+
totalChunks: 1,
|
455
|
+
data: batchString,
|
456
|
+
isFinal: true
|
457
|
+
};
|
458
|
+
await window.lmnrSendEvents(chunk);
|
459
|
+
} else {
|
460
|
+
// Need to chunk
|
461
|
+
const chunks = createChunks(batchString, batchId);
|
462
|
+
await sendChunks(chunks);
|
463
|
+
}
|
353
464
|
} catch (error) {
|
354
465
|
console.error('Failed to send events:', error);
|
466
|
+
// Clear batch to prevent memory buildup
|
467
|
+
window.lmnrRrwebEventsBatch = [];
|
355
468
|
}
|
356
469
|
}
|
357
470
|
}
|
@@ -387,7 +500,16 @@ INJECT_PLACEHOLDER = """
|
|
387
500
|
},
|
388
501
|
recordCanvas: true,
|
389
502
|
collectFonts: true,
|
390
|
-
recordCrossOriginIframes: true
|
503
|
+
recordCrossOriginIframes: true,
|
504
|
+
maskInputOptions: {
|
505
|
+
password: true,
|
506
|
+
textarea: mask_input_options.textarea || false,
|
507
|
+
text: mask_input_options.text || false,
|
508
|
+
number: mask_input_options.number || false,
|
509
|
+
select: mask_input_options.select || false,
|
510
|
+
email: mask_input_options.email || false,
|
511
|
+
tel: mask_input_options.tel || false,
|
512
|
+
}
|
391
513
|
});
|
392
514
|
|
393
515
|
function heartbeat() {
|
@@ -407,6 +529,30 @@ INJECT_PLACEHOLDER = """
|
|
407
529
|
"""
|
408
530
|
|
409
531
|
|
532
|
+
def get_mask_input_setting() -> MaskInputOptions:
|
533
|
+
"""Get the mask_input setting from session recording configuration."""
|
534
|
+
try:
|
535
|
+
config = TracerWrapper.get_session_recording_options()
|
536
|
+
return config.get("mask_input_options", MaskInputOptions(
|
537
|
+
textarea=False,
|
538
|
+
text=False,
|
539
|
+
number=False,
|
540
|
+
select=False,
|
541
|
+
email=False,
|
542
|
+
tel=False,
|
543
|
+
))
|
544
|
+
except (AttributeError, Exception):
|
545
|
+
# Fallback to default configuration if TracerWrapper is not initialized
|
546
|
+
return MaskInputOptions(
|
547
|
+
textarea=False,
|
548
|
+
text=False,
|
549
|
+
number=False,
|
550
|
+
select=False,
|
551
|
+
email=False,
|
552
|
+
tel=False,
|
553
|
+
)
|
554
|
+
|
555
|
+
|
410
556
|
def inject_session_recorder_sync(page: SyncPage):
|
411
557
|
try:
|
412
558
|
try:
|
@@ -435,7 +581,7 @@ def inject_session_recorder_sync(page: SyncPage):
|
|
435
581
|
return
|
436
582
|
|
437
583
|
try:
|
438
|
-
page.evaluate(INJECT_PLACEHOLDER)
|
584
|
+
page.evaluate(INJECT_PLACEHOLDER, get_mask_input_setting())
|
439
585
|
except Exception as e:
|
440
586
|
logger.debug(f"Failed to inject session recorder: {e}")
|
441
587
|
|
@@ -471,7 +617,7 @@ async def inject_session_recorder_async(page: Page):
|
|
471
617
|
return
|
472
618
|
|
473
619
|
try:
|
474
|
-
await page.evaluate(INJECT_PLACEHOLDER)
|
620
|
+
await page.evaluate(INJECT_PLACEHOLDER, get_mask_input_setting())
|
475
621
|
except Exception as e:
|
476
622
|
logger.debug(f"Failed to inject session recorder placeholder: {e}")
|
477
623
|
|
@@ -486,11 +632,54 @@ def start_recording_events_sync(page: SyncPage, session_id: str, client: Laminar
|
|
486
632
|
span = trace.get_current_span(ctx)
|
487
633
|
trace_id = format(span.get_span_context().trace_id, "032x")
|
488
634
|
span.set_attribute("lmnr.internal.has_browser_session", True)
|
489
|
-
|
490
|
-
|
635
|
+
|
636
|
+
# Buffer for reassembling chunks
|
637
|
+
chunk_buffers = {}
|
638
|
+
|
639
|
+
def send_events_from_browser(chunk):
|
491
640
|
try:
|
492
|
-
|
493
|
-
|
641
|
+
# Handle chunked data
|
642
|
+
batch_id = chunk['batchId']
|
643
|
+
chunk_index = chunk['chunkIndex']
|
644
|
+
total_chunks = chunk['totalChunks']
|
645
|
+
data = chunk['data']
|
646
|
+
|
647
|
+
# Initialize buffer for this batch if needed
|
648
|
+
if batch_id not in chunk_buffers:
|
649
|
+
chunk_buffers[batch_id] = {
|
650
|
+
'chunks': {},
|
651
|
+
'total': total_chunks,
|
652
|
+
'timestamp': time.time()
|
653
|
+
}
|
654
|
+
|
655
|
+
# Store chunk
|
656
|
+
chunk_buffers[batch_id]['chunks'][chunk_index] = data
|
657
|
+
|
658
|
+
# Check if we have all chunks
|
659
|
+
if len(chunk_buffers[batch_id]['chunks']) == total_chunks:
|
660
|
+
# Reassemble the full message
|
661
|
+
full_data = ''.join(chunk_buffers[batch_id]['chunks'][i] for i in range(total_chunks))
|
662
|
+
|
663
|
+
# Parse the JSON
|
664
|
+
events = orjson.loads(full_data)
|
665
|
+
|
666
|
+
# Send to server
|
667
|
+
if events and len(events) > 0:
|
668
|
+
client._browser_events.send(session_id, trace_id, events)
|
669
|
+
|
670
|
+
# Clean up buffer
|
671
|
+
del chunk_buffers[batch_id]
|
672
|
+
|
673
|
+
# Clean up old incomplete buffers
|
674
|
+
current_time = time.time()
|
675
|
+
to_delete = []
|
676
|
+
for bid, buffer in chunk_buffers.items():
|
677
|
+
if current_time - buffer['timestamp'] > OLD_BUFFER_TIMEOUT:
|
678
|
+
to_delete.append(bid)
|
679
|
+
for bid in to_delete:
|
680
|
+
logger.debug(f"Cleaning up incomplete chunk buffer: {bid}")
|
681
|
+
del chunk_buffers[bid]
|
682
|
+
|
494
683
|
except Exception as e:
|
495
684
|
logger.debug(f"Could not send events: {e}")
|
496
685
|
|
@@ -519,10 +708,55 @@ async def start_recording_events_async(
|
|
519
708
|
trace_id = format(span.get_span_context().trace_id, "032x")
|
520
709
|
span.set_attribute("lmnr.internal.has_browser_session", True)
|
521
710
|
|
522
|
-
|
711
|
+
# Buffer for reassembling chunks
|
712
|
+
chunk_buffers = {}
|
713
|
+
|
714
|
+
async def send_events_from_browser(chunk):
|
523
715
|
try:
|
524
|
-
|
525
|
-
|
716
|
+
# Handle chunked data
|
717
|
+
batch_id = chunk['batchId']
|
718
|
+
chunk_index = chunk['chunkIndex']
|
719
|
+
total_chunks = chunk['totalChunks']
|
720
|
+
data = chunk['data']
|
721
|
+
|
722
|
+
# Initialize buffer for this batch if needed
|
723
|
+
if batch_id not in chunk_buffers:
|
724
|
+
chunk_buffers[batch_id] = {
|
725
|
+
'chunks': {},
|
726
|
+
'total': total_chunks,
|
727
|
+
'timestamp': time.time()
|
728
|
+
}
|
729
|
+
|
730
|
+
# Store chunk
|
731
|
+
chunk_buffers[batch_id]['chunks'][chunk_index] = data
|
732
|
+
|
733
|
+
# Check if we have all chunks
|
734
|
+
if len(chunk_buffers[batch_id]['chunks']) == total_chunks:
|
735
|
+
# Reassemble the full message
|
736
|
+
full_data = ''
|
737
|
+
for i in range(total_chunks):
|
738
|
+
full_data += chunk_buffers[batch_id]['chunks'][i]
|
739
|
+
|
740
|
+
# Parse the JSON
|
741
|
+
events = orjson.loads(full_data)
|
742
|
+
|
743
|
+
# Send to server
|
744
|
+
if events and len(events) > 0:
|
745
|
+
await client._browser_events.send(session_id, trace_id, events)
|
746
|
+
|
747
|
+
# Clean up buffer
|
748
|
+
del chunk_buffers[batch_id]
|
749
|
+
|
750
|
+
# Clean up old incomplete buffers
|
751
|
+
current_time = time.time()
|
752
|
+
to_delete = []
|
753
|
+
for bid, buffer in chunk_buffers.items():
|
754
|
+
if current_time - buffer['timestamp'] > OLD_BUFFER_TIMEOUT:
|
755
|
+
to_delete.append(bid)
|
756
|
+
for bid in to_delete:
|
757
|
+
logger.debug(f"Cleaning up incomplete chunk buffer: {bid}")
|
758
|
+
del chunk_buffers[bid]
|
759
|
+
|
526
760
|
except Exception as e:
|
527
761
|
logger.debug(f"Could not send events: {e}")
|
528
762
|
|
@@ -102,7 +102,8 @@ def observe(
|
|
102
102
|
):
|
103
103
|
logger.warning("Tags must be a list of strings. Tags will be ignored.")
|
104
104
|
else:
|
105
|
-
|
105
|
+
# list(set(tags)) to deduplicate tags
|
106
|
+
association_properties["tags"] = list(set(tags))
|
106
107
|
if input_formatter is not None and ignore_input:
|
107
108
|
logger.warning(
|
108
109
|
f"observe, function {func.__name__}: Input formatter"
|
@@ -45,6 +45,7 @@ from .log import VerboseColorfulFormatter
|
|
45
45
|
|
46
46
|
from .types import (
|
47
47
|
LaminarSpanContext,
|
48
|
+
SessionRecordingOptions,
|
48
49
|
TraceType,
|
49
50
|
)
|
50
51
|
|
@@ -73,6 +74,7 @@ class Laminar:
|
|
73
74
|
export_timeout_seconds: int | None = None,
|
74
75
|
set_global_tracer_provider: bool = True,
|
75
76
|
otel_logger_level: int = logging.ERROR,
|
77
|
+
session_recording_options: SessionRecordingOptions | None = None,
|
76
78
|
):
|
77
79
|
"""Initialize Laminar context across the application.
|
78
80
|
This method must be called before using any other Laminar methods or
|
@@ -119,6 +121,10 @@ class Laminar:
|
|
119
121
|
Defaults to True.
|
120
122
|
otel_logger_level (int, optional): OpenTelemetry logger level. Defaults\
|
121
123
|
to logging.ERROR.
|
124
|
+
session_recording_options (SessionRecordingOptions | None, optional): Options\
|
125
|
+
for browser session recording. Currently supports 'mask_input'\
|
126
|
+
(bool) to control whether input fields are masked during recording.\
|
127
|
+
Defaults to None (uses default masking behavior).
|
122
128
|
|
123
129
|
Raises:
|
124
130
|
ValueError: If project API key is not set
|
@@ -179,6 +185,7 @@ class Laminar:
|
|
179
185
|
timeout_seconds=export_timeout_seconds,
|
180
186
|
set_global_tracer_provider=set_global_tracer_provider,
|
181
187
|
otel_logger_level=otel_logger_level,
|
188
|
+
session_recording_options=session_recording_options,
|
182
189
|
)
|
183
190
|
|
184
191
|
@classmethod
|
@@ -741,7 +748,8 @@ class Laminar:
|
|
741
748
|
"Tags must be a list of strings. Tags will be ignored."
|
742
749
|
)
|
743
750
|
return
|
744
|
-
|
751
|
+
# list(set(tags)) to deduplicate tags
|
752
|
+
span.set_attribute(f"{ASSOCIATION_PROPERTIES}.tags", list(set(tags)))
|
745
753
|
|
746
754
|
@classmethod
|
747
755
|
def set_trace_session_id(cls, session_id: str | None = None):
|
@@ -9,7 +9,7 @@ import uuid
|
|
9
9
|
|
10
10
|
from enum import Enum
|
11
11
|
from opentelemetry.trace import SpanContext, TraceFlags
|
12
|
-
from typing import Any, Awaitable, Callable, Literal, Optional
|
12
|
+
from typing import Any, Awaitable, Callable, Literal, Optional, TypedDict
|
13
13
|
|
14
14
|
from .utils import serialize
|
15
15
|
|
@@ -346,3 +346,15 @@ class RunAgentResponseChunk(pydantic.RootModel):
|
|
346
346
|
| ErrorChunkContent
|
347
347
|
| TimeoutChunkContent
|
348
348
|
)
|
349
|
+
|
350
|
+
|
351
|
+
class MaskInputOptions(TypedDict):
|
352
|
+
textarea: bool | None
|
353
|
+
text: bool | None
|
354
|
+
number: bool | None
|
355
|
+
select: bool | None
|
356
|
+
email: bool | None
|
357
|
+
tel: bool | None
|
358
|
+
|
359
|
+
class SessionRecordingOptions(TypedDict):
|
360
|
+
mask_input_options: MaskInputOptions | None
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py
RENAMED
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py
RENAMED
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{lmnr-0.7.2 → lmnr-0.7.3}/src/lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|