lmnr 0.4.53.dev0__py3-none-any.whl → 0.7.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/__init__.py +32 -11
- lmnr/cli/__init__.py +270 -0
- lmnr/cli/datasets.py +371 -0
- lmnr/cli/evals.py +111 -0
- lmnr/cli/rules.py +42 -0
- lmnr/opentelemetry_lib/__init__.py +70 -0
- lmnr/opentelemetry_lib/decorators/__init__.py +337 -0
- lmnr/opentelemetry_lib/litellm/__init__.py +685 -0
- lmnr/opentelemetry_lib/litellm/utils.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/__init__.py +849 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/config.py +13 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_emitter.py +211 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/span_utils.py +401 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/streaming.py +425 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/utils.py +332 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/anthropic/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/claude_agent/__init__.py +451 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/claude_agent/proxy.py +144 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_agent/__init__.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/__init__.py +476 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/cua_computer/utils.py +12 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/__init__.py +599 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/config.py +9 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/schema_utils.py +26 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/google_genai/utils.py +330 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/__init__.py +488 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/config.py +8 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_emitter.py +143 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/span_utils.py +229 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/utils.py +92 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/groq/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/kernel/__init__.py +381 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/kernel/utils.py +36 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/__init__.py +121 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/langgraph/utils.py +60 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/__init__.py +61 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/__init__.py +472 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +1185 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +305 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/config.py +16 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +312 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_emitter.py +100 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/event_models.py +41 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +68 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/utils.py +197 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v0/__init__.py +176 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/__init__.py +368 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +325 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +135 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +786 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openai/version.py +1 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/openhands_ai/__init__.py +388 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/opentelemetry/__init__.py +69 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/skyvern/__init__.py +191 -0
- lmnr/opentelemetry_lib/opentelemetry/instrumentation/threading/__init__.py +197 -0
- lmnr/opentelemetry_lib/tracing/__init__.py +263 -0
- lmnr/opentelemetry_lib/tracing/_instrument_initializers.py +516 -0
- lmnr/{openllmetry_sdk → opentelemetry_lib}/tracing/attributes.py +21 -8
- lmnr/opentelemetry_lib/tracing/context.py +200 -0
- lmnr/opentelemetry_lib/tracing/exporter.py +153 -0
- lmnr/opentelemetry_lib/tracing/instruments.py +140 -0
- lmnr/opentelemetry_lib/tracing/processor.py +193 -0
- lmnr/opentelemetry_lib/tracing/span.py +398 -0
- lmnr/opentelemetry_lib/tracing/tracer.py +57 -0
- lmnr/opentelemetry_lib/tracing/utils.py +62 -0
- lmnr/opentelemetry_lib/utils/package_check.py +18 -0
- lmnr/opentelemetry_lib/utils/wrappers.py +11 -0
- lmnr/sdk/browser/__init__.py +0 -0
- lmnr/sdk/browser/background_send_events.py +158 -0
- lmnr/sdk/browser/browser_use_cdp_otel.py +100 -0
- lmnr/sdk/browser/browser_use_otel.py +142 -0
- lmnr/sdk/browser/bubus_otel.py +71 -0
- lmnr/sdk/browser/cdp_utils.py +518 -0
- lmnr/sdk/browser/inject_script.js +514 -0
- lmnr/sdk/browser/patchright_otel.py +151 -0
- lmnr/sdk/browser/playwright_otel.py +322 -0
- lmnr/sdk/browser/pw_utils.py +363 -0
- lmnr/sdk/browser/recorder/record.umd.min.cjs +84 -0
- lmnr/sdk/browser/utils.py +70 -0
- lmnr/sdk/client/asynchronous/async_client.py +180 -0
- lmnr/sdk/client/asynchronous/resources/__init__.py +6 -0
- lmnr/sdk/client/asynchronous/resources/base.py +32 -0
- lmnr/sdk/client/asynchronous/resources/browser_events.py +41 -0
- lmnr/sdk/client/asynchronous/resources/datasets.py +131 -0
- lmnr/sdk/client/asynchronous/resources/evals.py +266 -0
- lmnr/sdk/client/asynchronous/resources/evaluators.py +85 -0
- lmnr/sdk/client/asynchronous/resources/tags.py +83 -0
- lmnr/sdk/client/synchronous/resources/__init__.py +6 -0
- lmnr/sdk/client/synchronous/resources/base.py +32 -0
- lmnr/sdk/client/synchronous/resources/browser_events.py +40 -0
- lmnr/sdk/client/synchronous/resources/datasets.py +131 -0
- lmnr/sdk/client/synchronous/resources/evals.py +263 -0
- lmnr/sdk/client/synchronous/resources/evaluators.py +85 -0
- lmnr/sdk/client/synchronous/resources/tags.py +83 -0
- lmnr/sdk/client/synchronous/sync_client.py +191 -0
- lmnr/sdk/datasets/__init__.py +94 -0
- lmnr/sdk/datasets/file_utils.py +91 -0
- lmnr/sdk/decorators.py +163 -26
- lmnr/sdk/eval_control.py +3 -2
- lmnr/sdk/evaluations.py +403 -191
- lmnr/sdk/laminar.py +1080 -549
- lmnr/sdk/log.py +7 -2
- lmnr/sdk/types.py +246 -134
- lmnr/sdk/utils.py +151 -7
- lmnr/version.py +46 -0
- {lmnr-0.4.53.dev0.dist-info → lmnr-0.7.26.dist-info}/METADATA +152 -106
- lmnr-0.7.26.dist-info/RECORD +116 -0
- lmnr-0.7.26.dist-info/WHEEL +4 -0
- lmnr-0.7.26.dist-info/entry_points.txt +3 -0
- lmnr/cli.py +0 -101
- lmnr/openllmetry_sdk/.python-version +0 -1
- lmnr/openllmetry_sdk/__init__.py +0 -72
- lmnr/openllmetry_sdk/config/__init__.py +0 -9
- lmnr/openllmetry_sdk/decorators/base.py +0 -185
- lmnr/openllmetry_sdk/instruments.py +0 -38
- lmnr/openllmetry_sdk/tracing/__init__.py +0 -1
- lmnr/openllmetry_sdk/tracing/content_allow_list.py +0 -24
- lmnr/openllmetry_sdk/tracing/context_manager.py +0 -13
- lmnr/openllmetry_sdk/tracing/tracing.py +0 -884
- lmnr/openllmetry_sdk/utils/in_memory_span_exporter.py +0 -61
- lmnr/openllmetry_sdk/utils/package_check.py +0 -7
- lmnr/openllmetry_sdk/version.py +0 -1
- lmnr/sdk/datasets.py +0 -55
- lmnr-0.4.53.dev0.dist-info/LICENSE +0 -75
- lmnr-0.4.53.dev0.dist-info/RECORD +0 -33
- lmnr-0.4.53.dev0.dist-info/WHEEL +0 -4
- lmnr-0.4.53.dev0.dist-info/entry_points.txt +0 -3
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/.flake8 +0 -0
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/__init__.py +0 -0
- /lmnr/{openllmetry_sdk → opentelemetry_lib}/utils/json_encoder.py +0 -0
- /lmnr/{openllmetry_sdk/decorators/__init__.py → py.typed} +0 -0
|
@@ -0,0 +1,514 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* The session recording inject script function.
|
|
3
|
+
* This function runs in the browser context and sets up rrweb recording.
|
|
4
|
+
*
|
|
5
|
+
* @param {Object} maskInputOptions - Optional recording options for masking inputs
|
|
6
|
+
* @param {boolean} stringifyCallbackArgs - If true, stringify arguments when calling
|
|
7
|
+
* lmnrSendEvents (for raw CDP bindings)
|
|
8
|
+
*/
|
|
9
|
+
(maskInputOptions, stringifyCallbackArgs) => {
|
|
10
|
+
const BATCH_TIMEOUT = 2000; // Send events after 2 seconds
|
|
11
|
+
const MAX_WORKER_PROMISES = 50; // Max concurrent worker promises
|
|
12
|
+
const HEARTBEAT_INTERVAL = 2000;
|
|
13
|
+
const CHUNK_SIZE = 256 * 1024; // 256KB chunks
|
|
14
|
+
const CHUNK_SEND_DELAY = 100; // 100ms delay between chunks
|
|
15
|
+
|
|
16
|
+
window.lmnrRrwebEventsBatch = [];
|
|
17
|
+
window.lmnrChunkQueue = [];
|
|
18
|
+
window.lmnrChunkSequence = 0;
|
|
19
|
+
window.lmnrCurrentBatchId = null;
|
|
20
|
+
|
|
21
|
+
// Define a wrapper function that handles stringification based on the parameter
|
|
22
|
+
const sendEvent = stringifyCallbackArgs
|
|
23
|
+
? (chunk) => window.lmnrSendEvents(JSON.stringify(chunk))
|
|
24
|
+
: (chunk) => window.lmnrSendEvents(chunk);
|
|
25
|
+
|
|
26
|
+
// Create a Web Worker for heavy JSON processing with chunked processing
|
|
27
|
+
const createCompressionWorker = () => {
|
|
28
|
+
const workerCode = `
|
|
29
|
+
self.onmessage = async function(e) {
|
|
30
|
+
const { jsonString, buffer, id, useBuffer } = e.data;
|
|
31
|
+
try {
|
|
32
|
+
let uint8Array;
|
|
33
|
+
|
|
34
|
+
if (useBuffer && buffer) {
|
|
35
|
+
// Use transferred ArrayBuffer (no copying needed!)
|
|
36
|
+
uint8Array = new Uint8Array(buffer);
|
|
37
|
+
} else {
|
|
38
|
+
// Convert JSON string to bytes
|
|
39
|
+
const textEncoder = new TextEncoder();
|
|
40
|
+
uint8Array = textEncoder.encode(jsonString);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const compressionStream = new CompressionStream('gzip');
|
|
44
|
+
const writer = compressionStream.writable.getWriter();
|
|
45
|
+
const reader = compressionStream.readable.getReader();
|
|
46
|
+
|
|
47
|
+
writer.write(uint8Array);
|
|
48
|
+
writer.close();
|
|
49
|
+
|
|
50
|
+
const chunks = [];
|
|
51
|
+
let totalLength = 0;
|
|
52
|
+
|
|
53
|
+
while (true) {
|
|
54
|
+
const { done, value } = await reader.read();
|
|
55
|
+
if (done) break;
|
|
56
|
+
chunks.push(value);
|
|
57
|
+
totalLength += value.length;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const compressedData = new Uint8Array(totalLength);
|
|
61
|
+
let offset = 0;
|
|
62
|
+
for (const chunk of chunks) {
|
|
63
|
+
compressedData.set(chunk, offset);
|
|
64
|
+
offset += chunk.length;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
self.postMessage({ id, success: true, data: compressedData });
|
|
68
|
+
} catch (error) {
|
|
69
|
+
self.postMessage({ id, success: false, error: error.message });
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
`;
|
|
73
|
+
|
|
74
|
+
const blob = new Blob([workerCode], { type: 'application/javascript' });
|
|
75
|
+
return new Worker(URL.createObjectURL(blob));
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
let compressionWorker = null;
|
|
79
|
+
let workerPromises = new Map();
|
|
80
|
+
let workerId = 0;
|
|
81
|
+
let workerSupported = null; // null = unknown, true = supported, false = blocked by CSP
|
|
82
|
+
let workerCreationInitiated = false;
|
|
83
|
+
|
|
84
|
+
// Test if workers are supported (not blocked by CSP)
|
|
85
|
+
function testWorkerSupport() {
|
|
86
|
+
if (workerSupported !== null) {
|
|
87
|
+
return workerSupported;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
try {
|
|
91
|
+
const testWorker = createCompressionWorker();
|
|
92
|
+
testWorker.terminate();
|
|
93
|
+
workerSupported = true;
|
|
94
|
+
return true;
|
|
95
|
+
} catch (error) {
|
|
96
|
+
console.warn('Web Workers blocked by CSP, will use main thread compression:', error);
|
|
97
|
+
workerSupported = false;
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Cleanup function for worker
|
|
103
|
+
const cleanupWorker = () => {
|
|
104
|
+
if (compressionWorker) {
|
|
105
|
+
compressionWorker.terminate();
|
|
106
|
+
compressionWorker = null;
|
|
107
|
+
}
|
|
108
|
+
workerCreationInitiated = false;
|
|
109
|
+
workerPromises.clear();
|
|
110
|
+
workerId = 0;
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
// Clean up stale promises to prevent memory leaks
|
|
114
|
+
const cleanupStalePromises = () => {
|
|
115
|
+
if (workerPromises.size > MAX_WORKER_PROMISES) {
|
|
116
|
+
const toDelete = [];
|
|
117
|
+
for (const [id, promise] of workerPromises) {
|
|
118
|
+
if (toDelete.length >= workerPromises.size - MAX_WORKER_PROMISES) break;
|
|
119
|
+
toDelete.push(id);
|
|
120
|
+
promise.reject(new Error('Promise cleaned up due to memory pressure'));
|
|
121
|
+
}
|
|
122
|
+
toDelete.forEach(id => workerPromises.delete(id));
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
// Non-blocking JSON.stringify using chunked processing
|
|
127
|
+
function stringifyNonBlocking(obj, chunkSize = 10000) {
|
|
128
|
+
return new Promise((resolve, reject) => {
|
|
129
|
+
try {
|
|
130
|
+
// For very large objects, we need to be more careful
|
|
131
|
+
// Use requestIdleCallback if available, otherwise setTimeout
|
|
132
|
+
const scheduleWork = window.requestIdleCallback ||
|
|
133
|
+
((cb) => setTimeout(cb, 0));
|
|
134
|
+
|
|
135
|
+
let result = '';
|
|
136
|
+
let keys = [];
|
|
137
|
+
let keyIndex = 0;
|
|
138
|
+
|
|
139
|
+
// Pre-process to get all keys if it's an object
|
|
140
|
+
if (typeof obj === 'object' && obj !== null && !Array.isArray(obj)) {
|
|
141
|
+
keys = Object.keys(obj);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function processChunk() {
|
|
145
|
+
try {
|
|
146
|
+
if (Array.isArray(obj) || typeof obj !== 'object' || obj === null) {
|
|
147
|
+
// For arrays and primitives, just stringify directly
|
|
148
|
+
result = JSON.stringify(obj);
|
|
149
|
+
resolve(result);
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// For objects, process in chunks
|
|
154
|
+
const endIndex = Math.min(keyIndex + chunkSize, keys.length);
|
|
155
|
+
|
|
156
|
+
if (keyIndex === 0) {
|
|
157
|
+
result = '{';
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
for (let i = keyIndex; i < endIndex; i++) {
|
|
161
|
+
const key = keys[i];
|
|
162
|
+
const value = obj[key];
|
|
163
|
+
|
|
164
|
+
if (i > 0) result += ',';
|
|
165
|
+
result += JSON.stringify(key) + ':' + JSON.stringify(value);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
keyIndex = endIndex;
|
|
169
|
+
|
|
170
|
+
if (keyIndex >= keys.length) {
|
|
171
|
+
result += '}';
|
|
172
|
+
resolve(result);
|
|
173
|
+
} else {
|
|
174
|
+
// Schedule next chunk
|
|
175
|
+
scheduleWork(processChunk);
|
|
176
|
+
}
|
|
177
|
+
} catch (error) {
|
|
178
|
+
reject(error);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
processChunk();
|
|
183
|
+
} catch (error) {
|
|
184
|
+
reject(error);
|
|
185
|
+
}
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Fast compression for small objects (main thread)
|
|
190
|
+
async function compressSmallObject(data) {
|
|
191
|
+
const jsonString = JSON.stringify(data);
|
|
192
|
+
const textEncoder = new TextEncoder();
|
|
193
|
+
const uint8Array = textEncoder.encode(jsonString);
|
|
194
|
+
|
|
195
|
+
const compressionStream = new CompressionStream('gzip');
|
|
196
|
+
const writer = compressionStream.writable.getWriter();
|
|
197
|
+
const reader = compressionStream.readable.getReader();
|
|
198
|
+
|
|
199
|
+
writer.write(uint8Array);
|
|
200
|
+
writer.close();
|
|
201
|
+
|
|
202
|
+
const chunks = [];
|
|
203
|
+
let totalLength = 0;
|
|
204
|
+
|
|
205
|
+
while (true) {
|
|
206
|
+
const { done, value } = await reader.read();
|
|
207
|
+
if (done) break;
|
|
208
|
+
chunks.push(value);
|
|
209
|
+
totalLength += value.length;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
const compressedData = new Uint8Array(totalLength);
|
|
213
|
+
let offset = 0;
|
|
214
|
+
for (const chunk of chunks) {
|
|
215
|
+
compressedData.set(chunk, offset);
|
|
216
|
+
offset += chunk.length;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
return compressedData;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Alternative: Use transferable objects for maximum efficiency
|
|
223
|
+
async function compressLargeObjectTransferable(data) {
|
|
224
|
+
try {
|
|
225
|
+
// Check if workers are supported first
|
|
226
|
+
if (!testWorkerSupport()) {
|
|
227
|
+
return compressSmallObject(data);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Clean up stale promises first
|
|
231
|
+
cleanupStalePromises();
|
|
232
|
+
|
|
233
|
+
// Stringify on main thread but non-blocking
|
|
234
|
+
const jsonString = await stringifyNonBlocking(data);
|
|
235
|
+
|
|
236
|
+
// Convert to ArrayBuffer (transferable)
|
|
237
|
+
const encoder = new TextEncoder();
|
|
238
|
+
const uint8Array = encoder.encode(jsonString);
|
|
239
|
+
const buffer = uint8Array.buffer; // Use the original buffer for transfer
|
|
240
|
+
|
|
241
|
+
return new Promise((resolve, reject) => {
|
|
242
|
+
if (!compressionWorker && !workerCreationInitiated) {
|
|
243
|
+
workerCreationInitiated = true;
|
|
244
|
+
try {
|
|
245
|
+
compressionWorker = createCompressionWorker();
|
|
246
|
+
compressionWorker.onmessage = (e) => {
|
|
247
|
+
const { id, success, data: result, error } = e.data;
|
|
248
|
+
const promise = workerPromises.get(id);
|
|
249
|
+
if (promise) {
|
|
250
|
+
workerPromises.delete(id);
|
|
251
|
+
if (success) {
|
|
252
|
+
promise.resolve(result);
|
|
253
|
+
} else {
|
|
254
|
+
promise.reject(new Error(error));
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
|
|
259
|
+
compressionWorker.onerror = (error) => {
|
|
260
|
+
console.error('Compression worker error:', error);
|
|
261
|
+
cleanupWorker();
|
|
262
|
+
compressSmallObject(data).then(resolve, reject);
|
|
263
|
+
};
|
|
264
|
+
} catch (error) {
|
|
265
|
+
workerCreationInitiated = false;
|
|
266
|
+
throw error;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
const id = ++workerId;
|
|
271
|
+
workerPromises.set(id, { resolve, reject });
|
|
272
|
+
|
|
273
|
+
// Set timeout to prevent hanging promises
|
|
274
|
+
setTimeout(() => {
|
|
275
|
+
if (workerPromises.has(id)) {
|
|
276
|
+
workerPromises.delete(id);
|
|
277
|
+
reject(new Error('Compression timeout'));
|
|
278
|
+
}
|
|
279
|
+
}, 10000);
|
|
280
|
+
|
|
281
|
+
// Transfer the ArrayBuffer (no copying!)
|
|
282
|
+
compressionWorker.postMessage({
|
|
283
|
+
buffer,
|
|
284
|
+
id,
|
|
285
|
+
useBuffer: true
|
|
286
|
+
}, [buffer]);
|
|
287
|
+
});
|
|
288
|
+
} catch (error) {
|
|
289
|
+
console.warn('Failed to process large object with transferable:', error);
|
|
290
|
+
return compressSmallObject(data);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// Worker-based compression for large objects
|
|
295
|
+
async function compressLargeObject(data) {
|
|
296
|
+
// Check if workers are supported first - if not, use main thread compression
|
|
297
|
+
if (!testWorkerSupport()) {
|
|
298
|
+
return await compressSmallObject(data);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
try {
|
|
302
|
+
// Use transferable objects for better performance
|
|
303
|
+
return await compressLargeObjectTransferable(data);
|
|
304
|
+
} catch (error) {
|
|
305
|
+
console.warn('Transferable failed, falling back to string method:', error);
|
|
306
|
+
try {
|
|
307
|
+
// Fallback to string method with worker
|
|
308
|
+
const jsonString = await stringifyNonBlocking(data);
|
|
309
|
+
|
|
310
|
+
return new Promise((resolve, reject) => {
|
|
311
|
+
if (!compressionWorker && !workerCreationInitiated) {
|
|
312
|
+
workerCreationInitiated = true;
|
|
313
|
+
try {
|
|
314
|
+
compressionWorker = createCompressionWorker();
|
|
315
|
+
compressionWorker.onmessage = (e) => {
|
|
316
|
+
const { id, success, data: result, error } = e.data;
|
|
317
|
+
const promise = workerPromises.get(id);
|
|
318
|
+
if (promise) {
|
|
319
|
+
workerPromises.delete(id);
|
|
320
|
+
if (success) {
|
|
321
|
+
promise.resolve(result);
|
|
322
|
+
} else {
|
|
323
|
+
promise.reject(new Error(error));
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
};
|
|
327
|
+
|
|
328
|
+
compressionWorker.onerror = (error) => {
|
|
329
|
+
console.error('Compression worker error:', error);
|
|
330
|
+
cleanupWorker();
|
|
331
|
+
};
|
|
332
|
+
} catch (error) {
|
|
333
|
+
workerCreationInitiated = false;
|
|
334
|
+
throw error;
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
const id = ++workerId;
|
|
339
|
+
workerPromises.set(id, { resolve, reject });
|
|
340
|
+
|
|
341
|
+
// Set timeout to prevent hanging promises
|
|
342
|
+
setTimeout(() => {
|
|
343
|
+
if (workerPromises.has(id)) {
|
|
344
|
+
workerPromises.delete(id);
|
|
345
|
+
reject(new Error('Compression timeout'));
|
|
346
|
+
}
|
|
347
|
+
}, 10000);
|
|
348
|
+
|
|
349
|
+
compressionWorker.postMessage({ jsonString, id });
|
|
350
|
+
});
|
|
351
|
+
} catch (workerError) {
|
|
352
|
+
console.warn('Worker creation failed, falling back to main thread compression:', workerError);
|
|
353
|
+
// Final fallback: compress on main thread (may block UI but will work)
|
|
354
|
+
return await compressSmallObject(data);
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
setInterval(cleanupWorker, 5000);
|
|
361
|
+
|
|
362
|
+
function isLargeEvent(type) {
|
|
363
|
+
const LARGE_EVENT_TYPES = [
|
|
364
|
+
2, // FullSnapshot
|
|
365
|
+
];
|
|
366
|
+
|
|
367
|
+
if (LARGE_EVENT_TYPES.includes(type)) {
|
|
368
|
+
return true;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
return false;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
// Create chunks from a string with metadata
|
|
375
|
+
function createChunks(str, batchId) {
|
|
376
|
+
const chunks = [];
|
|
377
|
+
const totalChunks = Math.ceil(str.length / CHUNK_SIZE);
|
|
378
|
+
|
|
379
|
+
for (let i = 0; i < str.length; i += CHUNK_SIZE) {
|
|
380
|
+
const chunk = str.slice(i, i + CHUNK_SIZE);
|
|
381
|
+
chunks.push({
|
|
382
|
+
batchId: batchId,
|
|
383
|
+
chunkIndex: chunks.length,
|
|
384
|
+
totalChunks: totalChunks,
|
|
385
|
+
data: chunk,
|
|
386
|
+
isFinal: chunks.length === totalChunks - 1
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
return chunks;
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// Send chunks with flow control
|
|
394
|
+
async function sendChunks(chunks) {
|
|
395
|
+
if (typeof window.lmnrSendEvents !== 'function') {
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
window.lmnrChunkQueue.push(...chunks);
|
|
400
|
+
|
|
401
|
+
// Process queue
|
|
402
|
+
while (window.lmnrChunkQueue.length > 0) {
|
|
403
|
+
const chunk = window.lmnrChunkQueue.shift();
|
|
404
|
+
try {
|
|
405
|
+
await sendEvent(chunk);
|
|
406
|
+
// Small delay between chunks to avoid overwhelming CDP
|
|
407
|
+
await new Promise(resolve => setTimeout(resolve, CHUNK_SEND_DELAY));
|
|
408
|
+
} catch (error) {
|
|
409
|
+
console.error('Failed to send chunk:', error);
|
|
410
|
+
// On error, clear failed chunk batch from queue
|
|
411
|
+
window.lmnrChunkQueue = window.lmnrChunkQueue.filter(c => c.batchId !== chunk.batchId);
|
|
412
|
+
break;
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
async function sendBatchIfReady() {
|
|
418
|
+
if (window.lmnrRrwebEventsBatch.length > 0 && typeof window.lmnrSendEvents === 'function') {
|
|
419
|
+
const events = window.lmnrRrwebEventsBatch;
|
|
420
|
+
window.lmnrRrwebEventsBatch = [];
|
|
421
|
+
|
|
422
|
+
try {
|
|
423
|
+
// Generate unique batch ID
|
|
424
|
+
const batchId = `${Date.now()}_${window.lmnrChunkSequence++}`;
|
|
425
|
+
window.lmnrCurrentBatchId = batchId;
|
|
426
|
+
|
|
427
|
+
// Stringify the entire batch
|
|
428
|
+
const batchString = JSON.stringify(events);
|
|
429
|
+
|
|
430
|
+
// Check size and chunk if necessary
|
|
431
|
+
if (batchString.length <= CHUNK_SIZE) {
|
|
432
|
+
// Small enough to send as single chunk
|
|
433
|
+
const chunk = {
|
|
434
|
+
batchId: batchId,
|
|
435
|
+
chunkIndex: 0,
|
|
436
|
+
totalChunks: 1,
|
|
437
|
+
data: batchString,
|
|
438
|
+
isFinal: true
|
|
439
|
+
};
|
|
440
|
+
await sendEvent(chunk);
|
|
441
|
+
} else {
|
|
442
|
+
// Need to chunk
|
|
443
|
+
const chunks = createChunks(batchString, batchId);
|
|
444
|
+
await sendChunks(chunks);
|
|
445
|
+
}
|
|
446
|
+
} catch (error) {
|
|
447
|
+
console.error('Failed to send events:', error);
|
|
448
|
+
// Clear batch to prevent memory buildup
|
|
449
|
+
window.lmnrRrwebEventsBatch = [];
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
async function bufferToBase64(buffer) {
|
|
455
|
+
const base64url = await new Promise(r => {
|
|
456
|
+
const reader = new FileReader()
|
|
457
|
+
reader.onload = () => r(reader.result)
|
|
458
|
+
reader.readAsDataURL(new Blob([buffer]))
|
|
459
|
+
});
|
|
460
|
+
return base64url.slice(base64url.indexOf(',') + 1);
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
if (!window.lmnrStartedRecordingEvents) {
|
|
464
|
+
setInterval(sendBatchIfReady, BATCH_TIMEOUT);
|
|
465
|
+
|
|
466
|
+
window.lmnrRrweb.record({
|
|
467
|
+
async emit(event) {
|
|
468
|
+
try {
|
|
469
|
+
const isLarge = isLargeEvent(event.type);
|
|
470
|
+
const compressedResult = isLarge ?
|
|
471
|
+
await compressLargeObject(event.data) :
|
|
472
|
+
await compressSmallObject(event.data);
|
|
473
|
+
|
|
474
|
+
const base64Data = await bufferToBase64(compressedResult);
|
|
475
|
+
const eventToSend = {
|
|
476
|
+
...event,
|
|
477
|
+
data: base64Data,
|
|
478
|
+
};
|
|
479
|
+
window.lmnrRrwebEventsBatch.push(eventToSend);
|
|
480
|
+
} catch (error) {
|
|
481
|
+
console.warn('Failed to push event to batch', error);
|
|
482
|
+
}
|
|
483
|
+
},
|
|
484
|
+
recordCanvas: true,
|
|
485
|
+
collectFonts: true,
|
|
486
|
+
recordCrossOriginIframes: true,
|
|
487
|
+
maskInputOptions: {
|
|
488
|
+
password: true,
|
|
489
|
+
textarea: maskInputOptions.textarea || false,
|
|
490
|
+
text: maskInputOptions.text || false,
|
|
491
|
+
number: maskInputOptions.number || false,
|
|
492
|
+
select: maskInputOptions.select || false,
|
|
493
|
+
email: maskInputOptions.email || false,
|
|
494
|
+
tel: maskInputOptions.tel || false,
|
|
495
|
+
}
|
|
496
|
+
});
|
|
497
|
+
|
|
498
|
+
function heartbeat() {
|
|
499
|
+
// Add heartbeat events
|
|
500
|
+
setInterval(
|
|
501
|
+
() => {
|
|
502
|
+
window.lmnrRrweb.record.addCustomEvent('heartbeat', {
|
|
503
|
+
title: document.title,
|
|
504
|
+
url: document.URL,
|
|
505
|
+
})
|
|
506
|
+
},
|
|
507
|
+
HEARTBEAT_INTERVAL,
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
heartbeat();
|
|
512
|
+
window.lmnrStartedRecordingEvents = true;
|
|
513
|
+
}
|
|
514
|
+
}
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
from lmnr.sdk.browser.playwright_otel import (
|
|
2
|
+
_wrap_bring_to_front_async,
|
|
3
|
+
_wrap_bring_to_front_sync,
|
|
4
|
+
_wrap_new_browser_sync,
|
|
5
|
+
_wrap_new_browser_async,
|
|
6
|
+
_wrap_new_context_sync,
|
|
7
|
+
_wrap_new_context_async,
|
|
8
|
+
)
|
|
9
|
+
from lmnr.sdk.client.asynchronous.async_client import AsyncLaminarClient
|
|
10
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
11
|
+
from opentelemetry.instrumentation.utils import unwrap
|
|
12
|
+
from opentelemetry.trace import get_tracer
|
|
13
|
+
from lmnr.version import __version__
|
|
14
|
+
from typing import Collection
|
|
15
|
+
from wrapt import wrap_function_wrapper
|
|
16
|
+
|
|
17
|
+
_instruments = ("patchright >= 1.9.0",)
|
|
18
|
+
|
|
19
|
+
WRAPPED_METHODS = [
|
|
20
|
+
{
|
|
21
|
+
"package": "patchright.sync_api",
|
|
22
|
+
"object": "BrowserType",
|
|
23
|
+
"method": "launch",
|
|
24
|
+
"wrapper": _wrap_new_browser_sync,
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
"package": "patchright.sync_api",
|
|
28
|
+
"object": "BrowserType",
|
|
29
|
+
"method": "connect",
|
|
30
|
+
"wrapper": _wrap_new_browser_sync,
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"package": "patchright.sync_api",
|
|
34
|
+
"object": "BrowserType",
|
|
35
|
+
"method": "connect_over_cdp",
|
|
36
|
+
"wrapper": _wrap_new_browser_sync,
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
"package": "patchright.sync_api",
|
|
40
|
+
"object": "Browser",
|
|
41
|
+
"method": "new_context",
|
|
42
|
+
"wrapper": _wrap_new_context_sync,
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
"package": "patchright.sync_api",
|
|
46
|
+
"object": "BrowserType",
|
|
47
|
+
"method": "launch_persistent_context",
|
|
48
|
+
"wrapper": _wrap_new_context_sync,
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
"package": "patchright.sync_api",
|
|
52
|
+
"object": "Page",
|
|
53
|
+
"method": "bring_to_front",
|
|
54
|
+
"wrapper": _wrap_bring_to_front_sync,
|
|
55
|
+
},
|
|
56
|
+
]
|
|
57
|
+
|
|
58
|
+
WRAPPED_METHODS_ASYNC = [
|
|
59
|
+
{
|
|
60
|
+
"package": "patchright.async_api",
|
|
61
|
+
"object": "BrowserType",
|
|
62
|
+
"method": "launch",
|
|
63
|
+
"wrapper": _wrap_new_browser_async,
|
|
64
|
+
},
|
|
65
|
+
{
|
|
66
|
+
"package": "patchright.async_api",
|
|
67
|
+
"object": "BrowserType",
|
|
68
|
+
"method": "connect",
|
|
69
|
+
"wrapper": _wrap_new_browser_async,
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
"package": "patchright.async_api",
|
|
73
|
+
"object": "BrowserType",
|
|
74
|
+
"method": "connect_over_cdp",
|
|
75
|
+
"wrapper": _wrap_new_browser_async,
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
"package": "patchright.async_api",
|
|
79
|
+
"object": "Browser",
|
|
80
|
+
"method": "new_context",
|
|
81
|
+
"wrapper": _wrap_new_context_async,
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
"package": "patchright.async_api",
|
|
85
|
+
"object": "BrowserType",
|
|
86
|
+
"method": "launch_persistent_context",
|
|
87
|
+
"wrapper": _wrap_new_context_async,
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
"package": "patchright.async_api",
|
|
91
|
+
"object": "Page",
|
|
92
|
+
"method": "bring_to_front",
|
|
93
|
+
"wrapper": _wrap_bring_to_front_async,
|
|
94
|
+
},
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class PatchrightInstrumentor(BaseInstrumentor):
|
|
99
|
+
def __init__(self, async_client: AsyncLaminarClient):
|
|
100
|
+
super().__init__()
|
|
101
|
+
self.async_client = async_client
|
|
102
|
+
|
|
103
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
104
|
+
return _instruments
|
|
105
|
+
|
|
106
|
+
def _instrument(self, **kwargs):
|
|
107
|
+
tracer_provider = kwargs.get("tracer_provider")
|
|
108
|
+
tracer = get_tracer(__name__, __version__, tracer_provider)
|
|
109
|
+
|
|
110
|
+
# Both sync and async methods use async_client
|
|
111
|
+
# because we are using a background asyncio loop for async sends
|
|
112
|
+
for wrapped_method in WRAPPED_METHODS:
|
|
113
|
+
wrap_package = wrapped_method.get("package")
|
|
114
|
+
wrap_object = wrapped_method.get("object")
|
|
115
|
+
wrap_method = wrapped_method.get("method")
|
|
116
|
+
try:
|
|
117
|
+
wrap_function_wrapper(
|
|
118
|
+
wrap_package,
|
|
119
|
+
f"{wrap_object}.{wrap_method}",
|
|
120
|
+
wrapped_method.get("wrapper")(
|
|
121
|
+
tracer,
|
|
122
|
+
self.async_client,
|
|
123
|
+
wrapped_method,
|
|
124
|
+
),
|
|
125
|
+
)
|
|
126
|
+
except ModuleNotFoundError:
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
for wrapped_method in WRAPPED_METHODS_ASYNC:
|
|
130
|
+
wrap_package = wrapped_method.get("package")
|
|
131
|
+
wrap_object = wrapped_method.get("object")
|
|
132
|
+
wrap_method = wrapped_method.get("method")
|
|
133
|
+
try:
|
|
134
|
+
wrap_function_wrapper(
|
|
135
|
+
wrap_package,
|
|
136
|
+
f"{wrap_object}.{wrap_method}",
|
|
137
|
+
wrapped_method.get("wrapper")(
|
|
138
|
+
tracer,
|
|
139
|
+
self.async_client,
|
|
140
|
+
wrapped_method,
|
|
141
|
+
),
|
|
142
|
+
)
|
|
143
|
+
except ModuleNotFoundError:
|
|
144
|
+
pass
|
|
145
|
+
|
|
146
|
+
def _uninstrument(self, **kwargs):
|
|
147
|
+
for wrapped_method in WRAPPED_METHODS + WRAPPED_METHODS_ASYNC:
|
|
148
|
+
wrap_package = wrapped_method.get("package")
|
|
149
|
+
wrap_object = wrapped_method.get("object")
|
|
150
|
+
wrap_method = wrapped_method.get("method")
|
|
151
|
+
unwrap(wrap_package, f"{wrap_object}.{wrap_method}")
|