@rivetkit/traces 2.1.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +203 -0
- package/dist/schemas/v1.ts +653 -0
- package/dist/tsup/chunk-2D7JND4Z.js +63 -0
- package/dist/tsup/chunk-2D7JND4Z.js.map +1 -0
- package/dist/tsup/chunk-7RQXHEKZ.js +541 -0
- package/dist/tsup/chunk-7RQXHEKZ.js.map +1 -0
- package/dist/tsup/chunk-DXS2HLRN.cjs +63 -0
- package/dist/tsup/chunk-DXS2HLRN.cjs.map +1 -0
- package/dist/tsup/chunk-QOSSO6CN.cjs +541 -0
- package/dist/tsup/chunk-QOSSO6CN.cjs.map +1 -0
- package/dist/tsup/chunk-UNGPFJ4C.js +417 -0
- package/dist/tsup/chunk-UNGPFJ4C.js.map +1 -0
- package/dist/tsup/chunk-ZTVH74GC.cjs +417 -0
- package/dist/tsup/chunk-ZTVH74GC.cjs.map +1 -0
- package/dist/tsup/encoding.cjs +20 -0
- package/dist/tsup/encoding.cjs.map +1 -0
- package/dist/tsup/encoding.d.cts +6 -0
- package/dist/tsup/encoding.d.ts +6 -0
- package/dist/tsup/encoding.js +20 -0
- package/dist/tsup/encoding.js.map +1 -0
- package/dist/tsup/index.browser.cjs +15 -0
- package/dist/tsup/index.browser.cjs.map +1 -0
- package/dist/tsup/index.browser.d.cts +7 -0
- package/dist/tsup/index.browser.d.ts +7 -0
- package/dist/tsup/index.browser.js +15 -0
- package/dist/tsup/index.browser.js.map +1 -0
- package/dist/tsup/index.cjs +921 -0
- package/dist/tsup/index.cjs.map +1 -0
- package/dist/tsup/index.d.cts +9 -0
- package/dist/tsup/index.d.ts +9 -0
- package/dist/tsup/index.js +921 -0
- package/dist/tsup/index.js.map +1 -0
- package/dist/tsup/noop-CcgjEgCu.d.cts +99 -0
- package/dist/tsup/noop-D-YAZiGa.d.ts +99 -0
- package/dist/tsup/otlp-Da4Yz0xC.d.cts +81 -0
- package/dist/tsup/otlp-Da4Yz0xC.d.ts +81 -0
- package/dist/tsup/otlp-entry.cjs +16 -0
- package/dist/tsup/otlp-entry.cjs.map +1 -0
- package/dist/tsup/otlp-entry.d.cts +10 -0
- package/dist/tsup/otlp-entry.d.ts +10 -0
- package/dist/tsup/otlp-entry.js +16 -0
- package/dist/tsup/otlp-entry.js.map +1 -0
- package/dist/tsup/v1-DovAIc7f.d.cts +118 -0
- package/dist/tsup/v1-DovAIc7f.d.ts +118 -0
- package/package.json +74 -0
- package/schemas/v1.bare +177 -0
- package/schemas/versioned.ts +99 -0
- package/src/encoding.ts +18 -0
- package/src/index.browser.ts +13 -0
- package/src/index.ts +31 -0
- package/src/noop.ts +81 -0
- package/src/otlp-entry.ts +18 -0
- package/src/otlp.ts +158 -0
- package/src/read-range.ts +502 -0
- package/src/traces.ts +1186 -0
- package/src/types.ts +94 -0
|
@@ -0,0 +1,921 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }
|
|
2
|
+
|
|
3
|
+
var _chunkDXS2HLRNcjs = require('./chunk-DXS2HLRN.cjs');
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
var _chunkZTVH74GCcjs = require('./chunk-ZTVH74GC.cjs');
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
var _chunkQOSSO6CNcjs = require('./chunk-QOSSO6CN.cjs');
|
|
13
|
+
|
|
14
|
+
// src/traces.ts
|
|
15
|
+
var _async_hooks = require('async_hooks');
|
|
16
|
+
var _buffer = require('buffer');
|
|
17
|
+
var _crypto = require('crypto');
|
|
18
|
+
var _perf_hooks = require('perf_hooks');
|
|
19
|
+
var _cborx = require('cbor-x');
|
|
20
|
+
var _fdbtuple = require('fdb-tuple');
|
|
21
|
+
var KEY_PREFIX = {
|
|
22
|
+
DATA: 1
|
|
23
|
+
};
|
|
24
|
+
var AFTER_MAX_CHUNK_ID = 4294967296;
|
|
25
|
+
var DEFAULT_BUCKET_SIZE_SEC = 3600;
|
|
26
|
+
var DEFAULT_TARGET_CHUNK_BYTES = 512 * 1024;
|
|
27
|
+
var DEFAULT_MAX_CHUNK_BYTES = 1024 * 1024;
|
|
28
|
+
var DEFAULT_MAX_CHUNK_AGE_MS = 5e3;
|
|
29
|
+
var DEFAULT_SNAPSHOT_INTERVAL_MS = 3e5;
|
|
30
|
+
var DEFAULT_SNAPSHOT_BYTES_THRESHOLD = 256 * 1024;
|
|
31
|
+
var DEFAULT_MAX_READ_LIMIT = 1e4;
|
|
32
|
+
var DEFAULT_MAX_ACTIVE_SPANS = 1e4;
|
|
33
|
+
var SPAN_ID_BYTES = 8;
|
|
34
|
+
var TRACE_ID_BYTES = 16;
|
|
35
|
+
var spanContext = new (0, _async_hooks.AsyncLocalStorage)();
|
|
36
|
+
function spanKey(spanId) {
|
|
37
|
+
return _chunkZTVH74GCcjs.hexFromBytes.call(void 0, normalizeBytes(spanId));
|
|
38
|
+
}
|
|
39
|
+
function toArrayBuffer(bytes) {
|
|
40
|
+
const copy = new Uint8Array(bytes.byteLength);
|
|
41
|
+
copy.set(bytes);
|
|
42
|
+
return copy.buffer;
|
|
43
|
+
}
|
|
44
|
+
function toUint8Array(buffer) {
|
|
45
|
+
return new Uint8Array(buffer);
|
|
46
|
+
}
|
|
47
|
+
function normalizeBytes(input) {
|
|
48
|
+
return input instanceof Uint8Array ? input : new Uint8Array(input);
|
|
49
|
+
}
|
|
50
|
+
function createTraces(options) {
|
|
51
|
+
const driver = options.driver;
|
|
52
|
+
const bucketSizeSec = _nullishCoalesce(options.bucketSizeSec, () => ( DEFAULT_BUCKET_SIZE_SEC));
|
|
53
|
+
const maxChunkBytes = _nullishCoalesce(options.maxChunkBytes, () => ( DEFAULT_MAX_CHUNK_BYTES));
|
|
54
|
+
const targetChunkBytes = Math.min(
|
|
55
|
+
_nullishCoalesce(options.targetChunkBytes, () => ( DEFAULT_TARGET_CHUNK_BYTES)),
|
|
56
|
+
maxChunkBytes
|
|
57
|
+
);
|
|
58
|
+
const maxChunkAgeMs = _nullishCoalesce(options.maxChunkAgeMs, () => ( DEFAULT_MAX_CHUNK_AGE_MS));
|
|
59
|
+
const snapshotIntervalMs = _nullishCoalesce(options.snapshotIntervalMs, () => ( DEFAULT_SNAPSHOT_INTERVAL_MS));
|
|
60
|
+
const snapshotBytesThreshold = _nullishCoalesce(options.snapshotBytesThreshold, () => ( DEFAULT_SNAPSHOT_BYTES_THRESHOLD));
|
|
61
|
+
const maxActiveSpans = _nullishCoalesce(options.maxActiveSpans, () => ( DEFAULT_MAX_ACTIVE_SPANS));
|
|
62
|
+
const maxReadLimit = _nullishCoalesce(options.maxReadLimit, () => ( DEFAULT_MAX_READ_LIMIT));
|
|
63
|
+
const resource = options.resource;
|
|
64
|
+
const timeAnchor = {
|
|
65
|
+
unixMs: Date.now(),
|
|
66
|
+
monoMs: _perf_hooks.performance.now()
|
|
67
|
+
};
|
|
68
|
+
const activeSpans = /* @__PURE__ */ new Map();
|
|
69
|
+
const activeSpanRefs = /* @__PURE__ */ new Map();
|
|
70
|
+
const pendingChunks = [];
|
|
71
|
+
let writeChain = Promise.resolve();
|
|
72
|
+
const bucketChunkCounters = /* @__PURE__ */ new Map();
|
|
73
|
+
function nowUnixMs() {
|
|
74
|
+
return timeAnchor.unixMs + (_perf_hooks.performance.now() - timeAnchor.monoMs);
|
|
75
|
+
}
|
|
76
|
+
function nowUnixNs(anchor) {
|
|
77
|
+
const unixMs = anchor.unixMs + (_perf_hooks.performance.now() - anchor.monoMs);
|
|
78
|
+
const wholeMs = Math.floor(unixMs);
|
|
79
|
+
const fracMs = unixMs - wholeMs;
|
|
80
|
+
return BigInt(wholeMs) * 1000000n + BigInt(Math.floor(fracMs * 1e6));
|
|
81
|
+
}
|
|
82
|
+
function createChunkState(bucketStartSec) {
|
|
83
|
+
return {
|
|
84
|
+
bucketStartSec,
|
|
85
|
+
chunkId: nextChunkId(bucketStartSec),
|
|
86
|
+
baseUnixNs: BigInt(bucketStartSec) * 1000000000n,
|
|
87
|
+
strings: [],
|
|
88
|
+
stringIds: /* @__PURE__ */ new Map(),
|
|
89
|
+
records: [],
|
|
90
|
+
sizeBytes: 0,
|
|
91
|
+
createdAtMonoMs: _perf_hooks.performance.now()
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function nextChunkId(bucketStartSec) {
|
|
95
|
+
const current = _nullishCoalesce(bucketChunkCounters.get(bucketStartSec), () => ( 0));
|
|
96
|
+
bucketChunkCounters.set(bucketStartSec, current + 1);
|
|
97
|
+
return current;
|
|
98
|
+
}
|
|
99
|
+
const currentChunk = createChunkState(
|
|
100
|
+
computeBucketStartSec(nowUnixNs(timeAnchor), bucketSizeSec)
|
|
101
|
+
);
|
|
102
|
+
function computeBucketStartSec(absoluteUnixNs, bucketSize) {
|
|
103
|
+
const sec = absoluteUnixNs / 1000000000n;
|
|
104
|
+
const bucket = sec / BigInt(bucketSize);
|
|
105
|
+
return Number(bucket * BigInt(bucketSize));
|
|
106
|
+
}
|
|
107
|
+
function internString(value) {
|
|
108
|
+
const existing = currentChunk.stringIds.get(value);
|
|
109
|
+
if (existing !== void 0) {
|
|
110
|
+
return existing;
|
|
111
|
+
}
|
|
112
|
+
const id = currentChunk.strings.length;
|
|
113
|
+
currentChunk.strings.push(value);
|
|
114
|
+
currentChunk.stringIds.set(value, id);
|
|
115
|
+
return id;
|
|
116
|
+
}
|
|
117
|
+
function encodeAttributes(attributes) {
|
|
118
|
+
const list = [];
|
|
119
|
+
let dropped = 0;
|
|
120
|
+
if (!attributes) {
|
|
121
|
+
return { attributes: list, dropped };
|
|
122
|
+
}
|
|
123
|
+
for (const [key, value] of Object.entries(attributes)) {
|
|
124
|
+
const sanitized = sanitizeAttributeValue(value);
|
|
125
|
+
if (sanitized === void 0) {
|
|
126
|
+
dropped++;
|
|
127
|
+
continue;
|
|
128
|
+
}
|
|
129
|
+
try {
|
|
130
|
+
const encoded = _cborx.encode.call(void 0, sanitized);
|
|
131
|
+
list.push({ key: internString(key), value: toArrayBuffer(encoded) });
|
|
132
|
+
} catch (e) {
|
|
133
|
+
dropped++;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
return { attributes: list, dropped };
|
|
137
|
+
}
|
|
138
|
+
function sanitizeAttributeValue(value) {
|
|
139
|
+
if (value === void 0 || typeof value === "function") {
|
|
140
|
+
return void 0;
|
|
141
|
+
}
|
|
142
|
+
if (typeof value === "symbol") {
|
|
143
|
+
return void 0;
|
|
144
|
+
}
|
|
145
|
+
if (value instanceof Map) {
|
|
146
|
+
const obj = {};
|
|
147
|
+
for (const [key, mapValue] of value.entries()) {
|
|
148
|
+
if (typeof key !== "string") {
|
|
149
|
+
return void 0;
|
|
150
|
+
}
|
|
151
|
+
const sanitized = sanitizeAttributeValue(mapValue);
|
|
152
|
+
if (sanitized !== void 0) {
|
|
153
|
+
obj[key] = sanitized;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
return obj;
|
|
157
|
+
}
|
|
158
|
+
if (Array.isArray(value)) {
|
|
159
|
+
return value.map((entry) => sanitizeAttributeValue(entry)).filter((entry) => entry !== void 0);
|
|
160
|
+
}
|
|
161
|
+
return value;
|
|
162
|
+
}
|
|
163
|
+
function encodeLinks(links) {
|
|
164
|
+
const result = [];
|
|
165
|
+
let dropped = 0;
|
|
166
|
+
if (!links) {
|
|
167
|
+
return { links: result, dropped };
|
|
168
|
+
}
|
|
169
|
+
for (const link of links) {
|
|
170
|
+
const { attributes, dropped: droppedAttributes } = encodeAttributes(
|
|
171
|
+
link.attributes
|
|
172
|
+
);
|
|
173
|
+
result.push({
|
|
174
|
+
traceId: toArrayBuffer(link.traceId),
|
|
175
|
+
spanId: toArrayBuffer(link.spanId),
|
|
176
|
+
traceState: _nullishCoalesce(link.traceState, () => ( null)),
|
|
177
|
+
attributes,
|
|
178
|
+
droppedAttributesCount: droppedAttributes
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
return { links: result, dropped };
|
|
182
|
+
}
|
|
183
|
+
function createSpanStartRecord(spanId, traceId, name, options2, parentSpanId) {
|
|
184
|
+
const { attributes, dropped } = encodeAttributes(options2 == null ? void 0 : options2.attributes);
|
|
185
|
+
const { links, dropped: droppedLinks } = encodeLinks(options2 == null ? void 0 : options2.links);
|
|
186
|
+
return {
|
|
187
|
+
traceId,
|
|
188
|
+
spanId,
|
|
189
|
+
parentSpanId,
|
|
190
|
+
name: internString(name),
|
|
191
|
+
kind: _nullishCoalesce((options2 == null ? void 0 : options2.kind), () => ( 0)),
|
|
192
|
+
traceState: _nullishCoalesce((options2 == null ? void 0 : options2.traceState), () => ( null)),
|
|
193
|
+
flags: _nullishCoalesce((options2 == null ? void 0 : options2.flags), () => ( 0)),
|
|
194
|
+
attributes,
|
|
195
|
+
droppedAttributesCount: dropped,
|
|
196
|
+
links,
|
|
197
|
+
droppedLinksCount: droppedLinks
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
function createSpanUpdateRecord(spanId, options2) {
|
|
201
|
+
const { attributes, dropped } = encodeAttributes(options2.attributes);
|
|
202
|
+
return {
|
|
203
|
+
spanId,
|
|
204
|
+
attributes,
|
|
205
|
+
droppedAttributesCount: dropped,
|
|
206
|
+
status: options2.status ? toBareStatus(options2.status) : null
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
function createSpanEventRecord(spanId, name, options2) {
|
|
210
|
+
const { attributes, dropped } = encodeAttributes(options2 == null ? void 0 : options2.attributes);
|
|
211
|
+
return {
|
|
212
|
+
spanId,
|
|
213
|
+
name: internString(name),
|
|
214
|
+
attributes,
|
|
215
|
+
droppedAttributesCount: dropped
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
function createSpanEndRecord(spanId, options2) {
|
|
219
|
+
return {
|
|
220
|
+
spanId,
|
|
221
|
+
status: (options2 == null ? void 0 : options2.status) ? toBareStatus(options2.status) : null
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
function createSpanSnapshotRecord(state) {
|
|
225
|
+
const { attributes, dropped } = encodeAttributeMap(state.attributes);
|
|
226
|
+
const { links, dropped: droppedLinks } = encodeLinkState(state.links);
|
|
227
|
+
return {
|
|
228
|
+
traceId: state.traceId,
|
|
229
|
+
spanId: state.spanId,
|
|
230
|
+
parentSpanId: state.parentSpanId,
|
|
231
|
+
name: internString(state.name),
|
|
232
|
+
kind: state.kind,
|
|
233
|
+
startTimeUnixNs: state.startTimeUnixNs,
|
|
234
|
+
traceState: state.traceState,
|
|
235
|
+
flags: state.flags,
|
|
236
|
+
attributes,
|
|
237
|
+
droppedAttributesCount: state.droppedAttributesCount + dropped,
|
|
238
|
+
links,
|
|
239
|
+
droppedLinksCount: state.droppedLinksCount + droppedLinks,
|
|
240
|
+
status: state.status
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
function encodeAttributeMap(attributes) {
|
|
244
|
+
const list = [];
|
|
245
|
+
let dropped = 0;
|
|
246
|
+
for (const [key, value] of attributes.entries()) {
|
|
247
|
+
const sanitized = sanitizeAttributeValue(value);
|
|
248
|
+
if (sanitized === void 0) {
|
|
249
|
+
dropped++;
|
|
250
|
+
continue;
|
|
251
|
+
}
|
|
252
|
+
try {
|
|
253
|
+
const encoded = _cborx.encode.call(void 0, sanitized);
|
|
254
|
+
list.push({ key: internString(key), value: toArrayBuffer(encoded) });
|
|
255
|
+
} catch (e2) {
|
|
256
|
+
dropped++;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return { attributes: list, dropped };
|
|
260
|
+
}
|
|
261
|
+
function buildAttributeMapFromInput(attributes) {
|
|
262
|
+
const map = /* @__PURE__ */ new Map();
|
|
263
|
+
if (!attributes) {
|
|
264
|
+
return map;
|
|
265
|
+
}
|
|
266
|
+
for (const [key, value] of Object.entries(attributes)) {
|
|
267
|
+
const sanitized = sanitizeAttributeValue(value);
|
|
268
|
+
if (sanitized !== void 0) {
|
|
269
|
+
map.set(key, sanitized);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
return map;
|
|
273
|
+
}
|
|
274
|
+
function decodeAttributeList(attributes, strings) {
|
|
275
|
+
const map = /* @__PURE__ */ new Map();
|
|
276
|
+
for (const kv of attributes) {
|
|
277
|
+
const key = _nullishCoalesce(strings[kv.key], () => ( ""));
|
|
278
|
+
try {
|
|
279
|
+
map.set(key, _cborx.decode.call(void 0, toUint8Array(kv.value)));
|
|
280
|
+
} catch (e3) {
|
|
281
|
+
continue;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
return map;
|
|
285
|
+
}
|
|
286
|
+
function decodeLinks(links, strings) {
|
|
287
|
+
return links.map((link) => ({
|
|
288
|
+
traceId: link.traceId,
|
|
289
|
+
spanId: link.spanId,
|
|
290
|
+
traceState: link.traceState,
|
|
291
|
+
attributes: decodeAttributeList(link.attributes, strings),
|
|
292
|
+
droppedAttributesCount: link.droppedAttributesCount
|
|
293
|
+
}));
|
|
294
|
+
}
|
|
295
|
+
function encodeLinkState(links) {
|
|
296
|
+
const result = [];
|
|
297
|
+
let dropped = 0;
|
|
298
|
+
for (const link of links) {
|
|
299
|
+
const { attributes, dropped: droppedAttributes } = encodeAttributeMap(
|
|
300
|
+
link.attributes
|
|
301
|
+
);
|
|
302
|
+
result.push({
|
|
303
|
+
traceId: link.traceId,
|
|
304
|
+
spanId: link.spanId,
|
|
305
|
+
traceState: link.traceState,
|
|
306
|
+
attributes,
|
|
307
|
+
droppedAttributesCount: droppedAttributes
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
return { links: result, dropped };
|
|
311
|
+
}
|
|
312
|
+
function appendRecord(buildBody, providedTimeUnixMs) {
|
|
313
|
+
const absoluteUnixNs = providedTimeUnixMs !== void 0 ? BigInt(Math.floor(providedTimeUnixMs)) * 1000000n : nowUnixNs(timeAnchor);
|
|
314
|
+
const recordBucketStart = computeBucketStartSec(
|
|
315
|
+
absoluteUnixNs,
|
|
316
|
+
bucketSizeSec
|
|
317
|
+
);
|
|
318
|
+
if (recordBucketStart !== currentChunk.bucketStartSec) {
|
|
319
|
+
flushChunk();
|
|
320
|
+
resetChunkState(recordBucketStart);
|
|
321
|
+
}
|
|
322
|
+
if (_perf_hooks.performance.now() - currentChunk.createdAtMonoMs >= maxChunkAgeMs) {
|
|
323
|
+
flushChunk();
|
|
324
|
+
resetChunkState(recordBucketStart);
|
|
325
|
+
}
|
|
326
|
+
let body = buildBody();
|
|
327
|
+
const timeOffsetNs = absoluteUnixNs - currentChunk.baseUnixNs;
|
|
328
|
+
let record = { timeOffsetNs, body };
|
|
329
|
+
let encodedRecord = _chunkQOSSO6CNcjs.encodeRecord.call(void 0, record);
|
|
330
|
+
if (encodedRecord.length > maxChunkBytes) {
|
|
331
|
+
throw new Error("Record exceeds maxChunkBytes");
|
|
332
|
+
}
|
|
333
|
+
if (currentChunk.sizeBytes + encodedRecord.length > targetChunkBytes) {
|
|
334
|
+
flushChunk();
|
|
335
|
+
resetChunkState(recordBucketStart);
|
|
336
|
+
body = buildBody();
|
|
337
|
+
record = { timeOffsetNs, body };
|
|
338
|
+
encodedRecord = _chunkQOSSO6CNcjs.encodeRecord.call(void 0, record);
|
|
339
|
+
if (encodedRecord.length > maxChunkBytes) {
|
|
340
|
+
throw new Error("Record exceeds maxChunkBytes");
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
currentChunk.records.push(record);
|
|
344
|
+
currentChunk.sizeBytes += encodedRecord.length;
|
|
345
|
+
const recordIndex = currentChunk.records.length - 1;
|
|
346
|
+
return { recordIndex, encodedBytes: encodedRecord.length, body };
|
|
347
|
+
}
|
|
348
|
+
function flushChunk() {
|
|
349
|
+
if (currentChunk.records.length === 0) {
|
|
350
|
+
return false;
|
|
351
|
+
}
|
|
352
|
+
const chunk = {
|
|
353
|
+
baseUnixNs: currentChunk.baseUnixNs,
|
|
354
|
+
strings: currentChunk.strings,
|
|
355
|
+
records: currentChunk.records,
|
|
356
|
+
activeSpans: Array.from(activeSpanRefs.values())
|
|
357
|
+
};
|
|
358
|
+
const bytes = _chunkQOSSO6CNcjs.CHUNK_VERSIONED.serializeWithEmbeddedVersion(
|
|
359
|
+
chunk,
|
|
360
|
+
_chunkQOSSO6CNcjs.CURRENT_VERSION
|
|
361
|
+
);
|
|
362
|
+
const key = buildChunkKey(currentChunk.bucketStartSec, currentChunk.chunkId);
|
|
363
|
+
const maxRecordNs = chunk.records.length > 0 ? chunk.baseUnixNs + chunk.records[chunk.records.length - 1].timeOffsetNs : chunk.baseUnixNs;
|
|
364
|
+
const pending = {
|
|
365
|
+
key,
|
|
366
|
+
bucketStartSec: currentChunk.bucketStartSec,
|
|
367
|
+
chunkId: currentChunk.chunkId,
|
|
368
|
+
chunk,
|
|
369
|
+
bytes,
|
|
370
|
+
maxRecordNs
|
|
371
|
+
};
|
|
372
|
+
pendingChunks.push(pending);
|
|
373
|
+
enqueueWrite(pending);
|
|
374
|
+
return true;
|
|
375
|
+
}
|
|
376
|
+
function enqueueWrite(pending) {
|
|
377
|
+
writeChain = writeChain.then(async () => {
|
|
378
|
+
await driver.set(pending.key, pending.bytes);
|
|
379
|
+
const index = pendingChunks.indexOf(pending);
|
|
380
|
+
if (index !== -1) {
|
|
381
|
+
pendingChunks.splice(index, 1);
|
|
382
|
+
}
|
|
383
|
+
});
|
|
384
|
+
}
|
|
385
|
+
function resetChunkState(bucketStartSec) {
|
|
386
|
+
currentChunk.bucketStartSec = bucketStartSec;
|
|
387
|
+
currentChunk.chunkId = nextChunkId(bucketStartSec);
|
|
388
|
+
currentChunk.baseUnixNs = BigInt(bucketStartSec) * 1000000000n;
|
|
389
|
+
currentChunk.strings = [];
|
|
390
|
+
currentChunk.stringIds = /* @__PURE__ */ new Map();
|
|
391
|
+
currentChunk.records = [];
|
|
392
|
+
currentChunk.sizeBytes = 0;
|
|
393
|
+
currentChunk.createdAtMonoMs = _perf_hooks.performance.now();
|
|
394
|
+
}
|
|
395
|
+
function enforceMaxActiveSpans() {
|
|
396
|
+
if (activeSpans.size <= maxActiveSpans) {
|
|
397
|
+
return;
|
|
398
|
+
}
|
|
399
|
+
const candidates = Array.from(activeSpans.values()).sort((a, b) => {
|
|
400
|
+
if (a.depth !== b.depth) {
|
|
401
|
+
return b.depth - a.depth;
|
|
402
|
+
}
|
|
403
|
+
if (a.startTimeUnixNs > b.startTimeUnixNs) {
|
|
404
|
+
return -1;
|
|
405
|
+
}
|
|
406
|
+
if (a.startTimeUnixNs < b.startTimeUnixNs) {
|
|
407
|
+
return 1;
|
|
408
|
+
}
|
|
409
|
+
return 0;
|
|
410
|
+
});
|
|
411
|
+
for (const span of candidates) {
|
|
412
|
+
dropSpan(span.spanId);
|
|
413
|
+
if (activeSpans.size <= maxActiveSpans) {
|
|
414
|
+
break;
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
function dropSpan(spanId) {
|
|
419
|
+
const key = spanKey(spanId);
|
|
420
|
+
activeSpans.delete(key);
|
|
421
|
+
activeSpanRefs.delete(key);
|
|
422
|
+
}
|
|
423
|
+
function assertActive(handle) {
|
|
424
|
+
if (!isActive(handle)) {
|
|
425
|
+
throw new Error("Span handle is not active");
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
function isActive(handle) {
|
|
429
|
+
return activeSpans.has(spanKey(handle.spanId));
|
|
430
|
+
}
|
|
431
|
+
function startSpan(name, options2) {
|
|
432
|
+
const parent = _nullishCoalesce((options2 == null ? void 0 : options2.parent), () => ( getCurrentSpan()));
|
|
433
|
+
if (parent) {
|
|
434
|
+
assertActive(parent);
|
|
435
|
+
}
|
|
436
|
+
const spanIdBytes = _crypto.randomBytes.call(void 0, SPAN_ID_BYTES);
|
|
437
|
+
const traceIdBytes = parent ? parent.traceId : _crypto.randomBytes.call(void 0, TRACE_ID_BYTES);
|
|
438
|
+
const spanId = toArrayBuffer(spanIdBytes);
|
|
439
|
+
const traceId = toArrayBuffer(traceIdBytes);
|
|
440
|
+
const parentSpanId = parent ? toArrayBuffer(parent.spanId) : null;
|
|
441
|
+
const { recordIndex, encodedBytes, body } = appendRecord(() => ({
|
|
442
|
+
tag: "SpanStart",
|
|
443
|
+
val: createSpanStartRecord(
|
|
444
|
+
spanId,
|
|
445
|
+
traceId,
|
|
446
|
+
name,
|
|
447
|
+
options2,
|
|
448
|
+
parentSpanId
|
|
449
|
+
)
|
|
450
|
+
}));
|
|
451
|
+
const spanStart = body.val;
|
|
452
|
+
const key = spanKey(spanId);
|
|
453
|
+
const startKey = {
|
|
454
|
+
prefix: KEY_PREFIX.DATA,
|
|
455
|
+
bucketStartSec: BigInt(currentChunk.bucketStartSec),
|
|
456
|
+
chunkId: currentChunk.chunkId,
|
|
457
|
+
recordIndex
|
|
458
|
+
};
|
|
459
|
+
activeSpanRefs.set(key, {
|
|
460
|
+
spanId,
|
|
461
|
+
startKey,
|
|
462
|
+
latestSnapshotKey: null
|
|
463
|
+
});
|
|
464
|
+
const depth = computeSpanDepth(parentSpanId);
|
|
465
|
+
activeSpans.set(key, {
|
|
466
|
+
spanId,
|
|
467
|
+
traceId,
|
|
468
|
+
parentSpanId,
|
|
469
|
+
name,
|
|
470
|
+
kind: _nullishCoalesce((options2 == null ? void 0 : options2.kind), () => ( 0)),
|
|
471
|
+
traceState: _nullishCoalesce((options2 == null ? void 0 : options2.traceState), () => ( null)),
|
|
472
|
+
flags: _nullishCoalesce((options2 == null ? void 0 : options2.flags), () => ( 0)),
|
|
473
|
+
attributes: buildAttributeMapFromInput(options2 == null ? void 0 : options2.attributes),
|
|
474
|
+
droppedAttributesCount: spanStart.droppedAttributesCount,
|
|
475
|
+
links: decodeLinks(spanStart.links, currentChunk.strings),
|
|
476
|
+
droppedLinksCount: spanStart.droppedLinksCount,
|
|
477
|
+
status: null,
|
|
478
|
+
startTimeUnixNs: currentChunk.baseUnixNs + currentChunk.records[recordIndex].timeOffsetNs,
|
|
479
|
+
depth,
|
|
480
|
+
bytesSinceSnapshot: encodedBytes,
|
|
481
|
+
lastSnapshotMonoMs: _perf_hooks.performance.now()
|
|
482
|
+
});
|
|
483
|
+
enforceMaxActiveSpans();
|
|
484
|
+
return {
|
|
485
|
+
spanId: spanIdBytes,
|
|
486
|
+
traceId: traceIdBytes,
|
|
487
|
+
isActive: () => activeSpans.has(key)
|
|
488
|
+
};
|
|
489
|
+
}
|
|
490
|
+
function updateSpan(handle, options2) {
|
|
491
|
+
if (!options2.attributes && !options2.status) {
|
|
492
|
+
return;
|
|
493
|
+
}
|
|
494
|
+
assertActive(handle);
|
|
495
|
+
const { encodedBytes, body } = appendRecord(() => ({
|
|
496
|
+
tag: "SpanUpdate",
|
|
497
|
+
val: createSpanUpdateRecord(toArrayBuffer(handle.spanId), options2)
|
|
498
|
+
}));
|
|
499
|
+
const spanUpdate = body.val;
|
|
500
|
+
const state = activeSpans.get(spanKey(handle.spanId));
|
|
501
|
+
if (!state) {
|
|
502
|
+
return;
|
|
503
|
+
}
|
|
504
|
+
if (options2.attributes) {
|
|
505
|
+
const updates = buildAttributeMapFromInput(options2.attributes);
|
|
506
|
+
for (const [key, value] of updates.entries()) {
|
|
507
|
+
state.attributes.set(key, value);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
state.droppedAttributesCount += spanUpdate.droppedAttributesCount;
|
|
511
|
+
if (options2.status) {
|
|
512
|
+
state.status = toBareStatus(options2.status);
|
|
513
|
+
}
|
|
514
|
+
state.bytesSinceSnapshot += encodedBytes;
|
|
515
|
+
maybeSnapshot(handle.spanId, state);
|
|
516
|
+
}
|
|
517
|
+
function setAttributes(handle, attributes) {
|
|
518
|
+
updateSpan(handle, { attributes });
|
|
519
|
+
}
|
|
520
|
+
function setStatus(handle, status) {
|
|
521
|
+
updateSpan(handle, { status });
|
|
522
|
+
}
|
|
523
|
+
function emitEvent(handle, name, options2) {
|
|
524
|
+
assertActive(handle);
|
|
525
|
+
const { encodedBytes } = appendRecord(
|
|
526
|
+
() => ({
|
|
527
|
+
tag: "SpanEvent",
|
|
528
|
+
val: createSpanEventRecord(toArrayBuffer(handle.spanId), name, options2)
|
|
529
|
+
}),
|
|
530
|
+
options2 == null ? void 0 : options2.timeUnixMs
|
|
531
|
+
);
|
|
532
|
+
const state = activeSpans.get(spanKey(handle.spanId));
|
|
533
|
+
if (state) {
|
|
534
|
+
state.bytesSinceSnapshot += encodedBytes;
|
|
535
|
+
maybeSnapshot(handle.spanId, state);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
function endSpan(handle, options2) {
|
|
539
|
+
assertActive(handle);
|
|
540
|
+
appendRecord(() => ({
|
|
541
|
+
tag: "SpanEnd",
|
|
542
|
+
val: createSpanEndRecord(toArrayBuffer(handle.spanId), options2)
|
|
543
|
+
}));
|
|
544
|
+
dropSpan(handle.spanId);
|
|
545
|
+
}
|
|
546
|
+
function maybeSnapshot(spanId, state) {
|
|
547
|
+
if (state.bytesSinceSnapshot < snapshotBytesThreshold && _perf_hooks.performance.now() - state.lastSnapshotMonoMs < snapshotIntervalMs) {
|
|
548
|
+
return;
|
|
549
|
+
}
|
|
550
|
+
const { recordIndex } = appendRecord(() => ({
|
|
551
|
+
tag: "SpanSnapshot",
|
|
552
|
+
val: createSpanSnapshotRecord(state)
|
|
553
|
+
}));
|
|
554
|
+
const key = spanKey(spanId);
|
|
555
|
+
const ref = activeSpanRefs.get(key);
|
|
556
|
+
if (ref) {
|
|
557
|
+
activeSpanRefs.set(key, {
|
|
558
|
+
...ref,
|
|
559
|
+
latestSnapshotKey: {
|
|
560
|
+
prefix: KEY_PREFIX.DATA,
|
|
561
|
+
bucketStartSec: BigInt(currentChunk.bucketStartSec),
|
|
562
|
+
chunkId: currentChunk.chunkId,
|
|
563
|
+
recordIndex
|
|
564
|
+
}
|
|
565
|
+
});
|
|
566
|
+
}
|
|
567
|
+
state.bytesSinceSnapshot = 0;
|
|
568
|
+
state.lastSnapshotMonoMs = _perf_hooks.performance.now();
|
|
569
|
+
}
|
|
570
|
+
async function flush() {
|
|
571
|
+
const didFlush = flushChunk();
|
|
572
|
+
if (didFlush) {
|
|
573
|
+
resetChunkState(currentChunk.bucketStartSec);
|
|
574
|
+
}
|
|
575
|
+
await writeChain;
|
|
576
|
+
return didFlush;
|
|
577
|
+
}
|
|
578
|
+
function withSpan(handle, fn) {
|
|
579
|
+
return spanContext.run(handle, fn);
|
|
580
|
+
}
|
|
581
|
+
function getCurrentSpan() {
|
|
582
|
+
const handle = _nullishCoalesce(spanContext.getStore(), () => ( null));
|
|
583
|
+
if (!handle) {
|
|
584
|
+
return null;
|
|
585
|
+
}
|
|
586
|
+
return isActive(handle) ? handle : null;
|
|
587
|
+
}
|
|
588
|
+
async function readRangeWire(options2) {
|
|
589
|
+
const startMs = Math.floor(options2.startMs);
|
|
590
|
+
const endMs = Math.floor(options2.endMs);
|
|
591
|
+
if (options2.limit <= 0 || endMs <= startMs) {
|
|
592
|
+
return {
|
|
593
|
+
startTimeMs: BigInt(startMs),
|
|
594
|
+
endTimeMs: BigInt(endMs),
|
|
595
|
+
limit: 0,
|
|
596
|
+
clamped: false,
|
|
597
|
+
baseChunks: [],
|
|
598
|
+
chunks: []
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
const limitWasClamped = options2.limit > maxReadLimit;
|
|
602
|
+
const limit = Math.min(options2.limit, maxReadLimit);
|
|
603
|
+
const startNs = BigInt(startMs) * 1000000n;
|
|
604
|
+
const endNs = BigInt(endMs) * 1000000n;
|
|
605
|
+
const previousChunk = await findPreviousChunk(startNs, bucketSizeSec);
|
|
606
|
+
const activeRefs = _nullishCoalesce((previousChunk == null ? void 0 : previousChunk.activeSpans), () => ( []));
|
|
607
|
+
const baseChunks = [];
|
|
608
|
+
for (const ref of activeRefs) {
|
|
609
|
+
const baseRecord = await loadBaseRecord(ref);
|
|
610
|
+
if (!baseRecord) {
|
|
611
|
+
continue;
|
|
612
|
+
}
|
|
613
|
+
const baseUnixNs = baseRecord.absNs - baseRecord.record.timeOffsetNs;
|
|
614
|
+
baseChunks.push({
|
|
615
|
+
baseUnixNs,
|
|
616
|
+
strings: baseRecord.strings,
|
|
617
|
+
records: [baseRecord.record],
|
|
618
|
+
activeSpans: []
|
|
619
|
+
});
|
|
620
|
+
}
|
|
621
|
+
const chunks = [];
|
|
622
|
+
const diskChunks = await listRangeChunks(startNs, endNs, bucketSizeSec);
|
|
623
|
+
for (const chunk of diskChunks) {
|
|
624
|
+
const filtered = filterChunkRecords(chunk.chunk, startNs, endNs);
|
|
625
|
+
if (filtered) {
|
|
626
|
+
chunks.push(filtered);
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
for (const pending of pendingChunks) {
|
|
630
|
+
const filtered = filterChunkRecords(pending.chunk, startNs, endNs);
|
|
631
|
+
if (filtered) {
|
|
632
|
+
chunks.push(filtered);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
const currentFiltered = filterChunkRecords(
|
|
636
|
+
currentChunkAsChunk(),
|
|
637
|
+
startNs,
|
|
638
|
+
endNs
|
|
639
|
+
);
|
|
640
|
+
if (currentFiltered) {
|
|
641
|
+
chunks.push(currentFiltered);
|
|
642
|
+
}
|
|
643
|
+
const reachedSpanLimit = countUniqueSpanIds(chunks, limit);
|
|
644
|
+
return {
|
|
645
|
+
startTimeMs: BigInt(startMs),
|
|
646
|
+
endTimeMs: BigInt(endMs),
|
|
647
|
+
limit,
|
|
648
|
+
clamped: limitWasClamped || reachedSpanLimit,
|
|
649
|
+
baseChunks,
|
|
650
|
+
chunks
|
|
651
|
+
};
|
|
652
|
+
}
|
|
653
|
+
async function readRange(options2) {
|
|
654
|
+
const wire = await readRangeWire(options2);
|
|
655
|
+
return _chunkZTVH74GCcjs.readRangeWireToOtlp.call(void 0, wire, resource);
|
|
656
|
+
}
|
|
657
|
+
function filterChunkRecords(chunk, startNs, endNs) {
|
|
658
|
+
const filtered = [];
|
|
659
|
+
for (const record of chunk.records) {
|
|
660
|
+
const absNs = chunk.baseUnixNs + record.timeOffsetNs;
|
|
661
|
+
if (absNs < startNs || absNs >= endNs) {
|
|
662
|
+
continue;
|
|
663
|
+
}
|
|
664
|
+
filtered.push(record);
|
|
665
|
+
}
|
|
666
|
+
if (filtered.length === 0) {
|
|
667
|
+
return null;
|
|
668
|
+
}
|
|
669
|
+
return {
|
|
670
|
+
baseUnixNs: chunk.baseUnixNs,
|
|
671
|
+
strings: chunk.strings,
|
|
672
|
+
records: filtered,
|
|
673
|
+
activeSpans: chunk.activeSpans
|
|
674
|
+
};
|
|
675
|
+
}
|
|
676
|
+
function countUniqueSpanIds(chunks, limit) {
|
|
677
|
+
if (limit <= 0) {
|
|
678
|
+
return true;
|
|
679
|
+
}
|
|
680
|
+
const seen = /* @__PURE__ */ new Set();
|
|
681
|
+
for (const chunk of chunks) {
|
|
682
|
+
for (const record of chunk.records) {
|
|
683
|
+
const key = spanKey(recordSpanId(record.body));
|
|
684
|
+
if (seen.has(key)) {
|
|
685
|
+
continue;
|
|
686
|
+
}
|
|
687
|
+
if (seen.size >= limit) {
|
|
688
|
+
return true;
|
|
689
|
+
}
|
|
690
|
+
seen.add(key);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
return false;
|
|
694
|
+
}
|
|
695
|
+
function recordSpanId(body) {
|
|
696
|
+
switch (body.tag) {
|
|
697
|
+
case "SpanStart":
|
|
698
|
+
return body.val.spanId;
|
|
699
|
+
case "SpanEvent":
|
|
700
|
+
return body.val.spanId;
|
|
701
|
+
case "SpanUpdate":
|
|
702
|
+
return body.val.spanId;
|
|
703
|
+
case "SpanEnd":
|
|
704
|
+
return body.val.spanId;
|
|
705
|
+
case "SpanSnapshot":
|
|
706
|
+
return body.val.spanId;
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
function currentChunkAsChunk() {
|
|
710
|
+
return {
|
|
711
|
+
baseUnixNs: currentChunk.baseUnixNs,
|
|
712
|
+
strings: currentChunk.strings,
|
|
713
|
+
records: currentChunk.records,
|
|
714
|
+
activeSpans: Array.from(activeSpanRefs.values())
|
|
715
|
+
};
|
|
716
|
+
}
|
|
717
|
+
async function listRangeChunks(startNs, endNs, bucketSize) {
|
|
718
|
+
const startBucket = computeBucketStartSec(startNs, bucketSize);
|
|
719
|
+
const endBucket = computeBucketStartSec(endNs, bucketSize);
|
|
720
|
+
const startKey = buildChunkKey(startBucket, 0);
|
|
721
|
+
const endKey = buildChunkKey(endBucket + bucketSize, 0);
|
|
722
|
+
const entries = await driver.listRange(startKey, endKey);
|
|
723
|
+
const output = [];
|
|
724
|
+
for (const entry of entries) {
|
|
725
|
+
const chunk = deserializeChunkSafe(entry.value);
|
|
726
|
+
if (!chunk) {
|
|
727
|
+
continue;
|
|
728
|
+
}
|
|
729
|
+
output.push({ key: entry.key, chunk });
|
|
730
|
+
}
|
|
731
|
+
return output;
|
|
732
|
+
}
|
|
733
|
+
async function findPreviousChunk(startNs, bucketSize) {
|
|
734
|
+
const startBucket = computeBucketStartSec(startNs, bucketSize);
|
|
735
|
+
let cursor = {
|
|
736
|
+
bucketStartSec: startBucket,
|
|
737
|
+
chunkId: AFTER_MAX_CHUNK_ID
|
|
738
|
+
};
|
|
739
|
+
while (true) {
|
|
740
|
+
const pendingCandidate = findLatestPendingBefore(cursor);
|
|
741
|
+
const diskCandidate = await findLatestDiskBefore(cursor);
|
|
742
|
+
const candidate = selectLatestCandidate(
|
|
743
|
+
pendingCandidate,
|
|
744
|
+
diskCandidate
|
|
745
|
+
);
|
|
746
|
+
if (!candidate) {
|
|
747
|
+
return null;
|
|
748
|
+
}
|
|
749
|
+
if (candidate.maxRecordNs < startNs) {
|
|
750
|
+
return candidate.chunk;
|
|
751
|
+
}
|
|
752
|
+
cursor = {
|
|
753
|
+
bucketStartSec: candidate.bucketStartSec,
|
|
754
|
+
chunkId: candidate.chunkId
|
|
755
|
+
};
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
function findLatestPendingBefore(cursor) {
|
|
759
|
+
let best = null;
|
|
760
|
+
for (const pending of pendingChunks) {
|
|
761
|
+
if (compareChunkKey(pending, cursor) >= 0) {
|
|
762
|
+
continue;
|
|
763
|
+
}
|
|
764
|
+
if (!best || compareChunkKey(pending, best) > 0) {
|
|
765
|
+
best = pending;
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
return best;
|
|
769
|
+
}
|
|
770
|
+
async function findLatestDiskBefore(cursor) {
|
|
771
|
+
const startKey = buildChunkKey(0, 0);
|
|
772
|
+
let endKey = buildChunkKey(cursor.bucketStartSec, cursor.chunkId);
|
|
773
|
+
while (true) {
|
|
774
|
+
const entries = await driver.listRange(startKey, endKey, {
|
|
775
|
+
reverse: true,
|
|
776
|
+
limit: 10
|
|
777
|
+
});
|
|
778
|
+
if (entries.length === 0) {
|
|
779
|
+
return null;
|
|
780
|
+
}
|
|
781
|
+
for (const entry of entries) {
|
|
782
|
+
const chunk = deserializeChunkSafe(entry.value);
|
|
783
|
+
if (!chunk) {
|
|
784
|
+
endKey = entry.key;
|
|
785
|
+
continue;
|
|
786
|
+
}
|
|
787
|
+
const { bucketStartSec, chunkId } = decodeChunkKey(entry.key);
|
|
788
|
+
const maxRecordNs = chunk.records.length > 0 ? chunk.baseUnixNs + chunk.records[chunk.records.length - 1].timeOffsetNs : chunk.baseUnixNs;
|
|
789
|
+
return {
|
|
790
|
+
key: entry.key,
|
|
791
|
+
bucketStartSec,
|
|
792
|
+
chunkId,
|
|
793
|
+
chunk,
|
|
794
|
+
bytes: entry.value,
|
|
795
|
+
maxRecordNs
|
|
796
|
+
};
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
function selectLatestCandidate(pending, disk) {
|
|
801
|
+
if (pending && disk) {
|
|
802
|
+
return compareChunkKey(pending, disk) >= 0 ? pending : disk;
|
|
803
|
+
}
|
|
804
|
+
return _nullishCoalesce(pending, () => ( disk));
|
|
805
|
+
}
|
|
806
|
+
function compareChunkKey(a, b) {
|
|
807
|
+
if (a.bucketStartSec !== b.bucketStartSec) {
|
|
808
|
+
return a.bucketStartSec - b.bucketStartSec;
|
|
809
|
+
}
|
|
810
|
+
return a.chunkId - b.chunkId;
|
|
811
|
+
}
|
|
812
|
+
function decodeChunkKey(key) {
|
|
813
|
+
const tuple = _fdbtuple.unpack.call(void 0, _buffer.Buffer.from(key));
|
|
814
|
+
return { bucketStartSec: tuple[1], chunkId: tuple[2] };
|
|
815
|
+
}
|
|
816
|
+
function buildChunkKey(bucketStartSec, chunkId) {
|
|
817
|
+
return _fdbtuple.pack.call(void 0, [KEY_PREFIX.DATA, bucketStartSec, chunkId]);
|
|
818
|
+
}
|
|
819
|
+
function deserializeChunkSafe(bytes) {
|
|
820
|
+
try {
|
|
821
|
+
return _chunkQOSSO6CNcjs.CHUNK_VERSIONED.deserializeWithEmbeddedVersion(bytes);
|
|
822
|
+
} catch (e4) {
|
|
823
|
+
return null;
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
async function loadBaseRecord(ref) {
|
|
827
|
+
const key = _nullishCoalesce(ref.latestSnapshotKey, () => ( ref.startKey));
|
|
828
|
+
const bucketStartSec = toNumber(key.bucketStartSec);
|
|
829
|
+
const fromMemory = findChunkInMemory(bucketStartSec, key.chunkId);
|
|
830
|
+
if (fromMemory) {
|
|
831
|
+
const record2 = fromMemory.records[key.recordIndex];
|
|
832
|
+
if (!record2) {
|
|
833
|
+
return null;
|
|
834
|
+
}
|
|
835
|
+
const absNs2 = fromMemory.baseUnixNs + record2.timeOffsetNs;
|
|
836
|
+
return { record: record2, strings: fromMemory.strings, absNs: absNs2 };
|
|
837
|
+
}
|
|
838
|
+
const chunkKey = buildChunkKey(bucketStartSec, key.chunkId);
|
|
839
|
+
const bytes = await driver.get(chunkKey);
|
|
840
|
+
if (!bytes) {
|
|
841
|
+
return null;
|
|
842
|
+
}
|
|
843
|
+
const chunk = deserializeChunkSafe(bytes);
|
|
844
|
+
if (!chunk) {
|
|
845
|
+
return null;
|
|
846
|
+
}
|
|
847
|
+
const record = chunk.records[key.recordIndex];
|
|
848
|
+
if (!record) {
|
|
849
|
+
return null;
|
|
850
|
+
}
|
|
851
|
+
const absNs = chunk.baseUnixNs + record.timeOffsetNs;
|
|
852
|
+
return { record, strings: chunk.strings, absNs };
|
|
853
|
+
}
|
|
854
|
+
function findChunkInMemory(bucketStartSec, chunkId) {
|
|
855
|
+
if (currentChunk.bucketStartSec === bucketStartSec && currentChunk.chunkId === chunkId) {
|
|
856
|
+
return currentChunkAsChunk();
|
|
857
|
+
}
|
|
858
|
+
const pending = pendingChunks.find(
|
|
859
|
+
(candidate) => candidate.bucketStartSec === bucketStartSec && candidate.chunkId === chunkId
|
|
860
|
+
);
|
|
861
|
+
return _nullishCoalesce((pending == null ? void 0 : pending.chunk), () => ( null));
|
|
862
|
+
}
|
|
863
|
+
function toNumber(value) {
|
|
864
|
+
const asNumber = Number(value);
|
|
865
|
+
if (!Number.isSafeInteger(asNumber)) {
|
|
866
|
+
throw new Error("Value exceeds safe integer range");
|
|
867
|
+
}
|
|
868
|
+
return asNumber;
|
|
869
|
+
}
|
|
870
|
+
function computeSpanDepth(parentSpanId) {
|
|
871
|
+
if (!parentSpanId) {
|
|
872
|
+
return 0;
|
|
873
|
+
}
|
|
874
|
+
const parent = activeSpans.get(spanKey(parentSpanId));
|
|
875
|
+
if (!parent) {
|
|
876
|
+
return 0;
|
|
877
|
+
}
|
|
878
|
+
return parent.depth + 1;
|
|
879
|
+
}
|
|
880
|
+
function randomSpanId() {
|
|
881
|
+
return toArrayBuffer(_crypto.randomBytes.call(void 0, SPAN_ID_BYTES));
|
|
882
|
+
}
|
|
883
|
+
function randomTraceId() {
|
|
884
|
+
return toArrayBuffer(_crypto.randomBytes.call(void 0, TRACE_ID_BYTES));
|
|
885
|
+
}
|
|
886
|
+
function toBareStatus(status) {
|
|
887
|
+
return {
|
|
888
|
+
code: toBareStatusCode(status.code),
|
|
889
|
+
message: _nullishCoalesce(status.message, () => ( null))
|
|
890
|
+
};
|
|
891
|
+
}
|
|
892
|
+
function toBareStatusCode(code) {
|
|
893
|
+
switch (code) {
|
|
894
|
+
case "OK":
|
|
895
|
+
return "OK" /* OK */;
|
|
896
|
+
case "ERROR":
|
|
897
|
+
return "ERROR" /* ERROR */;
|
|
898
|
+
case "UNSET":
|
|
899
|
+
default:
|
|
900
|
+
return "UNSET" /* UNSET */;
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
return {
|
|
904
|
+
startSpan,
|
|
905
|
+
updateSpan,
|
|
906
|
+
setAttributes,
|
|
907
|
+
setStatus,
|
|
908
|
+
endSpan,
|
|
909
|
+
emitEvent,
|
|
910
|
+
withSpan,
|
|
911
|
+
getCurrentSpan,
|
|
912
|
+
flush,
|
|
913
|
+
readRange,
|
|
914
|
+
readRangeWire
|
|
915
|
+
};
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
exports.createNoopTraces = _chunkDXS2HLRNcjs.createNoopTraces; exports.createTraces = createTraces;
|
|
921
|
+
//# sourceMappingURL=index.cjs.map
|