@runtime-digital-twin/sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +214 -0
- package/dist/constants.d.ts +11 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +13 -0
- package/dist/db-wrapper.d.ts +258 -0
- package/dist/db-wrapper.d.ts.map +1 -0
- package/dist/db-wrapper.js +636 -0
- package/dist/event-envelope.d.ts +35 -0
- package/dist/event-envelope.d.ts.map +1 -0
- package/dist/event-envelope.js +101 -0
- package/dist/fastify-plugin.d.ts +29 -0
- package/dist/fastify-plugin.d.ts.map +1 -0
- package/dist/fastify-plugin.js +243 -0
- package/dist/http-sentinels.d.ts +39 -0
- package/dist/http-sentinels.d.ts.map +1 -0
- package/dist/http-sentinels.js +169 -0
- package/dist/http-wrapper.d.ts +25 -0
- package/dist/http-wrapper.d.ts.map +1 -0
- package/dist/http-wrapper.js +477 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +93 -0
- package/dist/invariants.d.ts +58 -0
- package/dist/invariants.d.ts.map +1 -0
- package/dist/invariants.js +192 -0
- package/dist/multi-service-edge-builder.d.ts +80 -0
- package/dist/multi-service-edge-builder.d.ts.map +1 -0
- package/dist/multi-service-edge-builder.js +107 -0
- package/dist/outbound-matcher.d.ts +192 -0
- package/dist/outbound-matcher.d.ts.map +1 -0
- package/dist/outbound-matcher.js +457 -0
- package/dist/peer-service-resolver.d.ts +22 -0
- package/dist/peer-service-resolver.d.ts.map +1 -0
- package/dist/peer-service-resolver.js +85 -0
- package/dist/redaction.d.ts +111 -0
- package/dist/redaction.d.ts.map +1 -0
- package/dist/redaction.js +487 -0
- package/dist/replay-logger.d.ts +438 -0
- package/dist/replay-logger.d.ts.map +1 -0
- package/dist/replay-logger.js +434 -0
- package/dist/root-cause-analyzer.d.ts +45 -0
- package/dist/root-cause-analyzer.d.ts.map +1 -0
- package/dist/root-cause-analyzer.js +606 -0
- package/dist/shape-digest-utils.d.ts +45 -0
- package/dist/shape-digest-utils.d.ts.map +1 -0
- package/dist/shape-digest-utils.js +154 -0
- package/dist/trace-bundle-writer.d.ts +52 -0
- package/dist/trace-bundle-writer.d.ts.map +1 -0
- package/dist/trace-bundle-writer.js +267 -0
- package/dist/trace-loader.d.ts +69 -0
- package/dist/trace-loader.d.ts.map +1 -0
- package/dist/trace-loader.js +146 -0
- package/dist/trace-uploader.d.ts +25 -0
- package/dist/trace-uploader.d.ts.map +1 -0
- package/dist/trace-uploader.js +132 -0
- package/package.json +63 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Shape Digest Utilities
|
|
4
|
+
*
|
|
5
|
+
* Utilities for computing shape digests with sampling and feature flags.
|
|
6
|
+
*/
|
|
7
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.getShapeDigestMode = getShapeDigestMode;
|
|
9
|
+
exports.getShapeDigestSampleRate = getShapeDigestSampleRate;
|
|
10
|
+
exports.shouldComputeShapeDigest = shouldComputeShapeDigest;
|
|
11
|
+
exports.isJsonContentType = isJsonContentType;
|
|
12
|
+
exports.tryParseJson = tryParseJson;
|
|
13
|
+
exports.computeBodyShapeDigest = computeBodyShapeDigest;
|
|
14
|
+
exports.getContentType = getContentType;
|
|
15
|
+
exports.getBodySizeBytes = getBodySizeBytes;
|
|
16
|
+
const core_1 = require("@runtime-digital-twin/core");
|
|
17
|
+
/**
|
|
18
|
+
* Get shape digest mode from environment
|
|
19
|
+
*/
|
|
20
|
+
function getShapeDigestMode() {
|
|
21
|
+
const mode = process.env.WRAITH_SHAPE_DIGEST_MODE?.toLowerCase();
|
|
22
|
+
if (mode === "off" || mode === "sampled" || mode === "on") {
|
|
23
|
+
return mode;
|
|
24
|
+
}
|
|
25
|
+
return "sampled"; // default
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Get sample rate from environment
|
|
29
|
+
*/
|
|
30
|
+
function getShapeDigestSampleRate() {
|
|
31
|
+
const rate = parseFloat(process.env.WRAITH_SHAPE_DIGEST_SAMPLE_RATE || "0.1");
|
|
32
|
+
if (isNaN(rate) || rate < 0 || rate > 1) {
|
|
33
|
+
return 0.1; // default 10%
|
|
34
|
+
}
|
|
35
|
+
return rate;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Determine if shape digest should be computed
|
|
39
|
+
*/
|
|
40
|
+
function shouldComputeShapeDigest(mode, sampleRate, statusCode, hasError) {
|
|
41
|
+
if (mode === "off") {
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
if (mode === "on") {
|
|
45
|
+
return true;
|
|
46
|
+
}
|
|
47
|
+
// mode === "sampled"
|
|
48
|
+
// Always compute for errors or status >= 400
|
|
49
|
+
if (hasError || (statusCode !== null && statusCode !== undefined && statusCode >= 400)) {
|
|
50
|
+
return true;
|
|
51
|
+
}
|
|
52
|
+
// Sample based on rate
|
|
53
|
+
return Math.random() < sampleRate;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Check if content type indicates JSON
|
|
57
|
+
*/
|
|
58
|
+
function isJsonContentType(contentType) {
|
|
59
|
+
if (!contentType)
|
|
60
|
+
return false;
|
|
61
|
+
const lower = contentType.toLowerCase();
|
|
62
|
+
return (lower.includes("application/json") ||
|
|
63
|
+
lower.includes("application/vnd.api+json") ||
|
|
64
|
+
lower.includes("application/hal+json") ||
|
|
65
|
+
lower.includes("application/problem+json"));
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Attempt to parse body as JSON
|
|
69
|
+
*/
|
|
70
|
+
function tryParseJson(body) {
|
|
71
|
+
if (!body)
|
|
72
|
+
return null;
|
|
73
|
+
if (typeof body === "object" && !Buffer.isBuffer(body)) {
|
|
74
|
+
return body; // Already parsed
|
|
75
|
+
}
|
|
76
|
+
if (Buffer.isBuffer(body)) {
|
|
77
|
+
try {
|
|
78
|
+
return JSON.parse(body.toString("utf8"));
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (typeof body === "string") {
|
|
85
|
+
try {
|
|
86
|
+
return JSON.parse(body);
|
|
87
|
+
}
|
|
88
|
+
catch {
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return null;
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Compute shape digest for a body if conditions are met
|
|
96
|
+
*/
|
|
97
|
+
async function computeBodyShapeDigest(body, contentType, options) {
|
|
98
|
+
// Check if we should compute
|
|
99
|
+
if (!shouldComputeShapeDigest(options.mode, options.sampleRate, options.statusCode, options.hasError)) {
|
|
100
|
+
return null;
|
|
101
|
+
}
|
|
102
|
+
// Try to parse as JSON first
|
|
103
|
+
const parsed = tryParseJson(body);
|
|
104
|
+
const isJson = isJsonContentType(contentType);
|
|
105
|
+
// Only compute for JSON content type OR if we can parse as JSON
|
|
106
|
+
// This allows us to compute digests for JSON responses even if content-type is missing/wrong
|
|
107
|
+
if (!isJson && parsed === null) {
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
// Compute digest on parsed JSON (or original if already object)
|
|
111
|
+
const input = parsed !== null ? parsed : body;
|
|
112
|
+
return (0, core_1.computeShapeDigest)(input);
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Get content type from headers
|
|
116
|
+
*/
|
|
117
|
+
function getContentType(headers) {
|
|
118
|
+
if (!headers)
|
|
119
|
+
return null;
|
|
120
|
+
// Check if it's a Headers object (handle both real Headers and mock Headers)
|
|
121
|
+
if (headers instanceof Headers || (typeof headers.get === 'function')) {
|
|
122
|
+
return headers.get("content-type");
|
|
123
|
+
}
|
|
124
|
+
const contentType = headers["content-type"] || headers["Content-Type"];
|
|
125
|
+
if (typeof contentType === "string") {
|
|
126
|
+
return contentType;
|
|
127
|
+
}
|
|
128
|
+
if (Array.isArray(contentType) && contentType.length > 0) {
|
|
129
|
+
return contentType[0];
|
|
130
|
+
}
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Get size in bytes for a body
|
|
135
|
+
*/
|
|
136
|
+
function getBodySizeBytes(body) {
|
|
137
|
+
if (!body)
|
|
138
|
+
return 0;
|
|
139
|
+
if (Buffer.isBuffer(body)) {
|
|
140
|
+
return body.length;
|
|
141
|
+
}
|
|
142
|
+
if (typeof body === "string") {
|
|
143
|
+
return Buffer.byteLength(body, "utf8");
|
|
144
|
+
}
|
|
145
|
+
if (typeof body === "object") {
|
|
146
|
+
try {
|
|
147
|
+
return Buffer.byteLength(JSON.stringify(body), "utf8");
|
|
148
|
+
}
|
|
149
|
+
catch {
|
|
150
|
+
return undefined;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
return undefined;
|
|
154
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
export interface CreateTraceOptions {
|
|
2
|
+
serviceName: string;
|
|
3
|
+
mode?: "record" | "replay";
|
|
4
|
+
serviceVersion?: string;
|
|
5
|
+
environment?: string;
|
|
6
|
+
traceDir?: string;
|
|
7
|
+
traceId?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface TraceBundle {
|
|
10
|
+
traceId: string;
|
|
11
|
+
serviceName: string;
|
|
12
|
+
writeEvent: (event: Record<string, any>) => Promise<void>;
|
|
13
|
+
writeBlob: (content: string | object) => Promise<string>;
|
|
14
|
+
complete: (inboundRequest?: {
|
|
15
|
+
method: string;
|
|
16
|
+
path: string;
|
|
17
|
+
headers: Record<string, string>;
|
|
18
|
+
bodyHash?: string;
|
|
19
|
+
}) => Promise<void>;
|
|
20
|
+
}
|
|
21
|
+
import { CURRENT_TRACE_FORMAT_VERSION } from "./constants";
|
|
22
|
+
export { CURRENT_TRACE_FORMAT_VERSION };
|
|
23
|
+
/**
|
|
24
|
+
* Generate a span ID
|
|
25
|
+
*/
|
|
26
|
+
declare function generateSpanId(): string;
|
|
27
|
+
/**
|
|
28
|
+
* Create a new trace bundle
|
|
29
|
+
*/
|
|
30
|
+
export declare function createTrace(options: CreateTraceOptions): Promise<TraceBundle>;
|
|
31
|
+
/**
|
|
32
|
+
* Helper to process body content and store as blob
|
|
33
|
+
* Applies redaction to sensitive fields before storing
|
|
34
|
+
* NOTE: Always stores bodies as blobs so they can be retrieved during replay
|
|
35
|
+
*/
|
|
36
|
+
export declare function processBody(body: string | Buffer | object | null | undefined, writeBlobFn: (content: string | object) => Promise<string>, options?: {
|
|
37
|
+
skipRedaction?: boolean;
|
|
38
|
+
}): Promise<{
|
|
39
|
+
bodyHash: string | null;
|
|
40
|
+
bodyBlob: string | null;
|
|
41
|
+
}>;
|
|
42
|
+
/**
|
|
43
|
+
* Filter headers based on allowlist and apply redaction
|
|
44
|
+
*/
|
|
45
|
+
export declare function filterHeaders(headers: Record<string, string | string[] | undefined>, allowlist?: string[], options?: {
|
|
46
|
+
skipRedaction?: boolean;
|
|
47
|
+
}): Record<string, string>;
|
|
48
|
+
/**
|
|
49
|
+
* Generate span ID (exported for use in middleware)
|
|
50
|
+
*/
|
|
51
|
+
export { generateSpanId };
|
|
52
|
+
//# sourceMappingURL=trace-bundle-writer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trace-bundle-writer.d.ts","sourceRoot":"","sources":["../src/trace-bundle-writer.ts"],"names":[],"mappings":"AAYA,MAAM,WAAW,kBAAkB;IACjC,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC1D,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IACzD,QAAQ,EAAE,CAAC,cAAc,CAAC,EAAE;QAC1B,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CACrB;AAED,OAAO,EAAE,4BAA4B,EAAE,MAAM,aAAa,CAAC;AAM3D,OAAO,EAAE,4BAA4B,EAAE,CAAC;AAWxC;;GAEG;AACH,iBAAS,cAAc,IAAI,MAAM,CAEhC;AAgBD;;GAEG;AACH,wBAAsB,WAAW,CAC/B,OAAO,EAAE,kBAAkB,GAC1B,OAAO,CAAC,WAAW,CAAC,CAkLtB;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CACzB,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,IAAI,GAAG,SAAS,EACjD,WAAW,EAAE,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,EAC1D,OAAO,CAAC,EAAE;IAAE,aAAa,CAAC,EAAE,OAAO,CAAA;CAAE,GACpC,OAAO,CAAC;IAAE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IAAC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,CAAC,CAiC7D;AAEH;;GAEG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS,CAAC,EACtD,SAAS,CAAC,EAAE,MAAM,EAAE,EACpB,OAAO,CAAC,EAAE;IAAE,aAAa,CAAC,EAAE,OAAO,CAAA;CAAE,GACpC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAyCxB;AAED;;GAEG;AACH,OAAO,EAAE,cAAc,EAAE,CAAC"}
|
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CURRENT_TRACE_FORMAT_VERSION = void 0;
|
|
4
|
+
exports.createTrace = createTrace;
|
|
5
|
+
exports.processBody = processBody;
|
|
6
|
+
exports.filterHeaders = filterHeaders;
|
|
7
|
+
exports.generateSpanId = generateSpanId;
|
|
8
|
+
const crypto_1 = require("crypto");
|
|
9
|
+
const promises_1 = require("fs/promises");
|
|
10
|
+
const path_1 = require("path");
|
|
11
|
+
const fs_1 = require("fs");
|
|
12
|
+
const redaction_1 = require("./redaction");
|
|
13
|
+
const event_envelope_1 = require("./event-envelope");
|
|
14
|
+
const constants_1 = require("./constants");
|
|
15
|
+
Object.defineProperty(exports, "CURRENT_TRACE_FORMAT_VERSION", { enumerable: true, get: function () { return constants_1.CURRENT_TRACE_FORMAT_VERSION; } });
|
|
16
|
+
const BLOB_THRESHOLD = 1024; // 1KB
|
|
17
|
+
const DEFAULT_TRACE_DIR = "./traces";
|
|
18
|
+
/**
|
|
19
|
+
* Generate a trace ID in format: trace-<timestamp>-<random>
|
|
20
|
+
*/
|
|
21
|
+
function generateTraceId() {
|
|
22
|
+
const timestamp = Date.now();
|
|
23
|
+
const random = Math.random().toString(36).substring(2, 10);
|
|
24
|
+
return `trace-${timestamp}-${random}`;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Generate a span ID
|
|
28
|
+
*/
|
|
29
|
+
function generateSpanId() {
|
|
30
|
+
return `span-${Math.random().toString(36).substring(2, 15)}`;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Compute SHA256 hash of content
|
|
34
|
+
*/
|
|
35
|
+
function computeHash(content) {
|
|
36
|
+
return (0, crypto_1.createHash)("sha256").update(content).digest("hex");
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Format hash as sha256:<hex>
|
|
40
|
+
*/
|
|
41
|
+
function formatHash(hex) {
|
|
42
|
+
return `sha256:${hex}`;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Create a new trace bundle
|
|
46
|
+
*/
|
|
47
|
+
async function createTrace(options) {
|
|
48
|
+
const { serviceName, mode = "record", serviceVersion = "1.0.0", environment = "production", traceDir = DEFAULT_TRACE_DIR, traceId: providedTraceId, } = options;
|
|
49
|
+
// Use provided traceId (from cross-service propagation) or generate new one
|
|
50
|
+
const traceId = providedTraceId || generateTraceId();
|
|
51
|
+
const tracePath = (0, path_1.join)(traceDir, traceId);
|
|
52
|
+
const blobsPath = (0, path_1.join)(tracePath, "blobs");
|
|
53
|
+
const eventsPath = (0, path_1.join)(tracePath, "events.jsonl");
|
|
54
|
+
const metaPath = (0, path_1.join)(tracePath, "meta.json");
|
|
55
|
+
// Create directory structure
|
|
56
|
+
try {
|
|
57
|
+
await (0, promises_1.mkdir)(blobsPath, { recursive: true });
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
// Fail-open: log error but don't throw
|
|
61
|
+
console.error(`[SDK] Failed to create trace directory: ${error}`);
|
|
62
|
+
throw error; // Re-throw for critical errors
|
|
63
|
+
}
|
|
64
|
+
// Write initial meta.json
|
|
65
|
+
const startedAt = new Date().toISOString();
|
|
66
|
+
const meta = {
|
|
67
|
+
trace_format_version: constants_1.CURRENT_TRACE_FORMAT_VERSION,
|
|
68
|
+
traceId,
|
|
69
|
+
startedAt,
|
|
70
|
+
completedAt: null,
|
|
71
|
+
serviceName,
|
|
72
|
+
serviceVersion,
|
|
73
|
+
environment,
|
|
74
|
+
mode,
|
|
75
|
+
inboundRequest: null,
|
|
76
|
+
};
|
|
77
|
+
try {
|
|
78
|
+
await (0, promises_1.writeFile)(metaPath, JSON.stringify(meta, null, 2));
|
|
79
|
+
// Create empty events.jsonl file eagerly
|
|
80
|
+
await (0, promises_1.writeFile)(eventsPath, "", "utf8");
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
console.error(`[SDK] Failed to write meta.json: ${error}`);
|
|
84
|
+
throw error;
|
|
85
|
+
}
|
|
86
|
+
let completed = false;
|
|
87
|
+
const writeEvent = async (event) => {
|
|
88
|
+
if (completed) {
|
|
89
|
+
console.warn(`[SDK] Attempted to write event to completed trace: ${traceId}`);
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
try {
|
|
93
|
+
// Extract required fields from event
|
|
94
|
+
const eventType = event.type;
|
|
95
|
+
const eventTimestamp = event.timestamp ?? Date.now();
|
|
96
|
+
const eventSpanId = event.spanId;
|
|
97
|
+
const eventParentSpanId = event.parentSpanId ?? null;
|
|
98
|
+
// Remove base envelope fields from event to avoid duplication
|
|
99
|
+
// (they will be added by createBaseEvent)
|
|
100
|
+
const { type: _, timestamp: __, traceId: ___, spanId: ____, parentSpanId: _____, serviceName: ______, ...eventFields } = event;
|
|
101
|
+
// Create base envelope and merge with event-specific fields
|
|
102
|
+
// Base envelope fields take precedence to ensure consistency
|
|
103
|
+
const envelopeEvent = (0, event_envelope_1.createBaseEvent)(eventType, eventTimestamp, traceId, // From bundle
|
|
104
|
+
eventSpanId, eventParentSpanId, serviceName, // From bundle
|
|
105
|
+
eventFields // Event-specific fields only
|
|
106
|
+
);
|
|
107
|
+
const line = JSON.stringify(envelopeEvent) + "\n";
|
|
108
|
+
// Ensure directory exists before writing (in case it was deleted)
|
|
109
|
+
try {
|
|
110
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(eventsPath), { recursive: true });
|
|
111
|
+
}
|
|
112
|
+
catch (dirError) {
|
|
113
|
+
// Directory might already exist, ignore
|
|
114
|
+
}
|
|
115
|
+
// Ensure file exists (create if it doesn't)
|
|
116
|
+
if (!(0, fs_1.existsSync)(eventsPath)) {
|
|
117
|
+
await (0, promises_1.writeFile)(eventsPath, "", "utf8");
|
|
118
|
+
}
|
|
119
|
+
await (0, promises_1.appendFile)(eventsPath, line, "utf8");
|
|
120
|
+
if (process.env.DEBUG_TRACE_WRITER) {
|
|
121
|
+
console.log(`[TRACE_WRITER] Wrote event: ${eventType} to ${eventsPath}`);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
// Fail-open: log error but don't throw
|
|
126
|
+
console.error(`[SDK] Failed to write event: ${error}`);
|
|
127
|
+
if (process.env.DEBUG_TRACE_WRITER) {
|
|
128
|
+
console.error(`[TRACE_WRITER] Error details:`, error);
|
|
129
|
+
console.error(`[TRACE_WRITER] Event path: ${eventsPath}`);
|
|
130
|
+
console.error(`[TRACE_WRITER] Event type: ${event.type}`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
const writeBlob = async (content) => {
|
|
135
|
+
// Allow blob writes even after trace completion (for async operations)
|
|
136
|
+
// This is safe because blobs are deduplicated by hash
|
|
137
|
+
if (completed && process.env.DEBUG_TRACE_WRITER) {
|
|
138
|
+
console.warn(`[SDK] Writing blob to completed trace: ${traceId}`);
|
|
139
|
+
}
|
|
140
|
+
try {
|
|
141
|
+
// Ensure blobs directory exists (even if trace is completed)
|
|
142
|
+
try {
|
|
143
|
+
await (0, promises_1.mkdir)(blobsPath, { recursive: true });
|
|
144
|
+
}
|
|
145
|
+
catch (error) {
|
|
146
|
+
// Directory might already exist, ignore
|
|
147
|
+
}
|
|
148
|
+
const contentStr = typeof content === "string" ? content : JSON.stringify(content);
|
|
149
|
+
const hash = computeHash(contentStr);
|
|
150
|
+
const hashFormatted = formatHash(hash);
|
|
151
|
+
const blobPath = (0, path_1.join)(blobsPath, `${hash}.json`);
|
|
152
|
+
// Only write if blob doesn't exist (deduplication)
|
|
153
|
+
if (!(0, fs_1.existsSync)(blobPath)) {
|
|
154
|
+
await (0, promises_1.writeFile)(blobPath, contentStr, "utf8");
|
|
155
|
+
}
|
|
156
|
+
return hashFormatted;
|
|
157
|
+
}
|
|
158
|
+
catch (error) {
|
|
159
|
+
console.error(`[SDK] Failed to write blob: ${error}`);
|
|
160
|
+
throw error;
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
const complete = async (inboundRequest) => {
|
|
164
|
+
if (completed) {
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
completed = true;
|
|
168
|
+
try {
|
|
169
|
+
const completedAt = new Date().toISOString();
|
|
170
|
+
const finalMeta = {
|
|
171
|
+
...meta,
|
|
172
|
+
completedAt,
|
|
173
|
+
inboundRequest: inboundRequest || null,
|
|
174
|
+
};
|
|
175
|
+
await (0, promises_1.writeFile)(metaPath, JSON.stringify(finalMeta, null, 2));
|
|
176
|
+
}
|
|
177
|
+
catch (error) {
|
|
178
|
+
console.error(`[SDK] Failed to complete trace: ${error}`);
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
return {
|
|
182
|
+
traceId,
|
|
183
|
+
serviceName,
|
|
184
|
+
writeEvent,
|
|
185
|
+
writeBlob,
|
|
186
|
+
complete,
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Helper to process body content and store as blob
|
|
191
|
+
* Applies redaction to sensitive fields before storing
|
|
192
|
+
* NOTE: Always stores bodies as blobs so they can be retrieved during replay
|
|
193
|
+
*/
|
|
194
|
+
function processBody(body, writeBlobFn, options) {
|
|
195
|
+
if (!body) {
|
|
196
|
+
return Promise.resolve({ bodyHash: null, bodyBlob: null });
|
|
197
|
+
}
|
|
198
|
+
let bodyStr;
|
|
199
|
+
if (Buffer.isBuffer(body)) {
|
|
200
|
+
bodyStr = body.toString("utf8");
|
|
201
|
+
}
|
|
202
|
+
else if (typeof body === "object") {
|
|
203
|
+
bodyStr = JSON.stringify(body);
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
bodyStr = String(body);
|
|
207
|
+
}
|
|
208
|
+
// Apply redaction unless explicitly skipped
|
|
209
|
+
if (!options?.skipRedaction) {
|
|
210
|
+
const redacted = (0, redaction_1.redactBody)(bodyStr);
|
|
211
|
+
if (typeof redacted === "string") {
|
|
212
|
+
bodyStr = redacted;
|
|
213
|
+
}
|
|
214
|
+
else if (redacted !== null && redacted !== undefined) {
|
|
215
|
+
bodyStr = JSON.stringify(redacted);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
const hash = computeHash(bodyStr);
|
|
219
|
+
const hashFormatted = formatHash(hash);
|
|
220
|
+
// Always store body as blob so it can be retrieved during replay
|
|
221
|
+
// (Even small bodies need to be retrievable)
|
|
222
|
+
return writeBlobFn(bodyStr).then((blobRef) => ({
|
|
223
|
+
bodyHash: hashFormatted,
|
|
224
|
+
bodyBlob: blobRef,
|
|
225
|
+
}));
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Filter headers based on allowlist and apply redaction
|
|
229
|
+
*/
|
|
230
|
+
function filterHeaders(headers, allowlist, options) {
|
|
231
|
+
if (!allowlist || allowlist.length === 0) {
|
|
232
|
+
// Default allowlist: common headers that are safe to capture
|
|
233
|
+
const defaultAllowlist = [
|
|
234
|
+
"content-type",
|
|
235
|
+
"content-length",
|
|
236
|
+
"user-agent",
|
|
237
|
+
"accept",
|
|
238
|
+
"accept-encoding",
|
|
239
|
+
"authorization",
|
|
240
|
+
"x-api-key",
|
|
241
|
+
"x-request-id",
|
|
242
|
+
"x-correlation-id",
|
|
243
|
+
];
|
|
244
|
+
return filterHeaders(headers, defaultAllowlist, options);
|
|
245
|
+
}
|
|
246
|
+
const filtered = {};
|
|
247
|
+
const lowerAllowlist = allowlist.map((h) => h.toLowerCase());
|
|
248
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
249
|
+
const lowerKey = key.toLowerCase();
|
|
250
|
+
if (lowerAllowlist.includes(lowerKey) && value !== undefined) {
|
|
251
|
+
// Convert array to string if needed
|
|
252
|
+
filtered[key] = Array.isArray(value) ? value.join(", ") : String(value);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
// Apply redaction unless explicitly skipped
|
|
256
|
+
if (!options?.skipRedaction) {
|
|
257
|
+
const redacted = (0, redaction_1.redactHeaders)(filtered);
|
|
258
|
+
const result = {};
|
|
259
|
+
for (const [key, value] of Object.entries(redacted)) {
|
|
260
|
+
if (value !== undefined) {
|
|
261
|
+
result[key] = Array.isArray(value) ? value.join(", ") : String(value);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
return result;
|
|
265
|
+
}
|
|
266
|
+
return filtered;
|
|
267
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Structured failure object for trace loading errors
|
|
3
|
+
*/
|
|
4
|
+
export interface TraceLoadFailure {
|
|
5
|
+
type: 'schema_drift' | 'missing_file' | 'parse_error' | 'validation_error';
|
|
6
|
+
location: string;
|
|
7
|
+
expected: string;
|
|
8
|
+
actual: string;
|
|
9
|
+
hint: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Result of loading trace metadata
|
|
13
|
+
*/
|
|
14
|
+
export interface TraceMetadata {
|
|
15
|
+
trace_format_version: number;
|
|
16
|
+
traceId: string;
|
|
17
|
+
startedAt: string;
|
|
18
|
+
completedAt: string | null;
|
|
19
|
+
serviceName: string;
|
|
20
|
+
serviceVersion: string;
|
|
21
|
+
environment: string;
|
|
22
|
+
mode: 'record' | 'replay';
|
|
23
|
+
inboundRequest: {
|
|
24
|
+
method: string;
|
|
25
|
+
path: string;
|
|
26
|
+
headers: Record<string, string>;
|
|
27
|
+
bodyHash?: string;
|
|
28
|
+
} | null;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Warning event emitted when trace_format_version is missing
|
|
32
|
+
*/
|
|
33
|
+
export interface TraceVersionWarning {
|
|
34
|
+
type: 'warning';
|
|
35
|
+
code: 'missing_trace_format_version';
|
|
36
|
+
message: string;
|
|
37
|
+
traceDir: string;
|
|
38
|
+
assumedVersion: number;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Result of loading trace metadata with potential warnings
|
|
42
|
+
*/
|
|
43
|
+
export interface TraceLoadResult {
|
|
44
|
+
meta: TraceMetadata;
|
|
45
|
+
warnings: TraceVersionWarning[];
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Load trace metadata with version guard.
|
|
49
|
+
*
|
|
50
|
+
* - If trace_format_version is missing: assumes version 1 and emits a warning
|
|
51
|
+
* - If trace_format_version > CURRENT_TRACE_FORMAT_VERSION: throws TraceLoadFailure
|
|
52
|
+
*
|
|
53
|
+
* @param traceDir - Path to the trace directory
|
|
54
|
+
* @returns TraceLoadResult with metadata and any warnings
|
|
55
|
+
* @throws TraceLoadFailure if trace cannot be loaded or version is unsupported
|
|
56
|
+
*/
|
|
57
|
+
export declare function loadTraceMeta(traceDir: string): Promise<TraceLoadResult>;
|
|
58
|
+
/**
|
|
59
|
+
* Validate that a trace directory contains required files.
|
|
60
|
+
*
|
|
61
|
+
* @param traceDir - Path to the trace directory
|
|
62
|
+
* @throws TraceLoadFailure if required files are missing
|
|
63
|
+
*/
|
|
64
|
+
export declare function validateTraceStructure(traceDir: string): Promise<void>;
|
|
65
|
+
/**
|
|
66
|
+
* Helper to format TraceLoadFailure for CLI output
|
|
67
|
+
*/
|
|
68
|
+
export declare function formatTraceLoadFailure(failure: TraceLoadFailure): string;
|
|
69
|
+
//# sourceMappingURL=trace-loader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"trace-loader.d.ts","sourceRoot":"","sources":["../src/trace-loader.ts"],"names":[],"mappings":"AAKA;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,cAAc,GAAG,cAAc,GAAG,aAAa,GAAG,kBAAkB,CAAC;IAC3E,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,oBAAoB,EAAE,MAAM,CAAC;IAC7B,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,EAAE,MAAM,CAAC;IACpB,IAAI,EAAE,QAAQ,GAAG,QAAQ,CAAC;IAC1B,cAAc,EAAE;QACd,MAAM,EAAE,MAAM,CAAC;QACf,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,GAAG,IAAI,CAAC;CACV;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,SAAS,CAAC;IAChB,IAAI,EAAE,8BAA8B,CAAC;IACrC,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,CAAC;IACjB,cAAc,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,aAAa,CAAC;IACpB,QAAQ,EAAE,mBAAmB,EAAE,CAAC;CACjC;AAcD;;;;;;;;;GASG;AACH,wBAAsB,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAwF9E;AAED;;;;;GAKG;AACH,wBAAsB,sBAAsB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAa5E;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,gBAAgB,GAAG,MAAM,CAQxE"}
|