@showrun/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/__tests__/dsl-validation.test.d.ts +2 -0
- package/dist/__tests__/dsl-validation.test.d.ts.map +1 -0
- package/dist/__tests__/dsl-validation.test.js +203 -0
- package/dist/__tests__/pack-versioning.test.d.ts +2 -0
- package/dist/__tests__/pack-versioning.test.d.ts.map +1 -0
- package/dist/__tests__/pack-versioning.test.js +165 -0
- package/dist/__tests__/validator.test.d.ts +2 -0
- package/dist/__tests__/validator.test.d.ts.map +1 -0
- package/dist/__tests__/validator.test.js +149 -0
- package/dist/authResilience.d.ts +146 -0
- package/dist/authResilience.d.ts.map +1 -0
- package/dist/authResilience.js +378 -0
- package/dist/browserLauncher.d.ts +74 -0
- package/dist/browserLauncher.d.ts.map +1 -0
- package/dist/browserLauncher.js +159 -0
- package/dist/browserPersistence.d.ts +49 -0
- package/dist/browserPersistence.d.ts.map +1 -0
- package/dist/browserPersistence.js +143 -0
- package/dist/context.d.ts +10 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +30 -0
- package/dist/dsl/builders.d.ts +340 -0
- package/dist/dsl/builders.d.ts.map +1 -0
- package/dist/dsl/builders.js +416 -0
- package/dist/dsl/conditions.d.ts +33 -0
- package/dist/dsl/conditions.d.ts.map +1 -0
- package/dist/dsl/conditions.js +169 -0
- package/dist/dsl/interpreter.d.ts +24 -0
- package/dist/dsl/interpreter.d.ts.map +1 -0
- package/dist/dsl/interpreter.js +491 -0
- package/dist/dsl/stepHandlers.d.ts +32 -0
- package/dist/dsl/stepHandlers.d.ts.map +1 -0
- package/dist/dsl/stepHandlers.js +787 -0
- package/dist/dsl/target.d.ts +28 -0
- package/dist/dsl/target.d.ts.map +1 -0
- package/dist/dsl/target.js +110 -0
- package/dist/dsl/templating.d.ts +21 -0
- package/dist/dsl/templating.d.ts.map +1 -0
- package/dist/dsl/templating.js +73 -0
- package/dist/dsl/types.d.ts +695 -0
- package/dist/dsl/types.d.ts.map +1 -0
- package/dist/dsl/types.js +7 -0
- package/dist/dsl/validation.d.ts +15 -0
- package/dist/dsl/validation.d.ts.map +1 -0
- package/dist/dsl/validation.js +974 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +20 -0
- package/dist/jsonPackValidator.d.ts +11 -0
- package/dist/jsonPackValidator.d.ts.map +1 -0
- package/dist/jsonPackValidator.js +61 -0
- package/dist/loader.d.ts +35 -0
- package/dist/loader.d.ts.map +1 -0
- package/dist/loader.js +107 -0
- package/dist/networkCapture.d.ts +107 -0
- package/dist/networkCapture.d.ts.map +1 -0
- package/dist/networkCapture.js +390 -0
- package/dist/packUtils.d.ts +36 -0
- package/dist/packUtils.d.ts.map +1 -0
- package/dist/packUtils.js +97 -0
- package/dist/packVersioning.d.ts +25 -0
- package/dist/packVersioning.d.ts.map +1 -0
- package/dist/packVersioning.js +137 -0
- package/dist/runner.d.ts +62 -0
- package/dist/runner.d.ts.map +1 -0
- package/dist/runner.js +170 -0
- package/dist/types.d.ts +336 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +1 -0
- package/dist/validator.d.ts +20 -0
- package/dist/validator.d.ts.map +1 -0
- package/dist/validator.js +68 -0
- package/package.json +49 -0
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Network capture for Playwright page sessions.
|
|
3
|
+
* Hooks request/response, maintains a rolling buffer with size limits.
|
|
4
|
+
* Full request headers kept in-memory for replay only; redacted everywhere else.
|
|
5
|
+
*/
|
|
6
|
+
import { gunzipSync } from 'zlib';
|
|
7
|
+
const SENSITIVE_HEADER_NAMES = new Set([
|
|
8
|
+
'authorization',
|
|
9
|
+
'cookie',
|
|
10
|
+
'set-cookie',
|
|
11
|
+
'x-api-key',
|
|
12
|
+
'proxy-authorization',
|
|
13
|
+
]);
|
|
14
|
+
const POST_DATA_TRUNCATE = 64 * 1024; // 64KB
|
|
15
|
+
const RESPONSE_BODY_MAX_STORE_BYTES = 5 * 1024 * 1024; // 5MB - full body stored when under this (for replay, extract, responseContains)
|
|
16
|
+
const NETWORK_BUFFER_MAX_ENTRIES = 300;
|
|
17
|
+
const NETWORK_BUFFER_MAX_BYTES = 50 * 1024 * 1024; // 50MB - rolling buffer cap so we can keep more large responses
|
|
18
|
+
function redactHeaders(headers) {
|
|
19
|
+
const out = {};
|
|
20
|
+
for (const [k, v] of Object.entries(headers)) {
|
|
21
|
+
const lower = k.toLowerCase();
|
|
22
|
+
out[k] = SENSITIVE_HEADER_NAMES.has(lower) ? '[REDACTED]' : v;
|
|
23
|
+
}
|
|
24
|
+
return out;
|
|
25
|
+
}
|
|
26
|
+
function isTextOrJsonContentType(ct) {
|
|
27
|
+
if (!ct)
|
|
28
|
+
return false;
|
|
29
|
+
const lower = ct.toLowerCase();
|
|
30
|
+
return (lower.includes('application/json') ||
|
|
31
|
+
lower.includes('+json') ||
|
|
32
|
+
lower.includes('text/'));
|
|
33
|
+
}
|
|
34
|
+
const GZIP_MAGIC = Buffer.from([0x1f, 0x8b]);
|
|
35
|
+
function isGzip(body) {
|
|
36
|
+
return body.length >= 2 && body[0] === GZIP_MAGIC[0] && body[1] === GZIP_MAGIC[1];
|
|
37
|
+
}
|
|
38
|
+
/** Decompress gzip if needed; returns body unchanged if not gzip */
|
|
39
|
+
function maybeDecompress(body) {
|
|
40
|
+
if (isGzip(body)) {
|
|
41
|
+
try {
|
|
42
|
+
return gunzipSync(body);
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
return body;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return body;
|
|
49
|
+
}
|
|
50
|
+
/** True if buffer looks like JSON (starts with { or [), for responses with missing Content-Type */
|
|
51
|
+
function looksLikeJson(body) {
|
|
52
|
+
const len = body.length;
|
|
53
|
+
if (len === 0)
|
|
54
|
+
return false;
|
|
55
|
+
let i = 0;
|
|
56
|
+
while (i < len && (body[i] === 0x20 || body[i] === 0x0a || body[i] === 0x0d || body[i] === 0x09))
|
|
57
|
+
i++;
|
|
58
|
+
if (i >= len)
|
|
59
|
+
return false;
|
|
60
|
+
const first = body[i];
|
|
61
|
+
return first === 0x7b || first === 0x5b; // { or [
|
|
62
|
+
}
|
|
63
|
+
let idCounter = 0;
|
|
64
|
+
function nextId() {
|
|
65
|
+
return `req-${++idCounter}-${Date.now()}`;
|
|
66
|
+
}
|
|
67
|
+
function truncatePostData(raw) {
|
|
68
|
+
if (raw == null || raw === '')
|
|
69
|
+
return undefined;
|
|
70
|
+
if (raw.length > POST_DATA_TRUNCATE) {
|
|
71
|
+
return raw.slice(0, POST_DATA_TRUNCATE) + '...[truncated]';
|
|
72
|
+
}
|
|
73
|
+
return raw;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Attach network capture to a Playwright page and return the capture API.
|
|
77
|
+
* Full request headers are kept in-memory only for replay; list/get return redacted summaries.
|
|
78
|
+
*/
|
|
79
|
+
export function attachNetworkCapture(page) {
|
|
80
|
+
const buffer = [];
|
|
81
|
+
const mapById = new Map();
|
|
82
|
+
let totalBytesEstimate = 0;
|
|
83
|
+
const requestToEntry = new Map();
|
|
84
|
+
function dropOldest() {
|
|
85
|
+
while ((buffer.length >= NETWORK_BUFFER_MAX_ENTRIES || totalBytesEstimate > NETWORK_BUFFER_MAX_BYTES) &&
|
|
86
|
+
buffer.length > 0) {
|
|
87
|
+
const removed = buffer.shift();
|
|
88
|
+
mapById.delete(removed.id);
|
|
89
|
+
totalBytesEstimate -= removed.bytesEstimate;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
function entryToSummary(entry) {
|
|
93
|
+
const snippet = entry.responseBodyText != null
|
|
94
|
+
? entry.responseBodyText.slice(0, 2048)
|
|
95
|
+
: entry.responseBodyBase64
|
|
96
|
+
? '[binary]'
|
|
97
|
+
: undefined;
|
|
98
|
+
return {
|
|
99
|
+
id: entry.id,
|
|
100
|
+
ts: entry.ts,
|
|
101
|
+
method: entry.method,
|
|
102
|
+
url: entry.url,
|
|
103
|
+
resourceType: entry.resourceType,
|
|
104
|
+
requestHeaders: redactHeaders(entry.requestHeadersFull),
|
|
105
|
+
postData: entry.postData,
|
|
106
|
+
status: entry.status,
|
|
107
|
+
responseHeaders: entry.responseHeaders,
|
|
108
|
+
contentType: entry.contentType,
|
|
109
|
+
responseBodySnippet: snippet,
|
|
110
|
+
responseBodyAvailable: entry.responseBodyText != null || entry.responseBodyBase64 != null,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
function matchesWhere(entry, where) {
|
|
114
|
+
if (where.urlIncludes != null && !entry.url.toLowerCase().includes(where.urlIncludes.toLowerCase()))
|
|
115
|
+
return false;
|
|
116
|
+
if (where.urlRegex != null) {
|
|
117
|
+
try {
|
|
118
|
+
const re = new RegExp(where.urlRegex);
|
|
119
|
+
if (!re.test(entry.url))
|
|
120
|
+
return false;
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
return false;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
if (where.method != null && entry.method !== where.method)
|
|
127
|
+
return false;
|
|
128
|
+
if (where.status != null && entry.status !== where.status)
|
|
129
|
+
return false;
|
|
130
|
+
if (where.contentTypeIncludes != null &&
|
|
131
|
+
(!entry.contentType || !entry.contentType.toLowerCase().includes(where.contentTypeIncludes.toLowerCase())))
|
|
132
|
+
return false;
|
|
133
|
+
if (where.responseContains != null) {
|
|
134
|
+
let bodyText = entry.responseBodyText ?? null;
|
|
135
|
+
if (bodyText == null && entry.responseBodyBase64 != null) {
|
|
136
|
+
try {
|
|
137
|
+
const buf = Buffer.from(entry.responseBodyBase64, 'base64');
|
|
138
|
+
const decompressed = maybeDecompress(buf);
|
|
139
|
+
bodyText = decompressed.toString('utf8');
|
|
140
|
+
}
|
|
141
|
+
catch {
|
|
142
|
+
bodyText = null;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
if (bodyText == null)
|
|
146
|
+
return false;
|
|
147
|
+
if (!bodyText.toLowerCase().includes(where.responseContains.toLowerCase()))
|
|
148
|
+
return false;
|
|
149
|
+
}
|
|
150
|
+
return true;
|
|
151
|
+
}
|
|
152
|
+
page.on('request', (request) => {
|
|
153
|
+
const id = nextId();
|
|
154
|
+
const url = request.url();
|
|
155
|
+
const method = request.method();
|
|
156
|
+
const resourceType = request.resourceType();
|
|
157
|
+
const headers = request.headers();
|
|
158
|
+
const headersFull = {};
|
|
159
|
+
for (const [k, v] of Object.entries(headers)) {
|
|
160
|
+
headersFull[k] = v;
|
|
161
|
+
}
|
|
162
|
+
const postData = truncatePostData(request.postData() ?? undefined);
|
|
163
|
+
const entry = {
|
|
164
|
+
id,
|
|
165
|
+
ts: Date.now(),
|
|
166
|
+
method,
|
|
167
|
+
url,
|
|
168
|
+
resourceType,
|
|
169
|
+
requestHeaders: redactHeaders(headersFull),
|
|
170
|
+
requestHeadersFull: headersFull,
|
|
171
|
+
postData,
|
|
172
|
+
bytesEstimate: 0,
|
|
173
|
+
};
|
|
174
|
+
entry.bytesEstimate =
|
|
175
|
+
url.length +
|
|
176
|
+
JSON.stringify(entry.requestHeaders).length +
|
|
177
|
+
(postData?.length ?? 0) +
|
|
178
|
+
200;
|
|
179
|
+
requestToEntry.set(request, entry);
|
|
180
|
+
mapById.set(id, entry);
|
|
181
|
+
buffer.push(entry);
|
|
182
|
+
totalBytesEstimate += entry.bytesEstimate;
|
|
183
|
+
dropOldest();
|
|
184
|
+
});
|
|
185
|
+
page.on('response', async (response) => {
|
|
186
|
+
const req = response.request();
|
|
187
|
+
const entry = requestToEntry.get(req);
|
|
188
|
+
if (!entry)
|
|
189
|
+
return;
|
|
190
|
+
entry.status = response.status();
|
|
191
|
+
const respHeaders = {};
|
|
192
|
+
for (const [k, v] of Object.entries(response.headers())) {
|
|
193
|
+
respHeaders[k] = v;
|
|
194
|
+
}
|
|
195
|
+
entry.responseHeaders = redactHeaders(respHeaders);
|
|
196
|
+
const ct = respHeaders['content-type'] ?? respHeaders['Content-Type'];
|
|
197
|
+
entry.contentType = ct;
|
|
198
|
+
try {
|
|
199
|
+
let body = await response.body();
|
|
200
|
+
body = maybeDecompress(body);
|
|
201
|
+
const size = body.length;
|
|
202
|
+
const canStoreAsText = isTextOrJsonContentType(ct) || looksLikeJson(body);
|
|
203
|
+
if (size <= RESPONSE_BODY_MAX_STORE_BYTES && canStoreAsText) {
|
|
204
|
+
const text = body.toString('utf8');
|
|
205
|
+
entry.responseBodyText = text;
|
|
206
|
+
const added = Buffer.byteLength(text, 'utf8');
|
|
207
|
+
entry.bytesEstimate += added;
|
|
208
|
+
totalBytesEstimate += added;
|
|
209
|
+
}
|
|
210
|
+
else if (size <= RESPONSE_BODY_MAX_STORE_BYTES && body.length > 0) {
|
|
211
|
+
entry.responseBodyBase64 = body.toString('base64');
|
|
212
|
+
const added = Math.ceil((body.length * 4) / 3);
|
|
213
|
+
entry.bytesEstimate += added;
|
|
214
|
+
totalBytesEstimate += added;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
catch {
|
|
218
|
+
// ignore body read errors
|
|
219
|
+
}
|
|
220
|
+
requestToEntry.delete(req);
|
|
221
|
+
dropOldest();
|
|
222
|
+
});
|
|
223
|
+
const api = {
|
|
224
|
+
list(limit = 50, filter = 'all') {
|
|
225
|
+
let list = buffer.slice(-limit);
|
|
226
|
+
if (filter === 'api' || filter === 'xhr') {
|
|
227
|
+
list = list.filter((e) => e.resourceType === 'xhr' ||
|
|
228
|
+
e.resourceType === 'fetch' ||
|
|
229
|
+
/\/api\//.test(e.url.toLowerCase()) ||
|
|
230
|
+
/graphql/i.test(e.url));
|
|
231
|
+
}
|
|
232
|
+
return list.map(entryToSummary);
|
|
233
|
+
},
|
|
234
|
+
get(requestId) {
|
|
235
|
+
const entry = mapById.get(requestId);
|
|
236
|
+
return entry ? entryToSummary(entry) : null;
|
|
237
|
+
},
|
|
238
|
+
find(where, pick) {
|
|
239
|
+
const id = api.getRequestIdByIndex(where, pick);
|
|
240
|
+
return id ? api.get(id) : null;
|
|
241
|
+
},
|
|
242
|
+
getRequestIdByIndex(where, pick) {
|
|
243
|
+
const matches = [];
|
|
244
|
+
for (let i = buffer.length - 1; i >= 0; i--) {
|
|
245
|
+
if (matchesWhere(buffer[i], where))
|
|
246
|
+
matches.push(buffer[i]);
|
|
247
|
+
}
|
|
248
|
+
if (pick === 'last') {
|
|
249
|
+
// last = most recent = first in our reversed list
|
|
250
|
+
return matches.length > 0 ? matches[0].id : null;
|
|
251
|
+
}
|
|
252
|
+
// first = oldest match
|
|
253
|
+
return matches.length > 0 ? matches[matches.length - 1].id : null;
|
|
254
|
+
},
|
|
255
|
+
async replay(requestId, overrides) {
|
|
256
|
+
const entry = mapById.get(requestId);
|
|
257
|
+
if (!entry) {
|
|
258
|
+
throw new Error(`Request not found: ${requestId}`);
|
|
259
|
+
}
|
|
260
|
+
if (overrides?.setHeaders) {
|
|
261
|
+
for (const k of Object.keys(overrides.setHeaders)) {
|
|
262
|
+
if (SENSITIVE_HEADER_NAMES.has(k.toLowerCase())) {
|
|
263
|
+
throw new Error(`Cannot set sensitive header: ${k}`);
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
// Playwright: page.request is APIRequestContext sharing cookies with the browser context
|
|
268
|
+
const requestContext = page.request;
|
|
269
|
+
if (!requestContext || typeof requestContext.fetch !== 'function') {
|
|
270
|
+
throw new Error('Browser context does not support API request (replay). Playwright version may be too old.');
|
|
271
|
+
}
|
|
272
|
+
let url = entry.url;
|
|
273
|
+
if (overrides?.urlReplace) {
|
|
274
|
+
try {
|
|
275
|
+
const re = new RegExp(overrides.urlReplace.find, 'g');
|
|
276
|
+
url = url.replace(re, overrides.urlReplace.replace);
|
|
277
|
+
}
|
|
278
|
+
catch (e) {
|
|
279
|
+
throw new Error(`overrides.urlReplace.find is not a valid regex: ${e instanceof Error ? e.message : String(e)}`);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
if (overrides?.url != null)
|
|
283
|
+
url = overrides.url;
|
|
284
|
+
let body = entry.postData ?? undefined;
|
|
285
|
+
if (body != null && overrides?.bodyReplace) {
|
|
286
|
+
try {
|
|
287
|
+
const re = new RegExp(overrides.bodyReplace.find, 'g');
|
|
288
|
+
body = body.replace(re, overrides.bodyReplace.replace);
|
|
289
|
+
}
|
|
290
|
+
catch (e) {
|
|
291
|
+
throw new Error(`overrides.bodyReplace.find is not a valid regex: ${e instanceof Error ? e.message : String(e)}`);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
if (overrides?.body != null)
|
|
295
|
+
body = overrides.body;
|
|
296
|
+
const method = entry.method;
|
|
297
|
+
const headers = { ...entry.requestHeadersFull };
|
|
298
|
+
if (overrides?.setHeaders) {
|
|
299
|
+
for (const [k, v] of Object.entries(overrides.setHeaders)) {
|
|
300
|
+
if (SENSITIVE_HEADER_NAMES.has(k.toLowerCase()))
|
|
301
|
+
continue;
|
|
302
|
+
headers[k] = v;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
if (overrides?.setQuery) {
|
|
306
|
+
const u = new URL(url);
|
|
307
|
+
for (const [k, v] of Object.entries(overrides.setQuery)) {
|
|
308
|
+
u.searchParams.set(k, String(v));
|
|
309
|
+
}
|
|
310
|
+
url = u.toString();
|
|
311
|
+
}
|
|
312
|
+
const response = await requestContext.fetch(url, {
|
|
313
|
+
method,
|
|
314
|
+
headers,
|
|
315
|
+
data: body,
|
|
316
|
+
});
|
|
317
|
+
const respBody = await response.body();
|
|
318
|
+
const bodySize = respBody.length;
|
|
319
|
+
const contentType = response.headers()['content-type'] ?? response.headers()['Content-Type'];
|
|
320
|
+
const bodyText = bodySize <= RESPONSE_BODY_MAX_STORE_BYTES && isTextOrJsonContentType(contentType)
|
|
321
|
+
? respBody.toString('utf8')
|
|
322
|
+
: respBody.toString('utf8').slice(0, 2048) + (bodySize > 2048 ? '...[truncated]' : '');
|
|
323
|
+
return {
|
|
324
|
+
status: response.status(),
|
|
325
|
+
contentType,
|
|
326
|
+
body: bodyText,
|
|
327
|
+
bodySize,
|
|
328
|
+
};
|
|
329
|
+
},
|
|
330
|
+
clear() {
|
|
331
|
+
buffer.length = 0;
|
|
332
|
+
mapById.clear();
|
|
333
|
+
totalBytesEstimate = 0;
|
|
334
|
+
},
|
|
335
|
+
exportEntry(requestId) {
|
|
336
|
+
const entry = mapById.get(requestId);
|
|
337
|
+
if (!entry)
|
|
338
|
+
return null;
|
|
339
|
+
return {
|
|
340
|
+
id: entry.id,
|
|
341
|
+
ts: entry.ts,
|
|
342
|
+
method: entry.method,
|
|
343
|
+
url: entry.url,
|
|
344
|
+
resourceType: entry.resourceType,
|
|
345
|
+
requestHeaders: entry.requestHeaders,
|
|
346
|
+
requestHeadersFull: entry.requestHeadersFull,
|
|
347
|
+
postData: entry.postData,
|
|
348
|
+
status: entry.status,
|
|
349
|
+
responseHeaders: entry.responseHeaders,
|
|
350
|
+
contentType: entry.contentType,
|
|
351
|
+
responseBodyText: entry.responseBodyText,
|
|
352
|
+
responseBodyBase64: entry.responseBodyBase64,
|
|
353
|
+
};
|
|
354
|
+
},
|
|
355
|
+
importEntry(entry) {
|
|
356
|
+
// Skip if entry already exists
|
|
357
|
+
if (mapById.has(entry.id))
|
|
358
|
+
return;
|
|
359
|
+
const internal = {
|
|
360
|
+
id: entry.id,
|
|
361
|
+
ts: entry.ts,
|
|
362
|
+
method: entry.method,
|
|
363
|
+
url: entry.url,
|
|
364
|
+
resourceType: entry.resourceType,
|
|
365
|
+
requestHeaders: entry.requestHeaders,
|
|
366
|
+
requestHeadersFull: entry.requestHeadersFull,
|
|
367
|
+
postData: entry.postData,
|
|
368
|
+
status: entry.status,
|
|
369
|
+
responseHeaders: entry.responseHeaders,
|
|
370
|
+
contentType: entry.contentType,
|
|
371
|
+
responseBodyText: entry.responseBodyText,
|
|
372
|
+
responseBodyBase64: entry.responseBodyBase64,
|
|
373
|
+
bytesEstimate: 0,
|
|
374
|
+
};
|
|
375
|
+
// Calculate bytes estimate
|
|
376
|
+
internal.bytesEstimate =
|
|
377
|
+
entry.url.length +
|
|
378
|
+
JSON.stringify(entry.requestHeaders).length +
|
|
379
|
+
(entry.postData?.length ?? 0) +
|
|
380
|
+
(entry.responseBodyText ? Buffer.byteLength(entry.responseBodyText, 'utf8') : 0) +
|
|
381
|
+
(entry.responseBodyBase64 ? Math.ceil((entry.responseBodyBase64.length * 3) / 4) : 0) +
|
|
382
|
+
200;
|
|
383
|
+
mapById.set(entry.id, internal);
|
|
384
|
+
buffer.push(internal);
|
|
385
|
+
totalBytesEstimate += internal.bytesEstimate;
|
|
386
|
+
dropOldest();
|
|
387
|
+
},
|
|
388
|
+
};
|
|
389
|
+
return api;
|
|
390
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { TaskPackManifest, InputSchema, CollectibleDefinition } from './types.js';
|
|
2
|
+
import type { DslStep } from './dsl/types.js';
|
|
3
|
+
/**
|
|
4
|
+
* Sanitize a pack ID to be safe for use as a directory name
|
|
5
|
+
*/
|
|
6
|
+
export declare function sanitizePackId(packId: string): string;
|
|
7
|
+
/**
|
|
8
|
+
* Ensure a directory exists, creating it if necessary
|
|
9
|
+
*/
|
|
10
|
+
export declare function ensureDir(dirPath: string): void;
|
|
11
|
+
/**
|
|
12
|
+
* Atomic write: write to temp file then rename
|
|
13
|
+
*/
|
|
14
|
+
export declare function atomicWrite(filePath: string, content: string): void;
|
|
15
|
+
/**
|
|
16
|
+
* Validate that a path is within an allowed directory
|
|
17
|
+
*/
|
|
18
|
+
export declare function validatePathInAllowedDir(path: string, allowedDir: string): void;
|
|
19
|
+
/**
|
|
20
|
+
* Read JSON file safely
|
|
21
|
+
*/
|
|
22
|
+
export declare function readJsonFile<T>(filePath: string): T;
|
|
23
|
+
/**
|
|
24
|
+
* Write taskpack.json
|
|
25
|
+
*/
|
|
26
|
+
export declare function writeTaskPackManifest(packDir: string, manifest: TaskPackManifest): void;
|
|
27
|
+
/**
|
|
28
|
+
* Write flow.json
|
|
29
|
+
* @param skipValidation If true, skip validation (useful for empty flows during pack creation)
|
|
30
|
+
*/
|
|
31
|
+
export declare function writeFlowJson(packDir: string, flowData: {
|
|
32
|
+
inputs?: InputSchema;
|
|
33
|
+
collectibles?: CollectibleDefinition[];
|
|
34
|
+
flow: DslStep[];
|
|
35
|
+
}, skipValidation?: boolean): void;
|
|
36
|
+
//# sourceMappingURL=packUtils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"packUtils.d.ts","sourceRoot":"","sources":["../src/packUtils.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC;AACvF,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,gBAAgB,CAAC;AAG9C;;GAEG;AACH,wBAAgB,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAGrD;AAED;;GAEG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI,CAE/C;AAED;;GAEG;AACH,wBAAgB,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAgBnE;AAED;;GAEG;AACH,wBAAgB,wBAAwB,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI,CAO/E;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,CAAC,EAAE,QAAQ,EAAE,MAAM,GAAG,CAAC,CAUnD;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,gBAAgB,GACzB,IAAI,CAIN;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE;IACR,MAAM,CAAC,EAAE,WAAW,CAAC;IACrB,YAAY,CAAC,EAAE,qBAAqB,EAAE,CAAC;IACvC,IAAI,EAAE,OAAO,EAAE,CAAC;CACjB,EACD,cAAc,UAAQ,GACrB,IAAI,CAoBN"}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pack utilities for task pack file operations
|
|
3
|
+
*/
|
|
4
|
+
import { mkdirSync, writeFileSync, readFileSync, existsSync, renameSync } from 'fs';
|
|
5
|
+
import { resolve, join } from 'path';
|
|
6
|
+
import { validateJsonTaskPack } from './jsonPackValidator.js';
|
|
7
|
+
/**
|
|
8
|
+
* Sanitize a pack ID to be safe for use as a directory name
|
|
9
|
+
*/
|
|
10
|
+
export function sanitizePackId(packId) {
|
|
11
|
+
// Replace dots with dashes (some MCP clients reject dots), then other invalid chars with underscores
|
|
12
|
+
return packId.replace(/\./g, '-').replace(/[^a-zA-Z0-9_-]/g, '_');
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Ensure a directory exists, creating it if necessary
|
|
16
|
+
*/
|
|
17
|
+
export function ensureDir(dirPath) {
|
|
18
|
+
mkdirSync(dirPath, { recursive: true });
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Atomic write: write to temp file then rename
|
|
22
|
+
*/
|
|
23
|
+
export function atomicWrite(filePath, content) {
|
|
24
|
+
const tempPath = `${filePath}.tmp`;
|
|
25
|
+
try {
|
|
26
|
+
writeFileSync(tempPath, content, 'utf-8');
|
|
27
|
+
renameSync(tempPath, filePath);
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
// Clean up temp file on error
|
|
31
|
+
try {
|
|
32
|
+
if (existsSync(tempPath)) {
|
|
33
|
+
renameSync(tempPath, filePath);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
// Ignore cleanup errors
|
|
38
|
+
}
|
|
39
|
+
throw error;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Validate that a path is within an allowed directory
|
|
44
|
+
*/
|
|
45
|
+
export function validatePathInAllowedDir(path, allowedDir) {
|
|
46
|
+
const resolvedPath = resolve(path);
|
|
47
|
+
const resolvedAllowed = resolve(allowedDir);
|
|
48
|
+
if (!resolvedPath.startsWith(resolvedAllowed + '/') && resolvedPath !== resolvedAllowed) {
|
|
49
|
+
throw new Error(`Path ${resolvedPath} is outside allowed directory ${resolvedAllowed}`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Read JSON file safely
|
|
54
|
+
*/
|
|
55
|
+
export function readJsonFile(filePath) {
|
|
56
|
+
if (!existsSync(filePath)) {
|
|
57
|
+
throw new Error(`File not found: ${filePath}`);
|
|
58
|
+
}
|
|
59
|
+
try {
|
|
60
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
61
|
+
return JSON.parse(content);
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
throw new Error(`Failed to parse JSON file ${filePath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Write taskpack.json
|
|
69
|
+
*/
|
|
70
|
+
export function writeTaskPackManifest(packDir, manifest) {
|
|
71
|
+
const manifestPath = join(packDir, 'taskpack.json');
|
|
72
|
+
const content = JSON.stringify(manifest, null, 2) + '\n';
|
|
73
|
+
atomicWrite(manifestPath, content);
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Write flow.json
|
|
77
|
+
* @param skipValidation If true, skip validation (useful for empty flows during pack creation)
|
|
78
|
+
*/
|
|
79
|
+
export function writeFlowJson(packDir, flowData, skipValidation = false) {
|
|
80
|
+
// Validate before writing (unless skipping for empty flows)
|
|
81
|
+
if (!skipValidation) {
|
|
82
|
+
const taskPack = {
|
|
83
|
+
metadata: {
|
|
84
|
+
id: 'temp',
|
|
85
|
+
name: 'temp',
|
|
86
|
+
version: '0.0.0',
|
|
87
|
+
},
|
|
88
|
+
inputs: flowData.inputs || {},
|
|
89
|
+
collectibles: flowData.collectibles || [],
|
|
90
|
+
flow: flowData.flow,
|
|
91
|
+
};
|
|
92
|
+
validateJsonTaskPack(taskPack);
|
|
93
|
+
}
|
|
94
|
+
const flowPath = join(packDir, 'flow.json');
|
|
95
|
+
const content = JSON.stringify(flowData, null, 2) + '\n';
|
|
96
|
+
atomicWrite(flowPath, content);
|
|
97
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { FlowVersion } from './types.js';
|
|
2
|
+
/**
|
|
3
|
+
* Save a version snapshot of the current flow.json and taskpack.json.
|
|
4
|
+
*/
|
|
5
|
+
export declare function saveVersion(packDir: string, opts: {
|
|
6
|
+
label?: string;
|
|
7
|
+
source: FlowVersion['source'];
|
|
8
|
+
conversationId?: string;
|
|
9
|
+
}): FlowVersion;
|
|
10
|
+
/**
|
|
11
|
+
* List all saved versions for a pack. Returns [] if no versions exist.
|
|
12
|
+
*/
|
|
13
|
+
export declare function listVersions(packDir: string): FlowVersion[];
|
|
14
|
+
/**
|
|
15
|
+
* Read the versioned files without restoring them.
|
|
16
|
+
*/
|
|
17
|
+
export declare function getVersionFiles(packDir: string, versionNumber: number): {
|
|
18
|
+
flow: unknown;
|
|
19
|
+
taskpack: unknown;
|
|
20
|
+
};
|
|
21
|
+
/**
|
|
22
|
+
* Restore a previous version. Auto-saves the current state first.
|
|
23
|
+
*/
|
|
24
|
+
export declare function restoreVersion(packDir: string, versionNumber: number): void;
|
|
25
|
+
//# sourceMappingURL=packVersioning.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"packVersioning.d.ts","sourceRoot":"","sources":["../src/packVersioning.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAE,WAAW,EAAqC,MAAM,YAAY,CAAC;AAgCjF;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,EACf,IAAI,EAAE;IACJ,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,WAAW,CAAC,QAAQ,CAAC,CAAC;IAC9B,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB,GACA,WAAW,CAgDb;AAED;;GAEG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW,EAAE,CAG3D;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,EACf,aAAa,EAAE,MAAM,GACpB;IAAE,IAAI,EAAE,OAAO,CAAC;IAAC,QAAQ,EAAE,OAAO,CAAA;CAAE,CActC;AAED;;GAEG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,EACf,aAAa,EAAE,MAAM,GACpB,IAAI,CAgCN"}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pack versioning: save, list, and restore snapshots of flow.json + taskpack.json
|
|
3
|
+
*/
|
|
4
|
+
import { join } from 'path';
|
|
5
|
+
import { existsSync, copyFileSync, unlinkSync } from 'fs';
|
|
6
|
+
import { ensureDir, readJsonFile, atomicWrite } from './packUtils.js';
|
|
7
|
+
const VERSIONS_DIR = '.versions';
|
|
8
|
+
const MANIFEST_FILE = 'manifest.json';
|
|
9
|
+
const DEFAULT_MAX_VERSIONS = 50;
|
|
10
|
+
function versionsDir(packDir) {
|
|
11
|
+
return join(packDir, VERSIONS_DIR);
|
|
12
|
+
}
|
|
13
|
+
function manifestPath(packDir) {
|
|
14
|
+
return join(versionsDir(packDir), MANIFEST_FILE);
|
|
15
|
+
}
|
|
16
|
+
function readManifest(packDir) {
|
|
17
|
+
const mPath = manifestPath(packDir);
|
|
18
|
+
if (!existsSync(mPath)) {
|
|
19
|
+
return { versions: [], maxVersions: DEFAULT_MAX_VERSIONS };
|
|
20
|
+
}
|
|
21
|
+
return readJsonFile(mPath);
|
|
22
|
+
}
|
|
23
|
+
function writeManifest(packDir, manifest) {
|
|
24
|
+
ensureDir(versionsDir(packDir));
|
|
25
|
+
atomicWrite(manifestPath(packDir), JSON.stringify(manifest, null, 2) + '\n');
|
|
26
|
+
}
|
|
27
|
+
function nextVersionNumber(manifest) {
|
|
28
|
+
if (manifest.versions.length === 0)
|
|
29
|
+
return 1;
|
|
30
|
+
return Math.max(...manifest.versions.map((v) => v.number)) + 1;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Save a version snapshot of the current flow.json and taskpack.json.
|
|
34
|
+
*/
|
|
35
|
+
export function saveVersion(packDir, opts) {
|
|
36
|
+
const flowPath = join(packDir, 'flow.json');
|
|
37
|
+
const taskpackPath = join(packDir, 'taskpack.json');
|
|
38
|
+
if (!existsSync(flowPath)) {
|
|
39
|
+
throw new Error(`flow.json not found in ${packDir}`);
|
|
40
|
+
}
|
|
41
|
+
if (!existsSync(taskpackPath)) {
|
|
42
|
+
throw new Error(`taskpack.json not found in ${packDir}`);
|
|
43
|
+
}
|
|
44
|
+
const manifest = readManifest(packDir);
|
|
45
|
+
const num = nextVersionNumber(manifest);
|
|
46
|
+
const vDir = versionsDir(packDir);
|
|
47
|
+
ensureDir(vDir);
|
|
48
|
+
// Read metadata.version from taskpack.json
|
|
49
|
+
const taskpackData = readJsonFile(taskpackPath);
|
|
50
|
+
const metadataVersion = taskpackData.version || '0.0.0';
|
|
51
|
+
// Copy current files to versioned copies
|
|
52
|
+
copyFileSync(flowPath, join(vDir, `${num}.flow.json`));
|
|
53
|
+
copyFileSync(taskpackPath, join(vDir, `${num}.taskpack.json`));
|
|
54
|
+
const version = {
|
|
55
|
+
number: num,
|
|
56
|
+
version: metadataVersion,
|
|
57
|
+
timestamp: new Date().toISOString(),
|
|
58
|
+
label: opts.label,
|
|
59
|
+
source: opts.source,
|
|
60
|
+
conversationId: opts.conversationId,
|
|
61
|
+
};
|
|
62
|
+
manifest.versions.push(version);
|
|
63
|
+
// Prune oldest versions if exceeding maxVersions
|
|
64
|
+
const max = manifest.maxVersions || DEFAULT_MAX_VERSIONS;
|
|
65
|
+
while (manifest.versions.length > max) {
|
|
66
|
+
const oldest = manifest.versions.shift();
|
|
67
|
+
// Remove old versioned files
|
|
68
|
+
const oldFlow = join(vDir, `${oldest.number}.flow.json`);
|
|
69
|
+
const oldTaskpack = join(vDir, `${oldest.number}.taskpack.json`);
|
|
70
|
+
try {
|
|
71
|
+
if (existsSync(oldFlow))
|
|
72
|
+
unlinkSync(oldFlow);
|
|
73
|
+
}
|
|
74
|
+
catch { /* ignore */ }
|
|
75
|
+
try {
|
|
76
|
+
if (existsSync(oldTaskpack))
|
|
77
|
+
unlinkSync(oldTaskpack);
|
|
78
|
+
}
|
|
79
|
+
catch { /* ignore */ }
|
|
80
|
+
}
|
|
81
|
+
writeManifest(packDir, manifest);
|
|
82
|
+
return version;
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* List all saved versions for a pack. Returns [] if no versions exist.
|
|
86
|
+
*/
|
|
87
|
+
export function listVersions(packDir) {
|
|
88
|
+
const manifest = readManifest(packDir);
|
|
89
|
+
return manifest.versions;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Read the versioned files without restoring them.
|
|
93
|
+
*/
|
|
94
|
+
export function getVersionFiles(packDir, versionNumber) {
|
|
95
|
+
const vDir = versionsDir(packDir);
|
|
96
|
+
const flowPath = join(vDir, `${versionNumber}.flow.json`);
|
|
97
|
+
const taskpackPath = join(vDir, `${versionNumber}.taskpack.json`);
|
|
98
|
+
if (!existsSync(flowPath)) {
|
|
99
|
+
throw new Error(`Version ${versionNumber} not found`);
|
|
100
|
+
}
|
|
101
|
+
const flow = readJsonFile(flowPath);
|
|
102
|
+
// taskpack.json may not exist in very old versions — handle gracefully
|
|
103
|
+
const taskpack = existsSync(taskpackPath) ? readJsonFile(taskpackPath) : null;
|
|
104
|
+
return { flow, taskpack };
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Restore a previous version. Auto-saves the current state first.
|
|
108
|
+
*/
|
|
109
|
+
export function restoreVersion(packDir, versionNumber) {
|
|
110
|
+
// Verify the version exists before auto-saving
|
|
111
|
+
const manifest = readManifest(packDir);
|
|
112
|
+
const target = manifest.versions.find((v) => v.number === versionNumber);
|
|
113
|
+
if (!target) {
|
|
114
|
+
throw new Error(`Version ${versionNumber} not found`);
|
|
115
|
+
}
|
|
116
|
+
const vDir = versionsDir(packDir);
|
|
117
|
+
const versionedFlowPath = join(vDir, `${versionNumber}.flow.json`);
|
|
118
|
+
if (!existsSync(versionedFlowPath)) {
|
|
119
|
+
throw new Error(`Version ${versionNumber} files not found on disk`);
|
|
120
|
+
}
|
|
121
|
+
// Auto-save current state before restoring
|
|
122
|
+
const flowPath = join(packDir, 'flow.json');
|
|
123
|
+
if (existsSync(flowPath)) {
|
|
124
|
+
saveVersion(packDir, {
|
|
125
|
+
label: `Auto-saved before restoring version ${versionNumber}`,
|
|
126
|
+
source: 'dashboard',
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
// Restore flow.json
|
|
130
|
+
copyFileSync(versionedFlowPath, flowPath);
|
|
131
|
+
// Restore taskpack.json if the versioned copy exists
|
|
132
|
+
const versionedTaskpackPath = join(vDir, `${versionNumber}.taskpack.json`);
|
|
133
|
+
if (existsSync(versionedTaskpackPath)) {
|
|
134
|
+
const taskpackPath = join(packDir, 'taskpack.json');
|
|
135
|
+
copyFileSync(versionedTaskpackPath, taskpackPath);
|
|
136
|
+
}
|
|
137
|
+
}
|