@effing/ffs 0.4.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -20
- package/dist/chunk-4N2GLGC5.js +341 -0
- package/dist/chunk-4N2GLGC5.js.map +1 -0
- package/dist/{chunk-JDRYI7SR.js → chunk-7KHGAMSG.js} +89 -74
- package/dist/chunk-7KHGAMSG.js.map +1 -0
- package/dist/{chunk-3SM6XYCZ.js → chunk-O7Z6DV2I.js} +179 -504
- package/dist/chunk-O7Z6DV2I.js.map +1 -0
- package/dist/chunk-PERB3C4S.js +342 -0
- package/dist/handlers/index.d.ts +28 -11
- package/dist/handlers/index.js +2 -2
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -1
- package/dist/{proxy-qTA69nOV.d.ts → proxy-CsZ5h2Ya.d.ts} +3 -3
- package/dist/render-IKGZZOBP.js +8 -0
- package/dist/render-IKGZZOBP.js.map +1 -0
- package/dist/render-MUKKTCF6.js +936 -0
- package/dist/server.js +101 -1333
- package/dist/server.js.map +1 -1
- package/package.json +5 -3
- package/dist/chunk-3SM6XYCZ.js.map +0 -1
- package/dist/chunk-JDRYI7SR.js.map +0 -1
package/README.md
CHANGED
|
@@ -74,12 +74,8 @@ The server uses an internal HTTP proxy for video/audio URLs to ensure reliable D
|
|
|
74
74
|
| `FFS_TRANSIENT_STORE_SECRET_KEY` | S3 secret access key |
|
|
75
75
|
| `FFS_TRANSIENT_STORE_LOCAL_DIR` | Local storage directory (when not using S3) |
|
|
76
76
|
| `FFS_SOURCE_CACHE_TTL_MS` | TTL for cached sources in ms (default: 60 min) |
|
|
77
|
-
| `
|
|
77
|
+
| `FFS_JOB_DATA_TTL_MS` | TTL for job data in ms (default: 8 hours) |
|
|
78
78
|
| `FFS_WARMUP_CONCURRENCY` | Concurrent source fetches during warmup (default: 4) |
|
|
79
|
-
| `FFS_WARMUP_BACKEND_BASE_URL` | Separate backend for warmup (see Backend Separation) |
|
|
80
|
-
| `FFS_RENDER_BACKEND_BASE_URL` | Separate backend for render (see Backend Separation) |
|
|
81
|
-
| `FFS_WARMUP_BACKEND_API_KEY` | API key for authenticating to the warmup backend |
|
|
82
|
-
| `FFS_RENDER_BACKEND_API_KEY` | API key for authenticating to the render backend |
|
|
83
79
|
|
|
84
80
|
When `FFS_TRANSIENT_STORE_BUCKET` is not set, FFS uses the local filesystem for storage (default: system temp directory). Local files are automatically cleaned up after the TTL expires.
|
|
85
81
|
|
|
@@ -343,27 +339,47 @@ events.addEventListener("render:complete", (e) => {
|
|
|
343
339
|
|
|
344
340
|
## Backend Separation
|
|
345
341
|
|
|
346
|
-
FFS supports running warmup and render on separate backends
|
|
342
|
+
FFS supports running warmup and render on separate backends via resolver callbacks.
|
|
343
|
+
When backends are configured, the transient storage must be shared between services (e.g., using S3).
|
|
347
344
|
|
|
348
|
-
|
|
345
|
+
### Setup
|
|
349
346
|
|
|
350
|
-
|
|
351
|
-
- `FFS_RENDER_BACKEND_BASE_URL` — Base URL for render backend (e.g., `https://render.your.app`)
|
|
347
|
+
Pass resolvers to `createServerContext`:
|
|
352
348
|
|
|
353
|
-
|
|
349
|
+
```typescript
|
|
350
|
+
import { createServerContext } from "@effing/ffs/handlers";
|
|
351
|
+
import type {
|
|
352
|
+
RenderBackendResolver,
|
|
353
|
+
WarmupBackendResolver,
|
|
354
|
+
} from "@effing/ffs/handlers";
|
|
355
|
+
|
|
356
|
+
const renderBackendResolver: RenderBackendResolver = (effie, metadata) => ({
|
|
357
|
+
baseUrl: "https://render.your.app",
|
|
358
|
+
apiKey: "secret",
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
const warmupBackendResolver: WarmupBackendResolver = (sources, metadata) => ({
|
|
362
|
+
baseUrl: "https://warmup.your.app",
|
|
363
|
+
apiKey: "secret",
|
|
364
|
+
});
|
|
365
|
+
|
|
366
|
+
const ctx = await createServerContext({
|
|
367
|
+
renderBackendResolver,
|
|
368
|
+
warmupBackendResolver,
|
|
369
|
+
});
|
|
370
|
+
```
|
|
371
|
+
|
|
372
|
+
The render resolver receives the effie data; the warmup resolver receives the source list.
|
|
373
|
+
Both receive optional metadata (passed via handler options). Return `null` to handle locally.
|
|
354
374
|
|
|
355
|
-
|
|
356
|
-
| ---------------------------- | ---------------------------------------------------- |
|
|
357
|
-
| `POST /warmup` | Returns URL pointing to local server (orchestrator) |
|
|
358
|
-
| `GET /warmup/:id` | Proxies SSE from warmup backend |
|
|
359
|
-
| `POST /render` | Returns URL pointing to local server (orchestrator) |
|
|
360
|
-
| `GET /render/:id` | Proxies from render backend (SSE or video stream) |
|
|
361
|
-
| `POST /warmup-and-render` | Returns URL pointing to local server (orchestrator) |
|
|
362
|
-
| `GET /warmup-and-render/:id` | Proxies SSE from warmup backend, then render backend |
|
|
375
|
+
### Job metadata
|
|
363
376
|
|
|
364
|
-
|
|
377
|
+
Pass server-side metadata to be stored with the job and forwarded to the resolver:
|
|
365
378
|
|
|
366
|
-
|
|
379
|
+
```typescript
|
|
380
|
+
createRenderJob(req, res, ctx, { metadata: { tenantId: "abc" } });
|
|
381
|
+
createWarmupJob(req, res, ctx, { metadata: { tenantId: "abc" } });
|
|
382
|
+
```
|
|
367
383
|
|
|
368
384
|
## Examples
|
|
369
385
|
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
// src/fetch.ts
|
|
2
|
+
import { fetch, Agent } from "undici";
|
|
3
|
+
async function ffsFetch(url, options) {
|
|
4
|
+
const {
|
|
5
|
+
method,
|
|
6
|
+
body,
|
|
7
|
+
headers,
|
|
8
|
+
headersTimeout = 3e5,
|
|
9
|
+
// 5 minutes
|
|
10
|
+
bodyTimeout = 3e5
|
|
11
|
+
// 5 minutes
|
|
12
|
+
} = options ?? {};
|
|
13
|
+
const agent = new Agent({ headersTimeout, bodyTimeout });
|
|
14
|
+
return fetch(url, {
|
|
15
|
+
method,
|
|
16
|
+
body,
|
|
17
|
+
headers: { "User-Agent": "FFS (+https://effing.dev/ffs)", ...headers },
|
|
18
|
+
dispatcher: agent
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// src/storage.ts
|
|
23
|
+
import {
|
|
24
|
+
S3Client,
|
|
25
|
+
PutObjectCommand,
|
|
26
|
+
GetObjectCommand,
|
|
27
|
+
HeadObjectCommand,
|
|
28
|
+
DeleteObjectCommand
|
|
29
|
+
} from "@aws-sdk/client-s3";
|
|
30
|
+
import { Upload } from "@aws-sdk/lib-storage";
|
|
31
|
+
import fs from "fs/promises";
|
|
32
|
+
import { createReadStream, createWriteStream, existsSync } from "fs";
|
|
33
|
+
import { pipeline } from "stream/promises";
|
|
34
|
+
import path from "path";
|
|
35
|
+
import os from "os";
|
|
36
|
+
import crypto from "crypto";
|
|
37
|
+
var DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1e3;
|
|
38
|
+
var DEFAULT_JOB_DATA_TTL_MS = 8 * 60 * 60 * 1e3;
|
|
39
|
+
var S3TransientStore = class {
|
|
40
|
+
client;
|
|
41
|
+
bucket;
|
|
42
|
+
prefix;
|
|
43
|
+
sourceTtlMs;
|
|
44
|
+
jobDataTtlMs;
|
|
45
|
+
constructor(options) {
|
|
46
|
+
this.client = new S3Client({
|
|
47
|
+
endpoint: options.endpoint,
|
|
48
|
+
region: options.region ?? "auto",
|
|
49
|
+
credentials: options.accessKeyId ? {
|
|
50
|
+
accessKeyId: options.accessKeyId,
|
|
51
|
+
secretAccessKey: options.secretAccessKey
|
|
52
|
+
} : void 0,
|
|
53
|
+
forcePathStyle: !!options.endpoint
|
|
54
|
+
});
|
|
55
|
+
this.bucket = options.bucket;
|
|
56
|
+
this.prefix = options.prefix ?? "";
|
|
57
|
+
this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
58
|
+
this.jobDataTtlMs = options.jobDataTtlMs ?? DEFAULT_JOB_DATA_TTL_MS;
|
|
59
|
+
}
|
|
60
|
+
getExpires(ttlMs) {
|
|
61
|
+
return new Date(Date.now() + ttlMs);
|
|
62
|
+
}
|
|
63
|
+
getFullKey(key) {
|
|
64
|
+
return `${this.prefix}${key}`;
|
|
65
|
+
}
|
|
66
|
+
async put(key, stream, ttlMs) {
|
|
67
|
+
const upload = new Upload({
|
|
68
|
+
client: this.client,
|
|
69
|
+
params: {
|
|
70
|
+
Bucket: this.bucket,
|
|
71
|
+
Key: this.getFullKey(key),
|
|
72
|
+
Body: stream,
|
|
73
|
+
Expires: this.getExpires(ttlMs ?? this.sourceTtlMs)
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
await upload.done();
|
|
77
|
+
}
|
|
78
|
+
async getStream(key) {
|
|
79
|
+
try {
|
|
80
|
+
const response = await this.client.send(
|
|
81
|
+
new GetObjectCommand({
|
|
82
|
+
Bucket: this.bucket,
|
|
83
|
+
Key: this.getFullKey(key)
|
|
84
|
+
})
|
|
85
|
+
);
|
|
86
|
+
return response.Body;
|
|
87
|
+
} catch (err) {
|
|
88
|
+
const error = err;
|
|
89
|
+
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
async exists(key) {
|
|
96
|
+
try {
|
|
97
|
+
await this.client.send(
|
|
98
|
+
new HeadObjectCommand({
|
|
99
|
+
Bucket: this.bucket,
|
|
100
|
+
Key: this.getFullKey(key)
|
|
101
|
+
})
|
|
102
|
+
);
|
|
103
|
+
return true;
|
|
104
|
+
} catch (err) {
|
|
105
|
+
const error = err;
|
|
106
|
+
if (error.name === "NotFound" || error.$metadata?.httpStatusCode === 404) {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
throw err;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async existsMany(keys) {
|
|
113
|
+
const results = await Promise.all(
|
|
114
|
+
keys.map(async (key) => [key, await this.exists(key)])
|
|
115
|
+
);
|
|
116
|
+
return new Map(results);
|
|
117
|
+
}
|
|
118
|
+
async delete(key) {
|
|
119
|
+
try {
|
|
120
|
+
await this.client.send(
|
|
121
|
+
new DeleteObjectCommand({
|
|
122
|
+
Bucket: this.bucket,
|
|
123
|
+
Key: this.getFullKey(key)
|
|
124
|
+
})
|
|
125
|
+
);
|
|
126
|
+
} catch (err) {
|
|
127
|
+
const error = err;
|
|
128
|
+
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
throw err;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
async putJson(key, data, ttlMs) {
|
|
135
|
+
await this.client.send(
|
|
136
|
+
new PutObjectCommand({
|
|
137
|
+
Bucket: this.bucket,
|
|
138
|
+
Key: this.getFullKey(key),
|
|
139
|
+
Body: JSON.stringify(data),
|
|
140
|
+
ContentType: "application/json",
|
|
141
|
+
Expires: this.getExpires(ttlMs ?? this.jobDataTtlMs)
|
|
142
|
+
})
|
|
143
|
+
);
|
|
144
|
+
}
|
|
145
|
+
async getJson(key) {
|
|
146
|
+
try {
|
|
147
|
+
const response = await this.client.send(
|
|
148
|
+
new GetObjectCommand({
|
|
149
|
+
Bucket: this.bucket,
|
|
150
|
+
Key: this.getFullKey(key)
|
|
151
|
+
})
|
|
152
|
+
);
|
|
153
|
+
const body = await response.Body?.transformToString();
|
|
154
|
+
if (!body) return null;
|
|
155
|
+
return JSON.parse(body);
|
|
156
|
+
} catch (err) {
|
|
157
|
+
const error = err;
|
|
158
|
+
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
throw err;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
close() {
|
|
165
|
+
}
|
|
166
|
+
};
|
|
167
|
+
var LocalTransientStore = class {
|
|
168
|
+
baseDir;
|
|
169
|
+
initialized = false;
|
|
170
|
+
cleanupInterval;
|
|
171
|
+
sourceTtlMs;
|
|
172
|
+
jobDataTtlMs;
|
|
173
|
+
/** For cleanup, use the longer of the two TTLs */
|
|
174
|
+
maxTtlMs;
|
|
175
|
+
constructor(options) {
|
|
176
|
+
this.baseDir = options?.baseDir ?? path.join(os.tmpdir(), "ffs-transient");
|
|
177
|
+
this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
178
|
+
this.jobDataTtlMs = options?.jobDataTtlMs ?? DEFAULT_JOB_DATA_TTL_MS;
|
|
179
|
+
this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobDataTtlMs);
|
|
180
|
+
this.cleanupInterval = setInterval(() => {
|
|
181
|
+
this.cleanupExpired().catch(console.error);
|
|
182
|
+
}, 3e5);
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Remove files older than max TTL
|
|
186
|
+
*/
|
|
187
|
+
async cleanupExpired() {
|
|
188
|
+
if (!this.initialized) return;
|
|
189
|
+
const now = Date.now();
|
|
190
|
+
await this.cleanupDir(this.baseDir, now);
|
|
191
|
+
}
|
|
192
|
+
async cleanupDir(dir, now) {
|
|
193
|
+
let entries;
|
|
194
|
+
try {
|
|
195
|
+
entries = await fs.readdir(dir, { withFileTypes: true });
|
|
196
|
+
} catch {
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
for (const entry of entries) {
|
|
200
|
+
const fullPath = path.join(dir, entry.name);
|
|
201
|
+
if (entry.isDirectory()) {
|
|
202
|
+
await this.cleanupDir(fullPath, now);
|
|
203
|
+
try {
|
|
204
|
+
await fs.rmdir(fullPath);
|
|
205
|
+
} catch {
|
|
206
|
+
}
|
|
207
|
+
} else if (entry.isFile()) {
|
|
208
|
+
try {
|
|
209
|
+
const stat = await fs.stat(fullPath);
|
|
210
|
+
if (now - stat.mtimeMs > this.maxTtlMs) {
|
|
211
|
+
await fs.rm(fullPath, { force: true });
|
|
212
|
+
}
|
|
213
|
+
} catch {
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
async ensureDir(filePath) {
|
|
219
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
220
|
+
this.initialized = true;
|
|
221
|
+
}
|
|
222
|
+
filePath(key) {
|
|
223
|
+
return path.join(this.baseDir, key);
|
|
224
|
+
}
|
|
225
|
+
tmpPathFor(finalPath) {
|
|
226
|
+
const rand = crypto.randomBytes(8).toString("hex");
|
|
227
|
+
return `${finalPath}.tmp-${process.pid}-${rand}`;
|
|
228
|
+
}
|
|
229
|
+
async put(key, stream, _ttlMs) {
|
|
230
|
+
const fp = this.filePath(key);
|
|
231
|
+
await this.ensureDir(fp);
|
|
232
|
+
const tmpPath = this.tmpPathFor(fp);
|
|
233
|
+
try {
|
|
234
|
+
const writeStream = createWriteStream(tmpPath);
|
|
235
|
+
await pipeline(stream, writeStream);
|
|
236
|
+
await fs.rename(tmpPath, fp);
|
|
237
|
+
} catch (err) {
|
|
238
|
+
await fs.rm(tmpPath, { force: true }).catch(() => {
|
|
239
|
+
});
|
|
240
|
+
throw err;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
async getStream(key) {
|
|
244
|
+
const fp = this.filePath(key);
|
|
245
|
+
if (!existsSync(fp)) return null;
|
|
246
|
+
return createReadStream(fp);
|
|
247
|
+
}
|
|
248
|
+
async exists(key) {
|
|
249
|
+
try {
|
|
250
|
+
await fs.access(this.filePath(key));
|
|
251
|
+
return true;
|
|
252
|
+
} catch {
|
|
253
|
+
return false;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
async existsMany(keys) {
|
|
257
|
+
const results = await Promise.all(
|
|
258
|
+
keys.map(async (key) => [key, await this.exists(key)])
|
|
259
|
+
);
|
|
260
|
+
return new Map(results);
|
|
261
|
+
}
|
|
262
|
+
async delete(key) {
|
|
263
|
+
await fs.rm(this.filePath(key), { force: true });
|
|
264
|
+
}
|
|
265
|
+
async putJson(key, data, _ttlMs) {
|
|
266
|
+
const fp = this.filePath(key);
|
|
267
|
+
await this.ensureDir(fp);
|
|
268
|
+
const tmpPath = this.tmpPathFor(fp);
|
|
269
|
+
try {
|
|
270
|
+
await fs.writeFile(tmpPath, JSON.stringify(data));
|
|
271
|
+
await fs.rename(tmpPath, fp);
|
|
272
|
+
} catch (err) {
|
|
273
|
+
await fs.rm(tmpPath, { force: true }).catch(() => {
|
|
274
|
+
});
|
|
275
|
+
throw err;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
async getJson(key) {
|
|
279
|
+
try {
|
|
280
|
+
const content = await fs.readFile(this.filePath(key), "utf-8");
|
|
281
|
+
return JSON.parse(content);
|
|
282
|
+
} catch {
|
|
283
|
+
return null;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
close() {
|
|
287
|
+
if (this.cleanupInterval) {
|
|
288
|
+
clearInterval(this.cleanupInterval);
|
|
289
|
+
this.cleanupInterval = void 0;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
};
|
|
293
|
+
function createTransientStore() {
|
|
294
|
+
const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10) : DEFAULT_SOURCE_TTL_MS;
|
|
295
|
+
const jobDataTtlMs = process.env.FFS_JOB_DATA_TTL_MS ? parseInt(process.env.FFS_JOB_DATA_TTL_MS, 10) : DEFAULT_JOB_DATA_TTL_MS;
|
|
296
|
+
if (process.env.FFS_TRANSIENT_STORE_BUCKET) {
|
|
297
|
+
return new S3TransientStore({
|
|
298
|
+
endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,
|
|
299
|
+
region: process.env.FFS_TRANSIENT_STORE_REGION ?? "auto",
|
|
300
|
+
bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,
|
|
301
|
+
prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,
|
|
302
|
+
accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,
|
|
303
|
+
secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,
|
|
304
|
+
sourceTtlMs,
|
|
305
|
+
jobDataTtlMs
|
|
306
|
+
});
|
|
307
|
+
}
|
|
308
|
+
return new LocalTransientStore({
|
|
309
|
+
baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,
|
|
310
|
+
sourceTtlMs,
|
|
311
|
+
jobDataTtlMs
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
function hashUrl(url) {
|
|
315
|
+
return crypto.createHash("sha256").update(url).digest("hex").slice(0, 16);
|
|
316
|
+
}
|
|
317
|
+
function sourceStoreKey(url) {
|
|
318
|
+
return `sources/${hashUrl(url)}`;
|
|
319
|
+
}
|
|
320
|
+
function warmupJobStoreKey(jobId) {
|
|
321
|
+
return `jobs/warmup/${jobId}.json`;
|
|
322
|
+
}
|
|
323
|
+
function renderJobStoreKey(jobId) {
|
|
324
|
+
return `jobs/render/${jobId}.json`;
|
|
325
|
+
}
|
|
326
|
+
function warmupAndRenderJobStoreKey(jobId) {
|
|
327
|
+
return `jobs/warmup-and-render/${jobId}.json`;
|
|
328
|
+
}
|
|
329
|
+
var storeKeys = {
|
|
330
|
+
source: sourceStoreKey,
|
|
331
|
+
warmupJob: warmupJobStoreKey,
|
|
332
|
+
renderJob: renderJobStoreKey,
|
|
333
|
+
warmupAndRenderJob: warmupAndRenderJobStoreKey
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
export {
|
|
337
|
+
ffsFetch,
|
|
338
|
+
createTransientStore,
|
|
339
|
+
storeKeys
|
|
340
|
+
};
|
|
341
|
+
//# sourceMappingURL=chunk-4N2GLGC5.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/fetch.ts","../src/storage.ts"],"sourcesContent":["import { fetch, Agent, type Response, type BodyInit } from \"undici\";\n\n/**\n * Options for ffsFetch function\n */\nexport type FfsFetchOptions = {\n /** HTTP method */\n method?: \"GET\" | \"POST\" | \"PUT\" | \"DELETE\" | \"PATCH\" | \"HEAD\" | \"OPTIONS\";\n /** Request body */\n body?: BodyInit;\n /** Headers to send (merged with default User-Agent) */\n headers?: Record<string, string>;\n /** Timeout for receiving response headers in ms. @default 300000 (5 min) */\n headersTimeout?: number;\n /** Timeout between body data chunks in ms. 0 = no timeout. @default 300000 (5 min) */\n bodyTimeout?: number;\n};\n\n/**\n * Fetch with default User-Agent and configurable timeouts.\n *\n * @example\n * // Simple GET\n * const response = await ffsFetch(\"https://example.com/data.json\");\n *\n * @example\n * // Large file with infinite body timeout\n * const response = await ffsFetch(\"https://example.com/video.mp4\", {\n * bodyTimeout: 0,\n * });\n *\n * @example\n * // PUT upload\n * const response = await ffsFetch(\"https://s3.example.com/video.mp4\", {\n * method: \"PUT\",\n * body: videoBuffer,\n * bodyTimeout: 0,\n * headers: { \"Content-Type\": \"video/mp4\" },\n * });\n */\nexport async function ffsFetch(\n url: string,\n options?: FfsFetchOptions,\n): Promise<Response> {\n const {\n method,\n body,\n headers,\n headersTimeout = 300000, // 5 minutes\n bodyTimeout = 300000, // 5 minutes\n } = options ?? {};\n\n const agent = new Agent({ headersTimeout, bodyTimeout });\n\n return fetch(url, {\n method,\n body,\n headers: { \"User-Agent\": \"FFS (+https://effing.dev/ffs)\", ...headers },\n dispatcher: agent,\n });\n}\n","import {\n S3Client,\n PutObjectCommand,\n GetObjectCommand,\n HeadObjectCommand,\n DeleteObjectCommand,\n} from \"@aws-sdk/client-s3\";\nimport { Upload } from \"@aws-sdk/lib-storage\";\nimport fs from \"fs/promises\";\nimport { createReadStream, createWriteStream, existsSync } from \"fs\";\nimport { pipeline } from \"stream/promises\";\nimport path from \"path\";\nimport os from \"os\";\nimport crypto from \"crypto\";\nimport type { Readable } from \"stream\";\n\n/** Default TTL for sources: 60 minutes */\nconst DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1000;\n/** Default TTL for job data: 8 hours */\nconst DEFAULT_JOB_DATA_TTL_MS = 8 * 60 * 60 * 1000;\n\n/**\n * Transient store interface for caching sources and storing ephemeral job data.\n */\nexport interface TransientStore {\n /** TTL for cached sources in milliseconds */\n readonly sourceTtlMs: number;\n /** TTL for job data in milliseconds */\n readonly jobDataTtlMs: number;\n /** Store a stream with the given key and optional TTL override */\n put(key: string, stream: Readable, ttlMs?: number): Promise<void>;\n /** Get a stream for the given key, or null if not found */\n getStream(key: string): Promise<Readable | null>;\n /** Check if a key exists */\n exists(key: string): Promise<boolean>;\n /** Check if multiple keys exist (batch operation) */\n existsMany(keys: string[]): Promise<Map<string, boolean>>;\n /** Delete a key */\n delete(key: string): Promise<void>;\n /** Store JSON data with optional TTL override */\n putJson(key: string, data: object, ttlMs?: number): Promise<void>;\n /** Get JSON data, or null if not found */\n getJson<T>(key: string): Promise<T | null>;\n /** Close and cleanup resources */\n close(): void;\n}\n\n/**\n * S3-compatible transient store implementation\n */\nexport class S3TransientStore implements TransientStore {\n private client: S3Client;\n private bucket: string;\n private prefix: string;\n public readonly sourceTtlMs: number;\n public readonly jobDataTtlMs: number;\n\n constructor(options: {\n endpoint?: string;\n region?: string;\n bucket: string;\n prefix?: string;\n accessKeyId?: string;\n secretAccessKey?: string;\n sourceTtlMs?: number;\n jobDataTtlMs?: number;\n }) {\n this.client = new S3Client({\n endpoint: options.endpoint,\n region: options.region ?? \"auto\",\n credentials: options.accessKeyId\n ? {\n accessKeyId: options.accessKeyId,\n secretAccessKey: options.secretAccessKey!,\n }\n : undefined,\n forcePathStyle: !!options.endpoint,\n });\n this.bucket = options.bucket;\n this.prefix = options.prefix ?? \"\";\n this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;\n this.jobDataTtlMs = options.jobDataTtlMs ?? DEFAULT_JOB_DATA_TTL_MS;\n }\n\n private getExpires(ttlMs: number): Date {\n return new Date(Date.now() + ttlMs);\n }\n\n private getFullKey(key: string): string {\n return `${this.prefix}${key}`;\n }\n\n async put(key: string, stream: Readable, ttlMs?: number): Promise<void> {\n const upload = new Upload({\n client: this.client,\n params: {\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n Body: stream,\n Expires: this.getExpires(ttlMs ?? this.sourceTtlMs),\n },\n });\n await upload.done();\n }\n\n async getStream(key: string): Promise<Readable | null> {\n try {\n const response = await this.client.send(\n new GetObjectCommand({\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n }),\n );\n return response.Body as Readable;\n } catch (err: unknown) {\n const error = err as {\n name?: string;\n $metadata?: { httpStatusCode?: number };\n };\n if (\n error.name === \"NoSuchKey\" ||\n error.$metadata?.httpStatusCode === 404\n ) {\n return null;\n }\n throw err;\n }\n }\n\n async exists(key: string): Promise<boolean> {\n try {\n await this.client.send(\n new HeadObjectCommand({\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n }),\n );\n return true;\n } catch (err: unknown) {\n const error = err as {\n name?: string;\n $metadata?: { httpStatusCode?: number };\n };\n if (\n error.name === \"NotFound\" ||\n error.$metadata?.httpStatusCode === 404\n ) {\n return false;\n }\n throw err;\n }\n }\n\n async existsMany(keys: string[]): Promise<Map<string, boolean>> {\n const results = await Promise.all(\n keys.map(async (key) => [key, await this.exists(key)] as const),\n );\n return new Map(results);\n }\n\n async delete(key: string): Promise<void> {\n try {\n await this.client.send(\n new DeleteObjectCommand({\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n }),\n );\n } catch (err: unknown) {\n const error = err as {\n name?: string;\n $metadata?: { httpStatusCode?: number };\n };\n if (\n error.name === \"NoSuchKey\" ||\n error.$metadata?.httpStatusCode === 404\n ) {\n return;\n }\n throw err;\n }\n }\n\n async putJson(key: string, data: object, ttlMs?: number): Promise<void> {\n await this.client.send(\n new PutObjectCommand({\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n Body: JSON.stringify(data),\n ContentType: \"application/json\",\n Expires: this.getExpires(ttlMs ?? this.jobDataTtlMs),\n }),\n );\n }\n\n async getJson<T>(key: string): Promise<T | null> {\n try {\n const response = await this.client.send(\n new GetObjectCommand({\n Bucket: this.bucket,\n Key: this.getFullKey(key),\n }),\n );\n const body = await response.Body?.transformToString();\n if (!body) return null;\n return JSON.parse(body) as T;\n } catch (err: unknown) {\n const error = err as {\n name?: string;\n $metadata?: { httpStatusCode?: number };\n };\n if (\n error.name === \"NoSuchKey\" ||\n error.$metadata?.httpStatusCode === 404\n ) {\n return null;\n }\n throw err;\n }\n }\n\n close(): void {\n // nothing to do here\n }\n}\n\n/**\n * Local filesystem transient store implementation\n */\nexport class LocalTransientStore implements TransientStore {\n private baseDir: string;\n private initialized = false;\n private cleanupInterval?: ReturnType<typeof setInterval>;\n public readonly sourceTtlMs: number;\n public readonly jobDataTtlMs: number;\n /** For cleanup, use the longer of the two TTLs */\n private maxTtlMs: number;\n\n constructor(options?: {\n baseDir?: string;\n sourceTtlMs?: number;\n jobDataTtlMs?: number;\n }) {\n this.baseDir = options?.baseDir ?? path.join(os.tmpdir(), \"ffs-transient\");\n this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;\n this.jobDataTtlMs = options?.jobDataTtlMs ?? DEFAULT_JOB_DATA_TTL_MS;\n this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobDataTtlMs);\n\n // Cleanup expired files every 5 minutes\n this.cleanupInterval = setInterval(() => {\n this.cleanupExpired().catch(console.error);\n }, 300_000);\n }\n\n /**\n * Remove files older than max TTL\n */\n public async cleanupExpired(): Promise<void> {\n if (!this.initialized) return;\n\n const now = Date.now();\n await this.cleanupDir(this.baseDir, now);\n }\n\n private async cleanupDir(dir: string, now: number): Promise<void> {\n let entries;\n try {\n entries = await fs.readdir(dir, { withFileTypes: true });\n } catch {\n return; // Directory doesn't exist or can't be read\n }\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n\n if (entry.isDirectory()) {\n await this.cleanupDir(fullPath, now);\n // Remove empty directories\n try {\n await fs.rmdir(fullPath);\n } catch {\n // Directory not empty or other error, ignore\n }\n } else if (entry.isFile()) {\n try {\n const stat = await fs.stat(fullPath);\n if (now - stat.mtimeMs > this.maxTtlMs) {\n await fs.rm(fullPath, { force: true });\n }\n } catch {\n // File may have been deleted, ignore\n }\n }\n }\n }\n\n private async ensureDir(filePath: string): Promise<void> {\n await fs.mkdir(path.dirname(filePath), { recursive: true });\n this.initialized = true;\n }\n\n private filePath(key: string): string {\n return path.join(this.baseDir, key);\n }\n\n private tmpPathFor(finalPath: string): string {\n const rand = crypto.randomBytes(8).toString(\"hex\");\n // Keep tmp file in the same directory so rename stays atomic on POSIX filesystems.\n return `${finalPath}.tmp-${process.pid}-${rand}`;\n }\n\n async put(key: string, stream: Readable, _ttlMs?: number): Promise<void> {\n // Note: TTL is not used for local storage; cleanup uses file mtime\n const fp = this.filePath(key);\n await this.ensureDir(fp);\n\n // Write to temp file, then rename for atomicity (no partial reads).\n const tmpPath = this.tmpPathFor(fp);\n try {\n const writeStream = createWriteStream(tmpPath);\n await pipeline(stream, writeStream);\n await fs.rename(tmpPath, fp);\n } catch (err) {\n await fs.rm(tmpPath, { force: true }).catch(() => {});\n throw err;\n }\n }\n\n async getStream(key: string): Promise<Readable | null> {\n const fp = this.filePath(key);\n if (!existsSync(fp)) return null;\n return createReadStream(fp);\n }\n\n async exists(key: string): Promise<boolean> {\n try {\n await fs.access(this.filePath(key));\n return true;\n } catch {\n return false;\n }\n }\n\n async existsMany(keys: string[]): Promise<Map<string, boolean>> {\n const results = await Promise.all(\n keys.map(async (key) => [key, await this.exists(key)] as const),\n );\n return new Map(results);\n }\n\n async delete(key: string): Promise<void> {\n await fs.rm(this.filePath(key), { force: true });\n }\n\n async putJson(key: string, data: object, _ttlMs?: number): Promise<void> {\n // Note: TTL is not used for local storage; cleanup uses file mtime\n const fp = this.filePath(key);\n await this.ensureDir(fp);\n\n // Write to temp file, then rename for atomicity (no partial reads).\n const tmpPath = this.tmpPathFor(fp);\n try {\n await fs.writeFile(tmpPath, JSON.stringify(data));\n await fs.rename(tmpPath, fp);\n } catch (err) {\n await fs.rm(tmpPath, { force: true }).catch(() => {});\n throw err;\n }\n }\n\n async getJson<T>(key: string): Promise<T | null> {\n try {\n const content = await fs.readFile(this.filePath(key), \"utf-8\");\n return JSON.parse(content) as T;\n } catch {\n return null;\n }\n }\n\n close(): void {\n // Stop the cleanup interval\n if (this.cleanupInterval) {\n clearInterval(this.cleanupInterval);\n this.cleanupInterval = undefined;\n }\n }\n}\n\n/**\n * Create a transient store instance based on environment variables.\n * Uses S3 if FFS_TRANSIENT_STORE_BUCKET is set, otherwise uses local filesystem.\n */\nexport function createTransientStore(): TransientStore {\n // Parse TTLs from env\n const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS\n ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10)\n : DEFAULT_SOURCE_TTL_MS;\n const jobDataTtlMs = process.env.FFS_JOB_DATA_TTL_MS\n ? parseInt(process.env.FFS_JOB_DATA_TTL_MS, 10)\n : DEFAULT_JOB_DATA_TTL_MS;\n\n if (process.env.FFS_TRANSIENT_STORE_BUCKET) {\n return new S3TransientStore({\n endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,\n region: process.env.FFS_TRANSIENT_STORE_REGION ?? \"auto\",\n bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,\n prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,\n accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,\n secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,\n sourceTtlMs,\n jobDataTtlMs,\n });\n }\n\n return new LocalTransientStore({\n baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,\n sourceTtlMs,\n jobDataTtlMs,\n });\n}\n\nexport function hashUrl(url: string): string {\n return crypto.createHash(\"sha256\").update(url).digest(\"hex\").slice(0, 16);\n}\n\nexport type SourceStoreKey = `sources/${string}`;\nexport type WarmupJobStoreKey = `jobs/warmup/${string}.json`;\nexport type RenderJobStoreKey = `jobs/render/${string}.json`;\nexport type WarmupAndRenderJobStoreKey =\n `jobs/warmup-and-render/${string}.json`;\n\n/**\n * Build the store key for a source URL (hashing is handled internally).\n */\nexport function sourceStoreKey(url: string): SourceStoreKey {\n return `sources/${hashUrl(url)}`;\n}\n\nexport function warmupJobStoreKey(jobId: string): WarmupJobStoreKey {\n return `jobs/warmup/${jobId}.json`;\n}\n\nexport function renderJobStoreKey(jobId: string): RenderJobStoreKey {\n return `jobs/render/${jobId}.json`;\n}\n\nexport function warmupAndRenderJobStoreKey(\n jobId: string,\n): WarmupAndRenderJobStoreKey {\n return `jobs/warmup-and-render/${jobId}.json`;\n}\n\n/**\n * Centralized store key builders for known namespaces.\n * Prefer using these helpers over manual string interpolation.\n */\nexport const storeKeys = {\n source: sourceStoreKey,\n warmupJob: warmupJobStoreKey,\n renderJob: renderJobStoreKey,\n warmupAndRenderJob: warmupAndRenderJobStoreKey,\n} as const;\n"],"mappings":";AAAA,SAAS,OAAO,aAA2C;AAwC3D,eAAsB,SACpB,KACA,SACmB;AACnB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA;AAAA,IACjB,cAAc;AAAA;AAAA,EAChB,IAAI,WAAW,CAAC;AAEhB,QAAM,QAAQ,IAAI,MAAM,EAAE,gBAAgB,YAAY,CAAC;AAEvD,SAAO,MAAM,KAAK;AAAA,IAChB;AAAA,IACA;AAAA,IACA,SAAS,EAAE,cAAc,iCAAiC,GAAG,QAAQ;AAAA,IACrE,YAAY;AAAA,EACd,CAAC;AACH;;;AC5DA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,cAAc;AACvB,OAAO,QAAQ;AACf,SAAS,kBAAkB,mBAAmB,kBAAkB;AAChE,SAAS,gBAAgB;AACzB,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,YAAY;AAInB,IAAM,wBAAwB,KAAK,KAAK;AAExC,IAAM,0BAA0B,IAAI,KAAK,KAAK;AA+BvC,IAAM,mBAAN,MAAiD;AAAA,EAC9C;AAAA,EACA;AAAA,EACA;AAAA,EACQ;AAAA,EACA;AAAA,EAEhB,YAAY,SAST;AACD,SAAK,SAAS,IAAI,SAAS;AAAA,MACzB,UAAU,QAAQ;AAAA,MAClB,QAAQ,QAAQ,UAAU;AAAA,MAC1B,aAAa,QAAQ,cACjB;AAAA,QACE,aAAa,QAAQ;AAAA,QACrB,iBAAiB,QAAQ;AAAA,MAC3B,IACA;AAAA,MACJ,gBAAgB,CAAC,CAAC,QAAQ;AAAA,IAC5B,CAAC;AACD,SAAK,SAAS,QAAQ;AACtB,SAAK,SAAS,QAAQ,UAAU;AAChC,SAAK,cAAc,QAAQ,eAAe;AAC1C,SAAK,eAAe,QAAQ,gBAAgB;AAAA,EAC9C;AAAA,EAEQ,WAAW,OAAqB;AACtC,WAAO,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK;AAAA,EACpC;AAAA,EAEQ,WAAW,KAAqB;AACtC,WAAO,GAAG,KAAK,MAAM,GAAG,GAAG;AAAA,EAC7B;AAAA,EAEA,MAAM,IAAI,KAAa,QAAkB,OAA+B;AACtE,UAAM,SAAS,IAAI,OAAO;AAAA,MACxB,QAAQ,KAAK;AAAA,MACb,QAAQ;AAAA,QACN,QAAQ,KAAK;AAAA,QACb,KAAK,KAAK,WAAW,GAAG;AAAA,QACxB,MAAM;AAAA,QACN,SAAS,KAAK,WAAW,SAAS,KAAK,WAAW;AAAA,MACpD;AAAA,IACF,CAAC;AACD,UAAM,OAAO,KAAK;AAAA,EACpB;AAAA,EAEA,MAAM,UAAU,KAAuC;AACrD,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,OAAO;AAAA,QACjC,IAAI,iBAAiB;AAAA,UACnB,QAAQ,KAAK;AAAA,UACb,KAAK,KAAK,WAAW,GAAG;AAAA,QAC1B,CAAC;AAAA,MACH;AACA,aAAO,SAAS;AAAA,IAClB,SAAS,KAAc;AACrB,YAAM,QAAQ;AAId,UACE,MAAM,SAAS,eACf,MAAM,WAAW,mBAAmB,KACpC;AACA,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,KAA+B;AAC1C,QAAI;AACF,YAAM,KAAK,OAAO;AAAA,QAChB,IAAI,kBAAkB;AAAA,UACpB,QAAQ,KAAK;AAAA,UACb,KAAK,KAAK,WAAW,GAAG;AAAA,QAC1B,CAAC;AAAA,MACH;AACA,aAAO;AAAA,IACT,SAAS,KAAc;AACrB,YAAM,QAAQ;AAId,UACE,MAAM,SAAS,cACf,MAAM,WAAW,mBAAmB,KACpC;AACA,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,MAA+C;AAC9D,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,KAAK,IAAI,OAAO,QAAQ,CAAC,KAAK,MAAM,KAAK,OAAO,GAAG,CAAC,CAAU;AAAA,IAChE;AACA,WAAO,IAAI,IAAI,OAAO;AAAA,EACxB;AAAA,EAEA,MAAM,OAAO,KAA4B;AACvC,QAAI;AACF,YAAM,KAAK,OAAO;AAAA,QAChB,IAAI,oBAAoB;AAAA,UACtB,QAAQ,KAAK;AAAA,UACb,KAAK,KAAK,WAAW,GAAG;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAc;AACrB,YAAM,QAAQ;AAId,UACE,MAAM,SAAS,eACf,MAAM,WAAW,mBAAmB,KACpC;AACA;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,KAAa,MAAc,OAA+B;AACtE,UAAM,KAAK,OAAO;AAAA,MAChB,IAAI,iBAAiB;AAAA,QACnB,QAAQ,KAAK;AAAA,QACb,KAAK,KAAK,WAAW,GAAG;AAAA,QACxB,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,aAAa;AAAA,QACb,SAAS,KAAK,WAAW,SAAS,KAAK,YAAY;AAAA,MACrD,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,QAAW,KAAgC;AAC/C,QAAI;AACF,YAAM,WAAW,MAAM,KAAK,OAAO;AAAA,QACjC,IAAI,iBAAiB;AAAA,UACnB,QAAQ,KAAK;AAAA,UACb,KAAK,KAAK,WAAW,GAAG;AAAA,QAC1B,CAAC;AAAA,MACH;AACA,YAAM,OAAO,MAAM,SAAS,MAAM,kBAAkB;AACpD,UAAI,CAAC,KAAM,QAAO;AAClB,aAAO,KAAK,MAAM,IAAI;AAAA,IACxB,SAAS,KAAc;AACrB,YAAM,QAAQ;AAId,UACE,MAAM,SAAS,eACf,MAAM,WAAW,mBAAmB,KACpC;AACA,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,QAAc;AAAA,EAEd;AACF;AAKO,IAAM,sBAAN,MAAoD;AAAA,EACjD;AAAA,EACA,cAAc;AAAA,EACd;AAAA,EACQ;AAAA,EACA;AAAA;AAAA,EAER;AAAA,EAER,YAAY,SAIT;AACD,SAAK,UAAU,SAAS,WAAW,KAAK,KAAK,GAAG,OAAO,GAAG,eAAe;AACzE,SAAK,cAAc,SAAS,eAAe;AAC3C,SAAK,eAAe,SAAS,gBAAgB;AAC7C,SAAK,WAAW,KAAK,IAAI,KAAK,aAAa,KAAK,YAAY;AAG5D,SAAK,kBAAkB,YAAY,MAAM;AACvC,WAAK,eAAe,EAAE,MAAM,QAAQ,KAAK;AAAA,IAC3C,GAAG,GAAO;AAAA,EACZ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,iBAAgC;AAC3C,QAAI,CAAC,KAAK,YAAa;AAEvB,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,KAAK,WAAW,KAAK,SAAS,GAAG;AAAA,EACzC;AAAA,EAEA,MAAc,WAAW,KAAa,KAA4B;AAChE,QAAI;AACJ,QAAI;AACF,gBAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,IACzD,QAAQ;AACN;AAAA,IACF;AAEA,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAE1C,UAAI,MAAM,YAAY,GAAG;AACvB,cAAM,KAAK,WAAW,UAAU,GAAG;AAEnC,YAAI;AACF,gBAAM,GAAG,MAAM,QAAQ;AAAA,QACzB,QAAQ;AAAA,QAER;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,YAAI;AACF,gBAAM,OAAO,MAAM,GAAG,KAAK,QAAQ;AACnC,cAAI,MAAM,KAAK,UAAU,KAAK,UAAU;AACtC,kBAAM,GAAG,GAAG,UAAU,EAAE,OAAO,KAAK,CAAC;AAAA,UACvC;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,UAAU,UAAiC;AACvD,UAAM,GAAG,MAAM,KAAK,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAC1D,SAAK,cAAc;AAAA,EACrB;AAAA,EAEQ,SAAS,KAAqB;AACpC,WAAO,KAAK,KAAK,KAAK,SAAS,GAAG;AAAA,EACpC;AAAA,EAEQ,WAAW,WAA2B;AAC5C,UAAM,OAAO,OAAO,YAAY,CAAC,EAAE,SAAS,KAAK;AAEjD,WAAO,GAAG,SAAS,QAAQ,QAAQ,GAAG,IAAI,IAAI;AAAA,EAChD;AAAA,EAEA,MAAM,IAAI,KAAa,QAAkB,QAAgC;AAEvE,UAAM,KAAK,KAAK,SAAS,GAAG;AAC5B,UAAM,KAAK,UAAU,EAAE;AAGvB,UAAM,UAAU,KAAK,WAAW,EAAE;AAClC,QAAI;AACF,YAAM,cAAc,kBAAkB,OAAO;AAC7C,YAAM,SAAS,QAAQ,WAAW;AAClC,YAAM,GAAG,OAAO,SAAS,EAAE;AAAA,IAC7B,SAAS,KAAK;AACZ,YAAM,GAAG,GAAG,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AACpD,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,KAAuC;AACrD,UAAM,KAAK,KAAK,SAAS,GAAG;AAC5B,QAAI,CAAC,WAAW,EAAE,EAAG,QAAO;AAC5B,WAAO,iBAAiB,EAAE;AAAA,EAC5B;AAAA,EAEA,MAAM,OAAO,KAA+B;AAC1C,QAAI;AACF,YAAM,GAAG,OAAO,KAAK,SAAS,GAAG,CAAC;AAClC,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,WAAW,MAA+C;AAC9D,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,KAAK,IAAI,OAAO,QAAQ,CAAC,KAAK,MAAM,KAAK,OAAO,GAAG,CAAC,CAAU;AAAA,IAChE;AACA,WAAO,IAAI,IAAI,OAAO;AAAA,EACxB;AAAA,EAEA,MAAM,OAAO,KAA4B;AACvC,UAAM,GAAG,GAAG,KAAK,SAAS,GAAG,GAAG,EAAE,OAAO,KAAK,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ,KAAa,MAAc,QAAgC;AAEvE,UAAM,KAAK,KAAK,SAAS,GAAG;AAC5B,UAAM,KAAK,UAAU,EAAE;AAGvB,UAAM,UAAU,KAAK,WAAW,EAAE;AAClC,QAAI;AACF,YAAM,GAAG,UAAU,SAAS,KAAK,UAAU,IAAI,CAAC;AAChD,YAAM,GAAG,OAAO,SAAS,EAAE;AAAA,IAC7B,SAAS,KAAK;AACZ,YAAM,GAAG,GAAG,SAAS,EAAE,OAAO,KAAK,CAAC,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AACpD,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,QAAW,KAAgC;AAC/C,QAAI;AACF,YAAM,UAAU,MAAM,GAAG,SAAS,KAAK,SAAS,GAAG,GAAG,OAAO;AAC7D,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,QAAc;AAEZ,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AACF;AAMO,SAAS,uBAAuC;AAErD,QAAM,cAAc,QAAQ,IAAI,0BAC5B,SAAS,QAAQ,IAAI,yBAAyB,EAAE,IAChD;AACJ,QAAM,eAAe,QAAQ,IAAI,sBAC7B,SAAS,QAAQ,IAAI,qBAAqB,EAAE,IAC5C;AAEJ,MAAI,QAAQ,IAAI,4BAA4B;AAC1C,WAAO,IAAI,iBAAiB;AAAA,MAC1B,UAAU,QAAQ,IAAI;AAAA,MACtB,QAAQ,QAAQ,IAAI,8BAA8B;AAAA,MAClD,QAAQ,QAAQ,IAAI;AAAA,MACpB,QAAQ,QAAQ,IAAI;AAAA,MACpB,aAAa,QAAQ,IAAI;AAAA,MACzB,iBAAiB,QAAQ,IAAI;AAAA,MAC7B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,oBAAoB;AAAA,IAC7B,SAAS,QAAQ,IAAI;AAAA,IACrB;AAAA,IACA;AAAA,EACF,CAAC;AACH;AAEO,SAAS,QAAQ,KAAqB;AAC3C,SAAO,OAAO,WAAW,QAAQ,EAAE,OAAO,GAAG,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AAC1E;AAWO,SAAS,eAAe,KAA6B;AAC1D,SAAO,WAAW,QAAQ,GAAG,CAAC;AAChC;AAEO,SAAS,kBAAkB,OAAkC;AAClE,SAAO,eAAe,KAAK;AAC7B;AAEO,SAAS,kBAAkB,OAAkC;AAClE,SAAO,eAAe,KAAK;AAC7B;AAEO,SAAS,2BACd,OAC4B;AAC5B,SAAO,0BAA0B,KAAK;AACxC;AAMO,IAAM,YAAY;AAAA,EACvB,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,WAAW;AAAA,EACX,oBAAoB;AACtB;","names":[]}
|