@meframe/core 0.0.22 → 0.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Meframe.d.ts +17 -0
- package/dist/Meframe.d.ts.map +1 -1
- package/dist/Meframe.js +20 -0
- package/dist/Meframe.js.map +1 -1
- package/dist/cache/CacheManager.d.ts +18 -1
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +19 -1
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/cache/index.d.ts +3 -3
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/l2/IndexedDBStore.d.ts +74 -0
- package/dist/cache/l2/IndexedDBStore.d.ts.map +1 -0
- package/dist/cache/l2/IndexedDBStore.js +180 -0
- package/dist/cache/l2/IndexedDBStore.js.map +1 -0
- package/dist/cache/{L2Cache.d.ts → l2/L2Cache.d.ts} +24 -14
- package/dist/cache/l2/L2Cache.d.ts.map +1 -0
- package/dist/cache/l2/L2Cache.js +329 -0
- package/dist/cache/l2/L2Cache.js.map +1 -0
- package/dist/cache/l2/OPFSStore.d.ts +46 -0
- package/dist/cache/l2/OPFSStore.d.ts.map +1 -0
- package/dist/cache/l2/OPFSStore.js +131 -0
- package/dist/cache/l2/OPFSStore.js.map +1 -0
- package/dist/config/defaults.d.ts.map +1 -1
- package/dist/config/defaults.js +1 -0
- package/dist/config/defaults.js.map +1 -1
- package/dist/config/types.d.ts +3 -0
- package/dist/config/types.d.ts.map +1 -1
- package/dist/model/validation.js +2 -2
- package/dist/model/validation.js.map +1 -1
- package/dist/orchestrator/Orchestrator.js +1 -1
- package/dist/orchestrator/Orchestrator.js.map +1 -1
- package/dist/orchestrator/VideoClipSession.d.ts +1 -1
- package/dist/orchestrator/VideoClipSession.d.ts.map +1 -1
- package/dist/orchestrator/VideoClipSession.js +25 -36
- package/dist/orchestrator/VideoClipSession.js.map +1 -1
- package/dist/orchestrator/types.d.ts +1 -0
- package/dist/orchestrator/types.d.ts.map +1 -1
- package/package.json +1 -1
- package/dist/cache/BatchWriter.d.ts +0 -25
- package/dist/cache/BatchWriter.d.ts.map +0 -1
- package/dist/cache/CacheStatsDecorator.d.ts +0 -27
- package/dist/cache/CacheStatsDecorator.d.ts.map +0 -1
- package/dist/cache/L2Cache.d.ts.map +0 -1
- package/dist/cache/L2Cache.js +0 -488
- package/dist/cache/L2Cache.js.map +0 -1
package/dist/cache/L2Cache.js
DELETED
|
@@ -1,488 +0,0 @@
|
|
|
1
|
-
import { binarySearchRange } from "../utils/binary-search.js";
|
|
2
|
-
class L2Cache {
|
|
3
|
-
db = null;
|
|
4
|
-
opfsRoot = null;
|
|
5
|
-
maxSize;
|
|
6
|
-
projectId;
|
|
7
|
-
initPromise = null;
|
|
8
|
-
constructor(config) {
|
|
9
|
-
this.maxSize = config.maxSizeMB * 1024 * 1024;
|
|
10
|
-
this.projectId = config.projectId;
|
|
11
|
-
}
|
|
12
|
-
async init() {
|
|
13
|
-
if (this.initPromise) return this.initPromise;
|
|
14
|
-
this.initPromise = this.initStorage();
|
|
15
|
-
return this.initPromise;
|
|
16
|
-
}
|
|
17
|
-
async get(timeUs, clipId) {
|
|
18
|
-
await this.init();
|
|
19
|
-
if (!this.db) return null;
|
|
20
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
21
|
-
const store = tx.objectStore("chunks");
|
|
22
|
-
const records = await this.collectRecords(store, clipId);
|
|
23
|
-
for (const record of records) {
|
|
24
|
-
const batch = binarySearchRange(record.batches, timeUs, (b) => ({
|
|
25
|
-
start: b.startUs,
|
|
26
|
-
end: b.startUs + b.durationUs
|
|
27
|
-
}));
|
|
28
|
-
if (!batch) {
|
|
29
|
-
continue;
|
|
30
|
-
}
|
|
31
|
-
const chunkData = await this.readFromOPFS(record.fileName, batch);
|
|
32
|
-
if (!chunkData) continue;
|
|
33
|
-
this.updateLastAccess(record.clipId, record.track);
|
|
34
|
-
return this.createChunk(chunkData, timeUs, record.track, batch.type, batch.durationUs);
|
|
35
|
-
}
|
|
36
|
-
return null;
|
|
37
|
-
}
|
|
38
|
-
async put(clipId, chunks, track, options) {
|
|
39
|
-
await this.init();
|
|
40
|
-
if (!this.db || !this.opfsRoot) {
|
|
41
|
-
console.warn(`[L2Cache] put aborted: db=${!!this.db}, opfsRoot=${!!this.opfsRoot}`);
|
|
42
|
-
return;
|
|
43
|
-
}
|
|
44
|
-
if (chunks.length === 0) return;
|
|
45
|
-
const fileName = `clip-${clipId}-${track[0]}1.${track === "video" ? "webm" : "m4a"}`;
|
|
46
|
-
let existingRecord;
|
|
47
|
-
{
|
|
48
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
49
|
-
const store = tx.objectStore("chunks");
|
|
50
|
-
existingRecord = await this.promisifyRequest(store.get([clipId, track]));
|
|
51
|
-
}
|
|
52
|
-
if (existingRecord) {
|
|
53
|
-
const projectDir = await this.opfsRoot.getDirectoryHandle(
|
|
54
|
-
`meframe-project-${this.projectId}`,
|
|
55
|
-
{
|
|
56
|
-
create: true
|
|
57
|
-
}
|
|
58
|
-
);
|
|
59
|
-
try {
|
|
60
|
-
await projectDir.getFileHandle(existingRecord.fileName, { create: false });
|
|
61
|
-
} catch (error) {
|
|
62
|
-
if (error?.name === "NotFoundError") {
|
|
63
|
-
await this.deleteEntry(clipId, track);
|
|
64
|
-
existingRecord = void 0;
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
let chunksToWrite = chunks;
|
|
69
|
-
if (existingRecord && existingRecord.batches.length > 0) {
|
|
70
|
-
const lastBatch = existingRecord.batches[existingRecord.batches.length - 1];
|
|
71
|
-
if (lastBatch) {
|
|
72
|
-
const lastTimestamp = lastBatch.startUs;
|
|
73
|
-
chunksToWrite = chunks.filter((chunk) => chunk.timestamp > lastTimestamp);
|
|
74
|
-
if (chunksToWrite.length === 0) {
|
|
75
|
-
return;
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
const newBatches = await this.appendToOPFS(fileName, chunksToWrite, existingRecord?.batches);
|
|
80
|
-
{
|
|
81
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
82
|
-
const store = tx.objectStore("chunks");
|
|
83
|
-
const record = {
|
|
84
|
-
clipId,
|
|
85
|
-
track,
|
|
86
|
-
fileName,
|
|
87
|
-
batches: existingRecord?.batches ? [...existingRecord.batches, ...newBatches] : newBatches,
|
|
88
|
-
lastAccess: Date.now(),
|
|
89
|
-
totalBytes: (existingRecord?.totalBytes || 0) + newBatches.reduce((sum, b) => sum + b.byteLength, 0),
|
|
90
|
-
isComplete: options?.isComplete ?? existingRecord?.isComplete ?? false,
|
|
91
|
-
expectedDurationUs: options?.expectedDurationUs ?? existingRecord?.expectedDurationUs,
|
|
92
|
-
metadata: options?.metadata ?? existingRecord?.metadata
|
|
93
|
-
};
|
|
94
|
-
store.put(record);
|
|
95
|
-
await new Promise((resolve, reject) => {
|
|
96
|
-
tx.oncomplete = () => {
|
|
97
|
-
resolve();
|
|
98
|
-
};
|
|
99
|
-
tx.onerror = () => {
|
|
100
|
-
console.error(`[L2Cache] Transaction error for ${clipId} ${track}:`, tx.error);
|
|
101
|
-
reject(tx.error);
|
|
102
|
-
};
|
|
103
|
-
});
|
|
104
|
-
}
|
|
105
|
-
await this.enforceQuota();
|
|
106
|
-
}
|
|
107
|
-
async invalidateRange(startUs, endUs, clipId) {
|
|
108
|
-
await this.init();
|
|
109
|
-
if (!this.db) return;
|
|
110
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
111
|
-
const store = tx.objectStore("chunks");
|
|
112
|
-
const keysToDelete = [];
|
|
113
|
-
const cursor = store.openCursor();
|
|
114
|
-
await new Promise((resolve) => {
|
|
115
|
-
cursor.onsuccess = (event) => {
|
|
116
|
-
const cursor2 = event.target.result;
|
|
117
|
-
if (!cursor2) {
|
|
118
|
-
resolve();
|
|
119
|
-
return;
|
|
120
|
-
}
|
|
121
|
-
const record = cursor2.value;
|
|
122
|
-
if (clipId && record.clipId !== clipId) {
|
|
123
|
-
cursor2.continue();
|
|
124
|
-
return;
|
|
125
|
-
}
|
|
126
|
-
const hasOverlap = record.batches.some((batch) => {
|
|
127
|
-
const batchEnd = batch.startUs + batch.durationUs;
|
|
128
|
-
return batch.startUs < endUs && batchEnd > startUs;
|
|
129
|
-
});
|
|
130
|
-
if (hasOverlap) {
|
|
131
|
-
keysToDelete.push([record.clipId, record.track]);
|
|
132
|
-
}
|
|
133
|
-
cursor2.continue();
|
|
134
|
-
};
|
|
135
|
-
});
|
|
136
|
-
for (const key of keysToDelete) {
|
|
137
|
-
await this.deleteEntry(key[0], key[1]);
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
/**
|
|
141
|
-
* Check if clip has cached data in L2
|
|
142
|
-
*/
|
|
143
|
-
async hasClip(clipId, track) {
|
|
144
|
-
await this.init();
|
|
145
|
-
if (!this.db) return false;
|
|
146
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
147
|
-
const store = tx.objectStore("chunks");
|
|
148
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
149
|
-
return record !== void 0 && record.batches && record.batches.length > 0;
|
|
150
|
-
}
|
|
151
|
-
/**
|
|
152
|
-
* Check if clip has complete cached data in L2
|
|
153
|
-
*/
|
|
154
|
-
async hasCompleteClip(clipId, track) {
|
|
155
|
-
await this.init();
|
|
156
|
-
if (!this.db) {
|
|
157
|
-
console.warn(`[L2Cache] hasCompleteClip: db not initialized`);
|
|
158
|
-
return false;
|
|
159
|
-
}
|
|
160
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
161
|
-
const store = tx.objectStore("chunks");
|
|
162
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
163
|
-
const result = record?.isComplete === true;
|
|
164
|
-
return result;
|
|
165
|
-
}
|
|
166
|
-
/**
|
|
167
|
-
* Mark clip as complete in L2 cache
|
|
168
|
-
*/
|
|
169
|
-
async markComplete(clipId, track) {
|
|
170
|
-
await this.init();
|
|
171
|
-
if (!this.db) return;
|
|
172
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
173
|
-
const store = tx.objectStore("chunks");
|
|
174
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
175
|
-
if (record) {
|
|
176
|
-
record.isComplete = true;
|
|
177
|
-
record.lastAccess = Date.now();
|
|
178
|
-
store.put(record);
|
|
179
|
-
await new Promise((resolve, reject) => {
|
|
180
|
-
tx.oncomplete = () => resolve();
|
|
181
|
-
tx.onerror = () => reject(tx.error);
|
|
182
|
-
});
|
|
183
|
-
console.log(`[L2Cache] markComplete(${clipId}, ${track}): marked successfully`);
|
|
184
|
-
} else {
|
|
185
|
-
console.warn(`[L2Cache] markComplete(${clipId}, ${track}): no record found, cannot mark`);
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
async invalidateClip(clipId) {
|
|
189
|
-
await this.init();
|
|
190
|
-
if (!this.db) return;
|
|
191
|
-
const recordsToDelete = [];
|
|
192
|
-
{
|
|
193
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
194
|
-
const store = tx.objectStore("chunks");
|
|
195
|
-
recordsToDelete.push(...await this.collectRecords(store, clipId));
|
|
196
|
-
}
|
|
197
|
-
for (const record of recordsToDelete) {
|
|
198
|
-
await this.deleteEntry(record.clipId, record.track);
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
/**
|
|
202
|
-
* Create a readable stream of encoded chunks for export
|
|
203
|
-
* Reads chunks in timestamp order from OPFS
|
|
204
|
-
*/
|
|
205
|
-
async createReadStream(clipId, track) {
|
|
206
|
-
await this.init();
|
|
207
|
-
if (!this.db || !this.opfsRoot) return null;
|
|
208
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
209
|
-
const store = tx.objectStore("chunks");
|
|
210
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
211
|
-
if (!record || record.batches.length === 0) {
|
|
212
|
-
return null;
|
|
213
|
-
}
|
|
214
|
-
const batches = [...record.batches];
|
|
215
|
-
let batchIndex = 0;
|
|
216
|
-
return new ReadableStream({
|
|
217
|
-
pull: async (controller) => {
|
|
218
|
-
if (batchIndex >= batches.length) {
|
|
219
|
-
controller.close();
|
|
220
|
-
return;
|
|
221
|
-
}
|
|
222
|
-
const batch = batches[batchIndex];
|
|
223
|
-
if (!batch) {
|
|
224
|
-
controller.close();
|
|
225
|
-
return;
|
|
226
|
-
}
|
|
227
|
-
try {
|
|
228
|
-
const chunkData = await this.readFromOPFS(record.fileName, batch);
|
|
229
|
-
if (!chunkData) {
|
|
230
|
-
controller.close();
|
|
231
|
-
return;
|
|
232
|
-
}
|
|
233
|
-
const chunk = this.createChunk(
|
|
234
|
-
chunkData,
|
|
235
|
-
batch.startUs,
|
|
236
|
-
track,
|
|
237
|
-
batch.type,
|
|
238
|
-
batch.durationUs
|
|
239
|
-
);
|
|
240
|
-
controller.enqueue(chunk);
|
|
241
|
-
batchIndex++;
|
|
242
|
-
} catch (error) {
|
|
243
|
-
if (error instanceof DOMException && error.name === "NotFoundError") {
|
|
244
|
-
controller.close();
|
|
245
|
-
} else {
|
|
246
|
-
controller.error(error);
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
});
|
|
251
|
-
}
|
|
252
|
-
async clear() {
|
|
253
|
-
await this.init();
|
|
254
|
-
if (!this.db || !this.opfsRoot) {
|
|
255
|
-
console.warn("[L2Cache] clear() called but db or opfsRoot not available");
|
|
256
|
-
return;
|
|
257
|
-
}
|
|
258
|
-
try {
|
|
259
|
-
const tx = this.db.transaction(["chunks", "meta"], "readwrite");
|
|
260
|
-
await this.promisifyRequest(tx.objectStore("chunks").clear());
|
|
261
|
-
await this.promisifyRequest(tx.objectStore("meta").clear());
|
|
262
|
-
} catch (error) {
|
|
263
|
-
console.error("[L2Cache] Failed to clear IndexedDB:", error);
|
|
264
|
-
throw error;
|
|
265
|
-
}
|
|
266
|
-
try {
|
|
267
|
-
const projectDir = await this.opfsRoot.getDirectoryHandle(
|
|
268
|
-
`meframe-project-${this.projectId}`,
|
|
269
|
-
{
|
|
270
|
-
create: false
|
|
271
|
-
}
|
|
272
|
-
);
|
|
273
|
-
await this.opfsRoot.removeEntry(projectDir.name, { recursive: true });
|
|
274
|
-
} catch (error) {
|
|
275
|
-
if (error?.name !== "NotFoundError") {
|
|
276
|
-
console.warn("[L2Cache] Failed to clear OPFS:", error);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
async initStorage() {
|
|
281
|
-
this.opfsRoot = await navigator.storage.getDirectory();
|
|
282
|
-
const request = indexedDB.open("meframe_cache", 1);
|
|
283
|
-
request.onupgradeneeded = (event) => {
|
|
284
|
-
const db = event.target.result;
|
|
285
|
-
if (!db.objectStoreNames.contains("chunks")) {
|
|
286
|
-
const store = db.createObjectStore("chunks", {
|
|
287
|
-
keyPath: ["clipId", "track"]
|
|
288
|
-
});
|
|
289
|
-
store.createIndex("lastAccess", "lastAccess");
|
|
290
|
-
}
|
|
291
|
-
if (!db.objectStoreNames.contains("meta")) {
|
|
292
|
-
db.createObjectStore("meta", { keyPath: "projectId" });
|
|
293
|
-
}
|
|
294
|
-
};
|
|
295
|
-
this.db = await new Promise((resolve, reject) => {
|
|
296
|
-
request.onsuccess = () => resolve(request.result);
|
|
297
|
-
request.onerror = () => reject(request.error);
|
|
298
|
-
});
|
|
299
|
-
}
|
|
300
|
-
async readFromOPFS(fileName, batch) {
|
|
301
|
-
if (!this.opfsRoot) return null;
|
|
302
|
-
const projectDir = await this.opfsRoot.getDirectoryHandle(`meframe-project-${this.projectId}`, {
|
|
303
|
-
create: false
|
|
304
|
-
});
|
|
305
|
-
const fileHandle = await projectDir.getFileHandle(fileName);
|
|
306
|
-
const file = await fileHandle.getFile();
|
|
307
|
-
const slice = file.slice(batch.byteOffset, batch.byteOffset + batch.byteLength);
|
|
308
|
-
return await slice.arrayBuffer();
|
|
309
|
-
}
|
|
310
|
-
/**
|
|
311
|
-
* Append chunks to OPFS file (or create new file)
|
|
312
|
-
* Supports incremental writing for streaming scenarios
|
|
313
|
-
*/
|
|
314
|
-
async appendToOPFS(fileName, chunks, existingBatches) {
|
|
315
|
-
if (!this.opfsRoot) return [];
|
|
316
|
-
const projectDir = await this.opfsRoot.getDirectoryHandle(`meframe-project-${this.projectId}`, {
|
|
317
|
-
create: true
|
|
318
|
-
});
|
|
319
|
-
const fileHandle = await projectDir.getFileHandle(fileName, { create: true });
|
|
320
|
-
let offset = 0;
|
|
321
|
-
if (existingBatches && existingBatches.length > 0) {
|
|
322
|
-
const lastBatch = existingBatches[existingBatches.length - 1];
|
|
323
|
-
if (lastBatch) {
|
|
324
|
-
offset = lastBatch.byteOffset + lastBatch.byteLength;
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
const writable = await fileHandle.createWritable({ keepExistingData: true });
|
|
328
|
-
const batches = [];
|
|
329
|
-
for (const chunk of chunks) {
|
|
330
|
-
const data = await this.chunkToArrayBuffer(chunk);
|
|
331
|
-
await writable.write({ type: "write", position: offset, data });
|
|
332
|
-
batches.push({
|
|
333
|
-
startUs: chunk.timestamp,
|
|
334
|
-
durationUs: chunk.duration || 0,
|
|
335
|
-
byteOffset: offset,
|
|
336
|
-
byteLength: data.byteLength,
|
|
337
|
-
type: chunk.type
|
|
338
|
-
});
|
|
339
|
-
offset += data.byteLength;
|
|
340
|
-
}
|
|
341
|
-
await writable.close();
|
|
342
|
-
return batches;
|
|
343
|
-
}
|
|
344
|
-
async chunkToArrayBuffer(chunk) {
|
|
345
|
-
const buffer = new ArrayBuffer(chunk.byteLength);
|
|
346
|
-
chunk.copyTo(buffer);
|
|
347
|
-
return buffer;
|
|
348
|
-
}
|
|
349
|
-
createChunk(data, timeUs, track, chunkType = "key", durationUs = 0) {
|
|
350
|
-
if (track === "video") {
|
|
351
|
-
return new EncodedVideoChunk({
|
|
352
|
-
type: chunkType,
|
|
353
|
-
timestamp: timeUs,
|
|
354
|
-
duration: durationUs,
|
|
355
|
-
data
|
|
356
|
-
});
|
|
357
|
-
} else {
|
|
358
|
-
return new EncodedAudioChunk({
|
|
359
|
-
type: chunkType,
|
|
360
|
-
timestamp: timeUs,
|
|
361
|
-
duration: durationUs,
|
|
362
|
-
data
|
|
363
|
-
});
|
|
364
|
-
}
|
|
365
|
-
}
|
|
366
|
-
async updateLastAccess(clipId, track) {
|
|
367
|
-
if (!this.db) return;
|
|
368
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
369
|
-
const store = tx.objectStore("chunks");
|
|
370
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
371
|
-
if (record) {
|
|
372
|
-
record.lastAccess = Date.now();
|
|
373
|
-
await this.promisifyRequest(store.put(record));
|
|
374
|
-
}
|
|
375
|
-
}
|
|
376
|
-
async deleteEntry(clipId, track) {
|
|
377
|
-
if (!this.db) return;
|
|
378
|
-
let record;
|
|
379
|
-
{
|
|
380
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
381
|
-
const store = tx.objectStore("chunks");
|
|
382
|
-
record = await this.promisifyRequest(store.get([clipId, track]));
|
|
383
|
-
}
|
|
384
|
-
if (record && this.opfsRoot) {
|
|
385
|
-
try {
|
|
386
|
-
const projectDir = await this.opfsRoot.getDirectoryHandle(
|
|
387
|
-
`meframe-project-${this.projectId}`,
|
|
388
|
-
{ create: false }
|
|
389
|
-
);
|
|
390
|
-
await projectDir.removeEntry(record.fileName);
|
|
391
|
-
} catch (error) {
|
|
392
|
-
console.warn(`[L2Cache] Failed to delete OPFS file ${record.fileName}:`, error);
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
{
|
|
396
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
397
|
-
const store = tx.objectStore("chunks");
|
|
398
|
-
await this.promisifyRequest(store.delete([clipId, track]));
|
|
399
|
-
}
|
|
400
|
-
}
|
|
401
|
-
async enforceQuota() {
|
|
402
|
-
const estimate = await navigator.storage.estimate();
|
|
403
|
-
const usage = estimate.usage || 0;
|
|
404
|
-
if (usage <= this.maxSize) return;
|
|
405
|
-
console.warn(
|
|
406
|
-
`[L2Cache] Quota exceeded! Deleting oldest entries: usage=${usage}, maxSize=${this.maxSize}`
|
|
407
|
-
);
|
|
408
|
-
if (!this.db) return;
|
|
409
|
-
const tx = this.db.transaction("chunks", "readwrite");
|
|
410
|
-
const store = tx.objectStore("chunks");
|
|
411
|
-
const index = store.index("lastAccess");
|
|
412
|
-
let bytesDeleted = 0;
|
|
413
|
-
const toDelete = usage - this.maxSize;
|
|
414
|
-
const cursor = index.openCursor();
|
|
415
|
-
await new Promise((resolve) => {
|
|
416
|
-
cursor.onsuccess = async (event) => {
|
|
417
|
-
const cursor2 = event.target.result;
|
|
418
|
-
if (!cursor2 || bytesDeleted >= toDelete) {
|
|
419
|
-
resolve();
|
|
420
|
-
return;
|
|
421
|
-
}
|
|
422
|
-
const record = cursor2.value;
|
|
423
|
-
await this.deleteEntry(record.clipId, record.track);
|
|
424
|
-
bytesDeleted += record.totalBytes;
|
|
425
|
-
cursor2.continue();
|
|
426
|
-
};
|
|
427
|
-
});
|
|
428
|
-
}
|
|
429
|
-
async collectRecords(store, clipId) {
|
|
430
|
-
const records = [];
|
|
431
|
-
const cursor = store.openCursor();
|
|
432
|
-
await new Promise((resolve) => {
|
|
433
|
-
cursor.onsuccess = (event) => {
|
|
434
|
-
const cursor2 = event.target.result;
|
|
435
|
-
if (!cursor2) {
|
|
436
|
-
resolve();
|
|
437
|
-
return;
|
|
438
|
-
}
|
|
439
|
-
const record = cursor2.value;
|
|
440
|
-
if (record.clipId === clipId) {
|
|
441
|
-
records.push(record);
|
|
442
|
-
}
|
|
443
|
-
cursor2.continue();
|
|
444
|
-
};
|
|
445
|
-
});
|
|
446
|
-
return records;
|
|
447
|
-
}
|
|
448
|
-
promisifyRequest(request) {
|
|
449
|
-
return new Promise((resolve, reject) => {
|
|
450
|
-
request.onsuccess = () => resolve(request.result);
|
|
451
|
-
request.onerror = () => reject(request.error);
|
|
452
|
-
});
|
|
453
|
-
}
|
|
454
|
-
getMetadata() {
|
|
455
|
-
return {
|
|
456
|
-
maxSizeMB: this.maxSize / (1024 * 1024),
|
|
457
|
-
usedSizeMB: 0,
|
|
458
|
-
// Would need to track actual usage
|
|
459
|
-
entries: 0,
|
|
460
|
-
// Would need to track actual entries
|
|
461
|
-
hitRate: 0
|
|
462
|
-
// Would need to track hits and misses
|
|
463
|
-
};
|
|
464
|
-
}
|
|
465
|
-
async hasAvailableQuota(sizeMB) {
|
|
466
|
-
if (typeof navigator === "undefined" || !navigator.storage?.estimate) {
|
|
467
|
-
throw new Error("Storage API not available");
|
|
468
|
-
}
|
|
469
|
-
const estimate = await navigator.storage.estimate();
|
|
470
|
-
const availableMB = ((estimate.quota || 0) - (estimate.usage || 0)) / (1024 * 1024);
|
|
471
|
-
return availableMB >= sizeMB;
|
|
472
|
-
}
|
|
473
|
-
/**
|
|
474
|
-
* Get chunk metadata (decoderConfig) for a specific clip
|
|
475
|
-
*/
|
|
476
|
-
async getClipMetadata(clipId, track) {
|
|
477
|
-
await this.init();
|
|
478
|
-
if (!this.db) return null;
|
|
479
|
-
const tx = this.db.transaction("chunks", "readonly");
|
|
480
|
-
const store = tx.objectStore("chunks");
|
|
481
|
-
const record = await this.promisifyRequest(store.get([clipId, track]));
|
|
482
|
-
return record?.metadata || null;
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
export {
|
|
486
|
-
L2Cache
|
|
487
|
-
};
|
|
488
|
-
//# sourceMappingURL=L2Cache.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"L2Cache.js","sources":["../../src/cache/L2Cache.ts"],"sourcesContent":["import type { TimeUs } from '../model/types';\n// import type { ClipMetadata } from './types';\nimport { binarySearchRange } from '../utils/binary-search';\n\ninterface ChunkBatch {\n startUs: TimeUs;\n durationUs: TimeUs;\n byteOffset: number;\n byteLength: number;\n type: 'key' | 'delta'; // Store chunk type for correct reconstruction\n}\n\ninterface ChunkRecord {\n clipId: string; // IndexedDB keyPath uses clipId\n track: 'video' | 'audio';\n fileName: string;\n batches: ChunkBatch[];\n lastAccess: number;\n totalBytes: number;\n isComplete: boolean; // Mark if clip is fully rendered\n expectedDurationUs?: number; // Expected duration for validation\n metadata?: {\n codec?: string;\n description?: Uint8Array;\n codedWidth?: number;\n codedHeight?: number;\n displayAspectWidth?: number;\n displayAspectHeight?: number;\n colorSpace?: VideoColorSpaceInit;\n hardwareAcceleration?: HardwareAcceleration;\n optimizeForLatency?: boolean;\n sampleRate?: number;\n numberOfChannels?: number;\n };\n}\n\ninterface L2Config {\n maxSizeMB: number;\n projectId: string;\n}\n\nexport class L2Cache {\n private db: IDBDatabase | null = null;\n private opfsRoot: FileSystemDirectoryHandle | null = null;\n readonly maxSize: number;\n readonly projectId: string;\n private initPromise: Promise<void> | null = null;\n\n constructor(config: L2Config) {\n this.maxSize = config.maxSizeMB * 1024 * 1024;\n this.projectId = config.projectId;\n }\n\n async init(): Promise<void> {\n if (this.initPromise) return this.initPromise;\n\n this.initPromise = this.initStorage();\n return this.initPromise;\n }\n\n async get(timeUs: TimeUs, clipId: string): Promise<EncodedVideoChunk | EncodedAudioChunk | null> {\n await this.init();\n\n if (!this.db) return null;\n\n // Query IndexedDB for chunk metadata\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n const records = await this.collectRecords(store, clipId);\n\n for (const record of records) {\n const batch = binarySearchRange(record.batches, timeUs, (b) => ({\n start: b.startUs,\n end: b.startUs + b.durationUs,\n }));\n\n if (!batch) {\n continue;\n }\n\n const chunkData = await this.readFromOPFS(record.fileName, batch);\n if (!chunkData) continue;\n\n this.updateLastAccess(record.clipId, record.track);\n\n return this.createChunk(chunkData, timeUs, record.track, batch.type, batch.durationUs);\n }\n\n return null;\n }\n\n async put(\n clipId: string,\n chunks: Array<EncodedVideoChunk | EncodedAudioChunk>,\n track: 'video' | 'audio',\n options?: {\n isComplete?: boolean;\n expectedDurationUs?: number;\n metadata?: any;\n }\n ): Promise<void> {\n await this.init();\n\n if (!this.db || !this.opfsRoot) {\n console.warn(`[L2Cache] put aborted: db=${!!this.db}, opfsRoot=${!!this.opfsRoot}`);\n return;\n }\n if (chunks.length === 0) return;\n\n const fileName = `clip-${clipId}-${track[0]}1.${track === 'video' ? 'webm' : 'm4a'}`;\n\n // Step 1: Read existing record (separate transaction)\n let existingRecord: ChunkRecord | undefined;\n {\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n existingRecord = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n }\n\n // Step 2: Validate consistency - if IndexedDB has record but OPFS file missing, delete the record\n if (existingRecord) {\n const projectDir = await this.opfsRoot.getDirectoryHandle(\n `meframe-project-${this.projectId}`,\n {\n create: true,\n }\n );\n try {\n await projectDir.getFileHandle(existingRecord.fileName, { create: false });\n } catch (error) {\n if ((error as any)?.name === 'NotFoundError') {\n await this.deleteEntry(clipId, track);\n existingRecord = undefined;\n }\n }\n }\n\n // Step 3: Deduplicate based on timestamp\n let chunksToWrite = chunks;\n if (existingRecord && existingRecord.batches.length > 0) {\n const lastBatch = existingRecord.batches[existingRecord.batches.length - 1];\n if (lastBatch) {\n const lastTimestamp = lastBatch.startUs;\n // Filter out chunks with timestamp <= lastTimestamp\n chunksToWrite = chunks.filter((chunk) => chunk.timestamp > lastTimestamp);\n\n if (chunksToWrite.length === 0) {\n return;\n }\n }\n }\n\n // Step 3: Write to OPFS (no active transaction)\n const newBatches = await this.appendToOPFS(fileName, chunksToWrite, existingRecord?.batches);\n\n // Step 4: Update IndexedDB (new transaction)\n {\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n\n const record: ChunkRecord = {\n clipId,\n track,\n fileName,\n batches: existingRecord?.batches ? [...existingRecord.batches, ...newBatches] : newBatches,\n lastAccess: Date.now(),\n totalBytes:\n (existingRecord?.totalBytes || 0) + newBatches.reduce((sum, b) => sum + b.byteLength, 0),\n isComplete: options?.isComplete ?? existingRecord?.isComplete ?? false,\n expectedDurationUs: options?.expectedDurationUs ?? existingRecord?.expectedDurationUs,\n metadata: options?.metadata ?? existingRecord?.metadata,\n };\n\n store.put(record);\n // Wait for transaction to complete, not just the request\n await new Promise<void>((resolve, reject) => {\n tx.oncomplete = () => {\n resolve();\n };\n tx.onerror = () => {\n console.error(`[L2Cache] Transaction error for ${clipId} ${track}:`, tx.error);\n reject(tx.error);\n };\n });\n }\n\n // Check and enforce quota\n await this.enforceQuota();\n }\n\n async invalidateRange(startUs: TimeUs, endUs: TimeUs, clipId?: string): Promise<void> {\n await this.init();\n\n if (!this.db) return;\n\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n const keysToDelete: Array<[string, string]> = [];\n\n // Iterate through all records\n const cursor = store.openCursor();\n await new Promise<void>((resolve) => {\n cursor.onsuccess = (event) => {\n const cursor = (event.target as IDBRequest).result;\n if (!cursor) {\n resolve();\n return;\n }\n\n const record: ChunkRecord = cursor.value;\n\n if (clipId && record.clipId !== clipId) {\n cursor.continue();\n return;\n }\n\n // Check if any batch overlaps with invalidation range\n const hasOverlap = record.batches.some((batch) => {\n const batchEnd = batch.startUs + batch.durationUs;\n return batch.startUs < endUs && batchEnd > startUs;\n });\n\n if (hasOverlap) {\n keysToDelete.push([record.clipId, record.track]);\n }\n\n cursor.continue();\n };\n });\n\n // Delete invalidated entries\n for (const key of keysToDelete) {\n await this.deleteEntry(key[0], key[1]);\n }\n }\n\n /**\n * Check if clip has cached data in L2\n */\n async hasClip(clipId: string, track: 'video' | 'audio'): Promise<boolean> {\n await this.init();\n\n if (!this.db) return false;\n\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n return record !== undefined && record.batches && record.batches.length > 0;\n }\n\n /**\n * Check if clip has complete cached data in L2\n */\n async hasCompleteClip(clipId: string, track: 'video' | 'audio'): Promise<boolean> {\n await this.init();\n\n if (!this.db) {\n console.warn(`[L2Cache] hasCompleteClip: db not initialized`);\n return false;\n }\n\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n const result = record?.isComplete === true;\n return result;\n }\n\n /**\n * Mark clip as complete in L2 cache\n */\n async markComplete(clipId: string, track: 'video' | 'audio'): Promise<void> {\n await this.init();\n\n if (!this.db) return;\n\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n if (record) {\n record.isComplete = true;\n record.lastAccess = Date.now();\n store.put(record);\n // Wait for transaction to complete\n await new Promise<void>((resolve, reject) => {\n tx.oncomplete = () => resolve();\n tx.onerror = () => reject(tx.error);\n });\n console.log(`[L2Cache] markComplete(${clipId}, ${track}): marked successfully`);\n } else {\n console.warn(`[L2Cache] markComplete(${clipId}, ${track}): no record found, cannot mark`);\n }\n }\n\n async invalidateClip(clipId: string): Promise<void> {\n await this.init();\n\n if (!this.db) return;\n\n // Collect records to delete\n const recordsToDelete: ChunkRecord[] = [];\n {\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n recordsToDelete.push(...(await this.collectRecords(store, clipId)));\n }\n\n // Delete each record\n for (const record of recordsToDelete) {\n await this.deleteEntry(record.clipId, record.track);\n }\n }\n\n /**\n * Create a readable stream of encoded chunks for export\n * Reads chunks in timestamp order from OPFS\n */\n async createReadStream(\n clipId: string,\n track: 'video' | 'audio'\n ): Promise<ReadableStream<EncodedVideoChunk | EncodedAudioChunk> | null> {\n await this.init();\n\n if (!this.db || !this.opfsRoot) return null;\n\n // Get chunk record\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n if (!record || record.batches.length === 0) {\n return null;\n }\n\n // Clone batches array for stream iteration\n const batches = [...record.batches];\n let batchIndex = 0;\n\n return new ReadableStream<EncodedVideoChunk | EncodedAudioChunk>({\n pull: async (controller) => {\n if (batchIndex >= batches.length) {\n controller.close();\n return;\n }\n\n const batch = batches[batchIndex];\n if (!batch) {\n controller.close();\n return;\n }\n\n try {\n // Read chunk data from OPFS\n const chunkData = await this.readFromOPFS(record.fileName, batch);\n if (!chunkData) {\n controller.close();\n return;\n }\n\n // Create encoded chunk with correct type and duration\n const chunk = this.createChunk(\n chunkData,\n batch.startUs,\n track,\n batch.type,\n batch.durationUs\n );\n controller.enqueue(chunk);\n\n batchIndex++;\n } catch (error) {\n // File not found or read error - close stream gracefully\n if (error instanceof DOMException && error.name === 'NotFoundError') {\n controller.close();\n } else {\n controller.error(error);\n }\n }\n },\n });\n }\n\n async clear(): Promise<void> {\n await this.init();\n\n if (!this.db || !this.opfsRoot) {\n console.warn('[L2Cache] clear() called but db or opfsRoot not available');\n return;\n }\n\n // Clear IndexedDB\n try {\n const tx = this.db.transaction(['chunks', 'meta'], 'readwrite');\n await this.promisifyRequest(tx.objectStore('chunks').clear());\n await this.promisifyRequest(tx.objectStore('meta').clear());\n } catch (error) {\n console.error('[L2Cache] Failed to clear IndexedDB:', error);\n throw error;\n }\n\n // Clear OPFS files\n try {\n const projectDir = await this.opfsRoot.getDirectoryHandle(\n `meframe-project-${this.projectId}`,\n {\n create: false,\n }\n );\n await this.opfsRoot.removeEntry(projectDir.name, { recursive: true });\n } catch (error) {\n if ((error as any)?.name !== 'NotFoundError') {\n console.warn('[L2Cache] Failed to clear OPFS:', error);\n }\n }\n }\n\n private async initStorage(): Promise<void> {\n // Initialize OPFS\n this.opfsRoot = await navigator.storage.getDirectory();\n\n // Initialize IndexedDB\n const request = indexedDB.open('meframe_cache', 1);\n\n request.onupgradeneeded = (event) => {\n const db = (event.target as IDBOpenDBRequest).result;\n\n // chunks store with composite key [clipId, track]\n if (!db.objectStoreNames.contains('chunks')) {\n const store = db.createObjectStore('chunks', {\n keyPath: ['clipId', 'track'],\n });\n store.createIndex('lastAccess', 'lastAccess');\n }\n\n // meta store\n if (!db.objectStoreNames.contains('meta')) {\n db.createObjectStore('meta', { keyPath: 'projectId' });\n }\n };\n\n this.db = await new Promise((resolve, reject) => {\n request.onsuccess = () => resolve(request.result);\n request.onerror = () => reject(request.error);\n });\n }\n\n private async readFromOPFS(fileName: string, batch: ChunkBatch): Promise<ArrayBuffer | null> {\n if (!this.opfsRoot) return null;\n\n const projectDir = await this.opfsRoot.getDirectoryHandle(`meframe-project-${this.projectId}`, {\n create: false,\n });\n const fileHandle = await projectDir.getFileHandle(fileName);\n const file = await fileHandle.getFile();\n const slice = file.slice(batch.byteOffset, batch.byteOffset + batch.byteLength);\n return await slice.arrayBuffer();\n }\n\n /**\n * Append chunks to OPFS file (or create new file)\n * Supports incremental writing for streaming scenarios\n */\n private async appendToOPFS(\n fileName: string,\n chunks: Array<EncodedVideoChunk | EncodedAudioChunk>,\n existingBatches?: ChunkBatch[]\n ): Promise<ChunkBatch[]> {\n if (!this.opfsRoot) return [];\n\n const projectDir = await this.opfsRoot.getDirectoryHandle(`meframe-project-${this.projectId}`, {\n create: true,\n });\n const fileHandle = await projectDir.getFileHandle(fileName, { create: true });\n\n // Calculate starting offset from existing batches\n let offset = 0;\n if (existingBatches && existingBatches.length > 0) {\n const lastBatch = existingBatches[existingBatches.length - 1];\n if (lastBatch) {\n offset = lastBatch.byteOffset + lastBatch.byteLength;\n }\n }\n\n const writable = await fileHandle.createWritable({ keepExistingData: true });\n\n const batches: ChunkBatch[] = [];\n\n for (const chunk of chunks) {\n const data = await this.chunkToArrayBuffer(chunk);\n await writable.write({ type: 'write', position: offset, data });\n\n batches.push({\n startUs: chunk.timestamp,\n durationUs: chunk.duration || 0,\n byteOffset: offset,\n byteLength: data.byteLength,\n type: chunk.type,\n });\n\n offset += data.byteLength;\n }\n\n await writable.close();\n return batches;\n }\n\n private async chunkToArrayBuffer(\n chunk: EncodedVideoChunk | EncodedAudioChunk\n ): Promise<ArrayBuffer> {\n const buffer = new ArrayBuffer(chunk.byteLength);\n chunk.copyTo(buffer);\n return buffer;\n }\n\n private createChunk(\n data: ArrayBuffer,\n timeUs: TimeUs,\n track: 'video' | 'audio',\n chunkType: 'key' | 'delta' = 'key',\n durationUs: TimeUs = 0\n ): EncodedVideoChunk | EncodedAudioChunk {\n if (track === 'video') {\n return new EncodedVideoChunk({\n type: chunkType,\n timestamp: timeUs,\n duration: durationUs,\n data,\n });\n } else {\n return new EncodedAudioChunk({\n type: chunkType,\n timestamp: timeUs,\n duration: durationUs,\n data,\n });\n }\n }\n\n private async updateLastAccess(clipId: string, track: string): Promise<void> {\n if (!this.db) return;\n\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n if (record) {\n record.lastAccess = Date.now();\n await this.promisifyRequest(store.put(record));\n }\n }\n\n private async deleteEntry(clipId: string, track: string): Promise<void> {\n if (!this.db) return;\n\n // Step 1: Get record info\n let record: ChunkRecord | undefined;\n {\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n }\n\n // Step 2: Delete OPFS file (outside transaction)\n if (record && this.opfsRoot) {\n try {\n const projectDir = await this.opfsRoot.getDirectoryHandle(\n `meframe-project-${this.projectId}`,\n { create: false }\n );\n await projectDir.removeEntry(record.fileName);\n } catch (error) {\n console.warn(`[L2Cache] Failed to delete OPFS file ${record.fileName}:`, error);\n }\n }\n\n // Step 3: Delete IndexedDB record (new transaction)\n {\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n await this.promisifyRequest(store.delete([clipId, track]));\n }\n }\n\n private async enforceQuota(): Promise<void> {\n const estimate = await navigator.storage.estimate();\n const usage = estimate.usage || 0;\n\n if (usage <= this.maxSize) return;\n\n console.warn(\n `[L2Cache] Quota exceeded! Deleting oldest entries: usage=${usage}, maxSize=${this.maxSize}`\n );\n\n if (!this.db) return;\n\n // Delete oldest entries until under quota\n const tx = this.db.transaction('chunks', 'readwrite');\n const store = tx.objectStore('chunks');\n const index = store.index('lastAccess');\n\n let bytesDeleted = 0;\n const toDelete = usage - this.maxSize;\n\n const cursor = index.openCursor();\n await new Promise<void>((resolve) => {\n cursor.onsuccess = async (event) => {\n const cursor = (event.target as IDBRequest).result;\n if (!cursor || bytesDeleted >= toDelete) {\n resolve();\n return;\n }\n\n const record: ChunkRecord = cursor.value;\n await this.deleteEntry(record.clipId, record.track);\n bytesDeleted += record.totalBytes;\n\n cursor.continue();\n };\n });\n }\n\n private async collectRecords(store: IDBObjectStore, clipId: string): Promise<ChunkRecord[]> {\n const records: ChunkRecord[] = [];\n const cursor = store.openCursor();\n await new Promise<void>((resolve) => {\n cursor.onsuccess = (event) => {\n const cursor = (event.target as IDBRequest).result;\n if (!cursor) {\n resolve();\n return;\n }\n\n const record: ChunkRecord = cursor.value;\n if (record.clipId === clipId) {\n records.push(record);\n }\n\n cursor.continue();\n };\n });\n return records;\n }\n\n private promisifyRequest<T>(request: IDBRequest): Promise<T> {\n return new Promise((resolve, reject) => {\n request.onsuccess = () => resolve(request.result);\n request.onerror = () => reject(request.error);\n });\n }\n\n getMetadata(): {\n maxSizeMB: number;\n usedSizeMB: number;\n entries: number;\n hitRate: number;\n } {\n // This is a simplified implementation\n // In a real implementation, we would track actual usage\n return {\n maxSizeMB: this.maxSize / (1024 * 1024),\n usedSizeMB: 0, // Would need to track actual usage\n entries: 0, // Would need to track actual entries\n hitRate: 0, // Would need to track hits and misses\n };\n }\n\n async hasAvailableQuota(sizeMB: number): Promise<boolean> {\n if (typeof navigator === 'undefined' || !navigator.storage?.estimate) {\n // L2Cache requires storage API to function\n throw new Error('Storage API not available');\n }\n\n const estimate = await navigator.storage.estimate();\n const availableMB = ((estimate.quota || 0) - (estimate.usage || 0)) / (1024 * 1024);\n return availableMB >= sizeMB;\n }\n\n /**\n * Get chunk metadata (decoderConfig) for a specific clip\n */\n async getClipMetadata(clipId: string, track: 'video' | 'audio'): Promise<any | null> {\n await this.init();\n\n if (!this.db) return null;\n\n const tx = this.db.transaction('chunks', 'readonly');\n const store = tx.objectStore('chunks');\n const record = await this.promisifyRequest<ChunkRecord>(store.get([clipId, track]));\n\n return record?.metadata || null;\n }\n}\n"],"names":["cursor"],"mappings":";AAyCO,MAAM,QAAQ;AAAA,EACX,KAAyB;AAAA,EACzB,WAA6C;AAAA,EAC5C;AAAA,EACA;AAAA,EACD,cAAoC;AAAA,EAE5C,YAAY,QAAkB;AAC5B,SAAK,UAAU,OAAO,YAAY,OAAO;AACzC,SAAK,YAAY,OAAO;AAAA,EAC1B;AAAA,EAEA,MAAM,OAAsB;AAC1B,QAAI,KAAK,YAAa,QAAO,KAAK;AAElC,SAAK,cAAc,KAAK,YAAA;AACxB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAI,QAAgB,QAAuE;AAC/F,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI,QAAO;AAGrB,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,UAAU,MAAM,KAAK,eAAe,OAAO,MAAM;AAEvD,eAAW,UAAU,SAAS;AAC5B,YAAM,QAAQ,kBAAkB,OAAO,SAAS,QAAQ,CAAC,OAAO;AAAA,QAC9D,OAAO,EAAE;AAAA,QACT,KAAK,EAAE,UAAU,EAAE;AAAA,MAAA,EACnB;AAEF,UAAI,CAAC,OAAO;AACV;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,KAAK,aAAa,OAAO,UAAU,KAAK;AAChE,UAAI,CAAC,UAAW;AAEhB,WAAK,iBAAiB,OAAO,QAAQ,OAAO,KAAK;AAEjD,aAAO,KAAK,YAAY,WAAW,QAAQ,OAAO,OAAO,MAAM,MAAM,MAAM,UAAU;AAAA,IACvF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IACJ,QACA,QACA,OACA,SAKe;AACf,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,MAAM,CAAC,KAAK,UAAU;AAC9B,cAAQ,KAAK,6BAA6B,CAAC,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC,KAAK,QAAQ,EAAE;AAClF;AAAA,IACF;AACA,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,WAAW,QAAQ,MAAM,IAAI,MAAM,CAAC,CAAC,KAAK,UAAU,UAAU,SAAS,KAAK;AAGlF,QAAI;AACJ;AACE,YAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,YAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,uBAAiB,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAAA,IACtF;AAGA,QAAI,gBAAgB;AAClB,YAAM,aAAa,MAAM,KAAK,SAAS;AAAA,QACrC,mBAAmB,KAAK,SAAS;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,QAAA;AAAA,MACV;AAEF,UAAI;AACF,cAAM,WAAW,cAAc,eAAe,UAAU,EAAE,QAAQ,OAAO;AAAA,MAC3E,SAAS,OAAO;AACd,YAAK,OAAe,SAAS,iBAAiB;AAC5C,gBAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,2BAAiB;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AAGA,QAAI,gBAAgB;AACpB,QAAI,kBAAkB,eAAe,QAAQ,SAAS,GAAG;AACvD,YAAM,YAAY,eAAe,QAAQ,eAAe,QAAQ,SAAS,CAAC;AAC1E,UAAI,WAAW;AACb,cAAM,gBAAgB,UAAU;AAEhC,wBAAgB,OAAO,OAAO,CAAC,UAAU,MAAM,YAAY,aAAa;AAExE,YAAI,cAAc,WAAW,GAAG;AAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,KAAK,aAAa,UAAU,eAAe,gBAAgB,OAAO;AAG3F;AACE,YAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,YAAM,QAAQ,GAAG,YAAY,QAAQ;AAErC,YAAM,SAAsB;AAAA,QAC1B;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,gBAAgB,UAAU,CAAC,GAAG,eAAe,SAAS,GAAG,UAAU,IAAI;AAAA,QAChF,YAAY,KAAK,IAAA;AAAA,QACjB,aACG,gBAAgB,cAAc,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,YAAY,CAAC;AAAA,QACzF,YAAY,SAAS,cAAc,gBAAgB,cAAc;AAAA,QACjE,oBAAoB,SAAS,sBAAsB,gBAAgB;AAAA,QACnE,UAAU,SAAS,YAAY,gBAAgB;AAAA,MAAA;AAGjD,YAAM,IAAI,MAAM;AAEhB,YAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,WAAG,aAAa,MAAM;AACpB,kBAAA;AAAA,QACF;AACA,WAAG,UAAU,MAAM;AACjB,kBAAQ,MAAM,mCAAmC,MAAM,IAAI,KAAK,KAAK,GAAG,KAAK;AAC7E,iBAAO,GAAG,KAAK;AAAA,QACjB;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,KAAK,aAAA;AAAA,EACb;AAAA,EAEA,MAAM,gBAAgB,SAAiB,OAAe,QAAgC;AACpF,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI;AAEd,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,eAAwC,CAAA;AAG9C,UAAM,SAAS,MAAM,WAAA;AACrB,UAAM,IAAI,QAAc,CAAC,YAAY;AACnC,aAAO,YAAY,CAAC,UAAU;AAC5B,cAAMA,UAAU,MAAM,OAAsB;AAC5C,YAAI,CAACA,SAAQ;AACX,kBAAA;AACA;AAAA,QACF;AAEA,cAAM,SAAsBA,QAAO;AAEnC,YAAI,UAAU,OAAO,WAAW,QAAQ;AACtCA,kBAAO,SAAA;AACP;AAAA,QACF;AAGA,cAAM,aAAa,OAAO,QAAQ,KAAK,CAAC,UAAU;AAChD,gBAAM,WAAW,MAAM,UAAU,MAAM;AACvC,iBAAO,MAAM,UAAU,SAAS,WAAW;AAAA,QAC7C,CAAC;AAED,YAAI,YAAY;AACd,uBAAa,KAAK,CAAC,OAAO,QAAQ,OAAO,KAAK,CAAC;AAAA,QACjD;AAEAA,gBAAO,SAAA;AAAA,MACT;AAAA,IACF,CAAC;AAGD,eAAW,OAAO,cAAc;AAC9B,YAAM,KAAK,YAAY,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;AAAA,IACvC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,QAAgB,OAA4C;AACxE,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI,QAAO;AAErB,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,WAAO,WAAW,UAAa,OAAO,WAAW,OAAO,QAAQ,SAAS;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,QAAgB,OAA4C;AAChF,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,IAAI;AACZ,cAAQ,KAAK,+CAA+C;AAC5D,aAAO;AAAA,IACT;AAEA,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,UAAM,SAAS,QAAQ,eAAe;AACtC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,QAAgB,OAAyC;AAC1E,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI;AAEd,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,QAAI,QAAQ;AACV,aAAO,aAAa;AACpB,aAAO,aAAa,KAAK,IAAA;AACzB,YAAM,IAAI,MAAM;AAEhB,YAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,WAAG,aAAa,MAAM,QAAA;AACtB,WAAG,UAAU,MAAM,OAAO,GAAG,KAAK;AAAA,MACpC,CAAC;AACD,cAAQ,IAAI,0BAA0B,MAAM,KAAK,KAAK,wBAAwB;AAAA,IAChF,OAAO;AACL,cAAQ,KAAK,0BAA0B,MAAM,KAAK,KAAK,iCAAiC;AAAA,IAC1F;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,QAA+B;AAClD,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI;AAGd,UAAM,kBAAiC,CAAA;AACvC;AACE,YAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,YAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,sBAAgB,KAAK,GAAI,MAAM,KAAK,eAAe,OAAO,MAAM,CAAE;AAAA,IACpE;AAGA,eAAW,UAAU,iBAAiB;AACpC,YAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,KAAK;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBACJ,QACA,OACuE;AACvE,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,MAAM,CAAC,KAAK,SAAU,QAAO;AAGvC,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,QAAI,CAAC,UAAU,OAAO,QAAQ,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAGA,UAAM,UAAU,CAAC,GAAG,OAAO,OAAO;AAClC,QAAI,aAAa;AAEjB,WAAO,IAAI,eAAsD;AAAA,MAC/D,MAAM,OAAO,eAAe;AAC1B,YAAI,cAAc,QAAQ,QAAQ;AAChC,qBAAW,MAAA;AACX;AAAA,QACF;AAEA,cAAM,QAAQ,QAAQ,UAAU;AAChC,YAAI,CAAC,OAAO;AACV,qBAAW,MAAA;AACX;AAAA,QACF;AAEA,YAAI;AAEF,gBAAM,YAAY,MAAM,KAAK,aAAa,OAAO,UAAU,KAAK;AAChE,cAAI,CAAC,WAAW;AACd,uBAAW,MAAA;AACX;AAAA,UACF;AAGA,gBAAM,QAAQ,KAAK;AAAA,YACjB;AAAA,YACA,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,MAAM;AAAA,UAAA;AAER,qBAAW,QAAQ,KAAK;AAExB;AAAA,QACF,SAAS,OAAO;AAEd,cAAI,iBAAiB,gBAAgB,MAAM,SAAS,iBAAiB;AACnE,uBAAW,MAAA;AAAA,UACb,OAAO;AACL,uBAAW,MAAM,KAAK;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,MAAM,CAAC,KAAK,UAAU;AAC9B,cAAQ,KAAK,2DAA2D;AACxE;AAAA,IACF;AAGA,QAAI;AACF,YAAM,KAAK,KAAK,GAAG,YAAY,CAAC,UAAU,MAAM,GAAG,WAAW;AAC9D,YAAM,KAAK,iBAAiB,GAAG,YAAY,QAAQ,EAAE,OAAO;AAC5D,YAAM,KAAK,iBAAiB,GAAG,YAAY,MAAM,EAAE,OAAO;AAAA,IAC5D,SAAS,OAAO;AACd,cAAQ,MAAM,wCAAwC,KAAK;AAC3D,YAAM;AAAA,IACR;AAGA,QAAI;AACF,YAAM,aAAa,MAAM,KAAK,SAAS;AAAA,QACrC,mBAAmB,KAAK,SAAS;AAAA,QACjC;AAAA,UACE,QAAQ;AAAA,QAAA;AAAA,MACV;AAEF,YAAM,KAAK,SAAS,YAAY,WAAW,MAAM,EAAE,WAAW,MAAM;AAAA,IACtE,SAAS,OAAO;AACd,UAAK,OAAe,SAAS,iBAAiB;AAC5C,gBAAQ,KAAK,mCAAmC,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,cAA6B;AAEzC,SAAK,WAAW,MAAM,UAAU,QAAQ,aAAA;AAGxC,UAAM,UAAU,UAAU,KAAK,iBAAiB,CAAC;AAEjD,YAAQ,kBAAkB,CAAC,UAAU;AACnC,YAAM,KAAM,MAAM,OAA4B;AAG9C,UAAI,CAAC,GAAG,iBAAiB,SAAS,QAAQ,GAAG;AAC3C,cAAM,QAAQ,GAAG,kBAAkB,UAAU;AAAA,UAC3C,SAAS,CAAC,UAAU,OAAO;AAAA,QAAA,CAC5B;AACD,cAAM,YAAY,cAAc,YAAY;AAAA,MAC9C;AAGA,UAAI,CAAC,GAAG,iBAAiB,SAAS,MAAM,GAAG;AACzC,WAAG,kBAAkB,QAAQ,EAAE,SAAS,aAAa;AAAA,MACvD;AAAA,IACF;AAEA,SAAK,KAAK,MAAM,IAAI,QAAQ,CAAC,SAAS,WAAW;AAC/C,cAAQ,YAAY,MAAM,QAAQ,QAAQ,MAAM;AAChD,cAAQ,UAAU,MAAM,OAAO,QAAQ,KAAK;AAAA,IAC9C,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,aAAa,UAAkB,OAAgD;AAC3F,QAAI,CAAC,KAAK,SAAU,QAAO;AAE3B,UAAM,aAAa,MAAM,KAAK,SAAS,mBAAmB,mBAAmB,KAAK,SAAS,IAAI;AAAA,MAC7F,QAAQ;AAAA,IAAA,CACT;AACD,UAAM,aAAa,MAAM,WAAW,cAAc,QAAQ;AAC1D,UAAM,OAAO,MAAM,WAAW,QAAA;AAC9B,UAAM,QAAQ,KAAK,MAAM,MAAM,YAAY,MAAM,aAAa,MAAM,UAAU;AAC9E,WAAO,MAAM,MAAM,YAAA;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,aACZ,UACA,QACA,iBACuB;AACvB,QAAI,CAAC,KAAK,SAAU,QAAO,CAAA;AAE3B,UAAM,aAAa,MAAM,KAAK,SAAS,mBAAmB,mBAAmB,KAAK,SAAS,IAAI;AAAA,MAC7F,QAAQ;AAAA,IAAA,CACT;AACD,UAAM,aAAa,MAAM,WAAW,cAAc,UAAU,EAAE,QAAQ,MAAM;AAG5E,QAAI,SAAS;AACb,QAAI,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,YAAM,YAAY,gBAAgB,gBAAgB,SAAS,CAAC;AAC5D,UAAI,WAAW;AACb,iBAAS,UAAU,aAAa,UAAU;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,WAAW,MAAM,WAAW,eAAe,EAAE,kBAAkB,MAAM;AAE3E,UAAM,UAAwB,CAAA;AAE9B,eAAW,SAAS,QAAQ;AAC1B,YAAM,OAAO,MAAM,KAAK,mBAAmB,KAAK;AAChD,YAAM,SAAS,MAAM,EAAE,MAAM,SAAS,UAAU,QAAQ,MAAM;AAE9D,cAAQ,KAAK;AAAA,QACX,SAAS,MAAM;AAAA,QACf,YAAY,MAAM,YAAY;AAAA,QAC9B,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,MAAM,MAAM;AAAA,MAAA,CACb;AAED,gBAAU,KAAK;AAAA,IACjB;AAEA,UAAM,SAAS,MAAA;AACf,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,mBACZ,OACsB;AACtB,UAAM,SAAS,IAAI,YAAY,MAAM,UAAU;AAC/C,UAAM,OAAO,MAAM;AACnB,WAAO;AAAA,EACT;AAAA,EAEQ,YACN,MACA,QACA,OACA,YAA6B,OAC7B,aAAqB,GACkB;AACvC,QAAI,UAAU,SAAS;AACrB,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW;AAAA,QACX,UAAU;AAAA,QACV;AAAA,MAAA,CACD;AAAA,IACH,OAAO;AACL,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW;AAAA,QACX,UAAU;AAAA,QACV;AAAA,MAAA,CACD;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAc,iBAAiB,QAAgB,OAA8B;AAC3E,QAAI,CAAC,KAAK,GAAI;AAEd,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,QAAI,QAAQ;AACV,aAAO,aAAa,KAAK,IAAA;AACzB,YAAM,KAAK,iBAAiB,MAAM,IAAI,MAAM,CAAC;AAAA,IAC/C;AAAA,EACF;AAAA,EAEA,MAAc,YAAY,QAAgB,OAA8B;AACtE,QAAI,CAAC,KAAK,GAAI;AAGd,QAAI;AACJ;AACE,YAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,YAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,eAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAAA,IAC9E;AAGA,QAAI,UAAU,KAAK,UAAU;AAC3B,UAAI;AACF,cAAM,aAAa,MAAM,KAAK,SAAS;AAAA,UACrC,mBAAmB,KAAK,SAAS;AAAA,UACjC,EAAE,QAAQ,MAAA;AAAA,QAAM;AAElB,cAAM,WAAW,YAAY,OAAO,QAAQ;AAAA,MAC9C,SAAS,OAAO;AACd,gBAAQ,KAAK,wCAAwC,OAAO,QAAQ,KAAK,KAAK;AAAA,MAChF;AAAA,IACF;AAGA;AACE,YAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,YAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,YAAM,KAAK,iBAAiB,MAAM,OAAO,CAAC,QAAQ,KAAK,CAAC,CAAC;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,WAAW,MAAM,UAAU,QAAQ,SAAA;AACzC,UAAM,QAAQ,SAAS,SAAS;AAEhC,QAAI,SAAS,KAAK,QAAS;AAE3B,YAAQ;AAAA,MACN,4DAA4D,KAAK,aAAa,KAAK,OAAO;AAAA,IAAA;AAG5F,QAAI,CAAC,KAAK,GAAI;AAGd,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,WAAW;AACpD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,QAAQ,MAAM,MAAM,YAAY;AAEtC,QAAI,eAAe;AACnB,UAAM,WAAW,QAAQ,KAAK;AAE9B,UAAM,SAAS,MAAM,WAAA;AACrB,UAAM,IAAI,QAAc,CAAC,YAAY;AACnC,aAAO,YAAY,OAAO,UAAU;AAClC,cAAMA,UAAU,MAAM,OAAsB;AAC5C,YAAI,CAACA,WAAU,gBAAgB,UAAU;AACvC,kBAAA;AACA;AAAA,QACF;AAEA,cAAM,SAAsBA,QAAO;AACnC,cAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,KAAK;AAClD,wBAAgB,OAAO;AAEvBA,gBAAO,SAAA;AAAA,MACT;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,eAAe,OAAuB,QAAwC;AAC1F,UAAM,UAAyB,CAAA;AAC/B,UAAM,SAAS,MAAM,WAAA;AACrB,UAAM,IAAI,QAAc,CAAC,YAAY;AACnC,aAAO,YAAY,CAAC,UAAU;AAC5B,cAAMA,UAAU,MAAM,OAAsB;AAC5C,YAAI,CAACA,SAAQ;AACX,kBAAA;AACA;AAAA,QACF;AAEA,cAAM,SAAsBA,QAAO;AACnC,YAAI,OAAO,WAAW,QAAQ;AAC5B,kBAAQ,KAAK,MAAM;AAAA,QACrB;AAEAA,gBAAO,SAAA;AAAA,MACT;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAoB,SAAiC;AAC3D,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,cAAQ,YAAY,MAAM,QAAQ,QAAQ,MAAM;AAChD,cAAQ,UAAU,MAAM,OAAO,QAAQ,KAAK;AAAA,IAC9C,CAAC;AAAA,EACH;AAAA,EAEA,cAKE;AAGA,WAAO;AAAA,MACL,WAAW,KAAK,WAAW,OAAO;AAAA,MAClC,YAAY;AAAA;AAAA,MACZ,SAAS;AAAA;AAAA,MACT,SAAS;AAAA;AAAA,IAAA;AAAA,EAEb;AAAA,EAEA,MAAM,kBAAkB,QAAkC;AACxD,QAAI,OAAO,cAAc,eAAe,CAAC,UAAU,SAAS,UAAU;AAEpE,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,UAAM,WAAW,MAAM,UAAU,QAAQ,SAAA;AACzC,UAAM,gBAAgB,SAAS,SAAS,MAAM,SAAS,SAAS,OAAO,OAAO;AAC9E,WAAO,eAAe;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,QAAgB,OAA+C;AACnF,UAAM,KAAK,KAAA;AAEX,QAAI,CAAC,KAAK,GAAI,QAAO;AAErB,UAAM,KAAK,KAAK,GAAG,YAAY,UAAU,UAAU;AACnD,UAAM,QAAQ,GAAG,YAAY,QAAQ;AACrC,UAAM,SAAS,MAAM,KAAK,iBAA8B,MAAM,IAAI,CAAC,QAAQ,KAAK,CAAC,CAAC;AAElF,WAAO,QAAQ,YAAY;AAAA,EAC7B;AACF;"}
|