@talmolab/sleap-io.js 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-CDU5QGU6.js → chunk-23DE7GPK.js} +3 -0
- package/dist/index.d.ts +172 -1
- package/dist/index.js +701 -1
- package/dist/lite.js +1 -1
- package/package.json +1 -1
|
@@ -364,6 +364,9 @@ function parseJsonAttr(attr) {
|
|
|
364
364
|
if (value && typeof value === "object" && "buffer" in value) {
|
|
365
365
|
return JSON.parse(textDecoder.decode(new Uint8Array(value.buffer)));
|
|
366
366
|
}
|
|
367
|
+
if (value && typeof value === "object") {
|
|
368
|
+
return value;
|
|
369
|
+
}
|
|
367
370
|
return JSON.parse(String(value));
|
|
368
371
|
}
|
|
369
372
|
function trimHdf5String(str) {
|
package/dist/index.d.ts
CHANGED
|
@@ -241,13 +241,147 @@ declare class Mp4BoxVideoBackend implements VideoBackend {
|
|
|
241
241
|
private addToCache;
|
|
242
242
|
}
|
|
243
243
|
|
|
244
|
+
/**
|
|
245
|
+
* Streaming HDF5 file access via Web Worker.
|
|
246
|
+
*
|
|
247
|
+
* This module provides a high-level API for accessing remote HDF5 files
|
|
248
|
+
* using HTTP range requests for efficient streaming. The actual HDF5
|
|
249
|
+
* operations run in a Web Worker where synchronous XHR is allowed.
|
|
250
|
+
*
|
|
251
|
+
* @module
|
|
252
|
+
*/
|
|
253
|
+
/**
|
|
254
|
+
* Options for opening a streaming HDF5 file.
|
|
255
|
+
*/
|
|
256
|
+
interface StreamingH5Options {
|
|
257
|
+
/** URL to h5wasm IIFE bundle. Defaults to CDN. */
|
|
258
|
+
h5wasmUrl?: string;
|
|
259
|
+
/** Filename hint for the HDF5 file. */
|
|
260
|
+
filenameHint?: string;
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* A streaming HDF5 file handle that uses a Web Worker for range request access.
|
|
264
|
+
*
|
|
265
|
+
* This class provides an API similar to h5wasm.File but operates via message
|
|
266
|
+
* passing to a worker where createLazyFile enables HTTP range requests.
|
|
267
|
+
*/
|
|
268
|
+
declare class StreamingH5File {
|
|
269
|
+
private worker;
|
|
270
|
+
private messageId;
|
|
271
|
+
private pendingMessages;
|
|
272
|
+
private _keys;
|
|
273
|
+
private _isOpen;
|
|
274
|
+
constructor();
|
|
275
|
+
private handleMessage;
|
|
276
|
+
private handleError;
|
|
277
|
+
private send;
|
|
278
|
+
/**
|
|
279
|
+
* Initialize the h5wasm module in the worker.
|
|
280
|
+
*/
|
|
281
|
+
init(options?: StreamingH5Options): Promise<void>;
|
|
282
|
+
/**
|
|
283
|
+
* Open a remote HDF5 file for streaming access.
|
|
284
|
+
*
|
|
285
|
+
* @param url - URL to the HDF5 file (must support HTTP range requests)
|
|
286
|
+
* @param options - Optional settings
|
|
287
|
+
*/
|
|
288
|
+
open(url: string, options?: StreamingH5Options): Promise<void>;
|
|
289
|
+
/**
|
|
290
|
+
* Whether a file is currently open.
|
|
291
|
+
*/
|
|
292
|
+
get isOpen(): boolean;
|
|
293
|
+
/**
|
|
294
|
+
* Get the root-level keys in the file.
|
|
295
|
+
*/
|
|
296
|
+
keys(): string[];
|
|
297
|
+
/**
|
|
298
|
+
* Get the keys (children) at a given path.
|
|
299
|
+
*/
|
|
300
|
+
getKeys(path: string): Promise<string[]>;
|
|
301
|
+
/**
|
|
302
|
+
* Get an attribute value.
|
|
303
|
+
*/
|
|
304
|
+
getAttr(path: string, name: string): Promise<unknown>;
|
|
305
|
+
/**
|
|
306
|
+
* Get all attributes at a path.
|
|
307
|
+
*/
|
|
308
|
+
getAttrs(path: string): Promise<Record<string, unknown>>;
|
|
309
|
+
/**
|
|
310
|
+
* Get dataset metadata (shape, dtype) without reading values.
|
|
311
|
+
*/
|
|
312
|
+
getDatasetMeta(path: string): Promise<{
|
|
313
|
+
shape: number[];
|
|
314
|
+
dtype: string;
|
|
315
|
+
}>;
|
|
316
|
+
/**
|
|
317
|
+
* Read a dataset's value.
|
|
318
|
+
*
|
|
319
|
+
* @param path - Path to the dataset
|
|
320
|
+
* @param slice - Optional slice specification (array of [start, end] pairs)
|
|
321
|
+
*/
|
|
322
|
+
getDatasetValue(path: string, slice?: Array<[number, number] | []>): Promise<{
|
|
323
|
+
value: unknown;
|
|
324
|
+
shape: number[];
|
|
325
|
+
dtype: string;
|
|
326
|
+
}>;
|
|
327
|
+
/**
|
|
328
|
+
* Close the file and terminate the worker.
|
|
329
|
+
*/
|
|
330
|
+
close(): Promise<void>;
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Check if streaming via Web Worker is supported in the current environment.
|
|
334
|
+
*/
|
|
335
|
+
declare function isStreamingSupported(): boolean;
|
|
336
|
+
/**
|
|
337
|
+
* Open a remote HDF5 file with streaming support.
|
|
338
|
+
*
|
|
339
|
+
* @param url - URL to the HDF5 file
|
|
340
|
+
* @param options - Optional settings
|
|
341
|
+
* @returns A StreamingH5File instance
|
|
342
|
+
*/
|
|
343
|
+
declare function openStreamingH5(url: string, options?: StreamingH5Options): Promise<StreamingH5File>;
|
|
344
|
+
|
|
244
345
|
type SlpSource = string | ArrayBuffer | Uint8Array | File | FileSystemFileHandle;
|
|
245
346
|
type StreamMode = "auto" | "range" | "download";
|
|
246
347
|
type OpenH5Options = {
|
|
348
|
+
/**
|
|
349
|
+
* Streaming mode for remote files:
|
|
350
|
+
* - "auto": Try range requests, fall back to download
|
|
351
|
+
* - "range": Use HTTP range requests (requires Worker support in browser)
|
|
352
|
+
* - "download": Always download the entire file
|
|
353
|
+
*/
|
|
247
354
|
stream?: StreamMode;
|
|
355
|
+
/** Filename hint for the HDF5 file */
|
|
248
356
|
filenameHint?: string;
|
|
249
357
|
};
|
|
250
358
|
|
|
359
|
+
/**
|
|
360
|
+
* Load an SLP file.
|
|
361
|
+
*
|
|
362
|
+
* When loading from a URL in a browser with `h5.stream` set to 'range' or 'auto',
|
|
363
|
+
* this function automatically uses HTTP range requests for efficient streaming.
|
|
364
|
+
* Only the annotation data needed is downloaded, not the entire file.
|
|
365
|
+
*
|
|
366
|
+
* @param source - Path, URL, ArrayBuffer, File, or FileSystemFileHandle
|
|
367
|
+
* @param options - Loading options
|
|
368
|
+
* @param options.openVideos - Whether to open video backends (default: true, but false for streaming)
|
|
369
|
+
* @param options.h5 - HDF5 options including streaming mode
|
|
370
|
+
* @param options.h5.stream - 'auto' | 'range' | 'download' (default: 'auto')
|
|
371
|
+
*
|
|
372
|
+
* @example
|
|
373
|
+
* ```typescript
|
|
374
|
+
* // Load from URL with streaming (uses range requests automatically)
|
|
375
|
+
* const labels = await loadSlp('https://example.com/labels.slp', {
|
|
376
|
+
* h5: { stream: 'range' }
|
|
377
|
+
* });
|
|
378
|
+
*
|
|
379
|
+
* // Force full download
|
|
380
|
+
* const labels = await loadSlp('https://example.com/labels.slp', {
|
|
381
|
+
* h5: { stream: 'download' }
|
|
382
|
+
* });
|
|
383
|
+
* ```
|
|
384
|
+
*/
|
|
251
385
|
declare function loadSlp(source: SlpSource, options?: {
|
|
252
386
|
openVideos?: boolean;
|
|
253
387
|
h5?: OpenH5Options;
|
|
@@ -550,4 +684,41 @@ declare function checkFfmpeg(): Promise<boolean>;
|
|
|
550
684
|
*/
|
|
551
685
|
declare function renderVideo(source: Labels | LabeledFrame[], outputPath: string, options?: VideoOptions): Promise<void>;
|
|
552
686
|
|
|
553
|
-
|
|
687
|
+
/**
|
|
688
|
+
* Streaming SLP file reader using HTTP range requests.
|
|
689
|
+
*
|
|
690
|
+
* This module provides a streaming alternative to `readSlp` that uses
|
|
691
|
+
* `StreamingH5File` for efficient range request-based file access.
|
|
692
|
+
* Only the data actually needed is downloaded, rather than the entire file.
|
|
693
|
+
*
|
|
694
|
+
* @module
|
|
695
|
+
*/
|
|
696
|
+
|
|
697
|
+
/**
|
|
698
|
+
* Options for streaming SLP file loading.
|
|
699
|
+
*/
|
|
700
|
+
interface StreamingSlpOptions {
|
|
701
|
+
/** URL hint for h5wasm CDN */
|
|
702
|
+
h5wasmUrl?: string;
|
|
703
|
+
/** Filename hint for the HDF5 file */
|
|
704
|
+
filenameHint?: string;
|
|
705
|
+
}
|
|
706
|
+
/**
|
|
707
|
+
* Read an SLP file using HTTP range requests for efficient streaming.
|
|
708
|
+
*
|
|
709
|
+
* This function downloads only the data needed (metadata, frames, instances, points)
|
|
710
|
+
* rather than the entire file. Embedded videos are NOT loaded - only metadata.
|
|
711
|
+
*
|
|
712
|
+
* @param url - URL to the SLP file (must support HTTP range requests)
|
|
713
|
+
* @param options - Optional settings
|
|
714
|
+
* @returns Labels object with all annotation data
|
|
715
|
+
*
|
|
716
|
+
* @example
|
|
717
|
+
* ```typescript
|
|
718
|
+
* const labels = await readSlpStreaming('https://example.com/labels.slp');
|
|
719
|
+
* console.log(`Loaded ${labels.labeledFrames.length} frames`);
|
|
720
|
+
* ```
|
|
721
|
+
*/
|
|
722
|
+
declare function readSlpStreaming(url: string, options?: StreamingSlpOptions): Promise<Labels>;
|
|
723
|
+
|
|
724
|
+
export { Camera, CameraGroup, type ColorScheme, type ColorSpec, FrameGroup, Instance, InstanceContext, InstanceGroup, LabeledFrame, Labels, type LabelsDict, LabelsSet, MARKER_FUNCTIONS, type MarkerShape, Mp4BoxVideoBackend, NAMED_COLORS, PALETTES, type PaletteName, PredictedInstance, type RGB, type RGBA, RecordingSession, RenderContext, type RenderOptions, Skeleton, StreamingH5File, SuggestionFrame, Track, Video, type VideoBackend, type VideoFrame, type VideoOptions, checkFfmpeg, decodeYamlSkeleton, determineColorScheme, drawCircle, drawCross, drawDiamond, drawSquare, drawTriangle, encodeYamlSkeleton, fromDict, fromNumpy, getMarkerFunction, getPalette, isStreamingSupported, labelsFromNumpy, loadSlp, loadVideo, makeCameraFromDict, openStreamingH5, readSlpStreaming, renderImage, renderVideo, resolveColor, rgbToCSS, rodriguesTransformation, saveImage, saveSlp, toDataURL, toDict, toJPEG, toNumpy, toPNG };
|
package/dist/index.js
CHANGED
|
@@ -8,13 +8,16 @@ import {
|
|
|
8
8
|
Track,
|
|
9
9
|
parseJsonAttr,
|
|
10
10
|
parseSkeletons,
|
|
11
|
+
parseSuggestions,
|
|
12
|
+
parseTracks,
|
|
13
|
+
parseVideosMetadata,
|
|
11
14
|
pointsEmpty,
|
|
12
15
|
pointsFromArray,
|
|
13
16
|
pointsFromDict,
|
|
14
17
|
predictedPointsEmpty,
|
|
15
18
|
predictedPointsFromArray,
|
|
16
19
|
predictedPointsFromDict
|
|
17
|
-
} from "./chunk-
|
|
20
|
+
} from "./chunk-23DE7GPK.js";
|
|
18
21
|
|
|
19
22
|
// src/model/labeled-frame.ts
|
|
20
23
|
var LabeledFrame = class {
|
|
@@ -1203,6 +1206,383 @@ var Mp4BoxVideoBackend = class {
|
|
|
1203
1206
|
}
|
|
1204
1207
|
};
|
|
1205
1208
|
|
|
1209
|
+
// src/codecs/slp/h5-worker.ts
|
|
1210
|
+
var H5_WORKER_CODE = `
|
|
1211
|
+
// h5wasm streaming worker
|
|
1212
|
+
// Uses createLazyFile for HTTP range request streaming
|
|
1213
|
+
|
|
1214
|
+
let h5wasmModule = null;
|
|
1215
|
+
let FS = null;
|
|
1216
|
+
let currentFile = null;
|
|
1217
|
+
let mountPath = null;
|
|
1218
|
+
|
|
1219
|
+
self.onmessage = async function(e) {
|
|
1220
|
+
const { type, payload, id } = e.data;
|
|
1221
|
+
|
|
1222
|
+
try {
|
|
1223
|
+
switch (type) {
|
|
1224
|
+
case 'init':
|
|
1225
|
+
await initH5Wasm(payload?.h5wasmUrl);
|
|
1226
|
+
respond(id, { success: true });
|
|
1227
|
+
break;
|
|
1228
|
+
|
|
1229
|
+
case 'openUrl':
|
|
1230
|
+
const result = await openRemoteFile(payload.url, payload.filename);
|
|
1231
|
+
respond(id, result);
|
|
1232
|
+
break;
|
|
1233
|
+
|
|
1234
|
+
case 'getKeys':
|
|
1235
|
+
const keys = getKeys(payload.path);
|
|
1236
|
+
respond(id, { success: true, keys });
|
|
1237
|
+
break;
|
|
1238
|
+
|
|
1239
|
+
case 'getAttr':
|
|
1240
|
+
const attr = getAttr(payload.path, payload.name);
|
|
1241
|
+
respond(id, { success: true, value: attr });
|
|
1242
|
+
break;
|
|
1243
|
+
|
|
1244
|
+
case 'getAttrs':
|
|
1245
|
+
const attrs = getAttrs(payload.path);
|
|
1246
|
+
respond(id, { success: true, attrs });
|
|
1247
|
+
break;
|
|
1248
|
+
|
|
1249
|
+
case 'getDatasetMeta':
|
|
1250
|
+
const meta = getDatasetMeta(payload.path);
|
|
1251
|
+
respond(id, { success: true, meta });
|
|
1252
|
+
break;
|
|
1253
|
+
|
|
1254
|
+
case 'getDatasetValue':
|
|
1255
|
+
const data = getDatasetValue(payload.path, payload.slice);
|
|
1256
|
+
respond(id, { success: true, data }, data.transferables);
|
|
1257
|
+
break;
|
|
1258
|
+
|
|
1259
|
+
case 'close':
|
|
1260
|
+
closeFile();
|
|
1261
|
+
respond(id, { success: true });
|
|
1262
|
+
break;
|
|
1263
|
+
|
|
1264
|
+
default:
|
|
1265
|
+
respond(id, { success: false, error: 'Unknown message type: ' + type });
|
|
1266
|
+
}
|
|
1267
|
+
} catch (error) {
|
|
1268
|
+
respond(id, { success: false, error: error.message || String(error) });
|
|
1269
|
+
}
|
|
1270
|
+
};
|
|
1271
|
+
|
|
1272
|
+
function respond(id, data, transferables) {
|
|
1273
|
+
if (transferables) {
|
|
1274
|
+
self.postMessage({ id, ...data }, transferables);
|
|
1275
|
+
} else {
|
|
1276
|
+
self.postMessage({ id, ...data });
|
|
1277
|
+
}
|
|
1278
|
+
}
|
|
1279
|
+
|
|
1280
|
+
async function initH5Wasm(h5wasmUrl) {
|
|
1281
|
+
if (h5wasmModule) return;
|
|
1282
|
+
|
|
1283
|
+
// Default to CDN if no URL provided
|
|
1284
|
+
const url = h5wasmUrl || 'https://cdn.jsdelivr.net/npm/h5wasm@0.8.8/dist/iife/h5wasm.js';
|
|
1285
|
+
|
|
1286
|
+
// Import h5wasm IIFE
|
|
1287
|
+
importScripts(url);
|
|
1288
|
+
|
|
1289
|
+
// Wait for module to be ready
|
|
1290
|
+
const Module = await h5wasm.ready;
|
|
1291
|
+
h5wasmModule = h5wasm;
|
|
1292
|
+
FS = Module.FS;
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1295
|
+
async function openRemoteFile(url, filename = 'data.h5') {
|
|
1296
|
+
if (!h5wasmModule) {
|
|
1297
|
+
throw new Error('h5wasm not initialized');
|
|
1298
|
+
}
|
|
1299
|
+
|
|
1300
|
+
// Close any existing file
|
|
1301
|
+
closeFile();
|
|
1302
|
+
|
|
1303
|
+
// Create mount point
|
|
1304
|
+
mountPath = '/remote-' + Date.now();
|
|
1305
|
+
FS.mkdir(mountPath);
|
|
1306
|
+
|
|
1307
|
+
// Create lazy file - this enables range request streaming!
|
|
1308
|
+
FS.createLazyFile(mountPath, filename, url, true, false);
|
|
1309
|
+
|
|
1310
|
+
// Open with h5wasm
|
|
1311
|
+
const filePath = mountPath + '/' + filename;
|
|
1312
|
+
currentFile = new h5wasm.File(filePath, 'r');
|
|
1313
|
+
|
|
1314
|
+
return {
|
|
1315
|
+
success: true,
|
|
1316
|
+
path: currentFile.path,
|
|
1317
|
+
filename: currentFile.filename,
|
|
1318
|
+
keys: currentFile.keys()
|
|
1319
|
+
};
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
function getKeys(path) {
|
|
1323
|
+
if (!currentFile) throw new Error('No file open');
|
|
1324
|
+
const item = path === '/' || !path ? currentFile : currentFile.get(path);
|
|
1325
|
+
if (!item) throw new Error('Path not found: ' + path);
|
|
1326
|
+
return item.keys ? item.keys() : [];
|
|
1327
|
+
}
|
|
1328
|
+
|
|
1329
|
+
function getAttr(path, name) {
|
|
1330
|
+
if (!currentFile) throw new Error('No file open');
|
|
1331
|
+
const item = path === '/' || !path ? currentFile : currentFile.get(path);
|
|
1332
|
+
if (!item) throw new Error('Path not found: ' + path);
|
|
1333
|
+
const attrs = item.attrs;
|
|
1334
|
+
return attrs?.[name] || null;
|
|
1335
|
+
}
|
|
1336
|
+
|
|
1337
|
+
function getAttrs(path) {
|
|
1338
|
+
if (!currentFile) throw new Error('No file open');
|
|
1339
|
+
const item = path === '/' || !path ? currentFile : currentFile.get(path);
|
|
1340
|
+
if (!item) throw new Error('Path not found: ' + path);
|
|
1341
|
+
return item.attrs || {};
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
function getDatasetMeta(path) {
|
|
1345
|
+
if (!currentFile) throw new Error('No file open');
|
|
1346
|
+
const dataset = currentFile.get(path);
|
|
1347
|
+
if (!dataset) throw new Error('Dataset not found: ' + path);
|
|
1348
|
+
return {
|
|
1349
|
+
shape: dataset.shape,
|
|
1350
|
+
dtype: dataset.dtype,
|
|
1351
|
+
metadata: dataset.metadata
|
|
1352
|
+
};
|
|
1353
|
+
}
|
|
1354
|
+
|
|
1355
|
+
function getDatasetValue(path, slice) {
|
|
1356
|
+
if (!currentFile) throw new Error('No file open');
|
|
1357
|
+
const dataset = currentFile.get(path);
|
|
1358
|
+
if (!dataset) throw new Error('Dataset not found: ' + path);
|
|
1359
|
+
|
|
1360
|
+
// Get value or slice
|
|
1361
|
+
let value;
|
|
1362
|
+
if (slice && Array.isArray(slice)) {
|
|
1363
|
+
value = dataset.slice(slice);
|
|
1364
|
+
} else {
|
|
1365
|
+
value = dataset.value;
|
|
1366
|
+
}
|
|
1367
|
+
|
|
1368
|
+
// Prepare for transfer
|
|
1369
|
+
const transferables = [];
|
|
1370
|
+
let transferValue = value;
|
|
1371
|
+
|
|
1372
|
+
if (ArrayBuffer.isView(value)) {
|
|
1373
|
+
// TypedArray - transfer the underlying buffer
|
|
1374
|
+
transferValue = {
|
|
1375
|
+
type: 'typedarray',
|
|
1376
|
+
dtype: value.constructor.name,
|
|
1377
|
+
buffer: value.buffer,
|
|
1378
|
+
byteOffset: value.byteOffset,
|
|
1379
|
+
length: value.length
|
|
1380
|
+
};
|
|
1381
|
+
transferables.push(value.buffer);
|
|
1382
|
+
} else if (value instanceof ArrayBuffer) {
|
|
1383
|
+
transferValue = { type: 'arraybuffer', buffer: value };
|
|
1384
|
+
transferables.push(value);
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
return {
|
|
1388
|
+
value: transferValue,
|
|
1389
|
+
shape: dataset.shape,
|
|
1390
|
+
dtype: dataset.dtype,
|
|
1391
|
+
transferables
|
|
1392
|
+
};
|
|
1393
|
+
}
|
|
1394
|
+
|
|
1395
|
+
function closeFile() {
|
|
1396
|
+
if (currentFile) {
|
|
1397
|
+
try { currentFile.close(); } catch (e) {}
|
|
1398
|
+
currentFile = null;
|
|
1399
|
+
}
|
|
1400
|
+
if (mountPath && FS) {
|
|
1401
|
+
try { FS.rmdir(mountPath); } catch (e) {}
|
|
1402
|
+
mountPath = null;
|
|
1403
|
+
}
|
|
1404
|
+
}
|
|
1405
|
+
`;
|
|
1406
|
+
function createH5Worker() {
|
|
1407
|
+
const blob = new Blob([H5_WORKER_CODE], { type: "application/javascript" });
|
|
1408
|
+
const url = URL.createObjectURL(blob);
|
|
1409
|
+
const worker = new Worker(url);
|
|
1410
|
+
worker.addEventListener(
|
|
1411
|
+
"error",
|
|
1412
|
+
() => {
|
|
1413
|
+
URL.revokeObjectURL(url);
|
|
1414
|
+
},
|
|
1415
|
+
{ once: true }
|
|
1416
|
+
);
|
|
1417
|
+
return worker;
|
|
1418
|
+
}
|
|
1419
|
+
|
|
1420
|
+
// src/codecs/slp/h5-streaming.ts
|
|
1421
|
+
function reconstructValue(data) {
|
|
1422
|
+
if (data && typeof data === "object" && "type" in data) {
|
|
1423
|
+
const typed = data;
|
|
1424
|
+
if (typed.type === "typedarray" && typed.buffer) {
|
|
1425
|
+
const TypedArrayConstructor = getTypedArrayConstructor(typed.dtype || "Uint8Array");
|
|
1426
|
+
return new TypedArrayConstructor(typed.buffer, typed.byteOffset || 0, typed.length);
|
|
1427
|
+
}
|
|
1428
|
+
if (typed.type === "arraybuffer" && typed.buffer) {
|
|
1429
|
+
return typed.buffer;
|
|
1430
|
+
}
|
|
1431
|
+
}
|
|
1432
|
+
return data;
|
|
1433
|
+
}
|
|
1434
|
+
function getTypedArrayConstructor(name) {
|
|
1435
|
+
const constructors = {
|
|
1436
|
+
Int8Array,
|
|
1437
|
+
Uint8Array,
|
|
1438
|
+
Uint8ClampedArray,
|
|
1439
|
+
Int16Array,
|
|
1440
|
+
Uint16Array,
|
|
1441
|
+
Int32Array,
|
|
1442
|
+
Uint32Array,
|
|
1443
|
+
Float32Array,
|
|
1444
|
+
Float64Array,
|
|
1445
|
+
BigInt64Array,
|
|
1446
|
+
BigUint64Array
|
|
1447
|
+
};
|
|
1448
|
+
return constructors[name] || Uint8Array;
|
|
1449
|
+
}
|
|
1450
|
+
var StreamingH5File = class {
|
|
1451
|
+
worker;
|
|
1452
|
+
messageId = 0;
|
|
1453
|
+
pendingMessages = /* @__PURE__ */ new Map();
|
|
1454
|
+
_keys = [];
|
|
1455
|
+
_isOpen = false;
|
|
1456
|
+
constructor() {
|
|
1457
|
+
this.worker = createH5Worker();
|
|
1458
|
+
this.worker.onmessage = this.handleMessage.bind(this);
|
|
1459
|
+
this.worker.onerror = this.handleError.bind(this);
|
|
1460
|
+
}
|
|
1461
|
+
handleMessage(e) {
|
|
1462
|
+
const { id, ...data } = e.data;
|
|
1463
|
+
const pending = this.pendingMessages.get(id);
|
|
1464
|
+
if (pending) {
|
|
1465
|
+
this.pendingMessages.delete(id);
|
|
1466
|
+
if (data.success) {
|
|
1467
|
+
pending.resolve(e.data);
|
|
1468
|
+
} else {
|
|
1469
|
+
pending.reject(new Error(data.error || "Worker operation failed"));
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
handleError(e) {
|
|
1474
|
+
console.error("[StreamingH5File] Worker error:", e.message);
|
|
1475
|
+
for (const [id, pending] of this.pendingMessages) {
|
|
1476
|
+
pending.reject(new Error(`Worker error: ${e.message}`));
|
|
1477
|
+
this.pendingMessages.delete(id);
|
|
1478
|
+
}
|
|
1479
|
+
}
|
|
1480
|
+
send(type, payload) {
|
|
1481
|
+
return new Promise((resolve, reject) => {
|
|
1482
|
+
const id = ++this.messageId;
|
|
1483
|
+
this.pendingMessages.set(id, { resolve, reject });
|
|
1484
|
+
this.worker.postMessage({ type, payload, id });
|
|
1485
|
+
});
|
|
1486
|
+
}
|
|
1487
|
+
/**
|
|
1488
|
+
* Initialize the h5wasm module in the worker.
|
|
1489
|
+
*/
|
|
1490
|
+
async init(options) {
|
|
1491
|
+
await this.send("init", { h5wasmUrl: options?.h5wasmUrl });
|
|
1492
|
+
}
|
|
1493
|
+
/**
|
|
1494
|
+
* Open a remote HDF5 file for streaming access.
|
|
1495
|
+
*
|
|
1496
|
+
* @param url - URL to the HDF5 file (must support HTTP range requests)
|
|
1497
|
+
* @param options - Optional settings
|
|
1498
|
+
*/
|
|
1499
|
+
async open(url, options) {
|
|
1500
|
+
await this.init(options);
|
|
1501
|
+
const filename = options?.filenameHint || url.split("/").pop()?.split("?")[0] || "data.h5";
|
|
1502
|
+
const result = await this.send("openUrl", { url, filename });
|
|
1503
|
+
this._keys = result.keys || [];
|
|
1504
|
+
this._isOpen = true;
|
|
1505
|
+
}
|
|
1506
|
+
/**
|
|
1507
|
+
* Whether a file is currently open.
|
|
1508
|
+
*/
|
|
1509
|
+
get isOpen() {
|
|
1510
|
+
return this._isOpen;
|
|
1511
|
+
}
|
|
1512
|
+
/**
|
|
1513
|
+
* Get the root-level keys in the file.
|
|
1514
|
+
*/
|
|
1515
|
+
keys() {
|
|
1516
|
+
return this._keys;
|
|
1517
|
+
}
|
|
1518
|
+
/**
|
|
1519
|
+
* Get the keys (children) at a given path.
|
|
1520
|
+
*/
|
|
1521
|
+
async getKeys(path) {
|
|
1522
|
+
const result = await this.send("getKeys", { path });
|
|
1523
|
+
return result.keys || [];
|
|
1524
|
+
}
|
|
1525
|
+
/**
|
|
1526
|
+
* Get an attribute value.
|
|
1527
|
+
*/
|
|
1528
|
+
async getAttr(path, name) {
|
|
1529
|
+
const result = await this.send("getAttr", { path, name });
|
|
1530
|
+
return result.value?.value ?? result.value;
|
|
1531
|
+
}
|
|
1532
|
+
/**
|
|
1533
|
+
* Get all attributes at a path.
|
|
1534
|
+
*/
|
|
1535
|
+
async getAttrs(path) {
|
|
1536
|
+
const result = await this.send("getAttrs", { path });
|
|
1537
|
+
return result.attrs || {};
|
|
1538
|
+
}
|
|
1539
|
+
/**
|
|
1540
|
+
* Get dataset metadata (shape, dtype) without reading values.
|
|
1541
|
+
*/
|
|
1542
|
+
async getDatasetMeta(path) {
|
|
1543
|
+
const result = await this.send("getDatasetMeta", { path });
|
|
1544
|
+
const meta = result.meta;
|
|
1545
|
+
return meta;
|
|
1546
|
+
}
|
|
1547
|
+
/**
|
|
1548
|
+
* Read a dataset's value.
|
|
1549
|
+
*
|
|
1550
|
+
* @param path - Path to the dataset
|
|
1551
|
+
* @param slice - Optional slice specification (array of [start, end] pairs)
|
|
1552
|
+
*/
|
|
1553
|
+
async getDatasetValue(path, slice) {
|
|
1554
|
+
const result = await this.send("getDatasetValue", { path, slice });
|
|
1555
|
+
const data = result.data;
|
|
1556
|
+
return {
|
|
1557
|
+
value: reconstructValue(data.value),
|
|
1558
|
+
shape: data.shape,
|
|
1559
|
+
dtype: data.dtype
|
|
1560
|
+
};
|
|
1561
|
+
}
|
|
1562
|
+
/**
|
|
1563
|
+
* Close the file and terminate the worker.
|
|
1564
|
+
*/
|
|
1565
|
+
async close() {
|
|
1566
|
+
if (this._isOpen) {
|
|
1567
|
+
await this.send("close");
|
|
1568
|
+
this._isOpen = false;
|
|
1569
|
+
}
|
|
1570
|
+
this.worker.terminate();
|
|
1571
|
+
this._keys = [];
|
|
1572
|
+
}
|
|
1573
|
+
};
|
|
1574
|
+
function isStreamingSupported() {
|
|
1575
|
+
return typeof Worker !== "undefined" && typeof Blob !== "undefined" && typeof URL !== "undefined";
|
|
1576
|
+
}
|
|
1577
|
+
async function openStreamingH5(url, options) {
|
|
1578
|
+
if (!isStreamingSupported()) {
|
|
1579
|
+
throw new Error("Streaming HDF5 requires Web Worker support");
|
|
1580
|
+
}
|
|
1581
|
+
const file = new StreamingH5File();
|
|
1582
|
+
await file.open(url, options);
|
|
1583
|
+
return file;
|
|
1584
|
+
}
|
|
1585
|
+
|
|
1206
1586
|
// src/codecs/slp/h5.ts
|
|
1207
1587
|
var isNode = typeof process !== "undefined" && !!process.versions?.node;
|
|
1208
1588
|
var modulePromise = null;
|
|
@@ -1846,6 +2226,302 @@ function slicePoints(data, start, end, predicted = false) {
|
|
|
1846
2226
|
return points;
|
|
1847
2227
|
}
|
|
1848
2228
|
|
|
2229
|
+
// src/codecs/slp/read-streaming.ts
|
|
2230
|
+
async function readSlpStreaming(url, options) {
|
|
2231
|
+
if (!isStreamingSupported()) {
|
|
2232
|
+
throw new Error("Streaming HDF5 requires Web Worker support (browser environment)");
|
|
2233
|
+
}
|
|
2234
|
+
const file = await openStreamingH5(url, {
|
|
2235
|
+
h5wasmUrl: options?.h5wasmUrl,
|
|
2236
|
+
filenameHint: options?.filenameHint
|
|
2237
|
+
});
|
|
2238
|
+
try {
|
|
2239
|
+
return await readFromStreamingFile(file, url, options?.filenameHint);
|
|
2240
|
+
} finally {
|
|
2241
|
+
await file.close();
|
|
2242
|
+
}
|
|
2243
|
+
}
|
|
2244
|
+
async function readFromStreamingFile(file, url, filenameHint) {
|
|
2245
|
+
const metadataAttrs = await file.getAttrs("metadata");
|
|
2246
|
+
const formatId = Number(
|
|
2247
|
+
metadataAttrs["format_id"]?.value ?? metadataAttrs["format_id"] ?? 1
|
|
2248
|
+
);
|
|
2249
|
+
const metadataJson = parseJsonAttr(metadataAttrs["json"]);
|
|
2250
|
+
const labelsPath = filenameHint ?? url.split("/").pop()?.split("?")[0] ?? "slp-data.slp";
|
|
2251
|
+
const skeletons = parseSkeletons(metadataJson);
|
|
2252
|
+
const tracks = await readTracksStreaming(file);
|
|
2253
|
+
const videos = await readVideosStreaming(file, labelsPath);
|
|
2254
|
+
const suggestions = await readSuggestionsStreaming(file, videos);
|
|
2255
|
+
const framesData = await readStructDatasetStreaming(file, "frames");
|
|
2256
|
+
const instancesData = await readStructDatasetStreaming(file, "instances");
|
|
2257
|
+
const pointsData = await readStructDatasetStreaming(file, "points");
|
|
2258
|
+
const predPointsData = await readStructDatasetStreaming(file, "pred_points");
|
|
2259
|
+
const labeledFrames = buildLabeledFrames2({
|
|
2260
|
+
framesData,
|
|
2261
|
+
instancesData,
|
|
2262
|
+
pointsData,
|
|
2263
|
+
predPointsData,
|
|
2264
|
+
skeletons,
|
|
2265
|
+
tracks,
|
|
2266
|
+
videos,
|
|
2267
|
+
formatId
|
|
2268
|
+
});
|
|
2269
|
+
return new Labels({
|
|
2270
|
+
labeledFrames,
|
|
2271
|
+
videos,
|
|
2272
|
+
skeletons,
|
|
2273
|
+
tracks,
|
|
2274
|
+
suggestions,
|
|
2275
|
+
sessions: [],
|
|
2276
|
+
// Sessions require complex parsing, skip for now
|
|
2277
|
+
provenance: metadataJson?.provenance ?? {}
|
|
2278
|
+
});
|
|
2279
|
+
}
|
|
2280
|
+
async function readTracksStreaming(file) {
|
|
2281
|
+
try {
|
|
2282
|
+
const keys = file.keys();
|
|
2283
|
+
if (!keys.includes("tracks_json")) return [];
|
|
2284
|
+
const data = await file.getDatasetValue("tracks_json");
|
|
2285
|
+
const values = normalizeDatasetArray(data.value);
|
|
2286
|
+
return parseTracks(values);
|
|
2287
|
+
} catch {
|
|
2288
|
+
return [];
|
|
2289
|
+
}
|
|
2290
|
+
}
|
|
2291
|
+
async function readVideosStreaming(file, labelsPath) {
|
|
2292
|
+
try {
|
|
2293
|
+
const keys = file.keys();
|
|
2294
|
+
if (!keys.includes("videos_json")) return [];
|
|
2295
|
+
const data = await file.getDatasetValue("videos_json");
|
|
2296
|
+
const values = normalizeDatasetArray(data.value);
|
|
2297
|
+
const metadataList = parseVideosMetadata(values, labelsPath);
|
|
2298
|
+
return metadataList.map((meta) => new Video({
|
|
2299
|
+
filename: meta.filename,
|
|
2300
|
+
backend: null,
|
|
2301
|
+
// No backend in streaming mode
|
|
2302
|
+
backendMetadata: {
|
|
2303
|
+
dataset: meta.dataset,
|
|
2304
|
+
format: meta.format,
|
|
2305
|
+
shape: meta.frameCount && meta.height && meta.width && meta.channels ? [meta.frameCount, meta.height, meta.width, meta.channels] : void 0,
|
|
2306
|
+
fps: meta.fps,
|
|
2307
|
+
channel_order: meta.channelOrder
|
|
2308
|
+
},
|
|
2309
|
+
sourceVideo: meta.sourceVideo ? new Video({ filename: meta.sourceVideo.filename }) : null,
|
|
2310
|
+
openBackend: false
|
|
2311
|
+
}));
|
|
2312
|
+
} catch {
|
|
2313
|
+
return [];
|
|
2314
|
+
}
|
|
2315
|
+
}
|
|
2316
|
+
async function readSuggestionsStreaming(file, videos) {
|
|
2317
|
+
try {
|
|
2318
|
+
const keys = file.keys();
|
|
2319
|
+
if (!keys.includes("suggestions_json")) return [];
|
|
2320
|
+
const data = await file.getDatasetValue("suggestions_json");
|
|
2321
|
+
const values = normalizeDatasetArray(data.value);
|
|
2322
|
+
const metadataList = parseSuggestions(values);
|
|
2323
|
+
return metadataList.map((meta) => {
|
|
2324
|
+
const video = videos[meta.video];
|
|
2325
|
+
if (!video) return null;
|
|
2326
|
+
return new SuggestionFrame({
|
|
2327
|
+
video,
|
|
2328
|
+
frameIdx: meta.frameIdx,
|
|
2329
|
+
metadata: meta.metadata
|
|
2330
|
+
});
|
|
2331
|
+
}).filter((s) => s !== null);
|
|
2332
|
+
} catch {
|
|
2333
|
+
return [];
|
|
2334
|
+
}
|
|
2335
|
+
}
|
|
2336
|
+
async function readStructDatasetStreaming(file, path) {
|
|
2337
|
+
try {
|
|
2338
|
+
const keys = file.keys();
|
|
2339
|
+
if (!keys.includes(path)) return {};
|
|
2340
|
+
const meta = await file.getDatasetMeta(path);
|
|
2341
|
+
const data = await file.getDatasetValue(path);
|
|
2342
|
+
const fieldNames = getFieldNamesFromMeta(meta);
|
|
2343
|
+
return normalizeStructData(data.value, data.shape, fieldNames);
|
|
2344
|
+
} catch {
|
|
2345
|
+
return {};
|
|
2346
|
+
}
|
|
2347
|
+
}
|
|
2348
|
+
function getFieldNamesFromMeta(meta) {
|
|
2349
|
+
const dtype = meta.dtype;
|
|
2350
|
+
if (typeof dtype === "string") {
|
|
2351
|
+
const namesMatch = dtype.match(/'names':\s*\[([^\]]+)\]/);
|
|
2352
|
+
if (namesMatch) {
|
|
2353
|
+
const namesStr = namesMatch[1];
|
|
2354
|
+
const names = namesStr.match(/'([^']+)'/g);
|
|
2355
|
+
if (names) {
|
|
2356
|
+
return names.map((n) => n.replace(/'/g, ""));
|
|
2357
|
+
}
|
|
2358
|
+
}
|
|
2359
|
+
}
|
|
2360
|
+
if (typeof dtype === "object" && dtype !== null) {
|
|
2361
|
+
const dtypeObj = dtype;
|
|
2362
|
+
if (dtypeObj.compound_type && typeof dtypeObj.compound_type === "object") {
|
|
2363
|
+
const compound = dtypeObj.compound_type;
|
|
2364
|
+
if (compound.members) {
|
|
2365
|
+
return compound.members.map((m) => m.name).filter((n) => !!n);
|
|
2366
|
+
}
|
|
2367
|
+
}
|
|
2368
|
+
}
|
|
2369
|
+
return [];
|
|
2370
|
+
}
|
|
2371
|
+
function normalizeStructData(value, shape, fieldNames) {
|
|
2372
|
+
if (!value) return {};
|
|
2373
|
+
if (value && typeof value === "object" && !Array.isArray(value) && !ArrayBuffer.isView(value)) {
|
|
2374
|
+
const obj = value;
|
|
2375
|
+
const firstKey = Object.keys(obj)[0];
|
|
2376
|
+
if (firstKey && Array.isArray(obj[firstKey])) {
|
|
2377
|
+
return obj;
|
|
2378
|
+
}
|
|
2379
|
+
}
|
|
2380
|
+
if (ArrayBuffer.isView(value) && shape.length === 2) {
|
|
2381
|
+
const [rowCount, colCount] = shape;
|
|
2382
|
+
const arr = value;
|
|
2383
|
+
if (fieldNames.length === colCount) {
|
|
2384
|
+
const result = {};
|
|
2385
|
+
for (let col = 0; col < colCount; col++) {
|
|
2386
|
+
const colData = [];
|
|
2387
|
+
for (let row = 0; row < rowCount; row++) {
|
|
2388
|
+
colData.push(arr[row * colCount + col]);
|
|
2389
|
+
}
|
|
2390
|
+
result[fieldNames[col]] = colData;
|
|
2391
|
+
}
|
|
2392
|
+
return result;
|
|
2393
|
+
}
|
|
2394
|
+
}
|
|
2395
|
+
if (Array.isArray(value) && value.length > 0 && Array.isArray(value[0])) {
|
|
2396
|
+
const rows = value;
|
|
2397
|
+
if (fieldNames.length) {
|
|
2398
|
+
const result = {};
|
|
2399
|
+
fieldNames.forEach((field, colIdx) => {
|
|
2400
|
+
result[field] = rows.map((row) => row[colIdx]);
|
|
2401
|
+
});
|
|
2402
|
+
return result;
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
return {};
|
|
2406
|
+
}
|
|
2407
|
+
function normalizeDatasetArray(value) {
|
|
2408
|
+
if (Array.isArray(value)) return value;
|
|
2409
|
+
if (ArrayBuffer.isView(value)) {
|
|
2410
|
+
return Array.from(value);
|
|
2411
|
+
}
|
|
2412
|
+
return [];
|
|
2413
|
+
}
|
|
2414
|
+
function buildLabeledFrames2(options) {
|
|
2415
|
+
const frames = [];
|
|
2416
|
+
const { framesData, instancesData, pointsData, predPointsData, skeletons, tracks, videos, formatId } = options;
|
|
2417
|
+
const frameIds = framesData.frame_id ?? [];
|
|
2418
|
+
const videoIdToIndex = buildVideoIdMap2(framesData, videos);
|
|
2419
|
+
const instanceById = /* @__PURE__ */ new Map();
|
|
2420
|
+
const fromPredictedPairs = [];
|
|
2421
|
+
for (let frameIdx = 0; frameIdx < frameIds.length; frameIdx += 1) {
|
|
2422
|
+
const rawVideoId = Number(framesData.video?.[frameIdx] ?? 0);
|
|
2423
|
+
const videoIndex = videoIdToIndex.get(rawVideoId) ?? rawVideoId;
|
|
2424
|
+
const frameIndex = Number(framesData.frame_idx?.[frameIdx] ?? 0);
|
|
2425
|
+
const instStart = Number(framesData.instance_id_start?.[frameIdx] ?? 0);
|
|
2426
|
+
const instEnd = Number(framesData.instance_id_end?.[frameIdx] ?? 0);
|
|
2427
|
+
const video = videos[videoIndex];
|
|
2428
|
+
if (!video) continue;
|
|
2429
|
+
const instances = [];
|
|
2430
|
+
for (let instIdx = instStart; instIdx < instEnd; instIdx += 1) {
|
|
2431
|
+
const instanceType = Number(instancesData.instance_type?.[instIdx] ?? 0);
|
|
2432
|
+
const skeletonId = Number(instancesData.skeleton?.[instIdx] ?? 0);
|
|
2433
|
+
const trackId = Number(instancesData.track?.[instIdx] ?? -1);
|
|
2434
|
+
const pointStart = Number(instancesData.point_id_start?.[instIdx] ?? 0);
|
|
2435
|
+
const pointEnd = Number(instancesData.point_id_end?.[instIdx] ?? 0);
|
|
2436
|
+
const score = Number(instancesData.score?.[instIdx] ?? 0);
|
|
2437
|
+
const trackingScore = Number(instancesData.tracking_score?.[instIdx] ?? 0);
|
|
2438
|
+
const fromPredicted = Number(instancesData.from_predicted?.[instIdx] ?? -1);
|
|
2439
|
+
const skeleton = skeletons[skeletonId] ?? skeletons[0] ?? new Skeleton({ nodes: [] });
|
|
2440
|
+
const track = trackId >= 0 ? tracks[trackId] : null;
|
|
2441
|
+
let instance;
|
|
2442
|
+
if (instanceType === 0) {
|
|
2443
|
+
const points = slicePoints2(pointsData, pointStart, pointEnd);
|
|
2444
|
+
instance = new Instance({ points: pointsFromArray(points, skeleton.nodeNames), skeleton, track, trackingScore });
|
|
2445
|
+
if (formatId < 1.1) {
|
|
2446
|
+
instance.points.forEach((point) => {
|
|
2447
|
+
point.xy = [point.xy[0] - 0.5, point.xy[1] - 0.5];
|
|
2448
|
+
});
|
|
2449
|
+
}
|
|
2450
|
+
if (fromPredicted >= 0) {
|
|
2451
|
+
fromPredictedPairs.push([instIdx, fromPredicted]);
|
|
2452
|
+
}
|
|
2453
|
+
} else {
|
|
2454
|
+
const points = slicePoints2(predPointsData, pointStart, pointEnd, true);
|
|
2455
|
+
instance = new PredictedInstance({ points: predictedPointsFromArray(points, skeleton.nodeNames), skeleton, track, score, trackingScore });
|
|
2456
|
+
if (formatId < 1.1) {
|
|
2457
|
+
instance.points.forEach((point) => {
|
|
2458
|
+
point.xy = [point.xy[0] - 0.5, point.xy[1] - 0.5];
|
|
2459
|
+
});
|
|
2460
|
+
}
|
|
2461
|
+
}
|
|
2462
|
+
instanceById.set(instIdx, instance);
|
|
2463
|
+
instances.push(instance);
|
|
2464
|
+
}
|
|
2465
|
+
frames.push(new LabeledFrame({ video, frameIdx: frameIndex, instances }));
|
|
2466
|
+
}
|
|
2467
|
+
for (const [instanceId, fromPredictedId] of fromPredictedPairs) {
|
|
2468
|
+
const instance = instanceById.get(instanceId);
|
|
2469
|
+
const predicted = instanceById.get(fromPredictedId);
|
|
2470
|
+
if (instance && predicted instanceof PredictedInstance && instance instanceof Instance) {
|
|
2471
|
+
instance.fromPredicted = predicted;
|
|
2472
|
+
}
|
|
2473
|
+
}
|
|
2474
|
+
return frames;
|
|
2475
|
+
}
|
|
2476
|
+
function buildVideoIdMap2(framesData, videos) {
|
|
2477
|
+
const videoIds = /* @__PURE__ */ new Set();
|
|
2478
|
+
for (const value of framesData.video ?? []) {
|
|
2479
|
+
videoIds.add(Number(value));
|
|
2480
|
+
}
|
|
2481
|
+
if (!videoIds.size) return /* @__PURE__ */ new Map();
|
|
2482
|
+
const maxId = Math.max(...Array.from(videoIds));
|
|
2483
|
+
if (videoIds.size === videos.length && maxId === videos.length - 1) {
|
|
2484
|
+
const identity = /* @__PURE__ */ new Map();
|
|
2485
|
+
for (let i = 0; i < videos.length; i += 1) {
|
|
2486
|
+
identity.set(i, i);
|
|
2487
|
+
}
|
|
2488
|
+
return identity;
|
|
2489
|
+
}
|
|
2490
|
+
const map = /* @__PURE__ */ new Map();
|
|
2491
|
+
for (let index = 0; index < videos.length; index += 1) {
|
|
2492
|
+
const video = videos[index];
|
|
2493
|
+
const dataset = video.backendMetadata?.dataset ?? "";
|
|
2494
|
+
const parsedId = parseVideoIdFromDataset2(dataset);
|
|
2495
|
+
if (parsedId != null) {
|
|
2496
|
+
map.set(parsedId, index);
|
|
2497
|
+
}
|
|
2498
|
+
}
|
|
2499
|
+
return map;
|
|
2500
|
+
}
|
|
2501
|
+
function parseVideoIdFromDataset2(dataset) {
|
|
2502
|
+
if (!dataset) return null;
|
|
2503
|
+
const group = dataset.split("/")[0];
|
|
2504
|
+
if (!group.startsWith("video")) return null;
|
|
2505
|
+
const id = Number(group.slice(5));
|
|
2506
|
+
return Number.isNaN(id) ? null : id;
|
|
2507
|
+
}
|
|
2508
|
+
function slicePoints2(data, start, end, predicted = false) {
|
|
2509
|
+
const xs = data.x ?? [];
|
|
2510
|
+
const ys = data.y ?? [];
|
|
2511
|
+
const visible = data.visible ?? [];
|
|
2512
|
+
const complete = data.complete ?? [];
|
|
2513
|
+
const scores = data.score ?? [];
|
|
2514
|
+
const points = [];
|
|
2515
|
+
for (let i = start; i < end; i += 1) {
|
|
2516
|
+
if (predicted) {
|
|
2517
|
+
points.push([xs[i], ys[i], scores[i], visible[i], complete[i]]);
|
|
2518
|
+
} else {
|
|
2519
|
+
points.push([xs[i], ys[i], visible[i], complete[i]]);
|
|
2520
|
+
}
|
|
2521
|
+
}
|
|
2522
|
+
return points;
|
|
2523
|
+
}
|
|
2524
|
+
|
|
1849
2525
|
// src/codecs/slp/write.ts
|
|
1850
2526
|
var isNode2 = typeof process !== "undefined" && !!process.versions?.node;
|
|
1851
2527
|
var FORMAT_ID = 1.4;
|
|
@@ -2148,7 +2824,27 @@ function createMatrixDataset(file, name, rows, fieldNames, dtype) {
|
|
|
2148
2824
|
}
|
|
2149
2825
|
|
|
2150
2826
|
// src/io/main.ts
|
|
2827
|
+
function isProbablyUrl2(source) {
|
|
2828
|
+
return typeof source === "string" && /^https?:\/\//i.test(source);
|
|
2829
|
+
}
|
|
2830
|
+
function isBrowser4() {
|
|
2831
|
+
return typeof window !== "undefined" && typeof Worker !== "undefined";
|
|
2832
|
+
}
|
|
2151
2833
|
async function loadSlp(source, options) {
|
|
2834
|
+
const streamMode = options?.h5?.stream ?? "auto";
|
|
2835
|
+
if (isProbablyUrl2(source) && isBrowser4() && isStreamingSupported() && (streamMode === "range" || streamMode === "auto")) {
|
|
2836
|
+
try {
|
|
2837
|
+
return await readSlpStreaming(source, {
|
|
2838
|
+
filenameHint: options?.h5?.filenameHint
|
|
2839
|
+
});
|
|
2840
|
+
} catch (e) {
|
|
2841
|
+
if (streamMode === "auto") {
|
|
2842
|
+
console.warn("Streaming failed, falling back to full download:", e);
|
|
2843
|
+
} else {
|
|
2844
|
+
throw e;
|
|
2845
|
+
}
|
|
2846
|
+
}
|
|
2847
|
+
}
|
|
2152
2848
|
return readSlp(source, { openVideos: options?.openVideos ?? true, h5: options?.h5 });
|
|
2153
2849
|
}
|
|
2154
2850
|
async function saveSlp(labels, filename, options) {
|
|
@@ -3017,6 +3713,7 @@ export {
|
|
|
3017
3713
|
RecordingSession,
|
|
3018
3714
|
RenderContext,
|
|
3019
3715
|
Skeleton,
|
|
3716
|
+
StreamingH5File,
|
|
3020
3717
|
SuggestionFrame,
|
|
3021
3718
|
Symmetry,
|
|
3022
3719
|
Track,
|
|
@@ -3034,16 +3731,19 @@ export {
|
|
|
3034
3731
|
fromNumpy,
|
|
3035
3732
|
getMarkerFunction,
|
|
3036
3733
|
getPalette,
|
|
3734
|
+
isStreamingSupported,
|
|
3037
3735
|
labelsFromNumpy,
|
|
3038
3736
|
loadSlp,
|
|
3039
3737
|
loadVideo,
|
|
3040
3738
|
makeCameraFromDict,
|
|
3739
|
+
openStreamingH5,
|
|
3041
3740
|
pointsEmpty,
|
|
3042
3741
|
pointsFromArray,
|
|
3043
3742
|
pointsFromDict,
|
|
3044
3743
|
predictedPointsEmpty,
|
|
3045
3744
|
predictedPointsFromArray,
|
|
3046
3745
|
predictedPointsFromDict,
|
|
3746
|
+
readSlpStreaming,
|
|
3047
3747
|
renderImage,
|
|
3048
3748
|
renderVideo,
|
|
3049
3749
|
resolveColor,
|
package/dist/lite.js
CHANGED