@norskvideo/norsk-sdk 1.0.365 → 1.0.367

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -315,6 +315,7 @@ export declare class AudioBuildMultichannelNode extends AutoProcessorMediaNode<"
315
315
  export declare interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
316
316
  /** The channel layout of the built outgoing stream */
317
317
  channelLayout: ChannelLayout;
318
+ /** The sample rate of the built outgoing stream */
318
319
  sampleRate: SampleRate;
319
320
  /**
320
321
  * Stream keys specifying the source for each channel, where the order is
@@ -422,6 +423,7 @@ export declare interface AudioMeasureLevelsSettings extends ProcessorNodeSetting
422
423
  * @eventProperty
423
424
  */
424
425
  onData: (levels: AudioMeasureLevels) => void;
426
+ /** Optionally control the sample frequency */
425
427
  intervalFrames?: number;
426
428
  }
427
429
 
@@ -637,7 +639,16 @@ export declare class AudioTranscribeAzureNode extends AutoProcessorMediaNode<"au
637
639
  * */
638
640
  export declare interface AudioTranscribeAzureSettings extends ProcessorNodeSettings<AudioTranscribeAzureNode> {
639
641
  outputStreamId: number;
642
+ /** The source language to recognise - an IETF BCP 47 language tag, eg en-US, en-GB, de-DE. Supported languages are
643
+ * found at https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=stt
644
+ */
640
645
  sourceLanguage: string;
646
+ /** The target output languages for translation - technically a BCP 47 language tag but but in most cases omitting region, e.g. en, de, zh-Hant.
647
+
648
+ * Leave this field absent/empty to use the transcription service without
649
+ * translation, while if any target languages are present the translation service will be used even if this is the same as the
650
+ * source language.
651
+ */
641
652
  targetLanguages?: string[];
642
653
  /** Key for the Azure Speech Service endpoint */
643
654
  azureKey: string;
@@ -672,8 +683,6 @@ export declare interface AudioTranscribeWhisperSettings extends ProcessorNodeSet
672
683
  * Default 3000ms.
673
684
  */
674
685
  stepMs?: number;
675
- /** The full audio buffer size. Default 10000ms */
676
- lengthMs?: number;
677
686
  /** Duration of audio to keep when clearing the buffer to allow for partial-word recognition. Default 400ms */
678
687
  keepMs?: number;
679
688
  /** Max tokens per segment */
@@ -1415,7 +1424,11 @@ export declare type DeckLinkVideoIOSupport = "capture" | "playback";
1415
1424
  /** @public */
1416
1425
  export declare type DeferredVideoComposeSettings<Pins extends string> = (streams: StreamMetadata[]) => VideoComposeSettings<Pins> | undefined;
1417
1426
 
1427
+ /** Various options for de-interlacing video either in software or hardware (where available) */
1418
1428
  export declare type DeinterlaceSettings = {
1429
+ /** Use the CPU to perform deinterlace, Note: if the video has been decoded on hardware
1430
+ * it will need to be transferred to system memory and reuploaded again
1431
+ */
1419
1432
  type: 'software';
1420
1433
  algorithm: 'bwdif' | 'kerndeint' | 'nnedi' | 'w3fdif' | 'yadif';
1421
1434
  };
@@ -1443,6 +1456,10 @@ export declare interface DeltacastInputSettings extends InputSettings<DeltacastI
1443
1456
  deviceId: number;
1444
1457
  /** Which channel on the device to use */
1445
1458
  channelIndex: number;
1459
+ /** Should video be captured? Defaults to true, but for applications that only require the audio
1460
+ * there is a small efficiency gain by setting to false
1461
+ */
1462
+ captureVideo?: boolean;
1446
1463
  /**
1447
1464
  * An SDI signal can contain up to 16 (3G) or 32 (6G / 12G) audio channels, which are
1448
1465
  * transmitted as groups of 4. For example, a single stereo signal would typically be
@@ -1567,8 +1584,14 @@ export declare class FileMp4InputNode extends SourceMediaNode {
1567
1584
  * */
1568
1585
  nudge(nudge: number): void;
1569
1586
  updateSettings(settings: FileMp4InputSettingsUpdate): void;
1587
+ /** Pause the MP4 playback. This may not take fully immediate effect, frames already in flight within the Mp4Input workflow will still be emitted */
1570
1588
  pause(): void;
1589
+ /** Start/resume the MP4 playback */
1571
1590
  play(): void;
1591
+ /** Start MP4 playback, pausing once the given offset is reached */
1592
+ playUntil(offsetMs: number): void;
1593
+ /** Seek to a given point, without starting playback. When the stream is played/resumed, it will start from (about) this offset */
1594
+ seek(offsetMs: number): void;
1572
1595
  }
1573
1596
 
1574
1597
  /**
@@ -1583,6 +1606,10 @@ export declare interface FileMp4InputSettings extends SourceNodeSettings<FileMp4
1583
1606
  fileName: string;
1584
1607
  /** Callback to be notified when the file ends */
1585
1608
  onEof?: () => void;
1609
+ /** Callback to be notified when the file loops on hitting the end */
1610
+ onLoop?: () => void;
1611
+ /** Callback to be notified when the file reaches the specified stop position */
1612
+ onStopPosition?: () => void;
1586
1613
  /** Callback to be notified when the file is initially read */
1587
1614
  onInfo?: (info: FileMp4Info) => void;
1588
1615
  /** Whether to loop back to the start of the file after reaching the end (default false) */
@@ -1812,6 +1839,10 @@ export declare interface HlsPushDestinationSettings {
1812
1839
  this should take into account the end to end latency from source capture, to the part and playlist being published
1813
1840
  */
1814
1841
  partHoldBackSeconds?: number;
1842
+ /** Whether the server supports gzip *request* compression for PUT/POST requests */
1843
+ supportsGzip?: boolean;
1844
+ /** Whether to use TLS or plain TCP transport, by default TLS used if port is 443 */
1845
+ tlsTransport?: boolean;
1815
1846
  }
1816
1847
 
1817
1848
  /** @public */
@@ -2038,12 +2069,6 @@ export declare interface Interval {
2038
2069
  d: bigint;
2039
2070
  }
2040
2071
 
2041
- /** @public */
2042
- export declare interface IntervalTimestamp {
2043
- n: bigint;
2044
- d: bigint;
2045
- }
2046
-
2047
2072
  /** @public */
2048
2073
  export declare function isAdMarker(seg: MediaPlaylistPart): seg is AdMarker;
2049
2074
 
@@ -2303,9 +2328,10 @@ export declare interface MediaSegment {
2303
2328
  export declare class MediaStoreActiveCut {
2304
2329
  /**
2305
2330
  * @public
2306
- * Promise that resolves when the asset import is complete
2331
+ * Promise that resolves when the cut is complete
2307
2332
  */
2308
2333
  complete(): Promise<bigint>;
2334
+ cancel(): void;
2309
2335
  }
2310
2336
 
2311
2337
  /**
@@ -2313,48 +2339,70 @@ export declare class MediaStoreActiveCut {
2313
2339
  * see: {@link NorskOutput.mediaStore}
2314
2340
  */
2315
2341
  export declare class MediaStoreAsset {
2316
- /**
2317
- * @public
2318
- * Promise that resolves when the asset import is complete
2319
- */
2320
- ready(): Promise<{
2321
- durationMs: bigint;
2322
- size: bigint;
2323
- }>;
2342
+ /** @public */
2343
+ durationMs?: bigint;
2344
+ /** @public */
2345
+ size?: bigint;
2346
+ cutListEntry(settings: MediaStoreAssetCut): MediaStoreCut;
2324
2347
  }
2325
2348
 
2326
2349
  /** @public */
2327
- export declare type MediaStoreAssetSettings = {
2328
- name: string;
2329
- file: string;
2330
- path: string;
2331
- progressCb?: (progress: number) => void;
2332
- };
2333
-
2334
- /** @public */
2335
- export declare type MediaStoreCut = {
2350
+ export declare class MediaStoreAssetCut {
2336
2351
  /**
2337
- * Required: Name of the media store instance that this cut is from
2352
+ * Required: The streams you want to select from the asset
2338
2353
  */
2339
- mediaStoreName: string;
2354
+ streamSelection: MediaStoreStreamSelection[];
2340
2355
  /**
2341
- * Required: The streams you want to playback from the media store
2356
+ * Optional: The start point for the cut in milliseconds - defaults to zero
2342
2357
  */
2343
- streamSelection: MediaStoreStreamSelection[];
2358
+ startTimeMs?: number;
2344
2359
  /**
2345
- * Required: The start date/time for the cut. For a local recoder or a remote view,
2346
- * this is UTC. For an asset store, this starts at zero
2360
+ * Optional: The duration of the cut in milliseconds - defaults to the entire asset
2347
2361
  */
2348
- startDateTime: Date;
2362
+ durationMs?: number | "all";
2349
2363
  /**
2350
- * Required: The duration of the cut in milliseconds
2364
+ * Optional: How many times to repeat the cut - defaults to 1
2351
2365
  */
2352
- durationMs: number;
2366
+ repeatCount?: number | "infinity";
2353
2367
  /**
2354
2368
  * Required: In a video stream, if a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
2355
2369
  */
2356
2370
  trimPartialGops: boolean;
2357
- sessionNum?: number;
2371
+ }
2372
+
2373
+ /** @public */
2374
+ export declare type MediaStoreAssetFile = {
2375
+ file: string;
2376
+ errorOnHashMismatch?: boolean;
2377
+ };
2378
+
2379
+ /** @public */
2380
+ export declare type MediaStoreAssetFileWithHash = {
2381
+ file: string;
2382
+ hash: string;
2383
+ errorOnHashMismatch?: boolean;
2384
+ };
2385
+
2386
+ /** @public */
2387
+ export declare type MediaStoreAssetSettings = {
2388
+ name: string;
2389
+ source?: MediaStoreAssetSource;
2390
+ path: string;
2391
+ progressCb?: (progress: number) => void;
2392
+ };
2393
+
2394
+ /** @public */
2395
+ export declare type MediaStoreAssetSource = MediaStoreAssetFileWithHash | MediaStoreAssetFile;
2396
+
2397
+ declare type MediaStoreCut = {
2398
+ mediaStoreName: string;
2399
+ cut: {
2400
+ type: "asset";
2401
+ cut: MediaStoreAssetCut;
2402
+ } | {
2403
+ type: "recorder";
2404
+ cut: MediaStoreRecorderCut;
2405
+ };
2358
2406
  };
2359
2407
 
2360
2408
  /** @public */
@@ -2402,11 +2450,38 @@ export declare interface MediaStorePlayerSettings extends InputSettings<MediaSto
2402
2450
  onEof?: () => void;
2403
2451
  }
2404
2452
 
2453
+ /** @public */
2454
+ export declare class MediaStoreRecorderCut {
2455
+ /**
2456
+ * Required: The streams you want to select from the asset
2457
+ */
2458
+ streamSelection: MediaStoreStreamSelection[];
2459
+ /**
2460
+ * Required: The start point for the cut in UTC
2461
+ */
2462
+ startDateTime: Date;
2463
+ /**
2464
+ * Required: The duration of the cut in milliseconds
2465
+ */
2466
+ durationMs: number;
2467
+ /**
2468
+ * Required: In a video stream, if a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
2469
+ */
2470
+ trimPartialGops: boolean;
2471
+ /**
2472
+ * Optional: If it's ambiguous which session contains the start time (for example, due to a system clock change), then
2473
+ * specify the correct session
2474
+ */
2475
+ sessionNum?: number;
2476
+ }
2477
+
2405
2478
  /**
2406
2479
  * @public
2407
2480
  * see: {@link NorskOutput.mediaStore}
2408
2481
  */
2409
2482
  export declare class MediaStoreRecorderNode extends AutoSinkMediaNode<"audio" | "video"> {
2483
+ metadata(): Promise<MediaStoreSession[]>;
2484
+ cutListEntry(settings: MediaStoreRecorderCut): MediaStoreCut;
2410
2485
  }
2411
2486
 
2412
2487
  /**
@@ -2439,6 +2514,32 @@ export declare type MediaStoreSession = {
2439
2514
  streams: MediaStoreStream[];
2440
2515
  };
2441
2516
 
2517
+ /**
2518
+ * @public
2519
+ * see: {@link NorskOutput.mediaStore}
2520
+ */
2521
+ export declare class MediaStoreSnapshot {
2522
+ update(): Promise<void>;
2523
+ metadata(): Promise<MediaStoreSession[]>;
2524
+ cutListEntry(settings: MediaStoreRecorderCut): MediaStoreCut;
2525
+ }
2526
+
2527
+ /**
2528
+ * @public
2529
+ * Settings to configure a media store snapshot
2530
+ * see {@link NorskOutput.mediaStore}
2531
+ */
2532
+ export declare interface MediaStoreSnapshotSettings {
2533
+ /**
2534
+ * Required: Name of this media store instance.
2535
+ */
2536
+ name: string;
2537
+ /**
2538
+ * Required: Path for the media store database.
2539
+ */
2540
+ path: string;
2541
+ }
2542
+
2442
2543
  /** @public */
2443
2544
  export declare type MediaStoreStream = {
2444
2545
  streamKey: StreamKey;
@@ -2806,17 +2907,15 @@ export declare interface NorskMediaStore {
2806
2907
  *
2807
2908
  * @param cut request - Configuration for the cut
2808
2909
  */
2809
- makeCut(request: MediaStoreCutRequest): Promise<MediaStoreActiveCut>;
2910
+ makeCut(request: MediaStoreCutRequest): MediaStoreActiveCut;
2810
2911
  /**
2811
- * Get the metadata for a store
2812
- *
2813
- * @param Media Store name
2912
+ * Create a Media Store for an on-disk MP4 asset
2814
2913
  */
2815
- metadata(mediaStoreName: string): Promise<MediaStoreSession[]>;
2914
+ asyncLoadAsset(settings: MediaStoreAssetSettings): Promise<MediaStoreAsset>;
2816
2915
  /**
2817
- * Create a Media Store for an on-disk MP4 asset
2916
+ * Create a Media Store snapshot on an exist store
2818
2917
  */
2819
- asset(settings: MediaStoreAssetSettings): Promise<MediaStoreAsset>;
2918
+ snapshot(settings: MediaStoreSnapshotSettings): Promise<MediaStoreSnapshot>;
2820
2919
  }
2821
2920
 
2822
2921
  /**
@@ -4847,6 +4946,7 @@ export declare class StreamTimestampNudgeNode extends AutoProcessorMediaNode<"au
4847
4946
  * see: {@link NorskTransform.streamTimestampNudge}
4848
4947
  * */
4849
4948
  export declare interface StreamTimestampNudgeSettings extends ProcessorNodeSettings<StreamTimestampNudgeNode> {
4949
+ /** the initial nudge to apply, in milliseconds */
4850
4950
  nudge?: number;
4851
4951
  }
4852
4952
 
@@ -4863,7 +4963,7 @@ export declare class StreamTimestampReportNode extends AutoSinkMediaNode<string>
4863
4963
  * see {@link NorskOutput.fileMp4}
4864
4964
  */
4865
4965
  export declare interface StreamTimestampReportSettings extends SinkNodeSettings<StreamTimestampReportNode> {
4866
- onTimestamp?: (streamKey: StreamKey, timestamp: IntervalTimestamp) => Promise<void>;
4966
+ onTimestamp?: (streamKey: StreamKey, timestamp: Interval) => Promise<void>;
4867
4967
  }
4868
4968
 
4869
4969
  /**
@@ -1,12 +1,12 @@
1
1
  import { AutoSinkMediaNode, SinkNodeSettings } from "./common";
2
- import { IntervalTimestamp, StreamKey } from "./types";
2
+ import { Interval, StreamKey } from "./types";
3
3
  /**
4
4
  * @public
5
5
  * Settings to control MP4 file output
6
6
  * see {@link NorskOutput.fileMp4}
7
7
  */
8
8
  export interface StreamTimestampReportSettings extends SinkNodeSettings<StreamTimestampReportNode> {
9
- onTimestamp?: (streamKey: StreamKey, timestamp: IntervalTimestamp) => Promise<void>;
9
+ onTimestamp?: (streamKey: StreamKey, timestamp: Interval) => Promise<void>;
10
10
  }
11
11
  /**
12
12
  * @public
@@ -43,7 +43,7 @@ class StreamTimestampReportNode extends common_1.AutoSinkMediaNode {
43
43
  case "report": {
44
44
  if (settings.onTimestamp) {
45
45
  if (data.message.value.pts && data.message.value.stream)
46
- settings.onTimestamp((0, types_1.fromStreamKey)(data.message.value.stream), data.message.value.pts);
46
+ settings.onTimestamp((0, types_1.fromStreamKey)(data.message.value.stream), (0, types_1.fromInterval)(data.message.value.pts));
47
47
  }
48
48
  break;
49
49
  }
@@ -281,6 +281,10 @@ export interface DeltacastInputSettings extends InputSettings<DeltacastInputNode
281
281
  deviceId: number;
282
282
  /** Which channel on the device to use */
283
283
  channelIndex: number;
284
+ /** Should video be captured? Defaults to true, but for applications that only require the audio
285
+ * there is a small efficiency gain by setting to false
286
+ */
287
+ captureVideo?: boolean;
284
288
  /**
285
289
  * An SDI signal can contain up to 16 (3G) or 32 (6G / 12G) audio channels, which are
286
290
  * transmitted as groups of 4. For example, a single stereo signal would typically be
@@ -672,6 +676,10 @@ export interface FileMp4InputSettings extends SourceNodeSettings<FileMp4InputNod
672
676
  fileName: string;
673
677
  /** Callback to be notified when the file ends */
674
678
  onEof?: () => void;
679
+ /** Callback to be notified when the file loops on hitting the end */
680
+ onLoop?: () => void;
681
+ /** Callback to be notified when the file reaches the specified stop position */
682
+ onStopPosition?: () => void;
675
683
  /** Callback to be notified when the file is initially read */
676
684
  onInfo?: (info: FileMp4Info) => void;
677
685
  /** Whether to loop back to the start of the file after reaching the end (default false) */
@@ -704,8 +712,14 @@ export declare class FileMp4InputNode extends SourceMediaNode {
704
712
  * */
705
713
  nudge(nudge: number): void;
706
714
  updateSettings(settings: FileMp4InputSettingsUpdate): void;
715
+ /** Pause the MP4 playback. This may not take fully immediate effect, frames already in flight within the Mp4Input workflow will still be emitted */
707
716
  pause(): void;
717
+ /** Start/resume the MP4 playback */
708
718
  play(): void;
719
+ /** Start MP4 playback, pausing once the given offset is reached */
720
+ playUntil(offsetMs: number): void;
721
+ /** Seek to a given point, without starting playback. When the stream is played/resumed, it will start from (about) this offset */
722
+ seek(offsetMs: number): void;
709
723
  }
710
724
  /**
711
725
  * @public
@@ -8,6 +8,7 @@ const types_3 = require("../types");
8
8
  const utils_1 = require("../shared/utils");
9
9
  const common_1 = require("./common");
10
10
  const protobuf_1 = require("@bufbuild/protobuf");
11
+ const common_pb_1 = require("@norskvideo/norsk-api/lib/shared/common_pb");
11
12
  /**
12
13
  * @public
13
14
  * see: {@link NorskInput.rtp}
@@ -515,6 +516,7 @@ class DeltacastInputNode extends common_1.SourceMediaNode {
515
516
  sourceName: settings.sourceName,
516
517
  deviceId: settings.deviceId,
517
518
  channelIndex: settings.channelIndex,
519
+ captureVideo: (!('captureVideo' in settings) || settings.captureVideo === undefined) ? true : settings.captureVideo,
518
520
  audioChannelMask: settings.audioChannelMask ? settings.audioChannelMask : 0xffffffff,
519
521
  statsSampling: settings.statsSampling
520
522
  ? (0, utils_1.provideFull)(media_pb_1.StreamStatisticsSampling, settings.statsSampling)
@@ -1165,7 +1167,8 @@ class VideoTestcardGeneratorNode extends common_1.SourceMediaNode {
1165
1167
  ? (0, utils_1.provideFull)(media_pb_1.MediaNodeId, { id: settings.id })
1166
1168
  : undefined;
1167
1169
  const config = (0, utils_1.provideFull)(media_pb_1.TestCardVideoConfiguration, {
1168
- numberOfFrames: settings.numberOfFrames ?? 0,
1170
+ numberOfFrames: settings.numberOfFrames ? (0, utils_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, utils_1.mkMessageCase)({ intValue: settings.numberOfFrames }))
1171
+ : (0, utils_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, utils_1.mkMessageCase)({ infinity: new protobuf_1.Empty() })),
1169
1172
  realtime: true,
1170
1173
  sourceName: settings.sourceName,
1171
1174
  frameRate: (0, utils_1.provideFull)(media_pb_1.FrameRate, settings.frameRate),
@@ -1318,10 +1321,16 @@ class FileMp4InputNode extends common_1.SourceMediaNode {
1318
1321
  break;
1319
1322
  }
1320
1323
  case "status": {
1321
- if (data.message.value.state ==
1322
- media_pb_1.FileMp4InputStatus_State.INPUT_STATUS_EOF &&
1323
- settings.onEof) {
1324
- settings.onEof();
1324
+ switch (data.message.value.state) {
1325
+ case media_pb_1.FileMp4InputStatus_State.INPUT_STATUS_EOF:
1326
+ settings.onEof && settings.onEof();
1327
+ break;
1328
+ case media_pb_1.FileMp4InputStatus_State.INPUT_STATUS_EOF_LOOPING:
1329
+ settings.onLoop && settings.onLoop();
1330
+ break;
1331
+ case media_pb_1.FileMp4InputStatus_State.INPUT_STATUS_STOP_POSITION:
1332
+ settings.onStopPosition && settings.onStopPosition();
1333
+ break;
1325
1334
  }
1326
1335
  break;
1327
1336
  }
@@ -1363,12 +1372,22 @@ class FileMp4InputNode extends common_1.SourceMediaNode {
1363
1372
  updateSettings(settings) {
1364
1373
  this.grpcStream.write((0, utils_1.provideFull)(media_pb_1.FileMp4InputMessage, (0, utils_1.mkMessageCase)({ updateConfig: (0, utils_1.provideFull)(media_pb_1.FileMp4InputConfigurationUpdate, { loop: settings.loop !== undefined ? (0, types_1.mkOptBool)(settings.loop) : undefined }) })));
1365
1374
  }
1375
+ /** Pause the MP4 playback. This may not take fully immediate effect, frames already in flight within the Mp4Input workflow will still be emitted */
1366
1376
  pause() {
1367
1377
  this.grpcStream.write((0, utils_1.provideFull)(media_pb_1.FileMp4InputMessage, (0, utils_1.mkMessageCase)({ pause: (0, utils_1.provideFull)(protobuf_1.Empty, {}) })));
1368
1378
  }
1379
+ /** Start/resume the MP4 playback */
1369
1380
  play() {
1370
1381
  this.grpcStream.write((0, utils_1.provideFull)(media_pb_1.FileMp4InputMessage, (0, utils_1.mkMessageCase)({ play: (0, utils_1.provideFull)(protobuf_1.Empty, {}) })));
1371
1382
  }
1383
+ /** Start MP4 playback, pausing once the given offset is reached */
1384
+ playUntil(offsetMs) {
1385
+ this.grpcStream.write((0, utils_1.provideFull)(media_pb_1.FileMp4InputMessage, (0, utils_1.mkMessageCase)({ playUntil: (0, utils_1.provideFull)(media_pb_1.FileMp4InputPlayUntil, { offsetMs }) })));
1386
+ }
1387
+ /** Seek to a given point, without starting playback. When the stream is played/resumed, it will start from (about) this offset */
1388
+ seek(offsetMs) {
1389
+ this.grpcStream.write((0, utils_1.provideFull)(media_pb_1.FileMp4InputMessage, (0, utils_1.mkMessageCase)({ seek: (0, utils_1.provideFull)(media_pb_1.FileMp4InputSeek, { offsetMs }) })));
1390
+ }
1372
1391
  }
1373
1392
  exports.FileMp4InputNode = FileMp4InputNode;
1374
1393
  //# sourceMappingURL=input.js.map
@@ -1,6 +1,6 @@
1
1
  import { AutoSinkMediaNode, SinkNodeSettings, SourceMediaNode, StreamStatisticsMixin } from "./common";
2
2
  import { InputSettings } from "./input";
3
- import { MediaStoreCut, StreamKey, StreamMetadata } from "./types";
3
+ import { MediaStoreStreamSelection, StreamKey, StreamMetadata } from "./types";
4
4
  /** @public */
5
5
  export declare type MediaStoreExpireByTime = {
6
6
  expire: "byTime";
@@ -40,11 +40,61 @@ export declare type MediaStoreSession = {
40
40
  streams: MediaStoreStream[];
41
41
  };
42
42
  /** @public */
43
- export declare type MediaStoreAssetSettings = {
44
- name: string;
45
- file: string;
46
- path: string;
47
- progressCb?: (progress: number) => void;
43
+ export declare class MediaStoreAssetCut {
44
+ /**
45
+ * Required: The streams you want to select from the asset
46
+ */
47
+ streamSelection: MediaStoreStreamSelection[];
48
+ /**
49
+ * Optional: The start point for the cut in milliseconds - defaults to zero
50
+ */
51
+ startTimeMs?: number;
52
+ /**
53
+ * Optional: The duration of the cut in milliseconds - defaults to the entire asset
54
+ */
55
+ durationMs?: number | "all";
56
+ /**
57
+ * Optional: How many times to repeat the cut - defaults to 1
58
+ */
59
+ repeatCount?: number | "infinity";
60
+ /**
61
+ * Required: In a video stream, if a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
62
+ */
63
+ trimPartialGops: boolean;
64
+ }
65
+ /** @public */
66
+ export declare class MediaStoreRecorderCut {
67
+ /**
68
+ * Required: The streams you want to select from the asset
69
+ */
70
+ streamSelection: MediaStoreStreamSelection[];
71
+ /**
72
+ * Required: The start point for the cut in UTC
73
+ */
74
+ startDateTime: Date;
75
+ /**
76
+ * Required: The duration of the cut in milliseconds
77
+ */
78
+ durationMs: number;
79
+ /**
80
+ * Required: In a video stream, if a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
81
+ */
82
+ trimPartialGops: boolean;
83
+ /**
84
+ * Optional: If it's ambiguous which session contains the start time (for example, due to a system clock change), then
85
+ * specify the correct session
86
+ */
87
+ sessionNum?: number;
88
+ }
89
+ declare type MediaStoreCut = {
90
+ mediaStoreName: string;
91
+ cut: {
92
+ type: "asset";
93
+ cut: MediaStoreAssetCut;
94
+ } | {
95
+ type: "recorder";
96
+ cut: MediaStoreRecorderCut;
97
+ };
48
98
  };
49
99
  /**
50
100
  * @public
@@ -74,6 +124,8 @@ export interface MediaStoreRecorderSettings extends SinkNodeSettings<MediaStoreR
74
124
  * see: {@link NorskOutput.mediaStore}
75
125
  */
76
126
  export declare class MediaStoreRecorderNode extends AutoSinkMediaNode<"audio" | "video"> {
127
+ metadata(): Promise<MediaStoreSession[]>;
128
+ cutListEntry(settings: MediaStoreRecorderCut): MediaStoreCut;
77
129
  }
78
130
  /**
79
131
  * @public
@@ -101,23 +153,65 @@ export declare class MediaStorePlayerNode extends SourceMediaNode {
101
153
  export declare class MediaStoreActiveCut {
102
154
  /**
103
155
  * @public
104
- * Promise that resolves when the asset import is complete
156
+ * Promise that resolves when the cut is complete
105
157
  */
106
158
  complete(): Promise<bigint>;
159
+ cancel(): void;
107
160
  }
161
+ /** @public */
162
+ export declare type MediaStoreAssetFileWithHash = {
163
+ file: string;
164
+ hash: string;
165
+ errorOnHashMismatch?: boolean;
166
+ };
167
+ /** @public */
168
+ export declare type MediaStoreAssetFile = {
169
+ file: string;
170
+ errorOnHashMismatch?: boolean;
171
+ };
172
+ /** @public */
173
+ export declare type MediaStoreAssetSource = MediaStoreAssetFileWithHash | MediaStoreAssetFile;
174
+ /** @public */
175
+ export declare type MediaStoreAssetSettings = {
176
+ name: string;
177
+ source?: MediaStoreAssetSource;
178
+ path: string;
179
+ progressCb?: (progress: number) => void;
180
+ };
108
181
  /**
109
182
  * @public
110
183
  * see: {@link NorskOutput.mediaStore}
111
184
  */
112
185
  export declare class MediaStoreAsset {
186
+ /** @public */
187
+ durationMs?: bigint;
188
+ /** @public */
189
+ size?: bigint;
190
+ cutListEntry(settings: MediaStoreAssetCut): MediaStoreCut;
191
+ }
192
+ /**
193
+ * @public
194
+ * Settings to configure a media store snapshot
195
+ * see {@link NorskOutput.mediaStore}
196
+ */
197
+ export interface MediaStoreSnapshotSettings {
113
198
  /**
114
- * @public
115
- * Promise that resolves when the asset import is complete
199
+ * Required: Name of this media store instance.
200
+ */
201
+ name: string;
202
+ /**
203
+ * Required: Path for the media store database.
116
204
  */
117
- ready(): Promise<{
118
- durationMs: bigint;
119
- size: bigint;
120
- }>;
205
+ path: string;
206
+ }
207
+ /**
208
+ * @public
209
+ * see: {@link NorskOutput.mediaStore}
210
+ */
211
+ export declare class MediaStoreSnapshot {
212
+ update(): Promise<void>;
213
+ metadata(): Promise<MediaStoreSession[]>;
214
+ cutListEntry(settings: MediaStoreRecorderCut): MediaStoreCut;
121
215
  }
122
216
  /**
123
217
  * @public
@@ -139,16 +233,15 @@ export interface NorskMediaStore {
139
233
  *
140
234
  * @param cut request - Configuration for the cut
141
235
  */
142
- makeCut(request: MediaStoreCutRequest): Promise<MediaStoreActiveCut>;
236
+ makeCut(request: MediaStoreCutRequest): MediaStoreActiveCut;
143
237
  /**
144
- * Get the metadata for a store
145
- *
146
- * @param Media Store name
238
+ * Create a Media Store for an on-disk MP4 asset
147
239
  */
148
- metadata(mediaStoreName: string): Promise<MediaStoreSession[]>;
240
+ asyncLoadAsset(settings: MediaStoreAssetSettings): Promise<MediaStoreAsset>;
149
241
  /**
150
- * Create a Media Store for an on-disk MP4 asset
242
+ * Create a Media Store snapshot on an exist store
151
243
  */
152
- asset(settings: MediaStoreAssetSettings): Promise<MediaStoreAsset>;
244
+ snapshot(settings: MediaStoreSnapshotSettings): Promise<MediaStoreSnapshot>;
153
245
  }
246
+ export {};
154
247
  //# sourceMappingURL=mediaStore.d.ts.map
@@ -23,11 +23,70 @@ var __importStar = (this && this.__importStar) || function (mod) {
23
23
  return result;
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.mediaStoreMetadata = exports.MediaStoreAsset = exports.MediaStoreActiveCut = exports.MediaStorePlayerNode = exports.MediaStoreRecorderNode = void 0;
26
+ exports.mediaStoreMetadata = exports.MediaStoreSnapshot = exports.MediaStoreAsset = exports.MediaStoreActiveCut = exports.MediaStorePlayerNode = exports.MediaStoreRecorderNode = exports.MediaStoreRecorderCut = exports.MediaStoreAssetCut = void 0;
27
27
  const media_pb_1 = require("@norskvideo/norsk-api/lib/media_pb");
28
28
  const common_1 = require("./common");
29
29
  const types_1 = require("./types");
30
30
  const util = __importStar(require("util"));
31
+ const protobuf_1 = require("@bufbuild/protobuf");
32
+ const common_pb_1 = require("@norskvideo/norsk-api/lib/shared/common_pb");
33
+ /** @public */
34
+ class MediaStoreAssetCut {
35
+ }
36
+ exports.MediaStoreAssetCut = MediaStoreAssetCut;
37
+ /** @public */
38
+ class MediaStoreRecorderCut {
39
+ }
40
+ exports.MediaStoreRecorderCut = MediaStoreRecorderCut;
41
+ function toSingleCuts(cuts) {
42
+ return cuts.map(({ mediaStoreName, cut }) => {
43
+ const cutType = cut.type;
44
+ let streamSelection;
45
+ let startDateTime;
46
+ let durationMs;
47
+ let trimPartialGops;
48
+ let sessionNum;
49
+ let repeatCount;
50
+ switch (cutType) {
51
+ case "asset": {
52
+ const cut2 = cut.cut;
53
+ streamSelection = cut2.streamSelection;
54
+ startDateTime = new Date(cut2.startTimeMs ? cut2.startTimeMs : 0);
55
+ durationMs = cut2.durationMs ? (cut2.durationMs == "all" ? 0 : cut2.durationMs) : 0; // this looks dumb, but by cutListEntry in asset store will already have fixed it
56
+ trimPartialGops = cut2.trimPartialGops;
57
+ repeatCount = cut2.repeatCount ? (cut2.repeatCount == "infinity"
58
+ ? (0, common_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, common_1.mkMessageCase)({ infinity: new protobuf_1.Empty() }))
59
+ : (0, common_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, common_1.mkMessageCase)({ intValue: cut2.repeatCount })))
60
+ : (0, common_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, common_1.mkMessageCase)({ intValue: 1 }));
61
+ break;
62
+ }
63
+ case "recorder": {
64
+ const cut2 = cut.cut;
65
+ streamSelection = cut2.streamSelection;
66
+ startDateTime = cut2.startDateTime;
67
+ durationMs = cut2.durationMs;
68
+ trimPartialGops = cut2.trimPartialGops;
69
+ sessionNum = cut2.sessionNum;
70
+ repeatCount = (0, common_1.provideFull)(common_pb_1.UInt32OrInfinity, (0, common_1.mkMessageCase)({ intValue: 1 }));
71
+ break;
72
+ }
73
+ default:
74
+ (0, common_1.exhaustiveCheck)(cutType);
75
+ }
76
+ return (0, common_1.provideFull)(media_pb_1.SingleCut, {
77
+ mediaStoreName: mediaStoreName,
78
+ streamSelection: streamSelection.map(([sourceStreamKey, outputStreamKey]) => (0, common_1.provideFull)(media_pb_1.StreamSelection, {
79
+ sourceStreamKey: (0, types_1.mkStreamKey)(sourceStreamKey),
80
+ outputStreamKey: (0, types_1.mkStreamKey)(outputStreamKey)
81
+ })),
82
+ startDateTime: protobuf_1.Timestamp.fromDate(startDateTime),
83
+ durationMs,
84
+ trimPartialGops: trimPartialGops,
85
+ sessionNum: sessionNum ? (0, common_1.provideFull)(common_pb_1.OptionalInt, { value: sessionNum }) : undefined,
86
+ repeatCount: repeatCount
87
+ });
88
+ });
89
+ }
31
90
  /**
32
91
  * @public
33
92
  * see: {@link NorskOutput.mediaStore}
@@ -37,6 +96,8 @@ class MediaStoreRecorderNode extends common_1.AutoSinkMediaNode {
37
96
  constructor(settings, client, unregisterNode) {
38
97
  super(client, () => this.grpcStream, async (subscription) => this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreRecorderMessage, (0, common_1.mkMessageCase)({ subscription }))), settings.onSubscriptionError);
39
98
  let expiryPb = undefined;
99
+ this.client = client;
100
+ this.settings = settings;
40
101
  if (settings.expiry) {
41
102
  const expiryType = settings.expiry.expire;
42
103
  switch (expiryType) {
@@ -58,8 +119,8 @@ class MediaStoreRecorderNode extends common_1.AutoSinkMediaNode {
58
119
  expiry: expiryPb
59
120
  });
60
121
  this.grpcStream = this.client.media.createMediaStoreRecorder();
61
- this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreRecorderMessage, (0, common_1.mkMessageCase)({ configuration: config })));
62
122
  this.initialised = new Promise((resolve, reject) => {
123
+ this.grpcStream.on("error", (err) => reject(err));
63
124
  this.grpcStream.on("data", (data) => {
64
125
  const messageCase = data.message.case;
65
126
  switch (messageCase) {
@@ -71,7 +132,7 @@ class MediaStoreRecorderNode extends common_1.AutoSinkMediaNode {
71
132
  case "nodeId": {
72
133
  this.id = data.message.value.id;
73
134
  settings.onCreate && settings.onCreate(this);
74
- resolve();
135
+ resolve(this);
75
136
  break;
76
137
  }
77
138
  case "inboundContext": {
@@ -85,12 +146,25 @@ class MediaStoreRecorderNode extends common_1.AutoSinkMediaNode {
85
146
  });
86
147
  (0, common_1.registerStreamHandlers)(this.grpcStream, () => unregisterNode(this), "mediaStoreRecorder", reject, settings);
87
148
  });
149
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreRecorderMessage, (0, common_1.mkMessageCase)({ configuration: config })));
88
150
  }
89
151
  /** @internal */
90
152
  static async create(settings, client, unregisterNode) {
91
153
  const node = new MediaStoreRecorderNode(settings, client, unregisterNode);
92
- await node.initialised;
93
- return node;
154
+ return node.initialised;
155
+ }
156
+ async metadata() {
157
+ const fn = util
158
+ .promisify(this.client.media.mediaStoreMetadata)
159
+ .bind(this.client.media);
160
+ return fn((0, common_1.provideFull)(media_pb_1.MediaStoreMetadataRequest, { mediaStoreName: this.settings.name }))
161
+ .then(fromMediaStoreMetadataResponse);
162
+ }
163
+ cutListEntry(settings) {
164
+ return {
165
+ mediaStoreName: this.settings.name,
166
+ cut: { type: "recorder", cut: settings }
167
+ };
94
168
  }
95
169
  }
96
170
  exports.MediaStoreRecorderNode = MediaStoreRecorderNode;
@@ -107,11 +181,11 @@ class MediaStorePlayerNode extends common_1.SourceMediaNode {
107
181
  ? (0, common_1.provideFull)(media_pb_1.MediaNodeId, { id: settings.id })
108
182
  : undefined,
109
183
  sourceName: settings.sourceName,
110
- cuts: (0, types_1.toSingleCuts)(settings.cuts)
184
+ cuts: toSingleCuts(settings.cuts)
111
185
  });
112
186
  this.grpcStream = client.media.createMediaStorePlayer();
113
- this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStorePlayerMessage, (0, common_1.mkMessageCase)({ initialConfig: config })));
114
187
  this.initialised = new Promise((resolve, reject) => {
188
+ this.grpcStream.on("error", (err) => reject(err));
115
189
  this.grpcStream.on("data", (data) => {
116
190
  const messageCase = data.message.case;
117
191
  switch (messageCase) {
@@ -120,7 +194,7 @@ class MediaStorePlayerNode extends common_1.SourceMediaNode {
120
194
  case "nodeId": {
121
195
  this.id = data.message.value.id;
122
196
  settings.onCreate && settings.onCreate(this);
123
- resolve();
197
+ resolve(this);
124
198
  break;
125
199
  }
126
200
  case "outboundContext": {
@@ -138,12 +212,12 @@ class MediaStorePlayerNode extends common_1.SourceMediaNode {
138
212
  });
139
213
  (0, common_1.registerStreamHandlers)(this.grpcStream, () => unregisterNode(this), "mediaStorePlayer", reject, settings);
140
214
  });
215
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStorePlayerMessage, (0, common_1.mkMessageCase)({ initialConfig: config })));
141
216
  }
142
217
  /** @internal */
143
218
  static async create(settings, client, unregisterNode) {
144
219
  const node = new MediaStorePlayerNode(settings, client, unregisterNode);
145
- await node.initialised;
146
- return node;
220
+ return node.initialised;
147
221
  }
148
222
  }
149
223
  exports.MediaStorePlayerNode = MediaStorePlayerNode;
@@ -157,7 +231,7 @@ class MediaStoreActiveCut {
157
231
  let fileFormat;
158
232
  switch (cutRequest.fileFormat) {
159
233
  case "mp4": {
160
- fileFormat = media_pb_1.MediaStoreCutRequest_FileFormat.CUT_MP4;
234
+ fileFormat = media_pb_1.StartMediaStoreCut_FileFormat.CUT_MP4;
161
235
  break;
162
236
  }
163
237
  default:
@@ -165,12 +239,10 @@ class MediaStoreActiveCut {
165
239
  }
166
240
  this.client = client;
167
241
  this.cutRequest = cutRequest;
168
- this.grpcStream = this.client.media.createMediaStoreCut((0, common_1.provideFull)(media_pb_1.MediaStoreCutRequest, {
169
- ...cutRequest,
170
- fileFormat: fileFormat,
171
- cuts: (0, types_1.toSingleCuts)(cutRequest.cuts),
172
- }));
242
+ this.grpcStream = this.client.media.createMediaStoreCut();
243
+ this._isComplete = false;
173
244
  this._complete = new Promise((resolve, reject) => {
245
+ this.grpcStream.on("error", (err) => { reject(err); });
174
246
  this.grpcStream.on("data", (data) => {
175
247
  const messageCase = data.message.case;
176
248
  switch (messageCase) {
@@ -180,7 +252,13 @@ class MediaStoreActiveCut {
180
252
  this.cutRequest.progressCb && this.cutRequest.progressCb(data.message.value.progress);
181
253
  break;
182
254
  }
255
+ case "cutCancelled": {
256
+ this._isComplete = true;
257
+ reject(new Error("cancelled"));
258
+ break;
259
+ }
183
260
  case "cutComplete": {
261
+ this._isComplete = true;
184
262
  resolve(data.message.value.size);
185
263
  break;
186
264
  }
@@ -189,21 +267,35 @@ class MediaStoreActiveCut {
189
267
  }
190
268
  });
191
269
  });
270
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreCutRequest, (0, common_1.mkMessageCase)({
271
+ startCut: (0, common_1.provideFull)(media_pb_1.StartMediaStoreCut, {
272
+ ...cutRequest,
273
+ fileFormat: fileFormat,
274
+ cuts: toSingleCuts(cutRequest.cuts),
275
+ })
276
+ })));
192
277
  }
193
278
  /** @internal */
194
- static async create(cutRequest, client) {
195
- const cut = new MediaStoreActiveCut(cutRequest, client);
196
- return cut;
279
+ static create(cutRequest, client) {
280
+ return new MediaStoreActiveCut(cutRequest, client);
197
281
  }
198
282
  /**
199
283
  * @public
200
- * Promise that resolves when the asset import is complete
284
+ * Promise that resolves when the cut is complete
201
285
  */
202
286
  async complete() {
203
287
  return this._complete;
204
288
  }
289
+ cancel() {
290
+ if (!this._isComplete) {
291
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreCutRequest, (0, common_1.mkMessageCase)({ cancelCut: (0, common_1.provideFull)(media_pb_1.CancelMediaStoreCut, {}) })));
292
+ }
293
+ }
205
294
  }
206
295
  exports.MediaStoreActiveCut = MediaStoreActiveCut;
296
+ function isMediaStoreAssetFileWithHash(asset) {
297
+ return 'file' in asset && 'hash' in asset;
298
+ }
207
299
  /**
208
300
  * @public
209
301
  * see: {@link NorskOutput.mediaStore}
@@ -213,11 +305,27 @@ class MediaStoreAsset {
213
305
  constructor(settings, client) {
214
306
  this.client = client;
215
307
  this.settings = settings;
216
- this.grpcStream = this.client.media.createMediaStoreAsset((0, common_1.provideFull)(media_pb_1.MediaStoreAssetSettings, { mediaStoreName: settings.name,
217
- fileName: settings.file,
308
+ this.grpcStream = this.client.media.createMediaStoreAsset((0, common_1.provideFull)(media_pb_1.MediaStoreAssetSettings, {
309
+ mediaStoreName: settings.name,
310
+ assetSource: (0, common_1.provideFull)(media_pb_1.MediaStoreAssetSource, settings.source ?
311
+ (isMediaStoreAssetFileWithHash(settings.source) ?
312
+ (0, common_1.mkMessageCase)({
313
+ fileWithHash: (0, common_1.provideFull)(media_pb_1.MediaStoreAssetFileWithHash, {
314
+ file: settings.source.file,
315
+ hash: settings.source.hash,
316
+ errorOnHashMismatch: settings.source.errorOnHashMismatch ?? true
317
+ })
318
+ }) :
319
+ (0, common_1.mkMessageCase)({
320
+ file: (0, common_1.provideFull)(media_pb_1.MediaStoreAssetFile, {
321
+ file: settings.source.file,
322
+ errorOnHashMismatch: settings.source.errorOnHashMismatch ?? true
323
+ })
324
+ })) : (0, common_1.mkMessageCase)({ noLoad: (0, common_1.provideFull)(protobuf_1.Empty, {}) })),
218
325
  path: settings.path
219
326
  }));
220
327
  this._ready = new Promise((resolve, reject) => {
328
+ this.grpcStream.on("error", (err) => reject(err));
221
329
  this.grpcStream.on("data", (data) => {
222
330
  const messageCase = data.message.case;
223
331
  switch (messageCase) {
@@ -228,7 +336,9 @@ class MediaStoreAsset {
228
336
  break;
229
337
  }
230
338
  case "importComplete": {
231
- resolve({ durationMs: data.message.value.durationMs, size: data.message.value.size });
339
+ this.durationMs = data.message.value.durationMs;
340
+ this.size = data.message.value.size;
341
+ resolve(this);
232
342
  break;
233
343
  }
234
344
  default:
@@ -239,20 +349,95 @@ class MediaStoreAsset {
239
349
  }
240
350
  /** @internal */
241
351
  static async create(settings, client) {
242
- return new MediaStoreAsset(settings, client);
352
+ const asset = new MediaStoreAsset(settings, client);
353
+ return asset._ready;
243
354
  }
244
- /**
245
- * @public
246
- * Promise that resolves when the asset import is complete
247
- */
248
- async ready() {
249
- return this._ready;
355
+ cutListEntry(settings) {
356
+ if (this.durationMs == null) {
357
+ throw new Error("Attempt to cut from an asset that isn't yet loaded");
358
+ }
359
+ if (this.durationMs < Number.MIN_SAFE_INTEGER || this.durationMs > Number.MAX_SAFE_INTEGER) {
360
+ (0, common_1.debuglog)('The asset duration is longer than the maximum cut, you may not get a cut file of the duration that you expected');
361
+ }
362
+ settings.durationMs = settings.durationMs ? (settings.durationMs == "all" ? Number(this.durationMs) : settings.durationMs) : Number(this.durationMs);
363
+ return {
364
+ mediaStoreName: this.settings.name,
365
+ cut: { type: "asset", cut: settings }
366
+ };
250
367
  }
251
368
  }
252
369
  exports.MediaStoreAsset = MediaStoreAsset;
370
+ /**
371
+ * @public
372
+ * see: {@link NorskOutput.mediaStore}
373
+ */
374
+ class MediaStoreSnapshot {
375
+ /** @internal */
376
+ constructor(settings, client) {
377
+ this.client = client;
378
+ this.settings = settings;
379
+ this.grpcStream = this.client.media.createMediaStoreSnapshot();
380
+ this._ready = new Promise((resolve, reject) => {
381
+ this.grpcStream.on("error", (err) => reject(err));
382
+ this.grpcStream.on("data", (data) => {
383
+ const messageCase = data.message.case;
384
+ switch (messageCase) {
385
+ case undefined:
386
+ break;
387
+ case "snapshotLoaded":
388
+ resolve(this);
389
+ break;
390
+ case "snapshotUpdated":
391
+ this._updated?.resolve();
392
+ this._updated = undefined;
393
+ break;
394
+ case "snapshotUpdateFailed":
395
+ this._updated?.reject(new Error("Update failed"));
396
+ this._updated = undefined;
397
+ break;
398
+ default:
399
+ (0, common_1.exhaustiveCheck)(messageCase);
400
+ }
401
+ });
402
+ });
403
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreSnapshotMessage, (0, common_1.mkMessageCase)({
404
+ configuration: (0, common_1.provideFull)(media_pb_1.MediaStoreSnapshotConfiguration, {
405
+ name: settings.name,
406
+ path: settings.path
407
+ })
408
+ })));
409
+ }
410
+ /** @internal */
411
+ static async create(settings, client) {
412
+ const snapshot = new MediaStoreSnapshot(settings, client);
413
+ return snapshot._ready;
414
+ }
415
+ async update() {
416
+ if (this._updated) {
417
+ return new Promise((_resolve, reject) => { reject(new Error("Update in progress")); });
418
+ }
419
+ this.grpcStream.write((0, common_1.provideFull)(media_pb_1.MediaStoreSnapshotMessage, (0, common_1.mkMessageCase)({ update: (0, common_1.provideFull)(protobuf_1.Empty, {}) })));
420
+ return new Promise((resolve, reject) => { this._updated = { resolve, reject }; });
421
+ }
422
+ async metadata() {
423
+ const fn = util
424
+ .promisify(this.client.media.mediaStoreMetadata)
425
+ .bind(this.client.media);
426
+ return fn((0, common_1.provideFull)(media_pb_1.MediaStoreMetadataRequest, { mediaStoreName: this.settings.name }))
427
+ .then(fromMediaStoreMetadataResponse);
428
+ }
429
+ cutListEntry(settings) {
430
+ return {
431
+ mediaStoreName: this.settings.name,
432
+ cut: { type: "recorder", cut: settings }
433
+ };
434
+ }
435
+ }
436
+ exports.MediaStoreSnapshot = MediaStoreSnapshot;
253
437
  /** @internal */
254
438
  function fromMediaStoreStreamVersion(version) {
255
- return { versionNum: version.versionNum,
439
+ return {
440
+ versionNum: version.versionNum,
256
441
  startDateTime: version.startDateTime,
257
442
  durationMs: version.durationMs,
258
443
  metadata: (0, types_1.fromStreamMetadata)((0, common_1.mandatory)(version.metadata)),
@@ -261,13 +446,15 @@ function fromMediaStoreStreamVersion(version) {
261
446
  }
262
447
  /** @internal */
263
448
  function fromMediaStoreStream(stream) {
264
- return { streamKey: (0, types_1.fromStreamKey)(stream.streamKey),
449
+ return {
450
+ streamKey: (0, types_1.fromStreamKey)(stream.streamKey),
265
451
  versions: stream.versions.map(fromMediaStoreStreamVersion)
266
452
  };
267
453
  }
268
454
  /** @internal */
269
455
  function fromMediaStoreSession(session) {
270
- return { sessionNum: session.sessionNum,
456
+ return {
457
+ sessionNum: session.sessionNum,
271
458
  streams: session.streams.map(fromMediaStoreStream)
272
459
  };
273
460
  }
@@ -270,6 +270,10 @@ export interface HlsPushDestinationSettings {
270
270
  this should take into account the end to end latency from source capture, to the part and playlist being published
271
271
  */
272
272
  partHoldBackSeconds?: number;
273
+ /** Whether the server supports gzip *request* compression for PUT/POST requests */
274
+ supportsGzip?: boolean;
275
+ /** Whether to use TLS or plain TCP transport, by default TLS used if port is 443 */
276
+ tlsTransport?: boolean;
273
277
  }
274
278
  /**
275
279
  * @public
@@ -1448,6 +1448,8 @@ function mkCmafDestination(dest) {
1448
1448
  holdBackSeconds: dest.holdBackSeconds ?? 0.0,
1449
1449
  partHoldBackSeconds: dest.partHoldBackSeconds ?? 0.0,
1450
1450
  sessionId: dest.sessionId ? dest.sessionId : "", defaultSegmentCount: dest.defaultSegmentCount ? (0, utils_1.provideFull)(common_pb_1.OptionalInt, { value: dest.defaultSegmentCount }) : undefined,
1451
+ supportsGzip: !!dest.supportsGzip,
1452
+ tlsTransport: (0, types_1.toOptBool)(dest.tlsTransport)
1451
1453
  };
1452
1454
  return (0, utils_1.provideFull)(media_pb_1.CMAFDestination, (0, utils_1.mkMessageCase)({ hlsPushDestination: (0, utils_1.provideFull)(media_pb_1.HlsPushDestination, conf) }));
1453
1455
  }
@@ -131,6 +131,7 @@ export interface AudioMeasureLevelsSettings extends ProcessorNodeSettings<AudioM
131
131
  * @eventProperty
132
132
  */
133
133
  onData: (levels: AudioMeasureLevels) => void;
134
+ /** Optionally control the sample frequency */
134
135
  intervalFrames?: number;
135
136
  }
136
137
  /**
@@ -145,6 +146,7 @@ export declare class AudioMeasureLevelsNode extends AutoProcessorMediaNode<"audi
145
146
  * see: {@link NorskTransform.streamTimestampNudge}
146
147
  * */
147
148
  export interface StreamTimestampNudgeSettings extends ProcessorNodeSettings<StreamTimestampNudgeNode> {
149
+ /** the initial nudge to apply, in milliseconds */
148
150
  nudge?: number;
149
151
  }
150
152
  /**
@@ -714,6 +716,7 @@ export declare class AudioSplitMultichannelNode extends AutoProcessorMediaNode<"
714
716
  export interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
715
717
  /** The channel layout of the built outgoing stream */
716
718
  channelLayout: ChannelLayout;
719
+ /** The sample rate of the built outgoing stream */
717
720
  sampleRate: SampleRate;
718
721
  /**
719
722
  * Stream keys specifying the source for each channel, where the order is
@@ -845,7 +848,16 @@ export declare class SubtitleConvertNode extends AutoProcessorMediaNode<"subtitl
845
848
  * */
846
849
  export interface AudioTranscribeAzureSettings extends ProcessorNodeSettings<AudioTranscribeAzureNode> {
847
850
  outputStreamId: number;
851
+ /** The source language to recognise - an IETF BCP 47 language tag, eg en-US, en-GB, de-DE. Supported languages are
852
+ * found at https://learn.microsoft.com/en-us/azure/ai-services/speech-service/language-support?tabs=stt
853
+ */
848
854
  sourceLanguage: string;
855
+ /** The target output languages for translation - technically a BCP 47 language tag but but in most cases omitting region, e.g. en, de, zh-Hant.
856
+
857
+ * Leave this field absent/empty to use the transcription service without
858
+ * translation, while if any target languages are present the translation service will be used even if this is the same as the
859
+ * source language.
860
+ */
849
861
  targetLanguages?: string[];
850
862
  /** Key for the Azure Speech Service endpoint */
851
863
  azureKey: string;
@@ -878,8 +890,6 @@ export interface AudioTranscribeWhisperSettings extends ProcessorNodeSettings<Au
878
890
  * Default 3000ms.
879
891
  */
880
892
  stepMs?: number;
881
- /** The full audio buffer size. Default 10000ms */
882
- lengthMs?: number;
883
893
  /** Duration of audio to keep when clearing the buffer to allow for partial-word recognition. Default 400ms */
884
894
  keepMs?: number;
885
895
  /** Max tokens per segment */
@@ -1918,7 +1918,6 @@ class AudioTranscribeWhisperNode extends AutoProcessorMediaNode {
1918
1918
  : undefined,
1919
1919
  outputStreamId: settings.outputStreamId,
1920
1920
  stepMs: (0, types_2.toOptInt)(settings.stepMs),
1921
- lengthMs: (0, types_2.toOptInt)(settings.lengthMs),
1922
1921
  keepMs: (0, types_2.toOptInt)(settings.keepMs),
1923
1922
  maxTokens: (0, types_2.toOptInt)(settings.maxTokens),
1924
1923
  speedUp: (0, types_2.toOptBool)(settings.speedUp),
@@ -2,11 +2,6 @@ import { ExplicitChannel, AudioCodec, FrameRate, VideoTransformConfiguration } f
2
2
  /** @public */
3
3
  export declare type MediaNodeId = string;
4
4
  /** @public */
5
- export interface IntervalTimestamp {
6
- n: bigint;
7
- d: bigint;
8
- }
9
- /** @public */
10
5
  export interface StreamKey {
11
6
  streamId: number;
12
7
  programNumber: number;
@@ -113,7 +108,11 @@ export interface Resolution {
113
108
  width: number;
114
109
  height: number;
115
110
  }
111
+ /** Various options for de-interlacing video either in software or hardware (where available) */
116
112
  export declare type DeinterlaceSettings = {
113
+ /** Use the CPU to perform deinterlace, Note: if the video has been decoded on hardware
114
+ * it will need to be transferred to system memory and reuploaded again
115
+ */
117
116
  type: 'software';
118
117
  algorithm: 'bwdif' | 'kerndeint' | 'nnedi' | 'w3fdif' | 'yadif';
119
118
  };
@@ -1338,29 +1337,4 @@ export declare type Scte35GenericDescriptor = {
1338
1337
  };
1339
1338
  /** @public */
1340
1339
  export declare type MediaStoreStreamSelection = [StreamKey, StreamKey];
1341
- /** @public */
1342
- export declare type MediaStoreCut = {
1343
- /**
1344
- * Required: Name of the media store instance that this cut is from
1345
- */
1346
- mediaStoreName: string;
1347
- /**
1348
- * Required: The streams you want to playback from the media store
1349
- */
1350
- streamSelection: MediaStoreStreamSelection[];
1351
- /**
1352
- * Required: The start date/time for the cut. For a local recoder or a remote view,
1353
- * this is UTC. For an asset store, this starts at zero
1354
- */
1355
- startDateTime: Date;
1356
- /**
1357
- * Required: The duration of the cut in milliseconds
1358
- */
1359
- durationMs: number;
1360
- /**
1361
- * Required: In a video stream, if a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
1362
- */
1363
- trimPartialGops: boolean;
1364
- sessionNum?: number;
1365
- };
1366
1340
  //# sourceMappingURL=types.d.ts.map
@@ -1,13 +1,12 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.toSrtMode = exports.fromSrtInputState = exports.fromRtmpServerInputState = exports.toBitDepth = exports.toAmdMA35DHevc = exports.toAmdMA35DH264 = exports.toTrinary = exports.toAmdMA35DEncodeCommon = exports.toAmdMA35DH264Profile = exports.toAmdMA35DHevcProfile = exports.toAmdMA35DHevcTier = exports.toAmdU30Hevc = exports.toAmdU30H264 = exports.toAmdU30H264Profile = exports.toAmdU30HevcProfile = exports.toAmdU30HevcTier = exports.toQuadraHevc = exports.toQuadraH264 = exports.toQuadraH264Profile = exports.toQuadraHevcProfile = exports.toQuadraHevcTier = exports.toLoganHevc = exports.toLoganH264 = exports.toLoganH264Profile = exports.toLoganHevcProfile = exports.toLoganHevcTier = exports.toNvidiaH264 = exports.toNvidiaHevc = exports.toNvidiaH264Profile = exports.toNvidiaHevcProfile = exports.toNvidiaHevcTier = exports.toNvidiaPreset = exports.toNvidiaRateControl = exports.toNvidiaRateControlMode = exports.toX265Codec = exports.toX265Tune = exports.toX264Codec = exports.toDeinterlaceSettings = exports.fromContext = exports.fromStreamMetadata = exports.metadataForStreamKey = exports.fromStreamKey = exports.mkStreamKey = exports.toMediaNodeId = exports.toOptString = exports.mkOptString = exports.toOptBool = exports.mkOptBool = exports.toOptInt = exports.mkOptInt = void 0;
4
- exports.toSingleCuts = exports.fromScte35PB = exports.toScte35PB = exports.mkWebrtcServerSettings = exports.mkEncryptionSettings = exports.fromSourceSubscriptionError = exports.fromSubscriptionError = exports.fromStreamStatistics = exports.fromAudioMeasureLevels = exports.mkDb = exports.toInterval = exports.fromInterval = exports.addInterval = exports.fromChannelName = exports.toChannelName = exports.getAmountOfChannels = exports.fromChannelLayout = exports.toChannelLayout = exports.fromSampleRate = exports.toSampleRate = exports.toSampleFormat = exports.toSsHardwareAcceleration = exports.toComposeHardwareAcceleration = exports.toComposeMissingStreamBehaviour = exports.toPixelFormat = exports.toSimpleEasing = exports.toAacProfile = exports.toImageFormat = exports.toStabilizationMode = exports.toSentenceBuildMode = exports.fromBrowserEvent = void 0;
5
- const protobuf_1 = require("@bufbuild/protobuf");
4
+ exports.fromScte35PB = exports.toScte35PB = exports.mkWebrtcServerSettings = exports.mkEncryptionSettings = exports.fromSourceSubscriptionError = exports.fromSubscriptionError = exports.fromStreamStatistics = exports.fromAudioMeasureLevels = exports.mkDb = exports.toInterval = exports.fromInterval = exports.addInterval = exports.fromChannelName = exports.toChannelName = exports.getAmountOfChannels = exports.fromChannelLayout = exports.toChannelLayout = exports.fromSampleRate = exports.toSampleRate = exports.toSampleFormat = exports.toSsHardwareAcceleration = exports.toComposeHardwareAcceleration = exports.toComposeMissingStreamBehaviour = exports.toPixelFormat = exports.toSimpleEasing = exports.toAacProfile = exports.toImageFormat = exports.toStabilizationMode = exports.toSentenceBuildMode = exports.fromBrowserEvent = void 0;
6
5
  const media_pb_1 = require("@norskvideo/norsk-api/lib/media_pb");
7
6
  const common_pb_1 = require("@norskvideo/norsk-api/lib/shared/common_pb");
8
7
  const utils_1 = require("../shared/utils");
9
8
  const media_pb_2 = require("@norskvideo/norsk-api/lib/media_pb");
10
- const protobuf_2 = require("@bufbuild/protobuf");
9
+ const protobuf_1 = require("@bufbuild/protobuf");
11
10
  /** @internal */
12
11
  function mkOptInt(i) {
13
12
  return (0, utils_1.provideFull)(common_pb_1.OptionalInt, { value: i });
@@ -2105,9 +2104,9 @@ function toScte35PB(scte35) {
2105
2104
  const t = cmd.type;
2106
2105
  switch (t) {
2107
2106
  case "null":
2108
- return (0, utils_1.provideFull)(media_pb_2.Scte35SpliceCommand, (0, utils_1.mkMessageCase)({ null: (0, utils_1.provideFull)(protobuf_2.Empty, {}) }));
2107
+ return (0, utils_1.provideFull)(media_pb_2.Scte35SpliceCommand, (0, utils_1.mkMessageCase)({ null: (0, utils_1.provideFull)(protobuf_1.Empty, {}) }));
2109
2108
  case "bandwidthReservation":
2110
- return (0, utils_1.provideFull)(media_pb_2.Scte35SpliceCommand, (0, utils_1.mkMessageCase)({ bandwidthReservation: (0, utils_1.provideFull)(protobuf_2.Empty, {}) }));
2109
+ return (0, utils_1.provideFull)(media_pb_2.Scte35SpliceCommand, (0, utils_1.mkMessageCase)({ bandwidthReservation: (0, utils_1.provideFull)(protobuf_1.Empty, {}) }));
2111
2110
  case "privateCommand":
2112
2111
  return (0, utils_1.provideFull)(media_pb_2.Scte35SpliceCommand, (0, utils_1.mkMessageCase)({ privateCommand: (0, utils_1.provideFull)(media_pb_2.Scte35PrivateCommand, cmd.value) }));
2113
2112
  case "timeSignal":
@@ -2316,7 +2315,7 @@ function fromScte35PB(infoPB) {
2316
2315
  }
2317
2316
  const fromBreakDuration = (breakDuration) => {
2318
2317
  if (breakDuration) {
2319
- return breakDuration;
2318
+ return { autoReturn: breakDuration.autoReturn, duration: breakDuration.duration };
2320
2319
  }
2321
2320
  else {
2322
2321
  console.warn("Break duration was missing in splice_insert", spliceCommand);
@@ -2371,19 +2370,4 @@ function fromScte35PB(infoPB) {
2371
2370
  return { ...infoPB, spliceCommand, descriptors: infoPB.descriptors.map(mkDescriptor).filter((x) => x !== undefined) };
2372
2371
  }
2373
2372
  exports.fromScte35PB = fromScte35PB;
2374
- /** @internal */
2375
- function toSingleCuts(cuts) {
2376
- return cuts.map(({ mediaStoreName, streamSelection, startDateTime, durationMs, trimPartialGops, sessionNum }) => (0, utils_1.provideFull)(media_pb_1.SingleCut, {
2377
- mediaStoreName: mediaStoreName,
2378
- streamSelection: streamSelection.map(([sourceStreamKey, outputStreamKey]) => (0, utils_1.provideFull)(media_pb_1.StreamSelection, {
2379
- sourceStreamKey: mkStreamKey(sourceStreamKey),
2380
- outputStreamKey: mkStreamKey(outputStreamKey)
2381
- })),
2382
- startDateTime: protobuf_1.Timestamp.fromDate(startDateTime),
2383
- durationMs,
2384
- trimPartialGops: trimPartialGops,
2385
- sessionNum: sessionNum ? (0, utils_1.provideFull)(common_pb_1.OptionalInt, { value: sessionNum }) : undefined
2386
- }));
2387
- }
2388
- exports.toSingleCuts = toSingleCuts;
2389
2373
  //# sourceMappingURL=types.js.map
package/lib/src/sdk.js CHANGED
@@ -124,9 +124,9 @@ class Norsk {
124
124
  this.mediaStore = {
125
125
  player: async (settings) => mediaStore_1.MediaStorePlayerNode.create(settings, this._client(), unregisterNode).then(registerNode),
126
126
  recorder: async (settings) => mediaStore_1.MediaStoreRecorderNode.create(settings, this._client(), unregisterNode).then(registerNode),
127
- makeCut: async (cutRequest) => mediaStore_1.MediaStoreActiveCut.create(cutRequest, this._client()),
128
- metadata: async (mediaStoreName) => (0, mediaStore_1.mediaStoreMetadata)(mediaStoreName, this._client()),
129
- asset: async (settings) => mediaStore_1.MediaStoreAsset.create(settings, this._client())
127
+ makeCut: (cutRequest) => mediaStore_1.MediaStoreActiveCut.create(cutRequest, this._client()),
128
+ asyncLoadAsset: async (settings) => mediaStore_1.MediaStoreAsset.create(settings, this._client()),
129
+ snapshot: async (settings) => mediaStore_1.MediaStoreSnapshot.create(settings, this._client())
130
130
  };
131
131
  this.debug = {
132
132
  streamTimestampReport: async (settings) => debug_1.StreamTimestampReportNode.create(settings, this._client(), unregisterNode).then(registerNode),
package/package.json CHANGED
@@ -1,11 +1,11 @@
1
1
  {
2
2
  "license": "MIT",
3
3
  "name": "@norskvideo/norsk-sdk",
4
- "version": "1.0.365",
4
+ "version": "1.0.367",
5
5
  "dependencies": {
6
6
  "@bufbuild/protobuf": "^0.3.0",
7
7
  "@grpc/grpc-js": "^1.2.2",
8
- "@norskvideo/norsk-api": "1.0.365",
8
+ "@norskvideo/norsk-api": "1.0.367",
9
9
  "lodash": "^4.17.21",
10
10
  "typescript-nullable": "^0.6.0"
11
11
  },
package/src/sdk.ts CHANGED
@@ -104,10 +104,10 @@ import {
104
104
  MediaStoreRecorderSettings,
105
105
  MediaStoreCutRequest,
106
106
  MediaStoreActiveCut,
107
- MediaStoreSession,
108
- mediaStoreMetadata,
109
107
  MediaStoreAssetSettings,
110
108
  MediaStoreAsset,
109
+ MediaStoreSnapshotSettings,
110
+ MediaStoreSnapshot,
111
111
  } from "./media_nodes/mediaStore";
112
112
 
113
113
  export * from "./types";
@@ -577,12 +577,12 @@ export class Norsk {
577
577
  MediaStorePlayerNode.create(settings, this._client(), unregisterNode).then(registerNode),
578
578
  recorder: async (settings: MediaStoreRecorderSettings) =>
579
579
  MediaStoreRecorderNode.create(settings, this._client(), unregisterNode).then(registerNode),
580
- makeCut: async (cutRequest: MediaStoreCutRequest) =>
580
+ makeCut: (cutRequest: MediaStoreCutRequest) =>
581
581
  MediaStoreActiveCut.create(cutRequest, this._client()),
582
- metadata: async (mediaStoreName: string) : Promise<MediaStoreSession[]> =>
583
- mediaStoreMetadata(mediaStoreName, this._client()),
584
- asset: async(settings: MediaStoreAssetSettings) =>
585
- MediaStoreAsset.create(settings, this._client())
582
+ asyncLoadAsset: async(settings: MediaStoreAssetSettings) =>
583
+ MediaStoreAsset.create(settings, this._client()),
584
+ snapshot: async(settings: MediaStoreSnapshotSettings) =>
585
+ MediaStoreSnapshot.create(settings, this._client())
586
586
  }
587
587
  this.debug = {
588
588
  streamTimestampReport: async (settings: StreamTimestampReportSettings) =>