@norskvideo/norsk-sdk 1.0.338 → 1.0.340

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4128 @@
1
+ /// <reference types="node" />
2
+
3
+ import { AudioCodec } from '@norskvideo/norsk-api/lib/media_pb';
4
+ import { CmafAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
5
+ import { CmafMultiVariantMessage } from '@norskvideo/norsk-api/lib/media_pb';
6
+ import { CmafVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
7
+ import { CmafWebVttMessage } from '@norskvideo/norsk-api/lib/media_pb';
8
+ import { Context } from '@norskvideo/norsk-api/lib/media_pb';
9
+ import { CurrentLoad } from '@norskvideo/norsk-api/lib/shared/common_pb';
10
+ import { ExplicitChannel } from '@norskvideo/norsk-api/lib/media_pb';
11
+ import { FileTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
12
+ import { GopStructure } from '@norskvideo/norsk-api/lib/media_pb';
13
+ import * as grpc from '@grpc/grpc-js';
14
+ import { HlsOutputEvent } from '@norskvideo/norsk-api/lib/media_pb';
15
+ import { HlsTsAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
16
+ import { HlsTsCombinedPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
17
+ import { HlsTsVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
18
+ import { MediaClient } from '@norskvideo/norsk-api/lib/media_grpc_pb';
19
+ import { Nullable } from 'typescript-nullable';
20
+ import { PlainMessage } from '@bufbuild/protobuf';
21
+ import { Readable } from 'stream';
22
+ import { RtmpError_UnsupportedAudio } from '@norskvideo/norsk-api/lib/media_pb';
23
+ import { RtmpError_UnsupportedVideo } from '@norskvideo/norsk-api/lib/media_pb';
24
+ import { Scte35SpliceInfoSection } from '@norskvideo/norsk-api/lib/media_pb';
25
+ import { StreamKey as StreamKey_2 } from '@norskvideo/norsk-api/lib/media_pb';
26
+ import { StreamStatisticsSampling } from '@norskvideo/norsk-api/lib/media_pb';
27
+ import { Subscription } from '@norskvideo/norsk-api/lib/media_pb';
28
+ import { TimestampProgramNudge } from '@norskvideo/norsk-api/lib/media_pb';
29
+ import { TsInputEvent } from '@norskvideo/norsk-api/lib/media_pb';
30
+ import { UdpTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
31
+ import { VancPayloadFormat as VancPayloadFormat_2 } from '@norskvideo/norsk-api/lib/media_pb';
32
+ import { Version } from '@norskvideo/norsk-api/lib/shared/common_pb';
33
+ import { Wave } from '@norskvideo/norsk-api/lib/media_pb';
34
+ import { Writable } from 'stream';
35
+
36
+ /** @public */
37
+ export declare type AacProfile = "lc" | "main" | "high";
38
+
39
+ /**
40
+ * @public
41
+ * Settings for an AAC encode
42
+ * see: {@link NorskTransform.audioEncode}
43
+ */
44
+ export declare interface AacSettings {
45
+ kind: "aac";
46
+ /** The output sample rate of this AAC encode */
47
+ sampleRate: SampleRate;
48
+ /** The AAC profile of this AAC encode */
49
+ profile: AacProfile;
50
+ }
51
+
52
+ export declare class AncillaryNode extends AutoProcessorMediaNode<"ancillary"> {
53
+ close(): void;
54
+ sendScte35(key: StreamKey, info: Scte35SpliceInfoSection): void;
55
+ }
56
+
57
+ export declare interface AncillarySettings extends ProcessorNodeSettings<AncillaryNode> {
58
+ onScte35?: (stream: StreamKey, message: Scte35SpliceInfoSection) => void;
59
+ onSmpte2038?: (stream: StreamKey, message: Smpte2038Message) => void;
60
+ }
61
+
62
+ /**
63
+ * @public
64
+ * Returns the stream keys for ancillary streams in a media context
65
+ * @param streams - The media context from which to return the stream keys
66
+ * @returns The ancillary stream keys in the media context
67
+ */
68
+ export declare function ancillaryStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
69
+
70
+ /** @public */
71
+ export declare interface AncillaryStreamMetadata {
72
+ }
73
+
74
+ /**
75
+ * @public
76
+ * Filters a context to only the ancillary streams within it
77
+ * @param streams - The media context from which to return the streams
78
+ * @returns The ancillary streams in the media context
79
+ */
80
+ export declare function ancillaryStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
81
+
82
+ /**
83
+ * @public
84
+ * see: {@link NorskTransform.audioBuildMultichannel}
85
+ */
86
+ export declare class AudioBuildMultichannelNode extends AutoProcessorMediaNode<"audio"> {
87
+ close(): void;
88
+ }
89
+
90
+ /**
91
+ * @public
92
+ * Settings for an Audio Build Multichannel Node
93
+ * see: {@link NorskTransform.audioBuildMultichannel}
94
+ * */
95
+ export declare interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
96
+ /** The channel layout of the built outgoing stream */
97
+ channelLayout: ChannelLayout;
98
+ /**
99
+ * Stream keys specifying the source for each channel, where the order is
100
+ * significant. The streams must all have the same sample format and sample
101
+ * rate.
102
+ */
103
+ channelList: readonly StreamKey[];
104
+ /** The stream key to use for the outoging stream*/
105
+ outputStreamKey: StreamKey;
106
+ }
107
+
108
+ export { AudioCodec }
109
+
110
+ /**
111
+ * @public
112
+ * see: {@link NorskTransform.audioEncode}
113
+ */
114
+ export declare class AudioEncodeNode extends AutoProcessorMediaNode<"audio"> {
115
+ close(): void;
116
+ }
117
+
118
+ /**
119
+ * @public
120
+ * Settings for an audio encode
121
+ * see: {@link NorskTransform.audioEncode}
122
+ * */
123
+ export declare interface AudioEncodeSettings extends ProcessorNodeSettings<AudioEncodeNode> {
124
+ /**
125
+ * The channel layout of this encode
126
+ * Note: If the channel layout doesn't match then it will be automatically converted
127
+ * to gain greater control over this process, see {@link NorskTransform.audioMix} and {@link NorskTransform.audioMixMatrix}
128
+ * */
129
+ channelLayout: ChannelLayout;
130
+ /** The target bitrate of this encode */
131
+ bitrate: number;
132
+ /** The name given to the rendition portion of the stream key assigned to this node's output */
133
+ outputRenditionName: string;
134
+ /** What codec to (re) encode the audio to */
135
+ codec: OpusSettings | AacSettings;
136
+ }
137
+
138
+ /**
139
+ * @public
140
+ * see: {@link NorskTransform.audioGain}
141
+ */
142
+ export declare class AudioGainNode extends AutoProcessorMediaNode<"audio"> {
143
+ /**
144
+ * @public
145
+ * Updates the config of this AudioGain node for all subsequent frames
146
+ * this allows the user to change the gains in the outgoing stream
147
+ * dynamically as the stream progresses
148
+ * @param settings - The updated settings
149
+ */
150
+ updateConfig(settings: AudioGainSettingsUpdate): void;
151
+ close(): void;
152
+ }
153
+
154
+ /**
155
+ * @public
156
+ * Settings for an Audio Gain node
157
+ * see: {@link NorskTransform.audioGain}
158
+ * */
159
+ export declare interface AudioGainSettings extends ProcessorNodeSettings<AudioGainNode> {
160
+ /** A vector of gains for this source, one for each channel */
161
+ channelGains: readonly Gain[];
162
+ }
163
+
164
+ /**
165
+ * @public
166
+ * An update operation for an Audio Gain node
167
+ * see: {@link AudioGainNode.updateConfig}
168
+ * */
169
+ export declare interface AudioGainSettingsUpdate {
170
+ /** A vector of gains for this source, one for each channel */
171
+ channelGains?: readonly Gain[];
172
+ }
173
+
174
+ /** @public */
175
+ export declare interface AudioMeasureLevels {
176
+ stream: StreamKey;
177
+ pts: Interval;
178
+ channelLevels: ChannelLevels[];
179
+ }
180
+
181
+ /**
182
+ * @public
183
+ * see: {@link NorskControl.audioMeasureLevels}.
184
+ */
185
+ export declare class AudioMeasureLevelsNode extends AutoProcessorMediaNode<"audio"> {
186
+ close(): void;
187
+ }
188
+
189
+ /**
190
+ * @public
191
+ * Settings for an AudioMeasureLevelsNode
192
+ * see: {@link NorskControl.audioMeasureLevels}
193
+ */
194
+ export declare interface AudioMeasureLevelsSettings extends ProcessorNodeSettings<AudioMeasureLevelsNode> {
195
+ /**
196
+ * Called with the audio level data
197
+ * @param levels - The level data for the audio stream
198
+ * @eventProperty
199
+ */
200
+ onData: (levels: AudioMeasureLevels) => void;
201
+ intervalFrames?: number;
202
+ }
203
+
204
+ /**
205
+ * @public
206
+ * see: {@link NorskTransform.audioMixMatrix}
207
+ */
208
+ export declare class AudioMixMatrixNode extends AutoProcessorMediaNode<"audio"> {
209
+ /**
210
+ * @public
211
+ * Updates the config of this AudioMixMatrix node for all subsequent frames
212
+ * this allows the user to change the gains in the outgoing mix
213
+ * dynamically as the stream progresses
214
+ * @param settings - The updated settings
215
+ */
216
+ updateConfig(settings: AudioMixMatrixSettingsUpdate): void;
217
+ close(): void;
218
+ }
219
+
220
+ /**
221
+ * @public
222
+ * Settings for the Audio Mix Matrix Node
223
+ * see: {@link NorskTransform.audioMixMatrix}
224
+ * */
225
+ export declare interface AudioMixMatrixSettings extends ProcessorNodeSettings<AudioMixMatrixNode> {
226
+ /** The NxM matrix of gains from N input channels to M output channels */
227
+ channelGains: readonly Gain[][];
228
+ /** The desired output channel layout, such as "5.1" */
229
+ outputChannelLayout: ChannelLayout;
230
+ }
231
+
232
+ /**
233
+ * @public
234
+ * Config update for the {@link AudioMixMatrixNode}.
235
+ * Call {@link AudioMixMatrixNode.updateConfig} for updating the config.
236
+ */
237
+ export declare interface AudioMixMatrixSettingsUpdate {
238
+ /** The NxM updated matrix of gains from N input channels to M output channels */
239
+ channelGains: readonly Gain[][];
240
+ }
241
+
242
+ /**
243
+ * @public
244
+ * see: {@link NorskTransform.audioMix}
245
+ */
246
+ export declare class AudioMixNode<Pins extends string> extends ProcessorMediaNode<Pins> {
247
+ /**
248
+ * @public
249
+ * Updates the config of this AudioMix for all subsequent frames
250
+ * this allows the user to change the levels and sources in the outgoing mix
251
+ * dynamically as the stream progresses
252
+ * @param settings - The updated settings
253
+ */
254
+ updateConfig(settings: AudioMixSettingsUpdate<Pins>): void;
255
+ close(): void;
256
+ }
257
+
258
+ /**
259
+ * @public
260
+ * The settings for an AudioMix operation
261
+ * see: {@link NorskTransform.audioMix}
262
+ * */
263
+ export declare interface AudioMixSettings<Pins extends string> extends ProcessorNodeSettings<AudioMixNode<Pins>> {
264
+ /** The audio sources to mix */
265
+ sources: readonly AudioMixSource<Pins>[];
266
+ /** The source name to use for the output stream */
267
+ outputSource: string;
268
+ /** The sample rate that the mixer runs at
269
+ * all audio streams will be normalised to this value and therefore
270
+ * this will be the output sample rate of this node */
271
+ sampleRate?: SampleRate;
272
+ }
273
+
274
+ /**
275
+ * @public
276
+ * An update operation for an AudioMix node
277
+ * see: {@link AudioMixNode.updateConfig}
278
+ * */
279
+ export declare interface AudioMixSettingsUpdate<Pins extends string> {
280
+ /** The audio sources to mix along with their potentially new gain values */
281
+ sources: readonly AudioMixSource<Pins>[];
282
+ }
283
+
284
+ /**
285
+ * @public
286
+ * The settings for a single source within an AudioMix operation
287
+ * see: {@link NorskTransform.audioMix}
288
+ * */
289
+ export declare interface AudioMixSource<Pins> {
290
+ /** The name of the InputPin for this source */
291
+ pin: Pins;
292
+ /** A vector of gains for this source, one for each channel */
293
+ channelGains?: readonly Gain[];
294
+ }
295
+
296
+ /**
297
+ * @public
298
+ * see: {@link NorskInput.audioSignal}
299
+ */
300
+ export declare class AudioSignalGeneratorNode extends SourceMediaNode {
301
+ close(): void;
302
+ }
303
+
304
+ /**
305
+ * @public
306
+ * Settings for an Audio Signal Generator
307
+ * see: {@link NorskInput.audioSignal}
308
+ * */
309
+ export declare interface AudioSignalGeneratorSettings extends SourceNodeSettings<AudioSignalGeneratorNode> {
310
+ /** The source name to set in the stream key of the outgoing stream */
311
+ sourceName: string;
312
+ /** The audio channel layout of the generated stream */
313
+ channelLayout: ChannelLayout;
314
+ /** The sample rate of the generated stream */
315
+ sampleRate: SampleRate;
316
+ /** The sample format to use. Default: "fltp" */
317
+ sampleFormat?: SampleFormat;
318
+ /**
319
+ * Waveform - create one with {@link mkSine}
320
+ * */
321
+ wave?: Wave;
322
+ }
323
+
324
+ /**
325
+ * @public
326
+ * see: {@link NorskTransform.audioSplitMultichannel}
327
+ */
328
+ export declare class AudioSplitMultichannelNode extends AutoProcessorMediaNode<"audio"> {
329
+ close(): void;
330
+ }
331
+
332
+ /**
333
+ * @public
334
+ * Settings for an Audio Split Multichannel node
335
+ * see: {@link NorskTransform.audioSplitMultichannel}
336
+ * */
337
+ export declare interface AudioSplitMultichannelSettings extends ProcessorNodeSettings<AudioSplitMultichannelNode> {
338
+ /**
339
+ * The output stream key of the first channel
340
+ * subsequent channels will have streamId incremented by N
341
+ */
342
+ outputStreamKey: StreamKey;
343
+ }
344
+
345
+ /**
346
+ * @public
347
+ * Returns the stream keys for audio streams in a media context
348
+ * @param streams - The media context from which to return the stream keys
349
+ * @returns The audio stream keys in the media context
350
+ */
351
+ export declare function audioStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
352
+
353
+ /** @public */
354
+ export declare interface AudioStreamMetadata {
355
+ codec: AudioCodec;
356
+ sampleRate: SampleRate;
357
+ channelLayout?: ChannelLayout;
358
+ }
359
+
360
+ /**
361
+ * @public
362
+ * Filters a context to only the audio streams within it
363
+ * @param streams - The media context from which to return the streams
364
+ * @returns The audio streams in the media context
365
+ */
366
+ export declare function audioStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
367
+
368
+ /** @public */
369
+ export declare function audioToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
370
+
371
+ /**
372
+ * @public
373
+ * see: {@link NorskTransform.audioTranscribeAws}
374
+ */
375
+ export declare class AudioTranscribeAwsNode extends AutoProcessorMediaNode<"audio"> {
376
+ close(): void;
377
+ }
378
+
379
+ /**
380
+ * @public
381
+ * Settings for an Audio Transcribe operation using AWS
382
+ * see: {@link NorskTransform.audioTranscribeAws}
383
+ * */
384
+ export declare interface AudioTranscribeAwsSettings extends ProcessorNodeSettings<AudioTranscribeAwsNode> {
385
+ /** Region for the transcribe endpoint */
386
+ awsRegion: string;
387
+ /** the stream id to allocate to the outgoing stream*/
388
+ outputStreamId: number;
389
+ /** the language that we want to transcribe (also put in the outgoing metadata) */
390
+ language: string;
391
+ /** The mode to be used for building sentences */
392
+ sentenceBuildMode: SentenceBuildMode;
393
+ /** The mode to be used for stabilising sentences */
394
+ sentenceStabilizationMode: StabilizationMode;
395
+ /** The AWS credentials to use for this operation
396
+ * If not supplied, the standard environment variables will be used if present
397
+ * */
398
+ awsCredentials?: AwsCredentials;
399
+ }
400
+
401
+ export declare interface AutoProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
402
+ }
403
+
404
+ export declare class AutoProcessorMediaNode<Pins extends string> {
405
+ constructor(client: MediaClient, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => void, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
406
+ }
407
+
408
+ /** @public */
409
+ export declare class AutoSinkMediaNode<Pins extends string> extends SinkMediaNode<Pins | "auto"> {
410
+ /** Subscribe to the given sources.
411
+ *
412
+ * This version of subscribe simply requires a list of stream keys to be
413
+ * returned from each selector, and the server will automatically
414
+ * assign each stream to the appropriate pin on the sink node.
415
+ * This is the appropriate method for most cases.
416
+ *
417
+ * @param done - will be called with no arguments if the subscription succeeds,
418
+ * or an error if it failed. This error indicates the specific reason it
419
+ * failed, so you can take appropriate actions in response. It will be called
420
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
421
+ * provided in the config for the node.
422
+ *
423
+ * Errors are also logged to the debug log.
424
+ */
425
+ subscribe(sources: ReceiveFromAddressAuto[], validation?: (context: Context) => boolean, done?: (error?: SubscriptionError) => void): void;
426
+ }
427
+
428
+ /** @public */
429
+ export declare function avToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
430
+
431
+ /** @public */
432
+ export declare interface AwsCredentials {
433
+ accessKey: string;
434
+ secretKey: string;
435
+ sessionToken: string;
436
+ }
437
+
438
+ /**
439
+ * @public
440
+ * Configuration for pushing a segmented media stream directly to AWS S3
441
+ * */
442
+ export declare interface AwsS3PushDestinationSettings {
443
+ type: "s3";
444
+ /** The hostname of the s3 server being pushed to. */
445
+ host: string;
446
+ /** the port of the s3 server being pushed to. */
447
+ port: number;
448
+ /** the path under which segments and playlists will be pushed to */
449
+ pathPrefix: string;
450
+ /**
451
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
452
+ *
453
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
454
+ * cacheing directives
455
+ */
456
+ sessionId?: string;
457
+ /**
458
+ * A unique identifier for this destination
459
+ *
460
+ * This can be used for supplying updates to configuration to this destination specifically
461
+ * see: {@link UpdateCredentials}
462
+ */
463
+ id: string;
464
+ /**
465
+ * The AWS region being pushed to
466
+ */
467
+ awsRegion: string;
468
+ /**
469
+ * AWS credentials to be used for connecting to S3
470
+ * Standard environment variables will be read if these are not provided
471
+ */
472
+ awsCredentials?: AwsCredentials;
473
+ /**
474
+ * Informs the playlist generation how long segments will be retained for on the remote server
475
+ * in order to generate an accurate playlist
476
+ */
477
+ retentionPeriodSeconds: number;
478
+ }
479
+
480
+ /**
481
+ * @public
482
+ * There are three possible modes:
483
+ *
484
+ * - "abr": encode in average bitrate mode, specified in kilobits/sec (note, 1
485
+ * kilobit is 1000 bits). You can make use of the vbv settings to control
486
+ * the bounds on how much the actual bitrate can fluctuate within the bounds
487
+ * of the average
488
+ *
489
+ * - "cqp": encode in constant quantizer mode. In general, crf will give better
490
+ * results, although cqp can be faster to encode
491
+ *
492
+ * - "crf": encode in constant rate factor mode. This will give a constant 'quality'
493
+ * to the encode, but with a variable bitrate
494
+ */
495
+ export declare interface BitrateMode {
496
+ value: number;
497
+ mode: "abr" | "cqp" | "crf";
498
+ }
499
+
500
+ /** @public */
501
+ export declare type BrowserEvent = {
502
+ case: "onLoaded";
503
+ value: BrowserOnLoaded;
504
+ } | {
505
+ case: "onLoadStart";
506
+ value: BrowserOnLoadStart;
507
+ } | {
508
+ case: "onLoadEnd";
509
+ value: BrowserOnLoadEnd;
510
+ } | {
511
+ case: "onLoadError";
512
+ value: BrowserOnLoadError;
513
+ };
514
+
515
+ /**
516
+ * @public
517
+ * see: {@link NorskInput.browser}
518
+ */
519
+ export declare class BrowserInputNode extends SourceMediaNode {
520
+ /**
521
+ * @public
522
+ * Supply new config for an active web browser session
523
+ * */
524
+ updateConfig(settings: BrowserInputSettingsUpdate): void;
525
+ /**
526
+ * @public
527
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
528
+ * */
529
+ nudge(nudge: number): void;
530
+ close(): void;
531
+ }
532
+
533
+ /**
534
+ * @public
535
+ * Settings for a Browser Input
536
+ * see: {@link NorskInput.browser}
537
+ * */
538
+ export declare interface BrowserInputSettings extends SourceNodeSettings<BrowserInputNode>, StreamStatisticsMixin {
539
+ /** The url to load in the browser session */
540
+ url: string;
541
+ /** This is the resolution of the window opened to render the page
542
+ * This is therefore also output resolution of the generated video
543
+ */
544
+ resolution: {
545
+ width: number;
546
+ height: number;
547
+ };
548
+ /** The source name to populate the outgoing stream key with */
549
+ sourceName: string;
550
+ /** The frame rate at which to generate video from the web page
551
+ * Note: If the web page is static, this will just mean the initial frame
552
+ * is duplicated at the required frame rate
553
+ * */
554
+ frameRate: FrameRate;
555
+ /** An optional callback for reacting to events from the embedded browser
556
+ * At the very least this is useful for logging events (such as a 404)
557
+ * */
558
+ onBrowserEvent?: (event: BrowserEvent) => void;
559
+ }
560
+
561
+ /**
562
+ * @public
563
+ * A settings update for a running browser
564
+ * see: {@link BrowserInputNode.updateConfig}
565
+ * */
566
+ export declare interface BrowserInputSettingsUpdate {
567
+ /** Optionally, a new URL to load within the active session */
568
+ url?: string;
569
+ /** Optionally, a new resolution to use for outgoing video */
570
+ resolution?: {
571
+ width: number;
572
+ height: number;
573
+ };
574
+ }
575
+
576
+ /** @public */
577
+ export declare interface BrowserOnLoaded {
578
+ url: string;
579
+ }
580
+
581
+ /** @public */
582
+ export declare interface BrowserOnLoadEnd {
583
+ url: string;
584
+ statusCode: number;
585
+ }
586
+
587
+ /** @public */
588
+ export declare interface BrowserOnLoadError {
589
+ url: string;
590
+ errorText: string;
591
+ errorCode: number;
592
+ }
593
+
594
+ /** @public */
595
+ export declare interface BrowserOnLoadStart {
596
+ url: string;
597
+ }
598
+
599
+ /** @public Channel layout for an audio stream */
600
+ export declare type ChannelLayout = "mono" | "stereo" | "surround" | "4.0" | "5.0" | "5.1" | "7.1" | "5.1.4" | "7.1.4" | (ChannelName | ExplicitChannel)[];
601
+
602
+ /** @public */
603
+ export declare interface ChannelLevels {
604
+ rms?: Db;
605
+ peak?: Db;
606
+ }
607
+
608
+ /** @public */
609
+ export declare type ChannelName =
610
+ /** Left front */
611
+ "l"
612
+ /** Right front */
613
+ | "r"
614
+ /** Centre front */
615
+ | "c"
616
+ /** Low frequency enhancement */
617
+ | "lfe"
618
+ /** Left surround */
619
+ | "ls"
620
+ /** Right surround */
621
+ | "rs"
622
+ /** Left front centre */
623
+ | "lc"
624
+ /** Right front centre */
625
+ | "rc"
626
+ /** Rear surround left */
627
+ | "lsr"
628
+ /** Rear Surround Right */
629
+ | "rsr"
630
+ /** Rear centre */
631
+ | "cs"
632
+ /** Left surround direct */
633
+ | "lsd"
634
+ /** Right surround direct */
635
+ | "rsd"
636
+ /** Left side surround */
637
+ | "lss"
638
+ /** Right side surround */
639
+ | "rss"
640
+ /** Left wide front */
641
+ | "lw"
642
+ /** Right wide front */
643
+ | "rw"
644
+ /** Left front vertical height */
645
+ | "lv"
646
+ /** Right front vertical height */
647
+ | "rv"
648
+ /** Centre front vertical height */
649
+ | "cv"
650
+ /** Left surround vertical height rear */
651
+ | "lvr"
652
+ /** Right surround vertical height rear */
653
+ | "rvr"
654
+ /** Centre vertical height rear */
655
+ | "cvr"
656
+ /** Left vertical height side surround */
657
+ | "lvss"
658
+ /** Right vertical height side surround */
659
+ | "rvss"
660
+ /** Top centre surround */
661
+ | "ts"
662
+ /** Low frequency enhancement 2 */
663
+ | "lfe2"
664
+ /** Left front vertical bottom */
665
+ | "lb"
666
+ /** Right front vertical bottom */
667
+ | "rb"
668
+ /** Centre front vertical bottom */
669
+ | "cb"
670
+ /** Left vertical height surround */
671
+ | "lvs"
672
+ /** Right vertical height surround */
673
+ | "rvs"
674
+ /** Low frequency enhancement 3 */
675
+ | "lfe3"
676
+ /** Left edge of screen */
677
+ | "leos"
678
+ /** Right edge of screen */
679
+ | "reos"
680
+ /** Halfway between centre of screen and left edge of screen */
681
+ | "hwbcal"
682
+ /** Halfway between centre of screen and right edge of screen */
683
+ | "hwbcar"
684
+ /** Left back surround */
685
+ | "lbs"
686
+ /** Right back surround */
687
+ | "rbs"
688
+ /** Unknown */
689
+ | "unknown";
690
+
691
+ /** @public */
692
+ export declare function clientHostExternal(): string;
693
+
694
+ /** @public */
695
+ export declare function clientHostInternal(): string;
696
+
697
+ /** @public */
698
+ export declare function clientPortExternal(): string;
699
+
700
+ /** @public */
701
+ export declare function clientPortInternal(): string;
702
+
703
+ /**
704
+ * @public
705
+ * see: {@link NorskOutput.cmafAudio}
706
+ */
707
+ export declare class CmafAudioOutputNode extends CmafNodeWithPlaylist<CmafAudioMessage, "audio", CmafAudioOutputNode> {
708
+ /**
709
+ * @public
710
+ * Updates the credentials for a specific destination within this output by id
711
+ * see: {@link UpdateCredentials}
712
+ * see: {@link CmafDestinationSettings}
713
+ */
714
+ updateCredentials(settings: UpdateCredentials): void;
715
+ }
716
+
717
+ /**
718
+ * @public
719
+ * Possible destinations for a segmented media stream
720
+ * - {@link HlsPushDestinationSettings}: Push to a generic HTTP server
721
+ * - {@link AwsS3PushDestinationSettings}: Push to Amazon S3
722
+ * - {@link LocalPullDestinationSettings}: Serve directly from the Norsk Web Server
723
+ * */
724
+ export declare type CmafDestinationSettings = HlsPushDestinationSettings | AwsS3PushDestinationSettings | LocalPullDestinationSettings;
725
+
726
+ /**
727
+ * @public
728
+ * see: {@link NorskOutput.cmafMultiVariant}
729
+ */
730
+ export declare class CmafMultiVariantOutputNode extends CmafNodeWithPlaylist<CmafMultiVariantMessage, "video" | "audio" | "subtitle", CmafMultiVariantOutputNode> {
731
+ /** @public The URL of the file based multi variant playlist */
732
+ playlistUrl: string;
733
+ /**
734
+ * @public
735
+ * Updates the credentials for a specific destination within this output by id
736
+ * see: {@link UpdateCredentials}
737
+ * see: {@link CmafDestinationSettings}
738
+ */
739
+ updateCredentials(settings: UpdateCredentials): void;
740
+ }
741
+
742
+ /**
743
+ * @public
744
+ * Settings for a CMAF Multi Variant Playlist
745
+ * see {@link NorskOutput.cmafMultiVariant}
746
+ */
747
+ export declare interface CmafMultiVariantOutputSettings extends SinkNodeSettings<CmafMultiVariantOutputNode> {
748
+ /**
749
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
750
+ */
751
+ playlistName: string;
752
+ /**
753
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
754
+ */
755
+ destinations: CmafDestinationSettings[];
756
+ /**
757
+ * Directives to add to the m3u multi variant playlist
758
+ */
759
+ m3uAdditions?: string;
760
+ /**
761
+ * XML fragment to add to the (top-level) MPD element
762
+ */
763
+ mpdAdditions?: string;
764
+ }
765
+
766
+ declare class CmafNodeWithPlaylist<ClientMessage, Pins extends string, T extends MediaNodeState> extends AutoProcessorMediaNode<Pins> {
767
+ constructor(client: MediaClient, settings: ProcessorNodeSettings<T> & StreamStatisticsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => void, playlistPath: PlaylistPath);
768
+ /**
769
+ * @public
770
+ * Returns the URL to the HLS playlist entry. Note this can only be evaluated once the stream is active as it
771
+ * varies with the stream subscribed to. Useful during development, but you probably want to
772
+ * use {@link NorskOutput.cmafMultiVariant} for production.
773
+ */
774
+ url(): Promise<string>;
775
+ close(): void;
776
+ }
777
+
778
+ /**
779
+ * @public
780
+ * Settings for a CMAF Audio and Video Outputs
781
+ * see {@link NorskOutput.cmafAudio}, {@link NorskOutput.cmafVideo}
782
+ */
783
+ export declare interface CmafOutputSettings extends SinkNodeSettings<CmafAudioOutputNode | CmafVideoOutputNode> {
784
+ /**
785
+ * The target segment duration in seconds. Norsk will make the largest segments it can
786
+ * without going over this target
787
+ */
788
+ segmentDurationSeconds: number;
789
+ /**
790
+ * The target part duration in seconds. Norsk will make the largest parts it can
791
+ * without going over this target
792
+ */
793
+ partDurationSeconds: number;
794
+ /**
795
+ * By default, the program date time or event start time will be based on the
796
+ * timestamp of the first video packet received by Norsk in a stream.
797
+ *
798
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
799
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
800
+ *
801
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
802
+ *
803
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
804
+ */
805
+ delayOutputMs?: number;
806
+ /**
807
+ * Settings for encrypting the content.
808
+ */
809
+ encryption?: EncryptionSettings;
810
+ /**
811
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
812
+ */
813
+ destinations: CmafDestinationSettings[];
814
+ /**
815
+ * Directives to add to the m3u media playlist
816
+ */
817
+ m3uAdditions?: string;
818
+ /**
819
+ * XML fragment to add to the mpd Representation element
820
+ */
821
+ mpdAdditions?: string;
822
+ }
823
+
824
+ /**
825
+ * @public
826
+ * see: {@link NorskOutput.cmafVideo}
827
+ */
828
+ export declare class CmafVideoOutputNode extends CmafNodeWithPlaylist<CmafVideoMessage, "video", CmafVideoOutputNode> {
829
+ /**
830
+ * @public
831
+ * Updates the credentials for a specific destination within this output by id
832
+ * see: {@link UpdateCredentials}
833
+ * see: {@link CmafDestinationSettings}
834
+ */
835
+ updateCredentials(settings: UpdateCredentials): void;
836
+ }
837
+
838
+ /**
839
+ * @public
840
+ * see: {@link NorskOutput.cmafWebVtt}
841
+ */
842
+ export declare class CmafWebVttOutputNode extends CmafNodeWithPlaylist<CmafWebVttMessage, "subtitle", CmafWebVttOutputNode> {
843
+ /**
844
+ * @public
845
+ * Updates the credentials for a specific destination within this output by id
846
+ * see: {@link UpdateCredentials}
847
+ * see: {@link CmafDestinationSettings}
848
+ */
849
+ updateCredentials(settings: UpdateCredentials): void;
850
+ }
851
+
852
+ /**
853
+ * @public
854
+ * Settings for a CMAF WebVTT Output
855
+ * see {@link NorskOutput.cmafWebVtt}
856
+ */
857
+ export declare interface CmafWebVttOutputSettings extends SinkNodeSettings<CmafWebVttOutputNode> {
858
+ /**
859
+ * The target segment duration in seconds, Norsk will split subtitles over multiple segments
860
+ * in a compliant manner if necessary
861
+ */
862
+ segmentDurationSeconds: number;
863
+ /**
864
+ * By default, the program date time or event start time will be based on the
865
+ * timestamp of the first video packet received by Norsk in a stream.
866
+ *
867
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
868
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
869
+ *
870
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
871
+ *
872
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
873
+ */
874
+ delayOutputMs?: number;
875
+ /**
876
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
877
+ */
878
+ destinations: CmafDestinationSettings[];
879
+ /**
880
+ * A local directory in which to store the segments while they're available
881
+ */
882
+ hlsCacheDirectory: string;
883
+ /**
884
+ * The maximum number of segments to display in a single generated playlist
885
+ */
886
+ maximumPlaylistSegments: number;
887
+ }
888
+
889
+ /** @public */
890
+ export declare type ComposeHardwareAcceleration =
891
+ /**
892
+ * Use the quadra overlay functionality to perform the compose
893
+ */
894
+ "quadra";
895
+
896
+ /** @public */
897
+ export declare type ComposeMissingStreamBehaviour =
898
+ /**
899
+ * Produce frames on output by dropping the part(s) of the composition which
900
+ * cannot be fulfilled.
901
+ */
902
+ "drop_part"
903
+ /**
904
+ * Wait for all streams to be present before producing (further) output
905
+ */
906
+ | "wait_for_all";
907
+
908
+ /**
909
+ * @public
910
+ * A single layer of a video compose operation
911
+ * see {@link NorskTransform.videoCompose}
912
+ * */
913
+ export declare interface ComposePart<Pins> {
914
+ /** Input pin for this source */
915
+ pin: Pins;
916
+ /**
917
+ * The area within the source picture to include. This may be the full picture
918
+ * or cropped, and will be rescaled if necessary.
919
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
920
+ * this is taken to be within the coordinate system of the input image
921
+ */
922
+ sourceRect: OffsetRect;
923
+ /**
924
+ * The area within the destination picture to place this part of the
925
+ * composition.
926
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
927
+ * this is taken to be within the coordinate system of the destination image
928
+ */
929
+ destRect: OffsetRect;
930
+ /**
931
+ * Z-index to determine ordering by which the sources are overlaid
932
+ * (higher layers appear on top)
933
+ */
934
+ zIndex: number;
935
+ /**
936
+ * Opacity multiplier of this overlay (where 0.0 is fully transparent and 1.0
937
+ * is fully opaque)
938
+ */
939
+ opacity: number;
940
+ /** Optionally identify the part to enable transitions */
941
+ id?: string;
942
+ /**
943
+ * Optionally specify a transition for this part. A transition is applied only
944
+ * if the part is specified in both the existing and the current/new
945
+ * configuration, identified by having the same id specified, and a transition
946
+ * is specified for the new configuration.
947
+ */
948
+ transition?: PartTransition;
949
+ /**
950
+ * Optionally supply a reference resolution. This allows description of the
951
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
952
+ * can be specified to allow the source and destination areas to be described
953
+ * in percentage terms, or a notional resolution can be used that is
954
+ * independant of the source resolutions that may be provided.
955
+ *
956
+ * If unset, this will be overriden by a global reference resolution if that is present
957
+ *
958
+ * This is useful to set if you don't know the input resolution of a part but want to be able to describe
959
+ * an operation on that part.
960
+ */
961
+ referenceResolution?: Resolution;
962
+ }
963
+
964
+ /** @public */
965
+ export declare type ContextType = "full" | "singleSource" | "singleProgram" | "singleStream" | "singleRendition";
966
+
967
+ /** @public */
968
+ export declare interface Core {
969
+ logicalCpuIds: LogicalCpuId[];
970
+ }
971
+
972
+ /** @public */
973
+ export declare interface CpuTopology {
974
+ numaNodes: NumaNode[];
975
+ }
976
+
977
+ /** @public A decibel (dB). A null value represents -inf. */
978
+ export declare type Db = number | null;
979
+
980
+ /** @public */
981
+ export declare interface DeckLinkCard {
982
+ index: number;
983
+ displayName: string;
984
+ inputConnections: DeckLinkVideoConnection[];
985
+ outputConnections: DeckLinkVideoConnection[];
986
+ ioSupport: DeckLinkVideoIOSupport[];
987
+ }
988
+
989
+ /** @public */
990
+ export declare interface DeckLinkDisplayMode {
991
+ id: DeckLinkDisplayModeId;
992
+ name: string;
993
+ width: number;
994
+ height: number;
995
+ frameRate: FrameRate;
996
+ }
997
+
998
+ /** @public */
999
+ export declare type DeckLinkDisplayModeId = "auto" | "sd_ntsc" | "sd_nt23" | "sd_pal" | "sd_ntsp" | "sd_palp" | "hd1080_23ps" | "hd1080_24ps" | "hd1080_p25" | "hd1080_p29" | "hd1080_p30" | "hd1080_p47" | "hd1080_p48" | "hd1080_i50" | "hd1080_i59" | "hd1080_i60" | "hd1080_p95" | "hd1080_p96" | "hd1080_p10" | "hd1080_p11" | "hd1080_p12" | "hd1080_p50" | "hd1080_p59" | "hd1080_p60" | "hd720_p50" | "hd720_p59" | "hd720_p60" | "two_k_23" | "two_k_24" | "two_k_25" | "two_k_dci_23" | "two_k_dci_24" | "two_k_dci_25" | "two_k_dci_29" | "two_k_dci_30" | "two_k_dci_47" | "two_k_dci_48" | "two_k_dci_50" | "two_k_dci_59" | "two_k_dci_60" | "two_k_dci_95" | "two_k_dci_96" | "two_k_dci_10" | "two_k_dci_11" | "two_k_dci_12" | "four_k_23" | "four_k_24" | "four_k_25" | "four_k_29" | "four_k_30" | "four_k_47" | "four_k_48" | "four_k_50" | "four_k_59" | "four_k_60" | "four_k_95" | "four_k_96" | "four_k_10" | "four_k_11" | "four_k_12" | "four_k_dci_23" | "four_k_dci_24" | "four_k_dci_25" | "four_k_dci_29" | "four_k_dci_30" | "four_k_dci_47" | "four_k_dci_48" | "four_k_dci_50" | "four_k_dci_59" | "four_k_dci_60" | "four_k_dci_95" | "four_k_dci_96" | "four_k_dci_10" | "four_k_dci_11" | "four_k_dci_12" | "eight_k_23" | "eight_k_24" | "eight_k_25" | "eight_k_29" | "eight_k_30" | "eight_k_47" | "eight_k_48" | "eight_k_50" | "eight_k_59" | "eight_k_60" | "eight_k_dci_23" | "eight_k_dci_24" | "eight_k_dci_25" | "eight_k_dci_29" | "eight_k_dci_30" | "eight_k_dci_47" | "eight_k_dci_48" | "eight_k_dci_50" | "eight_k_dci_59" | "eight_k_dci_60" | "pc_vga6" | "pc_svg6" | "pc_wxg5" | "pc_wxg6" | "pc_sxg5" | "pc_sxg6" | "pc_uxg5" | "pc_uxg6" | "pc_wux5" | "pc_wux6" | "pc_1945" | "pc_1946" | "pc_wqh5" | "pc_wqh6" | "pc_wqx5" | "pc_wqx6" | "special_iunk";
1000
+
1001
+ /**
1002
+ * @public
1003
+ * SDI capture through a DeckLink card.
1004
+ * see: {@link NorskInput.deckLink}.
1005
+ */
1006
+ export declare class DeckLinkInputNode extends SourceMediaNode {
1007
+ /**
1008
+ * @public
1009
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1010
+ * */
1011
+ nudge(nudge: number): void;
1012
+ close(): void;
1013
+ }
1014
+
1015
+ /**
1016
+ * @public
1017
+ * Settings to control SDI capture through a DeckLink card
1018
+ * see: {@link NorskInput.deckLink}
1019
+ */
1020
+ export declare interface DeckLinkInputSettings extends InputSettings<DeckLinkInputNode>, StreamStatisticsMixin {
1021
+ /** Which card to use */
1022
+ cardIndex: number;
1023
+ /** The audio channel layout for the input */
1024
+ channelLayout: ChannelLayout;
1025
+ /** SDI or HDMI capture */
1026
+ videoConnection: DeckLinkVideoConnection;
1027
+ /** Typically left undefined, but can be used to force capture for a specific {@link DeckLinkDisplayModeId}. If
1028
+ * the source is not currently in this mode, then no capture will occur. */
1029
+ displayModeId?: DeckLinkDisplayModeId;
1030
+ pixelFormat?: DeckLinkPixelFormat;
1031
+ }
1032
+
1033
+ /** @public */
1034
+ export declare type DeckLinkPixelFormat = "uyvy" | "argb";
1035
+
1036
+ /** @public */
1037
+ export declare type DeckLinkVideoConnection = "sdi" | "hdmi" | "optical_sdi" | "component" | "composite" | "svideo";
1038
+
1039
+ /** @public */
1040
+ export declare type DeckLinkVideoIOSupport = "capture" | "playback";
1041
+
1042
+ /**
1043
+ * @public
1044
+ * Drop every N frames from an incoming video stream
1045
+ * */
1046
+ export declare interface DropEvery {
1047
+ kind: "every";
1048
+ every: number;
1049
+ }
1050
+
1051
+ /**
1052
+ * @public
1053
+ * Randomly drop frames on a stream
1054
+ * - 0.0 means don't drop any frames
1055
+ * - 1.0 means drop every single frame
1056
+ * */
1057
+ export declare interface DropRandom {
1058
+ kind: "random";
1059
+ percentage: number;
1060
+ }
1061
+
1062
+ /** @public */
1063
+ export declare interface EncryptionSettings {
1064
+ /**
1065
+ * The 16-byte key ID used to identify the key, hexadecimal or GUID encoded.
1066
+ */
1067
+ encryptionKeyId: string;
1068
+ /**
1069
+ * The 16-byte key used to encrypt the data, hexadecimal encoded.
1070
+ */
1071
+ encryptionKey: string;
1072
+ /**
1073
+ * The PSSH box(es) to include in the MP4, base64 encoded.
1074
+ * This is typically given by the DRM provider.
1075
+ */
1076
+ encryptionPssh: string;
1077
+ /**
1078
+ * The common encryption scheme used to encrypt data, as per ISO/IEC 23001-7:2016.
1079
+ *
1080
+ * - Default: CBCS encryption scheme (AES-CBC 10% pattern encryption).
1081
+ * Full-sample encryption for audio tracks, subsample encryption for video
1082
+ * tracks.
1083
+ *
1084
+ * - CENC encryption scheme (AES-CTR). Full-sample encryption for audio tracks,
1085
+ * subsample encryption for video tracks.
1086
+ */
1087
+ encryptionScheme?: "cbcs" | "cenc";
1088
+ }
1089
+
1090
+ /**
1091
+ * @public
1092
+ * see: {@link NorskInput.fileImage}
1093
+ */
1094
+ export declare class FileImageInputNode extends SourceMediaNode {
1095
+ close(): void;
1096
+ }
1097
+
1098
+ /**
1099
+ * @public
1100
+ * Settings for an image file source
1101
+ * see: {@link NorskInput.fileImage}
1102
+ * */
1103
+ export declare interface FileImageInputSettings extends SourceNodeSettings<FileImageInputNode>, StreamStatisticsMixin {
1104
+ /** The source name to set in the stream key of the outgoing stream */
1105
+ sourceName: string;
1106
+ /** the filename to read the image from */
1107
+ fileName: string;
1108
+ /** The file format for the image. Will be inferred from the file name if not specified. */
1109
+ imageFormat?: ImageFormat;
1110
+ }
1111
+
1112
+ /**
1113
+ * @public
1114
+ * Information about an Mp4 File
1115
+ * */
1116
+ export declare interface FileMp4Info {
1117
+ /** The duration of the Mp4 file in millseconds (if known) */
1118
+ durationMs?: number;
1119
+ /** The total length of the mp4 file in bytes, if known */
1120
+ byteLength?: number;
1121
+ }
1122
+
1123
+ /**
1124
+ * @public
1125
+ * see: {@link NorskInput.fileMp4}
1126
+ */
1127
+ export declare class FileMp4InputNode extends SourceMediaNode {
1128
+ /**
1129
+ * @public
1130
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1131
+ * */
1132
+ nudge(nudge: number): void;
1133
+ updateSettings(settings: FileMp4InputSettingsUpdate): void;
1134
+ close(): void;
1135
+ }
1136
+
1137
+ /**
1138
+ * @public
1139
+ * Settings for an File Based Mp4 Input
1140
+ * see: {@link NorskInput.fileMp4}
1141
+ */
1142
+ export declare interface FileMp4InputSettings extends SourceNodeSettings<FileMp4InputNode>, StreamStatisticsMixin {
1143
+ /** The source name to set in the stream key of the outgoing stream */
1144
+ sourceName: string;
1145
+ /** Path to the MP4 file to read */
1146
+ fileName: string;
1147
+ /** Callback to be notified when the file ends */
1148
+ onEof?: () => void;
1149
+ /** Callback to be notified when the file is initially read */
1150
+ onInfo?: (info: FileMp4Info) => void;
1151
+ /** Whether to loop back to the start of the file after reaching the end (default false) */
1152
+ loop?: boolean;
1153
+ }
1154
+
1155
+ export declare interface FileMp4InputSettingsUpdate {
1156
+ /** Whether to loop back to the start of the file after reaching the end */
1157
+ loop?: boolean;
1158
+ }
1159
+
1160
+ /**
1161
+ * @public
1162
+ * see: {@link NorskOutput.fileMp4}
1163
+ */
1164
+ export declare class FileMp4OutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1165
+ close(): void;
1166
+ /**
1167
+ * @public
1168
+ * Writes a non-fragmented MP4 file containing the data received so far to the
1169
+ * supplied filename
1170
+ */
1171
+ writeFile(nonfragmentedFileName: string): void;
1172
+ }
1173
+
1174
+ /**
1175
+ * @public
1176
+ * Settings to control MP4 file output
1177
+ * see {@link NorskOutput.fileMp4}
1178
+ */
1179
+ export declare interface FileMp4OutputSettings extends SinkNodeSettings<FileMp4OutputNode>, StreamStatisticsMixin {
1180
+ /**
1181
+ * Required: stream fragmented MP4 to this file.
1182
+ */
1183
+ fragmentedFileName: string;
1184
+ /**
1185
+ * Write non-fragmented MP4 to this file on close, creates a `.tmp` file to
1186
+ * store the frame data.
1187
+ */
1188
+ nonfragmentedFileName?: string;
1189
+ /**
1190
+ * Settings for encrypting the audio track.
1191
+ */
1192
+ audioEncryption?: EncryptionSettings;
1193
+ /**
1194
+ * Settings for encrypting the video track.
1195
+ */
1196
+ videoEncryption?: EncryptionSettings;
1197
+ }
1198
+
1199
+ /**
1200
+ * @public
1201
+ * see: {@link NorskInput.fileTs}
1202
+ */
1203
+ export declare class FileTsInputNode extends TsCommonInputNode<FileTsInputMessage, FileTsInputNode> {
1204
+ updateSettings(settings: FileTsInputSettingsUpdate): void;
1205
+ }
1206
+
1207
+ /** @public */
1208
+ export declare interface FileTsInputSettings extends LocalFileInputSettings, StreamStatisticsMixin {
1209
+ /** Whether to loop back to the start of the file after reaching the end */
1210
+ loop?: boolean;
1211
+ }
1212
+
1213
+ /** @public */
1214
+ export declare interface FileTsInputSettingsUpdate {
1215
+ /** Whether to loop back to the start of the file after reaching the end */
1216
+ loop?: boolean;
1217
+ }
1218
+
1219
+ /**
1220
+ * @public
1221
+ * see: {@link NorskOutput.fileTs}
1222
+ */
1223
+ export declare class FileTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1224
+ close(): void;
1225
+ }
1226
+
1227
+ /**
1228
+ * @public
1229
+ * The settings for an output Transport Stream written to file
1230
+ * see: {@link NorskOutput.fileTs}
1231
+ */
1232
+ export declare interface FileTsOutputSettings extends SinkNodeSettings<FileTsOutputNode>, StreamStatisticsMixin {
1233
+ /** The file to write - this will be truncated if it already exist */
1234
+ fileName: string;
1235
+ }
1236
+
1237
+ /**
1238
+ * @public
1239
+ * see: {@link NorskInput.fileWebVtt}
1240
+ */
1241
+ export declare class FileWebVttInputNode extends SourceMediaNode {
1242
+ close(): void;
1243
+ }
1244
+
1245
+ /** @public */
1246
+ export declare interface FrameRate {
1247
+ frames: number;
1248
+ seconds: number;
1249
+ }
1250
+
1251
+ export declare function fromVancPayloadFormat(format: VancPayloadFormat_2): VancPayloadFormat;
1252
+
1253
+ /**
1254
+ * @public
1255
+ * A relative change in decibels, expressing a power ratio.
1256
+ *
1257
+ * A value of 0dB means no change, positive values mean an increase in power, and negative values mean a decrease in power.
1258
+ */
1259
+ export declare type Gain = Db;
1260
+
1261
+ /** @public */
1262
+ export declare function getAmountOfChannels(layout: ChannelLayout): number;
1263
+
1264
+ /**
1265
+ * @public
1266
+ *
1267
+ */
1268
+ /**
1269
+ * @public
1270
+ */
1271
+ export declare interface HardwareInfo {
1272
+ cpuTopology: CpuTopology;
1273
+ systemMemory: number;
1274
+ deckLinkCards: DeckLinkCard[];
1275
+ }
1276
+
1277
+ /**
1278
+ * @public
1279
+ * Configuration for pushing a segmented media stream directly to a generic http server
1280
+ * */
1281
+ export declare interface HlsPushDestinationSettings {
1282
+ type: "generic";
1283
+ /** The hostname of the web server being pushed to.
1284
+ * This will be used to re-resolve the IP address on failures
1285
+ * */
1286
+ host: string;
1287
+ /** the port of the web server being pushed to. */
1288
+ port: number;
1289
+ /** the path under which segments and playlists will be pushed to */
1290
+ pathPrefix: string;
1291
+ /**
1292
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1293
+ *
1294
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1295
+ * cacheing directives
1296
+ */
1297
+ sessionId?: string;
1298
+ /**
1299
+ * A unique identifier for this destination
1300
+ *
1301
+ * This can be used for supplying updates to configuration to this destination specifically
1302
+ * see: {@link UpdateCredentials}
1303
+ */
1304
+ id: string;
1305
+ /**
1306
+ * Informs the playlist generation how long segments will be retained for on the remote server
1307
+ * in order to generate an accurate playlist
1308
+ */
1309
+ retentionPeriodSeconds: number;
1310
+ }
1311
+
1312
+ /**
1313
+ * @public
1314
+ * see: {@link NorskOutput.hlsTsAudio}
1315
+ */
1316
+ export declare class HlsTsAudioOutputNode extends CmafNodeWithPlaylist<HlsTsAudioMessage, "audio", HlsTsAudioOutputNode> {
1317
+ /**
1318
+ * @public
1319
+ * Updates the credentials for a specific destination within this output by id
1320
+ * see: {@link UpdateCredentials}
1321
+ * see: {@link CmafDestinationSettings}
1322
+ */
1323
+ updateCredentials(settings: UpdateCredentials): void;
1324
+ }
1325
+
1326
+ /**
1327
+ * @public
1328
+ * Settings for a HLS TS Audio Output
1329
+ * see {@link NorskOutput.hlsTsAudio}
1330
+ */
1331
+ export declare interface HlsTsAudioOutputSettings extends SinkNodeSettings<HlsTsAudioOutputNode> {
1332
+ /**
1333
+ * The target segment duration in seconds. Norsk will make the largest segments it can
1334
+ * without going over this target using the durations of the individual audio frames
1335
+ */
1336
+ segmentDurationSeconds: number;
1337
+ /**
1338
+ * By default, the program date time or event start time will be based on the
1339
+ * timestamp of the first video packet received by Norsk in a stream.
1340
+ *
1341
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1342
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1343
+ *
1344
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1345
+ *
1346
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1347
+ */
1348
+ delayOutputMs?: number;
1349
+ /**
1350
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1351
+ */
1352
+ destinations: CmafDestinationSettings[];
1353
+ /**
1354
+ * Directives to add to the m3u media playlist
1355
+ */
1356
+ m3uAdditions?: string;
1357
+ /**
1358
+ * XML fragment to add to the mpd Representation element
1359
+ */
1360
+ mpdAdditions?: string;
1361
+ }
1362
+
1363
+ /**
1364
+ * @public
1365
+ * see: {@link NorskOutput.hlsTsCombinedPush}
1366
+ */
1367
+ export declare class HlsTsCombinedPushOutputNode extends CmafNodeWithPlaylist<HlsTsCombinedPushMessage, "audio" | "video", HlsTsCombinedPushOutputNode> {
1368
+ }
1369
+
1370
+ /**
1371
+ * @public
1372
+ * Settings for a HLS Transport Stream Combined Push Output
1373
+ * see {@link NorskOutput.hlsTsCombinedPush}
1374
+ */
1375
+ export declare interface HlsTsCombinedPushOutputSettings extends SinkNodeSettings<HlsTsCombinedPushOutputNode> {
1376
+ /**
1377
+ * The target segment duration in seconds. Norsk will use the framerate of the video stream in order
1378
+ * to produce compliant segments that are less than or equal to this in duration, with audio packaged alongside
1379
+ * using timestamps to line them up
1380
+ */
1381
+ segmentDurationSeconds: number;
1382
+ /**
1383
+ * By default, the program date time or event start time will be based on the
1384
+ * timestamp of the first video packet received by Norsk in a stream.
1385
+ *
1386
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1387
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1388
+ *
1389
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1390
+ *
1391
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1392
+ */
1393
+ delayOutputMs?: number;
1394
+ /**
1395
+ * The destination {@link CmafDestinationSettings} for this stream to be published to
1396
+ */
1397
+ destination: CmafDestinationSettings;
1398
+ /**
1399
+ * The name of this media playlist (.m3u8 will be added onto this field to generate a filename)
1400
+ */
1401
+ playlistName: string;
1402
+ /**
1403
+ * Directives to add to the m3u media playlists
1404
+ */
1405
+ m3uAdditions?: string;
1406
+ /**
1407
+ * XML fragment to add to the mpd Representation elements
1408
+ */
1409
+ mpdAdditions?: string;
1410
+ }
1411
+
1412
+ /**
1413
+ * @public
1414
+ * see: {@link NorskOutput.hlsTsMultiVariant}
1415
+ */
1416
+ export declare class HlsTsMultiVariantOutputNode extends CmafNodeWithPlaylist<CmafMultiVariantMessage, "video" | "audio" | "subtitle", HlsTsMultiVariantOutputNode> {
1417
+ /** @public The URL of the file based multi variant playlist */
1418
+ playlistUrl: string;
1419
+ /**
1420
+ * @public
1421
+ * Updates the credentials for a specific destination within this output by id
1422
+ * see: {@link UpdateCredentials}
1423
+ * see: {@link CmafDestinationSettings}
1424
+ */
1425
+ updateCredentials(settings: UpdateCredentials): void;
1426
+ }
1427
+
1428
+ /**
1429
+ * @public
1430
+ * Settings for a Hls Ts Multivariant Playlist
1431
+ * see {@link NorskOutput.hlsTsMultiVariant}
1432
+ */
1433
+ export declare interface HlsTsMultiVariantOutputSettings extends SinkNodeSettings<HlsTsMultiVariantOutputNode> {
1434
+ /**
1435
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
1436
+ */
1437
+ playlistName: string;
1438
+ /**
1439
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1440
+ */
1441
+ destinations: CmafDestinationSettings[];
1442
+ /**
1443
+ * Directives to add to the m3u multi variant playlist
1444
+ */
1445
+ m3uAdditions?: string;
1446
+ }
1447
+
1448
+ /**
1449
+ * @public
1450
+ * see: {@link NorskOutput.hlsTsVideo}
1451
+ */
1452
+ export declare class HlsTsVideoOutputNode extends CmafNodeWithPlaylist<HlsTsVideoMessage, "video", HlsTsVideoOutputNode> {
1453
+ /**
1454
+ * @public
1455
+ * Updates the credentials for a specific destination within this output by id
1456
+ * see: {@link UpdateCredentials}
1457
+ * see: {@link CmafDestinationSettings}
1458
+ */
1459
+ updateCredentials(settings: UpdateCredentials): void;
1460
+ }
1461
+
1462
+ /**
1463
+ * @public
1464
+ * Settings for a HLS TS Video Output
1465
+ * see {@link NorskOutput.hlsTsVideo}
1466
+ */
1467
+ export declare interface HlsTsVideoOutputSettings extends SinkNodeSettings<HlsTsVideoOutputNode> {
1468
+ /**
1469
+ * The target segment duration in seconds. Norsk will use the framerate of the stream in order
1470
+ * to produce compliant segments that are less than or equal to this in duration
1471
+ */
1472
+ segmentDurationSeconds: number;
1473
+ /**
1474
+ * By default, the program date time or event start time will be based on the
1475
+ * timestamp of the first video packet received by Norsk in a stream.
1476
+ *
1477
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1478
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1479
+ *
1480
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1481
+ *
1482
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1483
+ */
1484
+ delayOutputMs?: number;
1485
+ /**
1486
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1487
+ */
1488
+ destinations: CmafDestinationSettings[];
1489
+ /**
1490
+ * Directives to add to the m3u media playlist
1491
+ */
1492
+ m3uAdditions?: string;
1493
+ /**
1494
+ * XML fragment to add to the mpd Representation element
1495
+ */
1496
+ mpdAdditions?: string;
1497
+ }
1498
+
1499
+ /** @public */
1500
+ export declare type IceServerSettings = {
1501
+ urls: string[];
1502
+ username?: string;
1503
+ credential?: string;
1504
+ };
1505
+
1506
+ /** @public */
1507
+ export declare type ImageFormat = "png" | "jpeg" | "gif" | "webp" | "pnm" | "tiff" | "tga" | "dds" | "bmp" | "ico" | "hdr" | "openexr" | "farbfeld" | "avif";
1508
+
1509
+ /**
1510
+ * @public
1511
+ * Base settings for most input nodes
1512
+ * */
1513
+ export declare interface InputSettings<T extends MediaNodeState> extends SourceNodeSettings<T> {
1514
+ /** The source name to set on the stream key on the outgoing stream from this node */
1515
+ sourceName: string;
1516
+ }
1517
+
1518
+ /** @public A time interval measured as ticks / (ticks per second) */
1519
+ export declare interface Interval {
1520
+ n: number;
1521
+ d: number;
1522
+ }
1523
+
1524
+ /**
1525
+ * @public
1526
+ * see: {@link NorskTransform.jitterBuffer}
1527
+ */
1528
+ export declare class JitterBufferNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
1529
+ close(): void;
1530
+ }
1531
+
1532
+ /**
1533
+ * @public
1534
+ * Settings for a Jitter Buffer
1535
+ * see: {@link NorskTransform.jitterBuffer}
1536
+ * */
1537
+ export declare interface JitterBufferSettings extends ProcessorNodeSettings<JitterBufferNode> {
1538
+ /** Buffer delay in milliseconds */
1539
+ delayMs: number;
1540
+ }
1541
+
1542
+ /**
1543
+ * @public
1544
+ * The standard settings for any node reading from a file
1545
+ * */
1546
+ export declare interface LocalFileInputSettings extends InputSettings<SourceMediaNode> {
1547
+ /** The file to be read from */
1548
+ fileName: string;
1549
+ /** An optional callback that will be invoked when file end is reached */
1550
+ onEof?: () => void;
1551
+ }
1552
+
1553
+ /**
1554
+ * @public
1555
+ * Configuration for the serving of segments and playlists directly from the Norsk Web Server
1556
+ * Note: While this is both useful for local testing and for sitting behind a reverse caching proxy / CDN
1557
+ * it is not expected that Norsk serve as the edge server in most scenarios
1558
+ * */
1559
+ export declare interface LocalPullDestinationSettings {
1560
+ type: "local";
1561
+ /**
1562
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1563
+ *
1564
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1565
+ * cacheing directives
1566
+ */
1567
+ sessionId?: string;
1568
+ /**
1569
+ * Informs the playlist generation how long segments will be retained for
1570
+ * and informs the local web server how long to retain those segments
1571
+ */
1572
+ retentionPeriodSeconds: number;
1573
+ }
1574
+
1575
+ /** @public */
1576
+ export declare type Log = {
1577
+ level: "emergency" | "alert" | "critical" | "error" | "warning" | "notice" | "info" | "debug";
1578
+ timestamp: Date;
1579
+ message: string;
1580
+ };
1581
+
1582
+ /**
1583
+ * @public
1584
+ * Settings for a H264 Encode using Netint Logan hardware
1585
+ * A detailed description of these params can be found
1586
+ * on the Netint Logan Encoder Documentation
1587
+ *
1588
+ * These fields have deliberately been written to maintain the same semantics as the
1589
+ * Logan documentation where possible.
1590
+ *
1591
+ * If left undefined, all will default to Logan's own defaults
1592
+ * */
1593
+ export declare interface LoganH264 {
1594
+ type: "logan-h264";
1595
+ /** This (for convenience) takes the xcoder string that Logan's
1596
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1597
+ * quickly and will override any values set manually in the rest of this interface.
1598
+ *
1599
+ * It is expected that developers will choose to use the typed fields for most things instead
1600
+ * when moving to production, as they offer a degree of validation and type safety
1601
+ * */
1602
+ extraOpts?: string;
1603
+ enableAud?: boolean;
1604
+ gpuIndex?: number;
1605
+ bitrate?: number;
1606
+ flushGop?: boolean;
1607
+ enableVfr?: boolean;
1608
+ crf?: number;
1609
+ cbr?: boolean;
1610
+ gopPresetIndex?: number;
1611
+ intraPeriod?: number;
1612
+ rcEnable?: boolean;
1613
+ intraQp?: number;
1614
+ rcInitDelay?: number;
1615
+ profile?: LoganH264Profile;
1616
+ level?: LoganH264Level;
1617
+ }
1618
+
1619
+ /** @public */
1620
+ export declare type LoganH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1621
+
1622
+ /** @public */
1623
+ export declare type LoganH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
1624
+
1625
+ /**
1626
+ * @public
1627
+ * Settings for a HEVC Encode using Netint Logan hardware
1628
+ * A detailed description of these params can be found
1629
+ * on the Netint Logan Encoder Documentation
1630
+ *
1631
+ * These fields have deliberately been written to maintain the same semantics as the
1632
+ * Logan documentation where possible.
1633
+ *
1634
+ * If left undefined, all will default to Logan's own defaults
1635
+ * */
1636
+ export declare interface LoganHevc {
1637
+ type: "logan-hevc";
1638
+ /** This (for convenience) takes the xcoder string that Logan's
1639
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1640
+ * quickly and will override any values set manually in the rest of this interface.
1641
+ *
1642
+ * It is expected that developers will choose to use the typed fields for most things instead
1643
+ * when moving to production, as they offer a degree of validation and type safety
1644
+ * */
1645
+ extraOpts?: string;
1646
+ enableAud?: boolean;
1647
+ gpuIndex?: number;
1648
+ bitrate?: number;
1649
+ flushGop?: boolean;
1650
+ enableVfr?: boolean;
1651
+ crf?: number;
1652
+ cbr?: boolean;
1653
+ gopPresetIndex?: number;
1654
+ intraPeriod?: number;
1655
+ rcEnable?: boolean;
1656
+ intraQp?: number;
1657
+ rcInitDelay?: number;
1658
+ profile?: LoganHevcProfile;
1659
+ level?: LoganHevcLevel;
1660
+ tier?: LoganHevcTier;
1661
+ lossless?: boolean;
1662
+ hrdEnable?: boolean;
1663
+ dolbyVisionProfile?: number;
1664
+ }
1665
+
1666
+ /** @public */
1667
+ export declare type LoganHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1668
+
1669
+ /** @public */
1670
+ export declare type LoganHevcProfile = "main" | "main10";
1671
+
1672
+ /** @public */
1673
+ export declare type LoganHevcTier = "main" | "high";
1674
+
1675
+ /** @public */
1676
+ export declare type LogicalCpuId = number;
1677
+
1678
+ /** @public */
1679
+ export declare type MediaNodeId = string;
1680
+
1681
+ /** @public */
1682
+ export declare class MediaNodeState {
1683
+ id: MediaNodeId | undefined;
1684
+ constructor(client: MediaClient);
1685
+ close(): void;
1686
+ }
1687
+
1688
+ /**
1689
+ * @public
1690
+ * Generate encryption parameters from from an encryption KeyID and Key,
1691
+ * in the form KEYID:KEY, both 16byte hexadecimal
1692
+ */
1693
+ export declare function mkEncryption(encryption: string | undefined, pssh?: string | undefined): EncryptionSettings | undefined;
1694
+
1695
+ /** @public */
1696
+ export declare function mkSine(freq: number): Wave;
1697
+
1698
+ /** @public */
1699
+ export declare interface MultiStreamStatistics {
1700
+ allStreams: SingleStreamStatistics[];
1701
+ sampleSizeSeconds: number;
1702
+ /**
1703
+ * Either `"default"`, if there is only one direction, or `"input"`/`"output"`
1704
+ * (for duplex nodes, where there are two directions)
1705
+ */
1706
+ label: string;
1707
+ total: StreamStatistics;
1708
+ audio: StreamStatistics;
1709
+ video: StreamStatistics;
1710
+ }
1711
+
1712
+ /** @public */
1713
+ export declare function newSilentMatrix(rows: number, cols: number): Gain[][];
1714
+
1715
+ /**
1716
+ * @public
1717
+ * Settings common to all media nodes
1718
+ */
1719
+ export declare interface NodeSettings<T extends MediaNodeState> {
1720
+ /**
1721
+ * Media Node identifier. If one is not specified, a random identifier will be generated.
1722
+ */
1723
+ id?: string;
1724
+ /**
1725
+ * Called when the Norsk session ends
1726
+ */
1727
+ onEnd?: () => void;
1728
+ /**
1729
+ * Called with any errors from the Node
1730
+ */
1731
+ onError?: (error: Error) => void;
1732
+ /**
1733
+ * Called when the Node closes
1734
+ */
1735
+ onClose?: () => void;
1736
+ /**
1737
+ * Callback to synchronously perform an action when node creation is complete
1738
+ * (e.g. subscribe a downstream node before the first context/frame might arrive)
1739
+ */
1740
+ onCreate?: (node: T) => void;
1741
+ }
1742
+
1743
+ /**
1744
+ * @public
1745
+ * The entrypoint for all Norsk Media applications
1746
+ *
1747
+ * @example
1748
+ * ```ts
1749
+ * const norsk = new Norsk();
1750
+ * ```
1751
+ */
1752
+ export declare class Norsk {
1753
+ /**
1754
+ * Implements the {@link NorskInput} interface
1755
+ */
1756
+ input: NorskInput;
1757
+ /**
1758
+ * Implements the {@link NorskOutput} interface
1759
+ */
1760
+ output: NorskOutput;
1761
+ /**
1762
+ * Implements the {@link NorskDuplex} interface
1763
+ */
1764
+ duplex: NorskDuplex;
1765
+ /**
1766
+ * Implements the {@link NorskProcessor} interface
1767
+ */
1768
+ processor: NorskProcessor;
1769
+ /**
1770
+ * Implements the {@link NorskSystem} interface
1771
+ */
1772
+ system: NorskSystem;
1773
+ /**
1774
+ * Norsk Runtime version information
1775
+ */
1776
+ version: Version;
1777
+ close(): void;
1778
+ /** @public */
1779
+ static connect(settings?: NorskSettings): Promise<Norsk>;
1780
+ }
1781
+
1782
+ /**
1783
+ * @public
1784
+ * Methods that allow you to control and monitor media streams
1785
+ */
1786
+ export declare interface NorskControl {
1787
+ /**
1788
+ * Switch between multiple input sources via a hard cut. May be used to switch between
1789
+ * sources of possibly different configurations or without decoding.
1790
+ * @param settings - Options for the switcher
1791
+ */
1792
+ streamSwitchHard<Pins extends string>(settings: StreamSwitchHardSettings<Pins>): Promise<StreamSwitchHardNode<Pins>>;
1793
+ /**
1794
+ * Switch between multiple input sources without interruption, via a transition.
1795
+ * @param settings - Options for the switcher
1796
+ */
1797
+ streamSwitchSmooth<Pins extends string>(settings: StreamSwitchSmoothSettings<Pins>): Promise<StreamSwitchSmoothNode<Pins>>;
1798
+ /**
1799
+ * Record statistical information about media streams, including bitrate,
1800
+ * frame rate, and number of keyframes, measured over some configurable
1801
+ * sampling windows.
1802
+ *
1803
+ * Corresponding settings are found on many input and output nodes.
1804
+ * @param settings - Callback and sampling intervals
1805
+ */
1806
+ streamStatistics(settings: StreamStatisticsSettings): Promise<StreamStatisticsNode>;
1807
+ /**
1808
+ * Monitor the volume of an audio stream
1809
+ * @param settings - Callback and options for the level data
1810
+ */
1811
+ audioMeasureLevels(settings: AudioMeasureLevelsSettings): Promise<AudioMeasureLevelsNode>;
1812
+ }
1813
+
1814
+ /**
1815
+ * @public
1816
+ * Methods that allow you to both ingest and egest media from your application
1817
+ * at the same time
1818
+ */
1819
+ export declare interface NorskDuplex {
1820
+ /**
1821
+ * Playback audio/video via webrtc to a browser, and accept audio/video input from a browser.
1822
+ * The browser client must conform to a custom protocol as implemented in the hosted test page.
1823
+ * (Available from {@link WebRTCBrowserNode.playerUrl}
1824
+ * For general WebRTC ingest prefer the WHIP input node, and for egest to a downstream media server
1825
+ * use the WHIP output node.
1826
+ * @param settings - Options for the webrtc node
1827
+ */
1828
+ webRtcBrowser(settings: WebRTCBrowserSettings): Promise<WebRTCBrowserNode>;
1829
+ }
1830
+
1831
+ /** @public */
1832
+ export declare function norskHost(): string;
1833
+
1834
+ /**
1835
+ * @public
1836
+ * Methods that allow you to ingest media into your application
1837
+ */
1838
+ export declare interface NorskInput {
1839
+ /** Create an RTMP Server to receive RTMP streams into your application
1840
+ * @param settings - Configuration for the RTMP server
1841
+ */
1842
+ rtmpServer(settings: RtmpServerInputSettings): Promise<RtmpServerInputNode>;
1843
+ /**
1844
+ * Read from a Transport Stream file with realtime playback.
1845
+ * @param settings - Configuration for the file input
1846
+ */
1847
+ fileTs(settings: FileTsInputSettings): Promise<FileTsInputNode>;
1848
+ /**
1849
+ * Stream from a SRT source
1850
+ * @param settings - Configuration for the SRT input
1851
+ */
1852
+ srt(settings: SrtInputSettings): Promise<SrtInputNode>;
1853
+ /**
1854
+ * Receive media via WebRTC via the WHIP standard.
1855
+ *
1856
+ * Here Norsk acts as the Media Server receiving from a remote WHIP client, to act as the
1857
+ * WHIP client sending to a remote media server see {@link NorskOutput.whip}. For a duplex
1858
+ * connection to a browser peer see {@link NorskDuplex.webRtcBrowser}.
1859
+ *
1860
+ * @param settings - Configuration for the WHIP input
1861
+ */
1862
+ whip(settings: WhipInputSettings): Promise<WhipInputNode>;
1863
+ /**
1864
+ * Read from a Transport Stream on the network
1865
+ * This can be multicast/unicast or broadcast
1866
+ * @param settings - Configuration for the UDP input
1867
+ */
1868
+ udpTs(settings: RemoteInputSettings<UdpTsInputNode>): Promise<UdpTsInputNode>;
1869
+ fileWebVtt(settings: LocalFileInputSettings): Promise<FileWebVttInputNode>;
1870
+ /**
1871
+ * Read an image from a file. Various image formats are supported, see the
1872
+ * documentation for {@link FileImageInputSettings} for more details.
1873
+ * @param settings - Configuration for the file input
1874
+ *
1875
+ * The image will then be provided into Norsk as a video at 25fps for use
1876
+ * in other operations
1877
+ */
1878
+ fileImage(settings: FileImageInputSettings): Promise<FileImageInputNode>;
1879
+ /**
1880
+ * Read a MP4 (fragmented or not) from a file with realtime playback.
1881
+ * This will not play frames that are written to the file after the node
1882
+ * starts.
1883
+ * @param settings - Configuration for the file input
1884
+ */
1885
+ fileMp4(settings: FileMp4InputSettings): Promise<FileMp4InputNode>;
1886
+ /**
1887
+ * Stream from a remote RTP source
1888
+ * @param settings - Configuration for the RTP input
1889
+ */
1890
+ rtp(settings: RtpInputSettings): Promise<RtpInputNode>;
1891
+ /**
1892
+ * Generate a test audio signal with a configurable waveform.
1893
+ * @param settings - Configuration for the audio signal
1894
+ */
1895
+ audioSignal(settings: AudioSignalGeneratorSettings): Promise<AudioSignalGeneratorNode>;
1896
+ /**
1897
+ * Generates a video source by rendering an HTML page
1898
+ * @param settings - Settings for the web page
1899
+ */
1900
+ browser(settings: BrowserInputSettings): Promise<BrowserInputNode>;
1901
+ /**
1902
+ * SDI/HDMI Input using a BlackMagic DeckLink card.
1903
+ * The available cards on the machine can be enumerated using the {@link NorskSystem.hardwareInfo} API.
1904
+ *
1905
+ * Multiple cards and both SDI and HDMI inputs are supported, with all DeckLink-supported
1906
+ * input resolutions and framerates are supported. The capture format is currently 8-bit only,
1907
+ * but 10-bit captures will be supported soon. All supported audio channels can be captured.
1908
+ * At present, additional data such as closed-captions and HDR metadata is not captured.
1909
+ * @param settings - Settings for the SDI capture
1910
+ */
1911
+ deckLink(settings: DeckLinkInputSettings): Promise<DeckLinkInputNode>;
1912
+ }
1913
+
1914
+ /**
1915
+ * @public
1916
+ * Methods that allow you to egest media from your application
1917
+ */
1918
+ export declare interface NorskOutput {
1919
+ /**
1920
+ * Produces video segments with the supplied settings for use in
1921
+ * HLS or DASH manifests.
1922
+ *
1923
+ * These can optionally be served the Norsk web server or be pushed
1924
+ * to other locations - see {@link CmafDestinationSettings}
1925
+ *
1926
+ * @param settings - Configuration for the CMAF Video Stream
1927
+ */
1928
+ cmafVideo(settings: CmafOutputSettings): Promise<CmafVideoOutputNode>;
1929
+ /**
1930
+ * Produces audio segments with the supplied settings for use in
1931
+ * HLS or DASH manifests.
1932
+ *
1933
+ * These can optionally be served via the Norsk web server or be pushed
1934
+ * to other locations - see {@link CmafDestinationSettings}
1935
+ *
1936
+ * @param settings - Configuration for the CMAF Audio Stream
1937
+ */
1938
+ cmafAudio(settings: CmafOutputSettings): Promise<CmafAudioOutputNode>;
1939
+ /**
1940
+ * Produces WebVTT segments with the supplied settings for use in
1941
+ * HLS or DASH manifests. These are served via the Norsk web server
1942
+ *
1943
+ * @param settings - Configuration for the CMAF WebVTT Stream
1944
+ */
1945
+ cmafWebVtt(settings: CmafWebVttOutputSettings): Promise<CmafWebVttOutputNode>;
1946
+ /**
1947
+ * Produces a multi variant (used to be known as master) hls and/or dash manifest for a collection of media streams
1948
+ *
1949
+ * This can optionally be served via the Norsk web server or be pushed
1950
+ * to other locations - see {@link CmafDestinationSettings}
1951
+ *
1952
+ * @param settings - Configuration for the CMAF Multi Variant Manifest
1953
+ */
1954
+ cmafMultiVariant(settings: CmafMultiVariantOutputSettings): Promise<CmafMultiVariantOutputNode>;
1955
+ /**
1956
+ * Produces Transport Stream video segments with the supplied settings for use in
1957
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
1958
+ * or from other locations - see {@link CmafDestinationSettings}
1959
+ *
1960
+ * @param settings - Configuration for the HLS TS Stream
1961
+ */
1962
+ hlsTsVideo(settings: HlsTsVideoOutputSettings): Promise<HlsTsVideoOutputNode>;
1963
+ /**
1964
+ * Produces Transport Stream audio segments with the supplied settings for use in
1965
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
1966
+ * or from other locations - see {@link CmafDestinationSettings}
1967
+ *
1968
+ * @param settings - Configuration for the HLS TS Stream
1969
+ */
1970
+ hlsTsAudio(settings: HlsTsAudioOutputSettings): Promise<HlsTsAudioOutputNode>;
1971
+ /**
1972
+ * Produces Transport Stream segments containing both video and audio with the supplied settings for use in
1973
+ * HLS manifests and pushes them to the configured location (see {@link CmafDestinationSettings})
1974
+ *
1975
+ * @param settings - Configuration for the HLS TS Stream
1976
+ */
1977
+ hlsTsCombinedPush(settings: HlsTsCombinedPushOutputSettings): Promise<HlsTsCombinedPushOutputNode>;
1978
+ /**
1979
+ * Produces a multi variant Hls Ts manifest for a collection of media streams
1980
+ *
1981
+ * This can optionally be served via the Norsk web server or be pushed
1982
+ * to other locations - see {@link CmafDestinationSettings}
1983
+ *
1984
+ * @param settings - Configuration for the Hls Ts Multivariant Playlist
1985
+ */
1986
+ hlsTsMultiVariant(settings: HlsTsMultiVariantOutputSettings): Promise<HlsTsMultiVariantOutputNode>;
1987
+ /**
1988
+ * Produces a Transport Stream optionally containing both video and audio
1989
+ * and sends it out over UDP
1990
+ *
1991
+ * @param settings - Configuration for the TS Stream
1992
+ */
1993
+ udpTs(settings: UdpTsOutputSettings): Promise<UdpTsOutputNode>;
1994
+ /**
1995
+ * Produces a Transport Stream, and allows Norsk to either connect to an existing
1996
+ * SRT server or act as an SRT server itself
1997
+ *
1998
+ * @param settings - Configuration for the SRT Stream
1999
+ */
2000
+ srt(settings: SrtOutputSettings): Promise<SrtOutputNode>;
2001
+ /**
2002
+ * Connects and sends media to a remote server via WebRTC using the WHIP standard.
2003
+ *
2004
+ * Here Norsk acts as the WHIP client sending to a remote Media Server; to
2005
+ * have Norsk act as the Media Server ingesting from some other WHIP client, see
2006
+ * {@link NorskInput.whip}
2007
+ *
2008
+ * @param settings - Configuration for the WebRTC Stream
2009
+ */
2010
+ whip(settings: WhipOutputSettings): Promise<WhipOutputNode>;
2011
+ /**
2012
+ * Hosts media for clients connecting via WebRTC using the WHEP standard.
2013
+ *
2014
+ * To send media to a remote Media Server via WebRTC see {@link NorskOutput.whip}.
2015
+ * See also {@link NorskInput.whip}, {@link NorskDuplex.webRtcBrowser}.
2016
+ *
2017
+ * @param settings - Configuration for the WebRTC Stream
2018
+ */
2019
+ whep(settings: WhepOutputSettings): Promise<WhepOutputNode>;
2020
+ /**
2021
+ * Connects and sends media to a remote RTMP server
2022
+ *
2023
+ * @param settings - Configuration for the WebRTC Stream
2024
+ */
2025
+ rtmp(settings: RtmpOutputSettings): Promise<RtmpOutputNode>;
2026
+ /**
2027
+ * Stream to a Transport Stream file.
2028
+ *
2029
+ * @param settings - Configuration for the Transport Stream output
2030
+ */
2031
+ fileTs(settings: FileTsOutputSettings): Promise<FileTsOutputNode>;
2032
+ /**
2033
+ * Output MP4 files to disk, both fragmented and non-fragmented.
2034
+ *
2035
+ * The fragmented output is required.
2036
+ *
2037
+ * The optional non-fragmented filename will be written when calling
2038
+ * {@link FileMp4OutputNode.close} and will be fully written by the time
2039
+ * {@link NodeSettings.onClose} is called. This sets up a temp file to
2040
+ * store the frame data by appending the extension `.tmp`.
2041
+ *
2042
+ * A non-fragmented MP4 file can be written on request with
2043
+ * {@link FileMp4OutputNode.writeFile}, which uses the frame data store if
2044
+ * {@link FileMp4OutputSettings.nonfragmentedFileName} was given or reads
2045
+ * back the fragmented mp4 if there is no non-fragmented file.
2046
+ *
2047
+ * @param settings - Configuration for the MP4 output.
2048
+ */
2049
+ fileMp4(settings: FileMp4OutputSettings): Promise<FileMp4OutputNode>;
2050
+ }
2051
+
2052
+ /** @public */
2053
+ export declare function norskPort(): string;
2054
+
2055
+ /** @public */
2056
+ export declare class NorskProcessor {
2057
+ /**
2058
+ * Implements the {@link NorskControl} interface
2059
+ */
2060
+ control: NorskControl;
2061
+ /**
2062
+ * Implements the {@link NorskTransform} interface
2063
+ */
2064
+ transform: NorskTransform;
2065
+ close(): Promise<void>;
2066
+ constructor(client: MediaClient);
2067
+ }
2068
+
2069
+ /**
2070
+ * @public
2071
+ * Top level Norsk configuration
2072
+ */
2073
+ export declare interface NorskSettings {
2074
+ /**
2075
+ * Callback URL to listen on for gRPC session with Norsk Media
2076
+ * Defaults to $NORSK_HOST:$NORSK_PORT if the environment variables are set
2077
+ * where NORSK_HOST defaults to "127.0.0.1" and NORSK_PORT to "6790"
2078
+ * (so "127.0.0.1:6790" if neither variable is set)
2079
+ */
2080
+ url?: string;
2081
+ onAttemptingToConnect?: () => void;
2082
+ onConnecting?: () => void;
2083
+ onReady?: () => void;
2084
+ onFailedToConnect?: () => void;
2085
+ /** Code to execute if the Norsk node is shutdown - by default if logs and exits the client application */
2086
+ onShutdown?: () => void;
2087
+ onCurrentLoad?: (load: CurrentLoad) => void;
2088
+ onHello?: (version: Version) => void;
2089
+ onLogEvent?: (log: Log) => void;
2090
+ /**
2091
+ * Manually handle license events, such as missing/invalid licenses and
2092
+ * sandbox timeout. (Logs messages to console by default.)
2093
+ */
2094
+ onLicenseEvent?: (message: string) => void;
2095
+ }
2096
+
2097
+ /**
2098
+ * @public
2099
+ * Methods that allow you query the features of the system that Norsk is running in
2100
+ */
2101
+ export declare interface NorskSystem {
2102
+ hardwareInfo(): Promise<HardwareInfo>;
2103
+ }
2104
+
2105
+ /**
2106
+ * @public
2107
+ * Methods that allow you to manipulate your media streams
2108
+ */
2109
+ export declare interface NorskTransform {
2110
+ /**
2111
+ * Encode a video stream to one or more renditions
2112
+ * using either software or appropriate hardware if available
2113
+ * @param settings - Encode ladder settings
2114
+ */
2115
+ videoEncode(settings: VideoEncodeSettings): Promise<VideoEncodeNode>;
2116
+ /**
2117
+ * Transform a single video stream (rescale, frame rate, etc)
2118
+ * @param settings - Transform settings
2119
+ */
2120
+ videoTransform(settings: VideoTransformSettings): Promise<VideoTransformNode>;
2121
+ /**
2122
+ * Interferes with a stream by dropping frames
2123
+ * Why would you want this? Stick one of these after a decoder and before
2124
+ * anything else in order to simulate what the world is going to look like if you
2125
+ * have network problems (packet drops for example) in your ingest
2126
+ *
2127
+ * *Just don't forget to remove it again when you've finished testing!*
2128
+ * @param settings - Chaos monkey settings
2129
+ */
2130
+ streamChaosMonkey(settings: StreamChaosMonkeySettings): Promise<StreamChaosMonkeyNode>;
2131
+ /**
2132
+ * Compose multiple video streams together into a single output
2133
+ * @param settings - Composition settings
2134
+ */
2135
+ videoCompose<Pins extends string>(settings: VideoComposeSettings<Pins>): Promise<VideoComposeNode<Pins>>;
2136
+ /**
2137
+ * Create a Media Node performing transcription into subtitles using the
2138
+ * Amazon Transcribe AWS service.
2139
+ * @param settings - Settings and credentials for AWS transcribe
2140
+ */
2141
+ audioTranscribeAws(settings: AudioTranscribeAwsSettings): Promise<AudioTranscribeAwsNode>;
2142
+ /**
2143
+ * Mix multiple audio streams together into a single output,
2144
+ * with optional gain control on each input.
2145
+ * @param settings - Settings for the mixer, including the gain vectors
2146
+ */
2147
+ audioMix<Pins extends string>(settings: AudioMixSettings<Pins>): Promise<AudioMixNode<Pins>>;
2148
+ /**
2149
+ * Given an audio stream of N channels, mix it down to M channels through a matrix of NxM gains.
2150
+ * @param settings - Settings for the mixer, including the gain matrix
2151
+ */
2152
+ audioMixMatrix(settings: AudioMixMatrixSettings): Promise<AudioMixMatrixNode>;
2153
+ /**
2154
+ * Apply gain to an audio stream
2155
+ * @param settings - Settings for the gain node
2156
+ */
2157
+ audioGain(settings: AudioGainSettings): Promise<AudioGainNode>;
2158
+ /**
2159
+ * Aggregate many single-channel audio streams into a stream with the
2160
+ * specified channel layout. The streams must all have the same sample format
2161
+ * and sample rate. The order of the streams provided for the channels is
2162
+ * important.
2163
+ * @param settings - Settings for the builder, including the channel layout
2164
+ * and stream keys specifying the sources for each channel.
2165
+ */
2166
+ audioBuildMultichannel(settings: AudioBuildMultichannelSettings): Promise<AudioBuildMultichannelNode>;
2167
+ /**
2168
+ * Split a multichannel audio stream into its individual channels. The first
2169
+ * channel receives the specified stream key, and each subsequent channel
2170
+ * increments the stream id on the stream key.
2171
+ * @param settings - Settings for the splitter
2172
+ */
2173
+ audioSplitMultichannel(settings: AudioSplitMultichannelSettings): Promise<AudioSplitMultichannelNode>;
2174
+ /**
2175
+ * Encode an audio stream.
2176
+ * @param settings - Settings for the encoder, including channel layout and
2177
+ * bitrate.
2178
+ */
2179
+ audioEncode(settings: AudioEncodeSettings): Promise<AudioEncodeNode>;
2180
+ /**
2181
+ * A node to nudge the timestamps on a stream, which affects how it syncs
2182
+ * with other streams. Useful for correcting for drift between different
2183
+ * sources.
2184
+ *
2185
+ * Subsequent nudges, via the `nudge` method, are applied gradually.
2186
+ *
2187
+ * This functionality is also provided by a `nudge` method on many sources.
2188
+ * @param settings - Initial nudge plus general node settings.
2189
+ */
2190
+ streamTimestampNudge(settings: StreamTimestampNudgeSettings): Promise<StreamTimestampNudgeNode>;
2191
+ /**
2192
+ * Provide a new stream key for a single stream. Cannot be subscribed to
2193
+ * multiple streams at once.
2194
+ *
2195
+ * The stream key is used for identifying streams within multiplexed sources
2196
+ * and also is translated into URIs for HLS playlists and other resources.
2197
+ *
2198
+ * This can be useful if changing sources and wanting to maintain a consistent
2199
+ * streamkey going into an output
2200
+ * @param settings - New stream key plus general node settings.
2201
+ */
2202
+ streamKeyOverride(settings: StreamKeyOverrideSettings): Promise<StreamKeyOverrideNode>;
2203
+ /**
2204
+ * Override bitrate and language metadata on streams.
2205
+ *
2206
+ * Audio and video bitrate metadata is required for playlists for the
2207
+ * {@link NorskOutput.cmafMultiVariant} node.
2208
+ * It is automatically configured for some sources (like RTMP) and in
2209
+ * cases where re-encoding is done, but is unset for other sources (like SRT).
2210
+ * @param settings - Bitrate and language metadata plus general node settings.
2211
+ */
2212
+ streamMetadataOverride(settings: StreamMetadataOverrideSettings): Promise<StreamMetadataOverrideNode>;
2213
+ /**
2214
+ * Buffer a stream for the specified number of milliseconds. This can be used
2215
+ * to reduce or eliminate jitter.
2216
+ * @param settings - Buffer delay time.
2217
+ */
2218
+ jitterBuffer(settings: JitterBufferSettings): Promise<JitterBufferNode>;
2219
+ /**
2220
+ * Sync multiple streams together by timestamps, queuing frames from streams
2221
+ * that are behind the others. This is already included in most nodes,
2222
+ * especially outputs.
2223
+ */
2224
+ streamSync(settings: StreamSyncSettings): Promise<StreamSyncNode>;
2225
+ ancillary(settings: AncillarySettings): Promise<AncillaryNode>;
2226
+ }
2227
+
2228
+ /** @public */
2229
+ export declare interface NumaNode {
2230
+ processors: Processor[];
2231
+ }
2232
+
2233
+ /**
2234
+ * @public
2235
+ * Settings for a H264 Encode using Nvidia hardware
2236
+ * A detailed description of these params can be found
2237
+ * on the Nvidia Encoder Documentation
2238
+ *
2239
+ * If left undefined, all will default to Nvidia's own defaults
2240
+ * If a preset is configured, then all will default to the values provided
2241
+ * by that preset
2242
+ * */
2243
+ export declare interface NvidiaH264 {
2244
+ type: "nv-h264";
2245
+ /** The preset to use for this encode */
2246
+ preset?: NvidiaPreset;
2247
+ /** The IDR period */
2248
+ idrPeriod?: number;
2249
+ /**The gopInterval to use for this encode
2250
+ * Note: This is different from the idrPeriod but usually you want
2251
+ * them set to the same value regardless
2252
+ * */
2253
+ gopInterval?: number;
2254
+ /** This is the gop structure to be used, and again it's best to look this up
2255
+ * in the Nvidia documentation
2256
+ * */
2257
+ frameIntervalP?: number;
2258
+ /** This is somewhat related to the gop structure and again, care should be taken when overriding this from
2259
+ * the preset */
2260
+ maxNumRefFrames?: number;
2261
+ /** The target level of this H264 encode
2262
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2263
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2264
+ level?: NvidiaH264Level;
2265
+ /** The target profile of this H264 encode
2266
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2267
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2268
+ profile?: NvidiaH264Profile;
2269
+ /** Output Access Unit Delimiters */
2270
+ outputAud?: boolean;
2271
+ /** Rate Control Settings */
2272
+ rateControl?: NvidiaRateControl;
2273
+ }
2274
+
2275
+ /**
2276
+ * @public
2277
+ * See the Nvidia Encoder Docs for a description of this value
2278
+ * */
2279
+ export declare type NvidiaH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2280
+
2281
+ /**
2282
+ * @public
2283
+ * See the Nvidia Encoder Docs for a description of this value
2284
+ * */
2285
+ export declare type NvidiaH264Profile = "baseline" | "main" | "high" | "high444";
2286
+
2287
+ /**
2288
+ * @public
2289
+ * Settings for a HEVC Encode using Nvidia hardware
2290
+ * A detailed description of these params can be found
2291
+ * on the Nvidia Encoder Documentation
2292
+ *
2293
+ * If left undefined, all will default to Nvidia's own defaults
2294
+ * If a preset is configured, then all will default to the values provided
2295
+ * by that preset
2296
+ * */
2297
+ export declare interface NvidiaHevc {
2298
+ type: "nv-hevc";
2299
+ /** The preset to use for this encode */
2300
+ preset?: NvidiaPreset;
2301
+ /** The IDR period */
2302
+ idrPeriod?: number;
2303
+ /**The gopInterval to use for this encode
2304
+ * Note: This is different from the idrPeriod but usually you want
2305
+ * them set to the same value regardless
2306
+ * */
2307
+ gopInterval?: number;
2308
+ /** This is the gop structure to be used, and again it's best to look this up
2309
+ * in the Nvidia documentation
2310
+ * */
2311
+ frameIntervalP?: number;
2312
+ /** This is somewhat related to the gop structure and again, care should be taken when overriding this from
2313
+ * the preset */
2314
+ maxNumRefFrames?: number;
2315
+ /** The target level of this HEVC encode
2316
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2317
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2318
+ level?: NvidiaHevcLevel;
2319
+ /** The target profile of this HEVC encode
2320
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2321
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2322
+ profile?: NvidiaHevcProfile;
2323
+ /** Output Access Unit Delimiters */
2324
+ outputAud?: boolean;
2325
+ /** The target tier of this HEVC encode
2326
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2327
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2328
+ tier?: NvidiaHevcTier;
2329
+ /** Rate Control Settings */
2330
+ rateControl?: NvidiaRateControl;
2331
+ }
2332
+
2333
+ /**
2334
+ * @public
2335
+ * See the Nvidia Encoder Docs for a description of this value
2336
+ * */
2337
+ export declare type NvidiaHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2338
+
2339
+ /**
2340
+ * @public
2341
+ * See the Nvidia Encoder Docs for a description of this value
2342
+ * */
2343
+ export declare type NvidiaHevcProfile = "main" | "main10";
2344
+
2345
+ /**
2346
+ * @public
2347
+ * See the Nvidia Encoder Docs for a description of this value
2348
+ * */
2349
+ export declare type NvidiaHevcTier = "main" | "high";
2350
+
2351
+ /**
2352
+ * @public
2353
+ * See the Nvidia Encoder Docs for a description of this value
2354
+ * */
2355
+ export declare type NvidiaPreset = "p1" | "p2" | "p3" | "p4" | "p5" | "p6" | "p7";
2356
+
2357
+ /**
2358
+ * @public
2359
+ * The rate control options for an nvidia encode
2360
+ * For further info, consult the Nvidia Encoder docs
2361
+ * */
2362
+ export declare interface NvidiaRateControl {
2363
+ /** the mode to use for this rate control operation */
2364
+ mode: NvidiaRateControlMode;
2365
+ /** The average bitrate of this encode */
2366
+ averageBitrate: number;
2367
+ /** The max bitrate of this encode */
2368
+ maxBitrate?: number;
2369
+ /** The vbv buffer size used for this encode */
2370
+ vbvBufferSize?: number;
2371
+ /** The vbv initial delay used for this encode */
2372
+ vbvInitialDelay?: number;
2373
+ /** Enable lookahead or not:
2374
+ * Note: enabling lookahead will introduce scene cuts unless this is specifically disabled
2375
+ * elswhere in the codec settings
2376
+ * */
2377
+ enableLookahead?: boolean;
2378
+ strictGopTarget?: boolean;
2379
+ lookaheadDepth?: number;
2380
+ }
2381
+
2382
+ /**
2383
+ * @public
2384
+ * See the Nvidia Encoder Docs for a description of this value
2385
+ * */
2386
+ export declare type NvidiaRateControlMode = "con_stqp" | "vbr" | "cbr";
2387
+
2388
+ /**
2389
+ * @public
2390
+ * A rectangle used for describing a subset of an image
2391
+ * */
2392
+ export declare interface OffsetRect {
2393
+ /** The leftmost coordinate of the rect, where 0,0 is top left */
2394
+ x: number;
2395
+ /** The topmost coordinate of the rect, where 0,0 is top left */
2396
+ y: number;
2397
+ /** the width of this rectangle */
2398
+ width: number;
2399
+ /** the height of this rectangle */
2400
+ height: number;
2401
+ }
2402
+
2403
+ /**
2404
+ * @public
2405
+ * Return type to enable control of an RTMP stream once media arrives on it
2406
+ */
2407
+ export declare type OnStreamResult =
2408
+ /** Accept the stream */
2409
+ {
2410
+ accept: true;
2411
+ videoStreamKey: StreamKey_2 | StreamKeySettings;
2412
+ audioStreamKey: StreamKey_2 | StreamKeySettings;
2413
+ }
2414
+ /** Reject the stream */
2415
+ | {
2416
+ accept: false;
2417
+ reason: string;
2418
+ };
2419
+
2420
+ /**
2421
+ * @public
2422
+ * Settings for an Opus encode
2423
+ * see: {@link NorskTransform.audioEncode}
2424
+ * */
2425
+ export declare interface OpusSettings {
2426
+ kind: "opus";
2427
+ }
2428
+
2429
+ /** @public
2430
+ * A transition for a video composition part.
2431
+ *
2432
+ * A transition interpolates the source_rect, dest_rect, and opacity properties
2433
+ * over the specified duration according to the specified easing function.
2434
+ *
2435
+ * As a special case, if a transition is specified and the input pin of the part
2436
+ * changes, an opacity fade from one to the other will occur.
2437
+ */
2438
+ export declare interface PartTransition {
2439
+ /** Duration for the transition */
2440
+ durationMs: number;
2441
+ /**
2442
+ * Easing function to apply to the transition. If not specified will be
2443
+ * linear.
2444
+ */
2445
+ easing?: SimpleEasing;
2446
+ }
2447
+
2448
+ /** @public */
2449
+ export declare type PinToKey<Pins extends string> = Nullable<Partial<Record<Pins, StreamKey[]>>>;
2450
+
2451
+ /** @public */
2452
+ export declare type PixelFormat = "bgra" | "rgba" | "yuv420p" | "yuv422p" | "yuv444p" | "yuva420p" | "yuva422p" | "yuva444p";
2453
+
2454
+ /** @public */
2455
+ declare enum PlaylistPath {
2456
+ Cmaf = 0,
2457
+ Ts = 1
2458
+ }
2459
+
2460
+ /**
2461
+ * @public
2462
+ * Returns the stream keys for playlist streams in a media context
2463
+ * @param streams - The media context from which to return the stream keys
2464
+ * @returns The playlist stream keys in the media context
2465
+ */
2466
+ export declare function playlistStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
2467
+
2468
+ /** @public */
2469
+ export declare interface PlaylistStreamMetadata {
2470
+ }
2471
+
2472
+ /**
2473
+ * @public
2474
+ * Filters a context to only the playlist streams within it
2475
+ * @param streams - The media context from which to return the streams
2476
+ * @returns The playlist streams in the media context
2477
+ */
2478
+ export declare function playlistStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
2479
+
2480
+ /** @public */
2481
+ export declare interface Processor {
2482
+ cores: Core[];
2483
+ }
2484
+
2485
+ export declare interface ProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
2486
+ }
2487
+
2488
+ export declare class ProcessorMediaNode<Pins extends string> {
2489
+ constructor(client: MediaClient, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => void, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
2490
+ }
2491
+
2492
+ /** @public */
2493
+ export declare interface ProcessorNodeSettings<T extends MediaNodeState> extends SinkNodeSettings<T>, SourceNodeSettings<T> {
2494
+ }
2495
+
2496
+ /** @public */
2497
+ export declare function publicUrlPrefix(): string;
2498
+
2499
+ /**
2500
+ * @public
2501
+ * Settings for a H264 Encode using Netint Quadra hardware
2502
+ * A detailed description of these params can be found
2503
+ * on the Netint Quadra Encoder Documentation
2504
+ *
2505
+ * These fields have deliberately been written to maintain the same semantics as the
2506
+ * Quadra documentation where possible.
2507
+ *
2508
+ * If left undefined, all will default to Quadra's own defaults
2509
+ * */
2510
+ export declare interface QuadraH264 {
2511
+ type: "quadra-h264";
2512
+ /** This (for convenience) takes the xcoder string that Quadra's
2513
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2514
+ * quickly and will override any values set manually in the rest of this interface.
2515
+ *
2516
+ * It is expected that developers will choose to use the typed fields for most things instead
2517
+ * when moving to production, as they offer a degree of validation and type safety
2518
+ * */
2519
+ extraOpts?: string;
2520
+ enableAud?: boolean;
2521
+ gpuIndex?: number;
2522
+ bitrate?: number;
2523
+ enableVfr?: boolean;
2524
+ crf?: number;
2525
+ gopPresetIndex?: number;
2526
+ intraPeriod?: number;
2527
+ rcEnable?: boolean;
2528
+ intraQp?: number;
2529
+ rcInitDelay?: number;
2530
+ profile?: QuadraH264Profile;
2531
+ level?: QuadraH264Level;
2532
+ fillerEnable?: boolean;
2533
+ minQp?: number;
2534
+ maxQp?: number;
2535
+ maxDeltaQp?: number;
2536
+ cuLevelRCEnable?: boolean;
2537
+ lookAheadDepth?: number;
2538
+ vbvBufferSize?: number;
2539
+ vbvMaxRate?: number;
2540
+ }
2541
+
2542
+ /** @public */
2543
+ export declare type QuadraH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2544
+
2545
+ /** @public */
2546
+ export declare type QuadraH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
2547
+
2548
+ /**
2549
+ * @public
2550
+ * Settings for a HEVC Encode using Netint Quadra hardware
2551
+ * A detailed description of these params can be found
2552
+ * on the Netint Quadra Encoder Documentation
2553
+ *
2554
+ * These fields have deliberately been written to maintain the same semantics as the
2555
+ * Quadra documentation where possible.
2556
+ *
2557
+ * If left undefined, all will default to Quadra's own defaults
2558
+ * */
2559
+ export declare interface QuadraHevc {
2560
+ type: "quadra-hevc";
2561
+ /** This (for convenience) takes the xcoder string that Quadra's
2562
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2563
+ * quickly and will override any values set manually in the rest of this interface.
2564
+ *
2565
+ * It is expected that developers will choose to use the typed fields for most things instead
2566
+ * when moving to production, as they offer a degree of validation and type safety
2567
+ * */
2568
+ extraOpts?: string;
2569
+ enableAud?: boolean;
2570
+ gpuIndex?: number;
2571
+ bitrate?: number;
2572
+ enableVfr?: boolean;
2573
+ crf?: number;
2574
+ gopPresetIndex?: number;
2575
+ intraPeriod?: number;
2576
+ rcEnable?: boolean;
2577
+ intraQp?: number;
2578
+ rcInitDelay?: number;
2579
+ profile?: QuadraHevcProfile;
2580
+ level?: QuadraHevcLevel;
2581
+ tier?: QuadraHevcTier;
2582
+ lossless?: boolean;
2583
+ hrdEnable?: boolean;
2584
+ dolbyVisionProfile?: number;
2585
+ fillerEnable?: boolean;
2586
+ minQp?: number;
2587
+ maxQp?: number;
2588
+ maxDeltaQp?: number;
2589
+ cuLevelRCEnable?: boolean;
2590
+ lookAheadDepth?: number;
2591
+ vbvBufferSize?: number;
2592
+ vbvMaxRate?: number;
2593
+ }
2594
+
2595
+ /** @public */
2596
+ export declare type QuadraHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2597
+
2598
+ /** @public */
2599
+ export declare type QuadraHevcProfile = "main" | "main10";
2600
+
2601
+ /** @public */
2602
+ export declare type QuadraHevcTier = "main" | "high";
2603
+
2604
+ /** @public */
2605
+ export declare type ReceiveFromAddress<Pins extends string> = {
2606
+ source: SourceMediaNode;
2607
+ sourceSelector: (streams: StreamMetadata[]) => PinToKey<Pins>;
2608
+ };
2609
+
2610
+ /** @public */
2611
+ export declare type ReceiveFromAddressAuto = {
2612
+ source: SourceMediaNode;
2613
+ sourceSelector: (streams: StreamMetadata[]) => StreamKey[];
2614
+ };
2615
+
2616
+ /**
2617
+ * @public
2618
+ * Base settings for any input node requiring access to a host:port pair
2619
+ * */
2620
+ export declare interface RemoteInputSettings<T extends MediaNodeState> extends InputSettings<T> {
2621
+ /** The IP of the remote server*/
2622
+ ip: string;
2623
+ /** The port the remote server is listening on*/
2624
+ port: number;
2625
+ }
2626
+
2627
+ /**
2628
+ * @public
2629
+ * Validation function to require at least one audio and at least one video stream. Often the default validation
2630
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
2631
+ * will produce both audio and video, default validation cannot know that both are required.
2632
+ */
2633
+ export declare const requireAV: (ctx: Context) => boolean;
2634
+
2635
+ /**
2636
+ * @public
2637
+ * Validation function to require exactly N audio and exactly N video streams. Often the default validation
2638
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
2639
+ * will produce both audio and video, default validation cannot know that both are required.
2640
+ */
2641
+ export declare const requireExactAV: ({ audio, video }: {
2642
+ audio: number;
2643
+ video: number;
2644
+ }) => (ctx: Context) => boolean;
2645
+
2646
+ /**
2647
+ * @public
2648
+ * The resolution of a video within Norsk
2649
+ * */
2650
+ export declare interface Resolution {
2651
+ width: number;
2652
+ height: number;
2653
+ }
2654
+
2655
+ /**
2656
+ * @public
2657
+ * see: {@link NorskOutput.rtmp}
2658
+ */
2659
+ export declare class RtmpOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
2660
+ close(): void;
2661
+ }
2662
+
2663
+ /**
2664
+ * @public
2665
+ * The settings for an RTMP output
2666
+ * see: {@link NorskOutput.rtmp}
2667
+ * */
2668
+ export declare interface RtmpOutputSettings extends SinkNodeSettings<RtmpOutputNode>, StreamStatisticsMixin {
2669
+ /**
2670
+ * The URL of the remote RTMP server to connect to, including the full stream path and credentials
2671
+ */
2672
+ url: string;
2673
+ /** Jitter buffer delay in milliseconds */
2674
+ bufferDelayMs?: number;
2675
+ }
2676
+
2677
+ /**
2678
+ * @public
2679
+ * see: {@link NorskInput.rtmpServer}
2680
+ */
2681
+ export declare class RtmpServerInputNode extends SourceMediaNode {
2682
+ close(): void;
2683
+ /**
2684
+ * @public
2685
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
2686
+ * */
2687
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
2688
+ }
2689
+
2690
+ /**
2691
+ * @public
2692
+ * Settings to control how RTMP streams can be included as sources in your media workflow
2693
+ * see: {@link NorskInput.rtmpServer}
2694
+ */
2695
+ export declare interface RtmpServerInputSettings extends SourceNodeSettings<RtmpServerInputNode>, StreamStatisticsMixin {
2696
+ /** The port the RTMP server should listen on */
2697
+ port?: number;
2698
+ /**
2699
+ * On connect callback, use to accept/reject connections given app/url in use
2700
+ * @eventProperty
2701
+ */
2702
+ onConnection?: (
2703
+ /** The connection ID, unique to this RtmpServer node */
2704
+ connectionId: string,
2705
+ /** The RTMP "app" field from the connection string */
2706
+ app: string,
2707
+ /** The full URL of the RTMP connection string */
2708
+ url: string) => {
2709
+ accept: true;
2710
+ } | {
2711
+ accept: false;
2712
+ reason?: string;
2713
+ };
2714
+ /**
2715
+ * On stream callback, set up the stream keys for a given stream or reject the stream
2716
+ * @eventProperty
2717
+ */
2718
+ onStream?: (
2719
+ /** The connection ID, unique to this RtmpServer node */
2720
+ connectionId: string,
2721
+ /** The RTMP "app" field from the connection string */
2722
+ app: string,
2723
+ /** The full URL of the RTMP connection string */
2724
+ url: string,
2725
+ /** The Norsk streamId of this media stream */
2726
+ streamId: number,
2727
+ /** TODO - publishingName */
2728
+ publishingName: string) => OnStreamResult;
2729
+ /**
2730
+ * Called when the connection status has changed (e.g. when the RTMP connection drops)
2731
+ * @eventProperty
2732
+ */
2733
+ onConnectionStatusChange?: (
2734
+ /** The connection ID, unique to this RtmpServer node */
2735
+ connectionId: string,
2736
+ /** The new connection state */
2737
+ status: RtmpServerInputStatus,
2738
+ /** The audio and video stream keys that were present in the stream at the time of the status change */
2739
+ streamKeys: {
2740
+ audioStreamKey: StreamKey_2;
2741
+ videoStreamKey: StreamKey_2;
2742
+ }[]) => void;
2743
+ onConnectionError?: (
2744
+ /** The connection ID, unique to this RtmpServer node */
2745
+ connectionId: string,
2746
+ /** The error */
2747
+ error: RtmpError_UnsupportedVideo | RtmpError_UnsupportedAudio) => void;
2748
+ onConnectionBytesRead?: (
2749
+ /** The connection ID, unique to this RtmpServer node */
2750
+ connectionId: string,
2751
+ /** The number of bytes read, as reported by the peer */
2752
+ bytesRead: bigint) => void;
2753
+ }
2754
+
2755
+ /** @public */
2756
+ export declare type RtmpServerInputStatus = "disconnected";
2757
+
2758
+ /**
2759
+ * @public
2760
+ * The stream keys in an RTMP input stream
2761
+ */
2762
+ export declare type RtmpServerStreamKeys = {
2763
+ audioStreamKey: StreamKey_2;
2764
+ videoStreamKey: StreamKey_2;
2765
+ }[];
2766
+
2767
+ /**
2768
+ * @public
2769
+ * A description of an Eac3 stream being delivered via RTP
2770
+ * */
2771
+ export declare interface RtpEac3 {
2772
+ kind: "eac3";
2773
+ /** The clock rate of the stream */
2774
+ clockRate: number;
2775
+ /** The language code (this will end up in outgoing metadata) */
2776
+ languageCode?: string;
2777
+ ec3Extension: boolean;
2778
+ complexityIndex: number;
2779
+ }
2780
+
2781
+ /**
2782
+ * @public
2783
+ * A description of an H264 stream delivered over RTP
2784
+ * */
2785
+ export declare interface RtpH264 {
2786
+ kind: "h264";
2787
+ /** The clock rate of the stream */
2788
+ clockRate: number;
2789
+ }
2790
+
2791
+ /**
2792
+ * @public
2793
+ * A description of an HEVC stream delivered over RTP
2794
+ * */
2795
+ export declare interface RtpHEVC {
2796
+ kind: "hevc";
2797
+ /** The clock rate of the stream */
2798
+ clockRate: number;
2799
+ }
2800
+
2801
+ /**
2802
+ * @public
2803
+ * see: {@link NorskInput.rtp}
2804
+ */
2805
+ export declare class RtpInputNode extends SourceMediaNode {
2806
+ /**
2807
+ * @public
2808
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
2809
+ * */
2810
+ nudge(nudge: number): void;
2811
+ close(): void;
2812
+ }
2813
+
2814
+ /**
2815
+ * @public
2816
+ * Settings for an RTP input
2817
+ * see: {@link NorskInput.rtp}
2818
+ * */
2819
+ export declare interface RtpInputSettings extends SourceNodeSettings<RtpInputNode>, StreamStatisticsMixin {
2820
+ sourceName: string;
2821
+ streams: readonly RtpStreamSettings[];
2822
+ }
2823
+
2824
+ /**
2825
+ * @public
2826
+ * A description of a LinearPCM stream being delivered via RTP
2827
+ * */
2828
+ export declare interface RtpLinearPcm {
2829
+ kind: "linearpcm";
2830
+ /** The sample rate of the stream */
2831
+ sampleRate: SampleRate;
2832
+ /** The channel layotu the stream */
2833
+ channelLayout: ChannelLayout;
2834
+ /** The bit depth of the stream */
2835
+ bitDepth: RtpLinearPcmBitDepth;
2836
+ }
2837
+
2838
+ /** @public */
2839
+ export declare type RtpLinearPcmBitDepth = 16 | 24;
2840
+
2841
+ /**
2842
+ * @public
2843
+ * A description of a Mpeg4 Generic Aac stream
2844
+ * */
2845
+ export declare interface RtpMpeg4GenericAacHbr {
2846
+ kind: "mpeg4-generic-aac-hbr";
2847
+ config: string;
2848
+ }
2849
+
2850
+ /**
2851
+ * @public
2852
+ * A description of an incoming RTP stream
2853
+ * */
2854
+ export declare interface RtpStreamSettings {
2855
+ /** A streamID to assign to the outgoing stream key */
2856
+ streamId: number;
2857
+ /** The IP Address to join the RTP stream on */
2858
+ ip: string;
2859
+ /** The interface to bind to, "loopback" and "any" are special cases
2860
+ * and anything else will be interpreted as the name of a network interface */
2861
+ iface: string;
2862
+ /** The port to connect to for the RTP stream itself */
2863
+ rtpPort: number;
2864
+ /** The port to connect to for the associated RTCP stream */
2865
+ rtcpPort: number;
2866
+ /** A description of the stream being joined */
2867
+ streamType: RtpLinearPcm | RtpEac3 | RtpMpeg4GenericAacHbr | RtpH264 | RtpHEVC;
2868
+ }
2869
+
2870
+ /**
2871
+ * @public
2872
+ * This is the SAR/PAR for a video stream and is an expression of what shape each pixel has within a video stream
2873
+ * x:1, y:1 being a square and the most common value for this
2874
+ * */
2875
+ export declare interface SampleAspectRatio {
2876
+ x: number;
2877
+ y: number;
2878
+ }
2879
+
2880
+ /** @public */
2881
+ export declare type SampleFormat =
2882
+ /** Signed 16 bits, non-planar */
2883
+ "s16"
2884
+ /** Signed 16 bits, planar */
2885
+ | "s16p"
2886
+ /** 32bit floating point, non-planar */
2887
+ | "flt"
2888
+ /** 32bit floating point, planar */
2889
+ | "fltp";
2890
+
2891
+ /** @public Audio sample rate, in Hz */
2892
+ export declare type SampleRate = 8000 | 11025 | 12000 | 16000 | 22050 | 24000 | 32000 | 44100 | 48000 | 64000 | 88200 | 96000;
2893
+
2894
+ /** @public */
2895
+ export declare function selectAllAudios(count: number): (streams: readonly StreamMetadata[]) => StreamKey[];
2896
+
2897
+ /** @public */
2898
+ export declare function selectAllPlaylists(count: number): (streams: readonly StreamMetadata[]) => StreamKey[];
2899
+
2900
+ /** @public */
2901
+ export declare function selectAllVideos(count: number): (streams: readonly StreamMetadata[]) => StreamKey[];
2902
+
2903
+ /** @public */
2904
+ export declare function selectAudio(streams: readonly StreamMetadata[]): StreamKey[];
2905
+
2906
+ /**
2907
+ * @public
2908
+ * Select all the audio and video streams from the input
2909
+ * @param streams - The streams from the inbound Context
2910
+ * @returns Array of selected StreamKeys
2911
+ */
2912
+ export declare function selectAV(streams: readonly StreamMetadata[]): StreamKey[];
2913
+
2914
+ /** @public */
2915
+ export declare function selectPlaylist(streams: readonly StreamMetadata[]): StreamKey[];
2916
+
2917
+ /** @public */
2918
+ export declare function selectSubtitles(streams: readonly StreamMetadata[]): StreamKey[];
2919
+
2920
+ /** @public */
2921
+ export declare function selectVideo(streams: readonly StreamMetadata[]): StreamKey[];
2922
+
2923
+ /** @public */
2924
+ export declare type SentenceBuildMode = "raw" | "stable" | "partial" | "complete";
2925
+
2926
+ /** @public */
2927
+ export declare type SimpleEasing = "linear" | "ease_in" | "ease_in_out" | "ease_out";
2928
+
2929
+ /** @public */
2930
+ export declare interface SingleStreamStatistics extends StreamStatistics {
2931
+ streamKey: StreamKey;
2932
+ metadata: StreamMetadataMessage;
2933
+ }
2934
+
2935
+ /** @public */
2936
+ export declare class SinkMediaNode<Pins extends string> extends MediaNodeState {
2937
+ /** Subscribe to the given sources.
2938
+ *
2939
+ * This version of the function call accepts the target pins of an output
2940
+ * and is suitable for advanced use where a node is capable of subscribing to
2941
+ * multiple video streams and provides a means of distinguishing them via pins
2942
+ * discarding any existing subscriptions.
2943
+ *
2944
+ * @param done - will be called with no arguments if the subscription succeeds,
2945
+ * or an error if it failed. This error indicates the specific reason it
2946
+ * failed, so you can take appropriate actions in response. It will be called
2947
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
2948
+ * provided in the config for the node.
2949
+ *
2950
+ * Errors are also logged to the debug log.
2951
+ */
2952
+ subscribeToPins(sources: ReceiveFromAddress<Pins>[], validation?: (context: Context) => boolean, done?: (error?: SubscriptionError) => void): void;
2953
+ sourceContextChange(responseCallback: (error?: SubscriptionError) => void): Promise<boolean>;
2954
+ }
2955
+
2956
+ /** @public */
2957
+ export declare interface SinkNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
2958
+ onSubscriptionError?: (error: SubscriptionError) => void;
2959
+ }
2960
+
2961
+ export declare interface Smpte2038Message {
2962
+ cNotYChannelFlag: boolean;
2963
+ lineNumber: number;
2964
+ horizontalOffset: number;
2965
+ payloadFormat: VancPayloadFormat;
2966
+ ancillaryId: VancType2AncillaryId;
2967
+ userData: Uint8Array;
2968
+ }
2969
+
2970
+ /** @public */
2971
+ export declare class SourceMediaNode extends MediaNodeState {
2972
+ outputStreams: StreamMetadata[];
2973
+ registerForContextChange(subscriber: SinkMediaNode<string>): void;
2974
+ unregisterForContextChange(subscriber: SinkMediaNode<string>): void;
2975
+ }
2976
+
2977
+ /** @public */
2978
+ export declare interface SourceNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
2979
+ onOutboundContextChange?: (streams: StreamMetadata[]) => Promise<void>;
2980
+ }
2981
+
2982
+ /**
2983
+ * @public
2984
+ * Errors found while subscribing to a particular source, separated out by reason:
2985
+ *
2986
+ * - `internal`: An opaque internal error
2987
+ *
2988
+ * - `unknownSourceId`: The media node does not exist (maybe it crashed)
2989
+ *
2990
+ * - `unknownSourceStream`: The media node exists, but does not have the stream key
2991
+ *
2992
+ * - `noSubscriberPin`: The media node is not set up to receive data on this pin (which may be auto-detected)
2993
+ *
2994
+ * - `unsupportedConversion`: Norsk does not support conversion from the media types of the source to the media types accepted by the subscriber
2995
+ */
2996
+ export declare type SourceSubscriptionError = {
2997
+ info: string;
2998
+ reason: "internal";
2999
+ } | {
3000
+ mediaNodeId: MediaNodeId;
3001
+ reason: "unknownSourceId";
3002
+ } | {
3003
+ mediaNodeId: MediaNodeId;
3004
+ streamKey: StreamKey;
3005
+ reason: "unknownSourceStream";
3006
+ } | {
3007
+ mediaNodeId: MediaNodeId;
3008
+ streamKey: StreamKey;
3009
+ pin: string;
3010
+ subscriberPins: string[];
3011
+ reason: "noSubscriberPin";
3012
+ } | {
3013
+ mediaNodeId: MediaNodeId;
3014
+ streamKey: StreamKey;
3015
+ sourceTypes: string[];
3016
+ subscriberTypes: string[];
3017
+ reason: "unsupportedConversion";
3018
+ };
3019
+
3020
+ /**
3021
+ * @public
3022
+ * The return value for the {@link SrtInputSettings.onConnection} callback
3023
+ * determining what to do with an incoming stream
3024
+ */
3025
+ export declare type SrtConnectionResult =
3026
+ /** Accept the stream */
3027
+ {
3028
+ accept: true;
3029
+ /** The source name to assign to the connection */
3030
+ sourceName: string;
3031
+ }
3032
+ /** Reject the stream */
3033
+ | {
3034
+ accept: false;
3035
+ };
3036
+
3037
+ /**
3038
+ * @public
3039
+ * see: {@link NorskInput.srt}
3040
+ */
3041
+ export declare class SrtInputNode extends SourceMediaNode {
3042
+ /**
3043
+ * @public
3044
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3045
+ * */
3046
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
3047
+ /**
3048
+ * @public
3049
+ * Closes a connected stream as specified by 'streamIndex'
3050
+ * @param streamIndex - the index of the stream to be terminated
3051
+ * */
3052
+ closeStream(streamIndex: number): void;
3053
+ close(): void;
3054
+ }
3055
+
3056
+ /**
3057
+ * @public
3058
+ * Settings for an SRT Input node
3059
+ * see: {@link NorskInput.srt}
3060
+ */
3061
+ export declare interface SrtInputSettings extends RemoteInputSettings<SrtInputNode>, StreamStatisticsMixin {
3062
+ /**
3063
+ * The mode to act in (caller or listener)
3064
+ */
3065
+ mode: SrtMode;
3066
+ /**
3067
+ * Passphrase for encryption
3068
+ */
3069
+ passphrase?: string;
3070
+ /**
3071
+ * Stream ID to set on the socket when acting in caller mode
3072
+ */
3073
+ streamId?: string;
3074
+ /**
3075
+ * On connect callback, notifying that a new caller has connected (in listener mode) and set the source name accordingly
3076
+ * @eventProperty
3077
+ */
3078
+ onConnection?: (
3079
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3080
+ streamId: string,
3081
+ /**
3082
+ * Identifier indicating which connection this message refers to (for a
3083
+ * listener which may have multiple connections)
3084
+ */
3085
+ index: number,
3086
+ /** The address of the remote host */
3087
+ remoteHost: string) => SrtConnectionResult;
3088
+ /**
3089
+ * Called when the connection status has changed (e.g. when the SRT socket is closed)
3090
+ * @eventProperty
3091
+ */
3092
+ onConnectionStatusChange?: (
3093
+ /** The new connection state */
3094
+ status: SrtInputStatus,
3095
+ /** The source name assigned to the connection which changed status */
3096
+ sourceName: string | undefined) => void;
3097
+ }
3098
+
3099
+ /** @public */
3100
+ export declare type SrtInputStatus = "disconnected";
3101
+
3102
+ /** @public */
3103
+ export declare type SrtMode = "listener" | "caller";
3104
+
3105
+ /**
3106
+ * @public
3107
+ * see: {@link NorskOutput.srt}
3108
+ */
3109
+ export declare class SrtOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3110
+ close(): void;
3111
+ }
3112
+
3113
+ /**
3114
+ * @public
3115
+ * The settings for an SRT output
3116
+ * see: {@link NorskOutput.srt}
3117
+ * */
3118
+ export declare interface SrtOutputSettings extends SinkNodeSettings<SrtOutputNode>, StreamStatisticsMixin {
3119
+ /**
3120
+ * Passphrase for encryption
3121
+ */
3122
+ passphrase?: string;
3123
+ /**
3124
+ * Stream ID to set on the socket when acting in caller mode
3125
+ */
3126
+ streamId?: string;
3127
+ /**
3128
+ * The mode to act in (see {@link SrtMode})
3129
+ */
3130
+ mode: SrtMode;
3131
+ /**
3132
+ * The IP address to listen on in listener mode, or to connect to in caller mode
3133
+ */
3134
+ ip: string;
3135
+ /**
3136
+ * The port to listen on in listener mode, or to connect to in caller mode
3137
+ */
3138
+ port: number;
3139
+ /** Jitter buffer delay in milliseconds */
3140
+ bufferDelayMs?: number;
3141
+ /**
3142
+ * On connect callback, notifying that a new caller has connected (in listener mode) and providing the stream_id that was set on the socket
3143
+ * @eventProperty
3144
+ */
3145
+ onConnection?: (
3146
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3147
+ streamId: string,
3148
+ /** The stream index (count of connections that have been made) */
3149
+ streamIndex: number,
3150
+ /** The remote host address */
3151
+ remoteHost: string) => void;
3152
+ }
3153
+
3154
+ /** @public */
3155
+ export declare type StabilizationMode = "low" | "medium" | "high";
3156
+
3157
+ /**
3158
+ * @public
3159
+ * see: {@link NorskTransform.streamChaosMonkey}
3160
+ */
3161
+ export declare class StreamChaosMonkeyNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3162
+ close(): void;
3163
+ }
3164
+
3165
+ /**
3166
+ * @public
3167
+ * The settings for a Chaos Monkey
3168
+ * see: {@link NorskTransform.streamChaosMonkey}
3169
+ * */
3170
+ export declare interface StreamChaosMonkeySettings extends ProcessorNodeSettings<StreamChaosMonkeyNode> {
3171
+ /** Optional configuration to drop frames from a stream
3172
+ * leaving this undefined means don't drop any frames
3173
+ * */
3174
+ frameDrop?: DropRandom | DropEvery;
3175
+ /**
3176
+ * Introduce random jitter
3177
+ */
3178
+ jitterMs?: number;
3179
+ }
3180
+
3181
+ /** @public */
3182
+ export declare interface StreamKey {
3183
+ streamId: number;
3184
+ programNumber: number;
3185
+ sourceName: string;
3186
+ renditionName: string;
3187
+ }
3188
+
3189
+ /**
3190
+ * @public
3191
+ * see: {@link NorskTransform.streamKeyOverride}
3192
+ */
3193
+ export declare class StreamKeyOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3194
+ close(): void;
3195
+ }
3196
+
3197
+ /**
3198
+ * @public
3199
+ * Settings for a Stream Key Override
3200
+ * see: {@link NorskTransform.streamKeyOverride}
3201
+ * */
3202
+ export declare interface StreamKeyOverrideSettings extends ProcessorNodeSettings<StreamKeyOverrideNode> {
3203
+ /** The stream key that all frames passing through this node will be assigned */
3204
+ streamKey: StreamKey;
3205
+ }
3206
+
3207
+ /** @public */
3208
+ export declare interface StreamKeySettings {
3209
+ /** Source name. Default: the rtmp app */
3210
+ sourceName?: string;
3211
+ /** Program number. Default: 1 */
3212
+ programNumber?: number;
3213
+ /** Stream Id. Default: 1 for audio, 2 for video */
3214
+ streamId?: number;
3215
+ /** Rendition name. Default: the stream publishing name */
3216
+ renditionName?: string;
3217
+ }
3218
+
3219
+ /** @public */
3220
+ export declare interface StreamMetadata {
3221
+ streamKey?: StreamKey;
3222
+ message: StreamMetadataMessage;
3223
+ }
3224
+
3225
+ /** @public */
3226
+ export declare type StreamMetadataMessage = {
3227
+ case: "audio";
3228
+ value: AudioStreamMetadata;
3229
+ } | {
3230
+ case: "video";
3231
+ value: VideoStreamMetadata;
3232
+ } | {
3233
+ case: "subtitle";
3234
+ value: SubtitleStreamMetadata;
3235
+ } | {
3236
+ case: "playlist";
3237
+ value: PlaylistStreamMetadata;
3238
+ } | {
3239
+ case: "ancillary";
3240
+ value: AncillaryStreamMetadata;
3241
+ } | {
3242
+ case: undefined;
3243
+ value?: undefined;
3244
+ };
3245
+
3246
+ /**
3247
+ * @public
3248
+ * see: {@link NorskTransform.streamMetadataOverride}
3249
+ */
3250
+ export declare class StreamMetadataOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3251
+ /**
3252
+ * @public
3253
+ * Updates the config used by this metadata override node for all subsequent frames
3254
+ * @param settings - The new settings
3255
+ */
3256
+ updateConfig(settings: StreamMetadataOverrideSettingsUpdate): void;
3257
+ close(): void;
3258
+ }
3259
+
3260
+ /**
3261
+ * @public
3262
+ * Settings for a Stream Key Metadata Override Node
3263
+ * see: {@link NorskTransform.streamMetadataOverride}
3264
+ * */
3265
+ export declare interface StreamMetadataOverrideSettings extends ProcessorNodeSettings<StreamMetadataOverrideNode>, StreamMetadataOverrideSettingsUpdate {
3266
+ }
3267
+
3268
+ /** @public */
3269
+ export declare interface StreamMetadataOverrideSettingsUpdate {
3270
+ video?: {
3271
+ /** Override the bitrate metadata of a compressed video stream, or `0` to clear */
3272
+ bitrate?: number;
3273
+ };
3274
+ audio?: {
3275
+ /** Override the bitrate metadata of a compressed audio stream, or `0` to clear */
3276
+ bitrate?: number;
3277
+ /** Override the language metadata of an audio stream, or `""` to clear */
3278
+ language?: string;
3279
+ };
3280
+ subtitles?: {
3281
+ /** Override the language metadata of a subtitles stream, or `""` to clear */
3282
+ language?: string;
3283
+ };
3284
+ }
3285
+
3286
+ /** @public */
3287
+ export declare interface StreamStatistics {
3288
+ /** The size of the sample window in seconds */
3289
+ sampleSizeSeconds: number;
3290
+ /** The number of bits over the sample window */
3291
+ bitsForSample: number;
3292
+ /** The bitrate, in bits per second */
3293
+ bitrate: number;
3294
+ /** The number of frames over the sample window */
3295
+ framesForSample: number;
3296
+ /** The frame rate, in frames per second */
3297
+ framerate: number;
3298
+ /** The number of key frames over the sample window */
3299
+ keyFramesForSample: number;
3300
+ }
3301
+
3302
+ /** @public */
3303
+ export declare interface StreamStatisticsMixin {
3304
+ /**
3305
+ * Sampling rates for stream stats, in seconds
3306
+ */
3307
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
3308
+ /**
3309
+ * Called at periodic intervals when stream statistics are ready.
3310
+ * @eventProperty
3311
+ */
3312
+ onStreamStatistics?: (
3313
+ /** The stats */
3314
+ stats: MultiStreamStatistics) => void;
3315
+ onGopStructure?: (structure: GopStructure) => void;
3316
+ }
3317
+
3318
+ /**
3319
+ * @public
3320
+ * see {@link NorskControl.streamStatistics}.
3321
+ */
3322
+ export declare class StreamStatisticsNode extends AutoProcessorMediaNode<"audio" | "video"> {
3323
+ close(): void;
3324
+ }
3325
+
3326
+ /**
3327
+ * @public
3328
+ * Settings for a Stream Statistics Node
3329
+ * see: {@link NorskControl.streamStatistics}
3330
+ */
3331
+ export declare interface StreamStatisticsSettings extends ProcessorNodeSettings<StreamStatisticsNode>, StreamStatisticsMixin {
3332
+ /**
3333
+ * Called periodically with the stream stats
3334
+ * @param stats - The statistics for the stream
3335
+ * @eventProperty
3336
+ */
3337
+ onStreamStatistics: (stats: MultiStreamStatistics) => void;
3338
+ /**
3339
+ * Sampling rates for stream stats, in seconds
3340
+ */
3341
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
3342
+ }
3343
+
3344
+ /**
3345
+ * @public
3346
+ * see: {@link NorskControl.streamSwitchHard}
3347
+ */
3348
+ export declare class StreamSwitchHardNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3349
+ switchSource(newSource: Pins): void;
3350
+ close(): void;
3351
+ }
3352
+
3353
+ /**
3354
+ * @public
3355
+ * Settings for the Hard Stream Switch
3356
+ * see: {@link NorskControl.streamSwitchHard}
3357
+ * */
3358
+ export declare interface StreamSwitchHardSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchHardNode<Pins>> {
3359
+ /** The currently active source to display on the output */
3360
+ activeSource: Pins;
3361
+ /** the source name to give the output of this switch operation */
3362
+ outputSource: string;
3363
+ }
3364
+
3365
+ /**
3366
+ * @public
3367
+ * see: {@link NorskControl.streamSwitchSmooth}
3368
+ */
3369
+ export declare class StreamSwitchSmoothNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3370
+ /**
3371
+ * @public
3372
+ * Switches the source used for the current output of this node
3373
+ */
3374
+ switchSource(newSource: Pins): void;
3375
+ close(): void;
3376
+ }
3377
+
3378
+ /**
3379
+ * @public
3380
+ * Settings for the Smooth Source Switch
3381
+ * see {@link NorskControl.streamSwitchSmooth}
3382
+ * */
3383
+ export declare interface StreamSwitchSmoothSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchSmoothNode<Pins>> {
3384
+ /** The presently active source being used to generate output for this node */
3385
+ activeSource?: Pins;
3386
+ /** The source name given to the output of this node */
3387
+ outputSource: string;
3388
+ /** How many milliseconds to use for the fade operation between two sources */
3389
+ transitionDurationMs?: number;
3390
+ /** The constant resolution that all output video will be scaled to */
3391
+ outputResolution: Resolution;
3392
+ /** The constant samplerate that all output audio will be resampled to */
3393
+ sampleRate: SampleRate;
3394
+ /** Callback which will be called if a switch request cannot be fulfilled */
3395
+ onSwitchError?: (message: string, inputPin?: Pins) => void;
3396
+ onInboundContextChange?: (allStreams: Map<Pins, StreamMetadata[]>) => Promise<void>;
3397
+ }
3398
+
3399
+ /**
3400
+ * @public
3401
+ * see: {@link NorskTransform.streamSync}
3402
+ */
3403
+ export declare class StreamSyncNode extends AutoProcessorMediaNode<"audio" | "video"> {
3404
+ close(): void;
3405
+ }
3406
+
3407
+ /**
3408
+ * @public
3409
+ * Settings for a StreamSync node
3410
+ * see {@link NorskTransform.streamSync}
3411
+ * */
3412
+ export declare interface StreamSyncSettings extends ProcessorNodeSettings<StreamSyncNode> {
3413
+ }
3414
+
3415
+ /**
3416
+ * @public
3417
+ * see: {@link NorskTransform.streamTimestampNudge}
3418
+ */
3419
+ export declare class StreamTimestampNudgeNode extends AutoProcessorMediaNode<"audio" | "video"> {
3420
+ /**
3421
+ * @public
3422
+ * Applies a gradual nudge to the stream timestamps by the specified number of milliseconds
3423
+ * */
3424
+ nudge(nudge: number): void;
3425
+ close(): void;
3426
+ }
3427
+
3428
+ /**
3429
+ * @public
3430
+ * Settings for a Stream Timestamp Nudge
3431
+ * see: {@link NorskTransform.streamTimestampNudge}
3432
+ * */
3433
+ export declare interface StreamTimestampNudgeSettings extends ProcessorNodeSettings<StreamTimestampNudgeNode> {
3434
+ nudge?: number;
3435
+ }
3436
+
3437
+ /**
3438
+ * @public
3439
+ * Errors found while setting up subscriptions, separated out by reason:
3440
+ *
3441
+ * - `internal`: An opaque internal error
3442
+ *
3443
+ * - `unknownSubscriber`: The media node requesting the subscription does not exist
3444
+ *
3445
+ * - `multipleStreams`: Multiple stream keys found for the context type
3446
+ *
3447
+ * - `sourceSubscriptionError`: Per-source errors
3448
+ */
3449
+ export declare type SubscriptionError = {
3450
+ info: string;
3451
+ reason: "internal";
3452
+ } | {
3453
+ mediaNodeId: MediaNodeId;
3454
+ reason: "unknownSubscriber";
3455
+ } | {
3456
+ contextType: ContextType;
3457
+ streamKeys: StreamKey[];
3458
+ reason: "multipleStreams";
3459
+ } | {
3460
+ sourceErrors: SourceSubscriptionError[];
3461
+ reason: "sourceSubscriptionError";
3462
+ };
3463
+
3464
+ /** @public */
3465
+ export declare function subtitlesToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3466
+
3467
+ /**
3468
+ * @public
3469
+ * Returns the stream keys for subtitle streams in a media context
3470
+ * @param streams - The media context from which to return the stream keys
3471
+ * @returns The subtitle stream keys in the media context
3472
+ */
3473
+ export declare function subtitleStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
3474
+
3475
+ /** @public */
3476
+ export declare interface SubtitleStreamMetadata {
3477
+ }
3478
+
3479
+ /**
3480
+ * @public
3481
+ * Filters a context to only the subtitle streams within it
3482
+ * @param streams - The media context from which to return the streams
3483
+ * @returns The subtitle streams in the media context
3484
+ */
3485
+ export declare function subtitleStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
3486
+
3487
+ declare class TsCommonInputNode<SourceMessage, T extends SourceMediaNode> extends SourceMediaNode {
3488
+ constructor(tsType: TsInputType, client: MediaClient, settings: SourceNodeSettings<T> & StreamStatisticsMixin, nudgeFn: (nudge: TimestampProgramNudge) => SourceMessage, onEof: (() => void) | undefined, grpcStartFn: () => grpc.ClientDuplexStream<SourceMessage, TsInputEvent>);
3489
+ /**
3490
+ * @public
3491
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3492
+ * */
3493
+ nudge(programNumber: number, nudge: number): void;
3494
+ close(): void;
3495
+ }
3496
+
3497
+ declare enum TsInputType {
3498
+ TsFile = 0,
3499
+ Srt = 1,
3500
+ Udp = 2,
3501
+ M3u8 = 3
3502
+ }
3503
+
3504
+ /**
3505
+ * @public
3506
+ * see: {@link NorskInput.udpTs}
3507
+ */
3508
+ export declare class UdpTsInputNode extends TsCommonInputNode<UdpTsInputMessage, UdpTsInputNode> {
3509
+ }
3510
+
3511
+ /**
3512
+ * @public
3513
+ * Settings for a UDP Transport Stream input
3514
+ * see: {@link NorskInput.udpTs}
3515
+ * */
3516
+ export declare interface UdpTsInputSettings extends RemoteInputSettings<UdpTsInputNode> {
3517
+ }
3518
+
3519
+ /**
3520
+ * @public
3521
+ * see: {@link NorskOutput.udpTs}
3522
+ */
3523
+ export declare class UdpTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3524
+ close(): void;
3525
+ }
3526
+
3527
+ /**
3528
+ * @public
3529
+ * The settings for an output Transport Stream over UDP
3530
+ * see: {@link NorskOutput.udpTs}
3531
+ */
3532
+ export declare interface UdpTsOutputSettings extends SinkNodeSettings<UdpTsOutputNode>, StreamStatisticsMixin {
3533
+ /**
3534
+ * The IP address to publish to
3535
+ * This can be multicast, unicast or broadcast
3536
+ */
3537
+ destinationIp: string;
3538
+ /**
3539
+ * The interface to bind to for publishing
3540
+ * This can be 'any', 'loopback' or any named interface on the machine
3541
+ * Note: If running inside docker this may be different to expected
3542
+ */
3543
+ iface: string;
3544
+ /** The port to send to */
3545
+ port: number;
3546
+ /** Jitter buffer delay in milliseconds */
3547
+ bufferDelayMs?: number;
3548
+ }
3549
+
3550
+ /**
3551
+ * @public
3552
+ * An update request for credentials on a CMAF output
3553
+ */
3554
+ export declare interface UpdateCredentials {
3555
+ /**
3556
+ * The id of the destination that is to be updated (see {@link HlsPushDestinationSettings.id})
3557
+ */
3558
+ destinationId: string;
3559
+ /**
3560
+ * the new credentials to be used by the destination
3561
+ */
3562
+ awsCredentials: AwsCredentials;
3563
+ }
3564
+
3565
+ /** @public */
3566
+ export declare type VancPayloadFormat = "other" | "afd_bar" | "pan_scan" | "scte104" | "dvb_scte_vbi" | "op47_sdp" | "op47_vanc_multipacket" | "ancillary_time_code" | "eia_708" | "eia_608";
3567
+
3568
+ /** @public */
3569
+ export declare interface VancType2AncillaryId {
3570
+ did: number;
3571
+ sdid: number;
3572
+ }
3573
+
3574
+ export { Version }
3575
+
3576
+ /**
3577
+ * @public
3578
+ * see: {@link NorskTransform.videoCompose}
3579
+ */
3580
+ export declare class VideoComposeNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3581
+ /**
3582
+ * @public
3583
+ * Updates the config used for a video compose operation
3584
+ * If transitions are specified, animations will be provided, otherwise
3585
+ * the change will be immediate
3586
+ *
3587
+ * Note: This is not a 'cheap' operation and care should be taken not to
3588
+ * do this too often (more than once a second for example!)
3589
+ */
3590
+ updateConfig(settings: VideoComposeSettingsUpdate<Pins>): void;
3591
+ close(): void;
3592
+ }
3593
+
3594
+ /** @public */
3595
+ export declare interface VideoComposeSettings<Pins extends string> extends ProcessorNodeSettings<VideoComposeNode<Pins>> {
3596
+ /**
3597
+ * Required. Stream key of the reference stream. This is the video stream
3598
+ * which defines the output frame timing, which will typically be part of the
3599
+ * composition, e.g. the main picture in the case of a simple
3600
+ * overlay/picture-in-picture, or the top left quadrant of a 4-way split
3601
+ * screen.
3602
+ */
3603
+ referenceStream: Pins;
3604
+ /** The parts (images/overlays) to include in the composition */
3605
+ parts: readonly ComposePart<Pins>[];
3606
+ /**
3607
+ * Optionally supply a fallback reference resolution. This allows description of the
3608
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
3609
+ * can be specified to allow the source and destination areas to be described
3610
+ * in percentage terms, or a notional resolution can be used that is
3611
+ * independant of the source resolutions that may be provided.
3612
+ *
3613
+ * if set here, this reference resolution will be applied to
3614
+ * any parts that do not have their own reference resolution specified
3615
+ *
3616
+ * If not provided the source and destination rectangles are in terms of the
3617
+ * source and output resolutions respectively.
3618
+ */
3619
+ referenceResolution?: Resolution;
3620
+ /** The resolution of the output video */
3621
+ outputResolution: Resolution;
3622
+ /**
3623
+ * Output pixel format to use. If not specified, this will be chosen
3624
+ * automatically based on the sources present in the initial composition
3625
+ */
3626
+ outputPixelFormat?: PixelFormat;
3627
+ /**
3628
+ * Behaviour in the case of a missing stream used in an active composition
3629
+ * part. Note that this does not apply to the reference stream, but to every
3630
+ * part which does not use the reference stream, whether at startup or on
3631
+ * context change.
3632
+ *
3633
+ * Missing means not present in the context or never having sent a frame.
3634
+ */
3635
+ missingStreamBehaviour?: ComposeMissingStreamBehaviour;
3636
+ /**
3637
+ * Optionally attempt to perform the compose operation on hardware
3638
+ */
3639
+ hardwareAcceleration?: ComposeHardwareAcceleration;
3640
+ /**
3641
+ * Called when the transitions specified in the last config update have
3642
+ * completed (in the case of multiple parts with specified transitions of
3643
+ * different duration, this means that the last remaining transitions have
3644
+ * completed
3645
+ */
3646
+ onTransitionComplete?: () => void;
3647
+ }
3648
+
3649
+ /**
3650
+ * @public
3651
+ * An update operation for a VideoCompose operation
3652
+ * see: {@link VideoComposeNode.updateConfig}
3653
+ * */
3654
+ export declare interface VideoComposeSettingsUpdate<Pins extends string> {
3655
+ /** Update the parts (images/overlays) to include in the composition */
3656
+ parts: readonly ComposePart<Pins>[];
3657
+ }
3658
+
3659
+ /**
3660
+ * @public
3661
+ * see: {@link NorskTransform.videoEncode}
3662
+ */
3663
+ export declare class VideoEncodeNode extends AutoProcessorMediaNode<"video"> {
3664
+ close(): void;
3665
+ }
3666
+
3667
+ /**
3668
+ * @public
3669
+ * A single rung in a video encode ladder
3670
+ * see: {@link NorskTransform.videoEncode}
3671
+ * */
3672
+ export declare interface VideoEncodeRung {
3673
+ /** The name of this rung, this should be unique across the ladder
3674
+ * and will end up in the renditionName of the outgoing StreamKey
3675
+ */
3676
+ name: string;
3677
+ /** The width of the outgoing video resolution */
3678
+ width: number;
3679
+ /** The height of the outgoing video resolution */
3680
+ height: number;
3681
+ /**
3682
+ * Optionally change the frameRate for this rendition
3683
+ * This can be useful if the input is 50FPS for example and some
3684
+ * lower rungs need to be 25fps
3685
+ *
3686
+ * Note: If you wish to apply the same frame rate across all rungs, it is
3687
+ * more efficient to use a single {@link VideoTransformNode} before the ladder
3688
+ * created with {@link NorskTransform.videoTransform} and leave this value undefined
3689
+ * */
3690
+ frameRate?: FrameRate;
3691
+ /**
3692
+ * Specifies the input video's Sample Aspect Ratio (SAR) to be used by the
3693
+ * encoder in width:height
3694
+ */
3695
+ sar?: SampleAspectRatio;
3696
+ /**
3697
+ * The codec (and detailed configuration) to use for the encoding operation.
3698
+ *
3699
+ * Note: Nvidia, Logan/Quadra, and Xilinx require Nvidia, Logan/Quadra and Xilinx hardware to be set up and
3700
+ * made available to Norsk
3701
+ *
3702
+ * A ladder can use several different codecs across its various rungs and the
3703
+ * VideoEncode node will attempt to build a pipeline that uses the hardware efficently
3704
+ */
3705
+ codec: X264Codec | X265Codec | NvidiaH264 | NvidiaHevc | LoganH264 | LoganHevc | QuadraH264 | QuadraHevc | XilinxH264 | XilinxHevc;
3706
+ }
3707
+
3708
+ /**
3709
+ * @public
3710
+ * Settings for a VideoEncode operation
3711
+ * see: {@link NorskTransform.videoEncode}
3712
+ * */
3713
+ export declare interface VideoEncodeSettings extends ProcessorNodeSettings<VideoEncodeNode> {
3714
+ rungs: readonly VideoEncodeRung[];
3715
+ }
3716
+
3717
+ /**
3718
+ * @public
3719
+ * Returns the stream keys for video streams in a media context
3720
+ * @param streams - The media context from which to return the stream keys
3721
+ * @returns The video stream keys in the media context
3722
+ */
3723
+ export declare function videoStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
3724
+
3725
+ /** @public */
3726
+ export declare interface VideoStreamMetadata {
3727
+ codec: string;
3728
+ width: number;
3729
+ height: number;
3730
+ }
3731
+
3732
+ /**
3733
+ * @public
3734
+ * Filters a context to only the video streams within it
3735
+ * @param streams - The media context from which to return the streams
3736
+ * @returns The video streams in the media context
3737
+ */
3738
+ export declare function videoStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
3739
+
3740
+ /** @public */
3741
+ export declare function videoToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3742
+
3743
+ /**
3744
+ * @public
3745
+ * see: {@link NorskTransform.videoTransform}
3746
+ */
3747
+ export declare class VideoTransformNode extends AutoProcessorMediaNode<"video"> {
3748
+ close(): void;
3749
+ }
3750
+
3751
+ /**
3752
+ * @public
3753
+ * Settings for a Video Transform node
3754
+ * see: {@link NorskTransform.videoTransform}
3755
+ * */
3756
+ export declare interface VideoTransformSettings extends ProcessorNodeSettings<VideoTransformNode> {
3757
+ /** An optional resolution to rescale this single stream to */
3758
+ resolution?: Resolution;
3759
+ /** An optional framerate to resample this single stream to */
3760
+ frameRate?: FrameRate;
3761
+ /** An optional SAR to set on the outgoing stream
3762
+ * Note: You can set this and only this if the SAR on your incoming stream is incorrect
3763
+ * for example (An often-seen problem with sources)
3764
+ * */
3765
+ sar?: SampleAspectRatio;
3766
+ }
3767
+
3768
+ /**
3769
+ * @public
3770
+ * see: {@link NorskDuplex.webRtcBrowser}
3771
+ */
3772
+ export declare class WebRTCBrowserNode extends AutoProcessorMediaNode<"audio" | "video"> {
3773
+ /** @public The URL of the local player */
3774
+ playerUrl: string;
3775
+ close(): void;
3776
+ }
3777
+
3778
+ /**
3779
+ * @public
3780
+ * Settings for a WebRTC browser session
3781
+ * see: {@link NorskDuplex.webRtcBrowser}
3782
+ * */
3783
+ export declare interface WebRTCBrowserSettings extends ProcessorNodeSettings<WebRTCBrowserNode>, StreamStatisticsMixin {
3784
+ iceServers?: IceServerSettings[];
3785
+ reportedIceServers?: IceServerSettings[];
3786
+ hostIps?: string[];
3787
+ serverReflexiveIps?: string[];
3788
+ /** Jitter buffer delay in milliseconds */
3789
+ bufferDelayMs?: number;
3790
+ }
3791
+
3792
+ /**
3793
+ * @public
3794
+ * see: {@link NorskOutput.whep}
3795
+ */
3796
+ export declare class WhepOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3797
+ /** @public The URL of the local player */
3798
+ playerUrl: string;
3799
+ /** @public The URL of the WHEP endpoint */
3800
+ endpointUrl: string;
3801
+ close(): void;
3802
+ }
3803
+
3804
+ /**
3805
+ * @public
3806
+ * The settings for a WebRTC WHEP Output
3807
+ * see {@link NorskOutput.whep}
3808
+ */
3809
+ export declare interface WhepOutputSettings extends SinkNodeSettings<WhepOutputNode>, StreamStatisticsMixin {
3810
+ iceServers?: IceServerSettings[];
3811
+ reportedIceServers?: IceServerSettings[];
3812
+ hostIps?: string[];
3813
+ serverReflexiveIps?: string[];
3814
+ /** Jitter buffer delay in milliseconds */
3815
+ bufferDelayMs?: number;
3816
+ }
3817
+
3818
+ /**
3819
+ * @public
3820
+ * see: {@link NorskInput.whip}
3821
+ */
3822
+ export declare class WhipInputNode extends SourceMediaNode {
3823
+ /** @public The URL of the local test client */
3824
+ clientUrl: string;
3825
+ /** @public The URL of the WHIP endpoint */
3826
+ endpointUrl: string;
3827
+ /**
3828
+ * @public
3829
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3830
+ * */
3831
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
3832
+ close(): void;
3833
+ }
3834
+
3835
+ /** @public */
3836
+ export declare interface WhipInputSettings extends InputSettings<WhipInputNode>, StreamStatisticsMixin {
3837
+ /** List of ice servers to use as part of session negotiation */
3838
+ iceServers?: IceServerSettings[];
3839
+ /** Internal addresses for the ice servers (defaults to iceServers) */
3840
+ reportedIceServers?: IceServerSettings[];
3841
+ /**
3842
+ * List of IPs to advertise as your host address - useful e.g. when on a cloud server
3843
+ * so that the public rather than private IP is used.
3844
+ */
3845
+ hostIps?: string[];
3846
+ /**
3847
+ * Similar to hostIps, but a list of server reflexive candidates so that ICE negotiations can be
3848
+ * sped up
3849
+ */
3850
+ serverReflexiveIps?: string[];
3851
+ }
3852
+
3853
+ /**
3854
+ * @public
3855
+ * see: {@link NorskOutput.whip}
3856
+ */
3857
+ export declare class WhipOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3858
+ close(): void;
3859
+ }
3860
+
3861
+ /**
3862
+ * @public
3863
+ * The settings for a WebRTC Whip Output
3864
+ * see {@link NorskOutput.whip}
3865
+ */
3866
+ export declare interface WhipOutputSettings extends SinkNodeSettings<WhipOutputNode>, StreamStatisticsMixin {
3867
+ /**
3868
+ * The URI to make the initial publish request to (as per the WHIP protocol)
3869
+ */
3870
+ uri: string;
3871
+ /** The auth header to supply (for example: 'Bearer: mybearertoken') */
3872
+ authHeader: string;
3873
+ /** Jitter buffer delay in milliseconds */
3874
+ bufferDelayMs?: number;
3875
+ }
3876
+
3877
+ /** @public */
3878
+ export declare interface X264Codec {
3879
+ type: "x264";
3880
+ threads?: number;
3881
+ /**
3882
+ * Rate control options - one of abr, cqp or crf
3883
+ */
3884
+ bitrateMode?: BitrateMode;
3885
+ /**
3886
+ * Set the H264 profile of the output
3887
+ */
3888
+ profile?: X264Profile;
3889
+ /**
3890
+ * Sets the level flag in the output
3891
+ */
3892
+ level?: X264Level;
3893
+ /**
3894
+ * Sets the minimum length between IDR frames
3895
+ */
3896
+ keyFrameIntervalMin?: number;
3897
+ /**
3898
+ * Sets the maximum length between IDR frames
3899
+ */
3900
+ keyFrameIntervalMax?: number;
3901
+ /**
3902
+ * Sets the maximum number of concurrent B-frames
3903
+ */
3904
+ bframes?: number;
3905
+ /**
3906
+ * Tune options to further optimize them for your input content. If you
3907
+ * specify a tuning, the changes will be applied after 'preset' but before all
3908
+ * other parameters.
3909
+ */
3910
+ tune?: X264Tune;
3911
+ /**
3912
+ * Change options to trade off compression efficiency against encoding speed.
3913
+ * If you specify a preset, the changes it makes will be applied before all
3914
+ * other parameters are applied.
3915
+ */
3916
+ preset?: X264Preset;
3917
+ /**
3918
+ * Maximum number of reference frames, i.e., the number of previous frames
3919
+ * each P-frame can use as references
3920
+ */
3921
+ frameReference?: number;
3922
+ /**
3923
+ * Enables CABAC (Context Adaptive Binary Arithmetic Coder) stream compression
3924
+ * instead of the less efficient CAVLC (Context Adaptive Variable Length
3925
+ * Coder) system. Significantly improves both the compression efficiency
3926
+ * (10-20% typically) and the decoding requirements, at the expense of encode
3927
+ * CPU requirements
3928
+ */
3929
+ cabac?: boolean;
3930
+ /**
3931
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
3932
+ */
3933
+ vbvMaxRate?: number;
3934
+ /**
3935
+ * Sets the size of the VBV buffer in kilobits
3936
+ */
3937
+ vbvBufferSize?: number;
3938
+ /**
3939
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
3940
+ * disables adaptive I-frame decisioning
3941
+ */
3942
+ sceneCut?: number;
3943
+ /**
3944
+ * Use access unit delimiters in the output
3945
+ */
3946
+ aud?: boolean;
3947
+ /**
3948
+ * Disables the loop filter. Not Recommended.
3949
+ */
3950
+ noDeblock?: boolean;
3951
+ /**
3952
+ * Signal HRD information
3953
+ */
3954
+ nalHrd?: X264NalHrd;
3955
+ }
3956
+
3957
+ /**
3958
+ * @public
3959
+ * See the X264 Docs for a description of this value
3960
+ * */
3961
+ export declare type X264Level = 1 | 1.1 | 1.2 | 1.3 | 2 | 2.1 | 2.2 | 3 | 3.1 | 3.2 | 4 | 4.1 | 4.2 | 5 | 5.1;
3962
+
3963
+ /**
3964
+ * @public
3965
+ * Three possible values:
3966
+ *
3967
+ * - "none": specify no HRD information
3968
+ *
3969
+ * - "vbr": specify HRD information
3970
+ *
3971
+ * - "cbr": specify HRD information and pack the bitstream to the bitrate specified
3972
+ *
3973
+ * See the X264 Docs for a further description of this value
3974
+ */
3975
+ export declare type X264NalHrd = "none" | "vbr" | "cbr";
3976
+
3977
+ /**
3978
+ * @public
3979
+ * See the X264 Docs for a description of this value
3980
+ * */
3981
+ export declare type X264Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
3982
+
3983
+ /** @public */
3984
+ export declare type X264Profile = "baseline" | "main" | "high" | "high10" | "high422" | "high444";
3985
+
3986
+ /**
3987
+ * @public
3988
+ * See the X264 Docs for a description of this value
3989
+ * */
3990
+ export declare type X264Tune = "film" | "animation" | "grain" | "stillimage" | "psnr" | "ssim" | "fastdecode" | "zerolatency";
3991
+
3992
+ /** @public X265 codec */
3993
+ export declare interface X265Codec {
3994
+ type: "x265";
3995
+ threads?: number;
3996
+ bitrateMode?: BitrateMode;
3997
+ profile?: X265Profile;
3998
+ /**
3999
+ * Sets the level flag in the output
4000
+ */
4001
+ level?: X265Level;
4002
+ /**
4003
+ * Sets the minimum length between IDR frames
4004
+ */
4005
+ keyFrameIntervalMin?: number;
4006
+ /**
4007
+ * Sets the maximum length between IDR frames
4008
+ */
4009
+ keyFrameIntervalMax?: number;
4010
+ /**
4011
+ * Sets the maximum number of concurrent B-frames
4012
+ */
4013
+ bframes?: number;
4014
+ /**
4015
+ * Tune options to further optimize them for your input content. If you
4016
+ * specify a tuning, the changes will be applied after 'preset' but before all
4017
+ * other parameters.
4018
+ */
4019
+ tune?: X265Tune;
4020
+ /**
4021
+ * Change options to trade off compression efficiency against encoding speed.
4022
+ * If you specify a preset, the changes it makes will be applied before all
4023
+ * other parameters are applied.
4024
+ */
4025
+ preset?: X265Preset;
4026
+ /**
4027
+ * Maximum number of reference frames, i.e., the number of previous frames
4028
+ * each P-frame can use as references
4029
+ */
4030
+ frameReference?: number;
4031
+ /**
4032
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
4033
+ */
4034
+ vbvMaxRate?: number;
4035
+ /**
4036
+ * Sets the size of the VBV buffer in kilobits
4037
+ */
4038
+ vbvBufferSize?: number;
4039
+ /**
4040
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
4041
+ * disables adaptive I-frame decisioning
4042
+ */
4043
+ sceneCut?: number;
4044
+ /**
4045
+ * Use access unit delimiters in the output
4046
+ */
4047
+ aud?: boolean;
4048
+ /**
4049
+ * Disables the loop filter. Not Recommended.
4050
+ */
4051
+ noDeblock?: boolean;
4052
+ }
4053
+
4054
+ /**
4055
+ * @public
4056
+ * See the X265 Docs for a description of this value
4057
+ * */
4058
+ export declare type X265Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4059
+
4060
+ /**
4061
+ * @public
4062
+ * See the X265 Docs for a description of this value
4063
+ * */
4064
+ export declare type X265Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
4065
+
4066
+ /**
4067
+ * @public
4068
+ * See the X265 Docs for a description of this value
4069
+ * */
4070
+ export declare type X265Profile = "main" | "main10" | "main444_8" | "main422_10" | "main444_10";
4071
+
4072
+ /**
4073
+ * @public
4074
+ * See the X265 Docs for a description of this value
4075
+ * */
4076
+ export declare type X265Tune = "psnr" | "ssim" | "grain" | "zerolatency" | "fastdecode" | "animation";
4077
+
4078
+ /**
4079
+ * @public
4080
+ * Settings for a H264 Encode using Netint Xilinx hardware
4081
+ * A detailed description of these params can be found
4082
+ * on the Netint Xilinx Encoder Documentation
4083
+ *
4084
+ * These fields have deliberately been written to maintain the same semantics as the
4085
+ * Xilinx documentation where possible.
4086
+ *
4087
+ * If left undefined, all will default to Xilinx's own defaults
4088
+ * */
4089
+ export declare interface XilinxH264 {
4090
+ type: "xilinx-h264";
4091
+ profile?: XilinxH264Profile;
4092
+ level?: XilinxH264Level;
4093
+ }
4094
+
4095
+ /** @public */
4096
+ export declare type XilinxH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4097
+
4098
+ /** @public */
4099
+ export declare type XilinxH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
4100
+
4101
+ /**
4102
+ * @public
4103
+ * Settings for a HEVC Encode using Netint Xilinx hardware
4104
+ * A detailed description of these params can be found
4105
+ * on the Netint Xilinx Encoder Documentation
4106
+ *
4107
+ * These fields have deliberately been written to maintain the same semantics as the
4108
+ * Xilinx documentation where possible.
4109
+ *
4110
+ * If left undefined, all will default to Xilinx's own defaults
4111
+ * */
4112
+ export declare interface XilinxHevc {
4113
+ type: "xilinx-hevc";
4114
+ profile?: XilinxHevcProfile;
4115
+ level?: XilinxHevcLevel;
4116
+ tier?: XilinxHevcTier;
4117
+ }
4118
+
4119
+ /** @public */
4120
+ export declare type XilinxHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4121
+
4122
+ /** @public */
4123
+ export declare type XilinxHevcProfile = "main" | "main10";
4124
+
4125
+ /** @public */
4126
+ export declare type XilinxHevcTier = "main" | "high";
4127
+
4128
+ export { }