@norskvideo/norsk-sdk 1.0.349 → 1.0.351

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4957 @@
1
+ /// <reference types="node" />
2
+
3
+ import { AudioCodec } from '@norskvideo/norsk-api/lib/media_pb';
4
+ import { CmafAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
5
+ import { CmafMultiVariantMessage } from '@norskvideo/norsk-api/lib/media_pb';
6
+ import { CmafVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
7
+ import { CmafWebVttMessage } from '@norskvideo/norsk-api/lib/media_pb';
8
+ import { CurrentLoad } from '@norskvideo/norsk-api/lib/shared/common_pb';
9
+ import { ExplicitChannel } from '@norskvideo/norsk-api/lib/media_pb';
10
+ import { FileTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
11
+ import { FrameRate as FrameRate_2 } from '@norskvideo/norsk-api/lib/media_pb';
12
+ import { GopStructure } from '@norskvideo/norsk-api/lib/media_pb';
13
+ import * as grpc from '@grpc/grpc-js';
14
+ import { HlsOutputEvent } from '@norskvideo/norsk-api/lib/media_pb';
15
+ import { HlsTsAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
16
+ import { HlsTsCombinedPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
17
+ import { HlsTsMultiVariantMessage } from '@norskvideo/norsk-api/lib/media_pb';
18
+ import { HlsTsVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
19
+ import { MediaClient } from '@norskvideo/norsk-api/lib/media_grpc_pb';
20
+ import { Nullable } from 'typescript-nullable';
21
+ import { PlainMessage } from '@bufbuild/protobuf';
22
+ import { Readable } from 'stream';
23
+ import { RtmpError_UnsupportedAudio } from '@norskvideo/norsk-api/lib/media_pb';
24
+ import { RtmpError_UnsupportedVideo } from '@norskvideo/norsk-api/lib/media_pb';
25
+ import { Scte35SpliceInfoSection as Scte35SpliceInfoSection_2 } from '@norskvideo/norsk-api/lib/media_pb';
26
+ import { StreamKey as StreamKey_2 } from '@norskvideo/norsk-api/lib/media_pb';
27
+ import { StreamStatisticsSampling } from '@norskvideo/norsk-api/lib/media_pb';
28
+ import { Subscription } from '@norskvideo/norsk-api/lib/media_pb';
29
+ import { TimestampProgramNudge } from '@norskvideo/norsk-api/lib/media_pb';
30
+ import { TsInputEvent } from '@norskvideo/norsk-api/lib/media_pb';
31
+ import { UdpTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
32
+ import { VancPayloadFormat as VancPayloadFormat_2 } from '@norskvideo/norsk-api/lib/media_pb';
33
+ import { Version } from '@norskvideo/norsk-api/lib/shared/common_pb';
34
+ import { Wave } from '@norskvideo/norsk-api/lib/media_pb';
35
+ import { Writable } from 'stream';
36
+
37
+ /** @public */
38
+ export declare type AacProfile = "lc" | "main" | "high";
39
+
40
+ /**
41
+ * @public
42
+ * Settings for an AAC encode
43
+ * see: {@link NorskTransform.audioEncode}
44
+ */
45
+ export declare interface AacSettings {
46
+ kind: "aac";
47
+ /** The output sample rate of this AAC encode */
48
+ sampleRate: SampleRate;
49
+ /** The AAC profile of this AAC encode */
50
+ profile: AacProfile;
51
+ }
52
+
53
+ /** @public */
54
+ export declare interface AdMarker {
55
+ id: string;
56
+ startDate: Date;
57
+ durationSeconds: number;
58
+ scte35: Scte35SpliceInfoSection;
59
+ }
60
+
61
+ export declare class AncillaryNode extends AutoProcessorMediaNode<"ancillary"> {
62
+ sendScte35(key: StreamKey, info: Scte35SpliceInfoSection_2): void;
63
+ }
64
+
65
+ export declare interface AncillarySettings extends ProcessorNodeSettings<AncillaryNode> {
66
+ onScte35?: (stream: StreamKey, message: Scte35SpliceInfoSection_2) => void;
67
+ onSmpte2038?: (stream: StreamKey, message: Smpte2038Message) => void;
68
+ }
69
+
70
+ /**
71
+ * @public
72
+ * Returns the stream keys for ancillary streams in a media context
73
+ * @param streams - The media context from which to return the stream keys
74
+ * @returns The ancillary stream keys in the media context
75
+ */
76
+ export declare function ancillaryStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
77
+
78
+ /** @public */
79
+ export declare interface AncillaryStreamMetadata {
80
+ }
81
+
82
+ /**
83
+ * @public
84
+ * Filters a context to only the ancillary streams within it
85
+ * @param streams - The media context from which to return the streams
86
+ * @returns The ancillary streams in the media context
87
+ */
88
+ export declare function ancillaryStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
89
+
90
+ /**
91
+ * @public
92
+ * see: {@link NorskTransform.audioBuildMultichannel}
93
+ */
94
+ export declare class AudioBuildMultichannelNode extends AutoProcessorMediaNode<"audio"> {
95
+ }
96
+
97
+ /**
98
+ * @public
99
+ * Settings for an Audio Build Multichannel Node
100
+ * see: {@link NorskTransform.audioBuildMultichannel}
101
+ * */
102
+ export declare interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
103
+ /** The channel layout of the built outgoing stream */
104
+ channelLayout: ChannelLayout;
105
+ sampleRate: SampleRate;
106
+ /**
107
+ * Stream keys specifying the source for each channel, where the order is
108
+ * significant. The streams must all have the same sample format and sample
109
+ * rate.
110
+ */
111
+ channelList: readonly StreamKey[];
112
+ /**
113
+ * Callback invoked when the inbound context changes
114
+ * a new channel list can be returned here that overrides the initial configuration
115
+ * and allows the channel order to be changed at runtime
116
+ */
117
+ onInputChanged?: (keys: StreamKey[]) => StreamKey[] | undefined;
118
+ /** The stream key to use for the outoging stream*/
119
+ outputStreamKey: StreamKey;
120
+ }
121
+
122
+ export { AudioCodec }
123
+
124
+ /**
125
+ * @public
126
+ * see: {@link NorskTransform.audioEncode}
127
+ */
128
+ export declare class AudioEncodeNode extends AutoProcessorMediaNode<"audio"> {
129
+ }
130
+
131
+ /**
132
+ * @public
133
+ * Settings for an audio encode
134
+ * see: {@link NorskTransform.audioEncode}
135
+ * */
136
+ export declare interface AudioEncodeSettings extends ProcessorNodeSettings<AudioEncodeNode> {
137
+ /**
138
+ * The channel layout of this encode
139
+ * Note: If the channel layout doesn't match then it will be automatically converted
140
+ * to gain greater control over this process, see {@link NorskTransform.audioMix} and {@link NorskTransform.audioMixMatrix}
141
+ * */
142
+ channelLayout: ChannelLayout;
143
+ /** The target bitrate of this encode */
144
+ bitrate: number;
145
+ /** The name given to the rendition portion of the stream key assigned to this node's output */
146
+ outputRenditionName: string;
147
+ /** What codec to (re) encode the audio to */
148
+ codec: OpusSettings | AacSettings;
149
+ }
150
+
151
+ /**
152
+ * @public
153
+ * see: {@link NorskTransform.audioGain}
154
+ */
155
+ export declare class AudioGainNode extends AutoProcessorMediaNode<"audio"> {
156
+ /**
157
+ * @public
158
+ * Updates the config of this AudioGain node for all subsequent frames
159
+ * this allows the user to change the gains in the outgoing stream
160
+ * dynamically as the stream progresses
161
+ * @param settings - The updated settings
162
+ */
163
+ updateConfig(settings: AudioGainSettingsUpdate): void;
164
+ }
165
+
166
+ /**
167
+ * @public
168
+ * Settings for an Audio Gain node
169
+ * see: {@link NorskTransform.audioGain}
170
+ * */
171
+ export declare interface AudioGainSettings extends ProcessorNodeSettings<AudioGainNode> {
172
+ /** A vector of gains for this source, one for each channel */
173
+ channelGains: readonly Gain[];
174
+ }
175
+
176
+ /**
177
+ * @public
178
+ * An update operation for an Audio Gain node
179
+ * see: {@link AudioGainNode.updateConfig}
180
+ * */
181
+ export declare interface AudioGainSettingsUpdate {
182
+ /** A vector of gains for this source, one for each channel */
183
+ channelGains?: readonly Gain[];
184
+ }
185
+
186
+ /** @public */
187
+ export declare interface AudioMeasureLevels {
188
+ stream: StreamKey;
189
+ pts: Interval;
190
+ channelLevels: ChannelLevels[];
191
+ }
192
+
193
+ /**
194
+ * @public
195
+ * see: {@link NorskControl.audioMeasureLevels}.
196
+ */
197
+ export declare class AudioMeasureLevelsNode extends AutoProcessorMediaNode<"audio"> {
198
+ }
199
+
200
+ /**
201
+ * @public
202
+ * Settings for an AudioMeasureLevelsNode
203
+ * see: {@link NorskControl.audioMeasureLevels}
204
+ */
205
+ export declare interface AudioMeasureLevelsSettings extends ProcessorNodeSettings<AudioMeasureLevelsNode> {
206
+ /**
207
+ * Called with the audio level data
208
+ * @param levels - The level data for the audio stream
209
+ * @eventProperty
210
+ */
211
+ onData: (levels: AudioMeasureLevels) => void;
212
+ intervalFrames?: number;
213
+ }
214
+
215
+ /**
216
+ * @public
217
+ * see: {@link NorskTransform.audioMixMatrix}
218
+ */
219
+ export declare class AudioMixMatrixNode extends AutoProcessorMediaNode<"audio"> {
220
+ /**
221
+ * @public
222
+ * Updates the config of this AudioMixMatrix node for all subsequent frames
223
+ * this allows the user to change the gains in the outgoing mix
224
+ * dynamically as the stream progresses
225
+ * @param settings - The updated settings
226
+ */
227
+ updateConfig(settings: AudioMixMatrixSettingsUpdate): void;
228
+ }
229
+
230
+ /**
231
+ * @public
232
+ * Settings for the Audio Mix Matrix Node
233
+ * see: {@link NorskTransform.audioMixMatrix}
234
+ * */
235
+ export declare interface AudioMixMatrixSettings extends ProcessorNodeSettings<AudioMixMatrixNode> {
236
+ /** The NxM matrix of gains from N input channels to M output channels */
237
+ channelGains: readonly Gain[][];
238
+ /** The desired output channel layout, such as "5.1" */
239
+ outputChannelLayout: ChannelLayout;
240
+ }
241
+
242
+ /**
243
+ * @public
244
+ * Config update for the {@link AudioMixMatrixNode}.
245
+ * Call {@link AudioMixMatrixNode.updateConfig} for updating the config.
246
+ */
247
+ export declare interface AudioMixMatrixSettingsUpdate {
248
+ /** The NxM updated matrix of gains from N input channels to M output channels */
249
+ channelGains: readonly Gain[][];
250
+ }
251
+
252
+ /**
253
+ * @public
254
+ * see: {@link NorskTransform.audioMix}
255
+ */
256
+ export declare class AudioMixNode<Pins extends string> extends ProcessorMediaNode<Pins> {
257
+ /**
258
+ * @public
259
+ * Updates the config of this AudioMix for all subsequent frames
260
+ * this allows the user to change the levels and sources in the outgoing mix
261
+ * dynamically as the stream progresses
262
+ * @param settings - The updated settings
263
+ */
264
+ updateConfig(settings: AudioMixSettingsUpdate<Pins>): void;
265
+ }
266
+
267
+ /**
268
+ * @public
269
+ * The settings for an AudioMix operation
270
+ * see: {@link NorskTransform.audioMix}
271
+ * */
272
+ export declare interface AudioMixSettings<Pins extends string> extends ProcessorNodeSettings<AudioMixNode<Pins>> {
273
+ /** The audio sources to mix */
274
+ sources: readonly AudioMixSource<Pins>[];
275
+ /** The source name to use for the output stream */
276
+ outputSource: string;
277
+ /** The channel layout that the mixer runs at
278
+ * all audio streams will be normalised to this value and therefore
279
+ * this will be the output channel layout of this node */
280
+ channelLayout: ChannelLayout;
281
+ /** The sample rate that the mixer runs at
282
+ * all audio streams will be normalised to this value and therefore
283
+ * this will be the output sample rate of this node */
284
+ sampleRate?: SampleRate;
285
+ }
286
+
287
+ /**
288
+ * @public
289
+ * An update operation for an AudioMix node
290
+ * see: {@link AudioMixNode.updateConfig}
291
+ * */
292
+ export declare interface AudioMixSettingsUpdate<Pins extends string> {
293
+ /** The audio sources to mix along with their potentially new gain values */
294
+ sources: readonly AudioMixSource<Pins>[];
295
+ }
296
+
297
+ /**
298
+ * @public
299
+ * The settings for a single source within an AudioMix operation
300
+ * see: {@link NorskTransform.audioMix}
301
+ * */
302
+ export declare interface AudioMixSource<Pins> {
303
+ /** The name of the InputPin for this source */
304
+ pin: Pins;
305
+ /** A vector of gains for this source, one for each channel */
306
+ channelGains?: readonly Gain[];
307
+ }
308
+
309
+ /**
310
+ * @public
311
+ * see: {@link NorskInput.audioSignal}
312
+ */
313
+ export declare class AudioSignalGeneratorNode extends SourceMediaNode {
314
+ }
315
+
316
+ /**
317
+ * @public
318
+ * Settings for an Audio Signal Generator
319
+ * see: {@link NorskInput.audioSignal}
320
+ * */
321
+ export declare interface AudioSignalGeneratorSettings extends SourceNodeSettings<AudioSignalGeneratorNode> {
322
+ /** The source name to set in the stream key of the outgoing stream */
323
+ sourceName: string;
324
+ /** The audio channel layout of the generated stream */
325
+ channelLayout: ChannelLayout;
326
+ /** The sample rate of the generated stream */
327
+ sampleRate: SampleRate;
328
+ /** The sample format to use. Default: "fltp" */
329
+ sampleFormat?: SampleFormat;
330
+ /**
331
+ * Waveform - create one with {@link mkSine}
332
+ * */
333
+ wave?: Wave;
334
+ }
335
+
336
+ /**
337
+ * @public
338
+ * see: {@link NorskTransform.audioSplitMultichannel}
339
+ */
340
+ export declare class AudioSplitMultichannelNode extends AutoProcessorMediaNode<"audio"> {
341
+ }
342
+
343
+ /**
344
+ * @public
345
+ * Settings for an Audio Split Multichannel node
346
+ * see: {@link NorskTransform.audioSplitMultichannel}
347
+ * */
348
+ export declare interface AudioSplitMultichannelSettings extends ProcessorNodeSettings<AudioSplitMultichannelNode> {
349
+ /**
350
+ * The output stream key of the first channel
351
+ * subsequent channels will have streamId incremented by N
352
+ */
353
+ outputStreamKey: StreamKey;
354
+ }
355
+
356
+ /**
357
+ * @public
358
+ * Returns the stream keys for audio streams in a media context
359
+ * @param streams - The media context from which to return the stream keys
360
+ * @returns The audio stream keys in the media context
361
+ */
362
+ export declare function audioStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
363
+
364
+ /** @public */
365
+ export declare interface AudioStreamMetadata {
366
+ codec: AudioCodec;
367
+ sampleRate: SampleRate;
368
+ channelLayout?: ChannelLayout;
369
+ }
370
+
371
+ /**
372
+ * @public
373
+ * Filters a context to only the audio streams within it
374
+ * @param streams - The media context from which to return the streams
375
+ * @returns The audio streams in the media context
376
+ */
377
+ export declare function audioStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
378
+
379
+ /** @public */
380
+ export declare function audioToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
381
+
382
+ /**
383
+ * @public
384
+ * see: {@link NorskTransform.audioTranscribeAws}
385
+ */
386
+ export declare class AudioTranscribeAwsNode extends AutoProcessorMediaNode<"audio"> {
387
+ }
388
+
389
+ /**
390
+ * @public
391
+ * Settings for an Audio Transcribe operation using AWS
392
+ * see: {@link NorskTransform.audioTranscribeAws}
393
+ * */
394
+ export declare interface AudioTranscribeAwsSettings extends ProcessorNodeSettings<AudioTranscribeAwsNode> {
395
+ /** Region for the transcribe endpoint */
396
+ awsRegion: string;
397
+ /** the stream id to allocate to the outgoing stream*/
398
+ outputStreamId: number;
399
+ /** the language that we want to transcribe (also put in the outgoing metadata) */
400
+ language: string;
401
+ /** The mode to be used for building sentences */
402
+ sentenceBuildMode: SentenceBuildMode;
403
+ /** The mode to be used for stabilising sentences */
404
+ sentenceStabilizationMode: StabilizationMode;
405
+ /** The AWS credentials to use for this operation
406
+ * If not supplied, the standard environment variables will be used if present
407
+ * */
408
+ awsCredentials?: AwsCredentials;
409
+ }
410
+
411
+ /**
412
+ * @public
413
+ * see: {@link NorskTransform.audioTranscribeAws}
414
+ */
415
+ export declare class AudioTranscribeAzureNode extends AutoProcessorMediaNode<"audio"> {
416
+ }
417
+
418
+ /**
419
+ * @public
420
+ * Settings for an audio transcribe/translate operation using Azure Speech Service
421
+ * see: {@link NorskTransform.audioTranscribeAzure}
422
+ * */
423
+ export declare interface AudioTranscribeAzureSettings extends ProcessorNodeSettings<AudioTranscribeAzureNode> {
424
+ outputStreamId: number;
425
+ sourceLanguage: string;
426
+ targetLanguages?: string[];
427
+ /** Key for the Azure Speech Service endpoint */
428
+ azureKey: string;
429
+ /** Region for the Azure Speech Service endpoint */
430
+ azureRegion: string;
431
+ maximumLineLength: undefined | number;
432
+ }
433
+
434
+ /**
435
+ * @public
436
+ * see: {@link NorskTransform.audioTranscribeAws}
437
+ */
438
+ export declare class AudioTranscribeWhisperNode extends AutoProcessorMediaNode<"audio"> {
439
+ }
440
+
441
+ /**
442
+ * @public
443
+ * Settings for an Audio Transcribe operation using Whisper sdk
444
+ * see: {@link NorskTransform.audioTranscribeWhisper}
445
+ * */
446
+ export declare interface AudioTranscribeWhisperSettings extends ProcessorNodeSettings<AudioTranscribeWhisperNode> {
447
+ outputStreamId: number;
448
+ stepMs?: number;
449
+ lengthMs?: number;
450
+ keepMs?: number;
451
+ maxTokens?: number;
452
+ speedUp?: boolean;
453
+ noFallback?: boolean;
454
+ numThreads?: number;
455
+ useGpu?: boolean;
456
+ language?: string;
457
+ model: string;
458
+ translate?: boolean;
459
+ tinyDiarize?: boolean;
460
+ initialPrompt?: string;
461
+ suppressNonSpeechTokens?: boolean;
462
+ samplingStrategy?: WhisperSamplingStrategy;
463
+ }
464
+
465
+ export declare interface AutoProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
466
+ }
467
+
468
+ export declare class AutoProcessorMediaNode<Pins extends string> {
469
+ constructor(client: MediaClient, _unregisterNode: (node: MediaNodeState) => void, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => Promise<boolean>, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
470
+ }
471
+
472
+ /** @public */
473
+ export declare class AutoSinkMediaNode<Pins extends string> extends SinkMediaNode<Pins | "auto"> {
474
+ /** Subscribe to the given sources.
475
+ *
476
+ * This version of subscribe simply requires a list of stream keys to be
477
+ * returned from each selector, and the server will automatically
478
+ * assign each stream to the appropriate pin on the sink node.
479
+ * This is the appropriate method for most cases.
480
+ *
481
+ * @param done - will be called with no arguments if the subscription succeeds,
482
+ * or an error if it failed. This error indicates the specific reason it
483
+ * failed, so you can take appropriate actions in response. It will be called
484
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
485
+ * provided in the config for the node.
486
+ *
487
+ * Errors are also logged to the debug log.
488
+ */
489
+ subscribe(sources: ReceiveFromAddressAuto[], validation?: (context: Context) => SubscriptionValidationResponse, done?: (error?: SubscriptionError) => void): void;
490
+ }
491
+
492
+ /** @public */
493
+ export declare function avToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
494
+
495
+ /** @public */
496
+ export declare interface AwsCredentials {
497
+ accessKey: string;
498
+ secretKey: string;
499
+ sessionToken: string;
500
+ }
501
+
502
+ /**
503
+ * @public
504
+ * Configuration for pushing a segmented media stream directly to AWS S3
505
+ * */
506
+ export declare interface AwsS3PushDestinationSettings {
507
+ type: "s3";
508
+ /** The hostname of the s3 server being pushed to. */
509
+ host: string;
510
+ /** the port of the s3 server being pushed to. */
511
+ port: number;
512
+ /** the path under which segments and playlists will be pushed to */
513
+ pathPrefix: string;
514
+ /**
515
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
516
+ *
517
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
518
+ * cacheing directives
519
+ */
520
+ sessionId?: string;
521
+ /**
522
+ * A unique identifier for this destination
523
+ *
524
+ * This can be used for supplying updates to configuration to this destination specifically
525
+ * see: {@link UpdateCredentials}
526
+ */
527
+ id: DestinationId;
528
+ /**
529
+ * The AWS region being pushed to
530
+ */
531
+ awsRegion: string;
532
+ /**
533
+ * AWS credentials to be used for connecting to S3
534
+ * Standard environment variables will be read if these are not provided
535
+ */
536
+ awsCredentials?: AwsCredentials;
537
+ /**
538
+ * Informs the playlist generation how long segments will be retained for on the remote server
539
+ * in order to generate an accurate playlist
540
+ */
541
+ retentionPeriodSeconds: number;
542
+ }
543
+
544
+ /**
545
+ * @public
546
+ * There are three possible modes:
547
+ *
548
+ * - "abr": encode in average bitrate mode, specified in kilobits/sec (note, 1
549
+ * kilobit is 1000 bits). You can make use of the vbv settings to control
550
+ * the bounds on how much the actual bitrate can fluctuate within the bounds
551
+ * of the average
552
+ *
553
+ * - "cqp": encode in constant quantizer mode. In general, crf will give better
554
+ * results, although cqp can be faster to encode
555
+ *
556
+ * - "crf": encode in constant rate factor mode. This will give a constant 'quality'
557
+ * to the encode, but with a variable bitrate
558
+ */
559
+ export declare interface BitrateMode {
560
+ value: number;
561
+ mode: "abr" | "cqp" | "crf";
562
+ }
563
+
564
+ /** @public */
565
+ export declare type BrowserEvent = {
566
+ case: "onLoaded";
567
+ value: BrowserOnLoaded;
568
+ } | {
569
+ case: "onLoadStart";
570
+ value: BrowserOnLoadStart;
571
+ } | {
572
+ case: "onLoadEnd";
573
+ value: BrowserOnLoadEnd;
574
+ } | {
575
+ case: "onLoadError";
576
+ value: BrowserOnLoadError;
577
+ };
578
+
579
+ /**
580
+ * @public
581
+ * see: {@link NorskInput.browser}
582
+ */
583
+ export declare class BrowserInputNode extends SourceMediaNode {
584
+ /**
585
+ * @public
586
+ * Supply new config for an active web browser session
587
+ * */
588
+ updateConfig(settings: BrowserInputSettingsUpdate): void;
589
+ /**
590
+ * @public
591
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
592
+ * */
593
+ nudge(nudge: number): void;
594
+ }
595
+
596
+ /**
597
+ * @public
598
+ * Settings for a Browser Input
599
+ * see: {@link NorskInput.browser}
600
+ * */
601
+ export declare interface BrowserInputSettings extends SourceNodeSettings<BrowserInputNode>, StreamStatisticsMixin {
602
+ /** The url to load in the browser session */
603
+ url: string;
604
+ /** This is the resolution of the window opened to render the page
605
+ * This is therefore also output resolution of the generated video
606
+ */
607
+ resolution: {
608
+ width: number;
609
+ height: number;
610
+ };
611
+ /** The source name to populate the outgoing stream key with */
612
+ sourceName: string;
613
+ /** The frame rate at which to generate video from the web page
614
+ * Note: If the web page is static, this will just mean the initial frame
615
+ * is duplicated at the required frame rate
616
+ * */
617
+ frameRate: FrameRate;
618
+ /** An optional callback for reacting to events from the embedded browser
619
+ * At the very least this is useful for logging events (such as a 404)
620
+ * */
621
+ onBrowserEvent?: (event: BrowserEvent) => void;
622
+ }
623
+
624
+ /**
625
+ * @public
626
+ * A settings update for a running browser
627
+ * see: {@link BrowserInputNode.updateConfig}
628
+ * */
629
+ export declare interface BrowserInputSettingsUpdate {
630
+ /** Optionally, a new URL to load within the active session */
631
+ url?: string;
632
+ /** Optionally, a new resolution to use for outgoing video */
633
+ resolution?: {
634
+ width: number;
635
+ height: number;
636
+ };
637
+ }
638
+
639
+ /** @public */
640
+ export declare interface BrowserOnLoaded {
641
+ url: string;
642
+ }
643
+
644
+ /** @public */
645
+ export declare interface BrowserOnLoadEnd {
646
+ url: string;
647
+ statusCode: number;
648
+ }
649
+
650
+ /** @public */
651
+ export declare interface BrowserOnLoadError {
652
+ url: string;
653
+ errorText: string;
654
+ errorCode: number;
655
+ }
656
+
657
+ /** @public */
658
+ export declare interface BrowserOnLoadStart {
659
+ url: string;
660
+ }
661
+
662
+ /** @public Channel layout for an audio stream */
663
+ export declare type ChannelLayout = "mono" | "stereo" | "surround" | "4.0" | "5.0" | "5.1" | "7.1" | "5.1.4" | "7.1.4" | (ChannelName | ExplicitChannel)[];
664
+
665
+ /** @public */
666
+ export declare interface ChannelLevels {
667
+ rms?: Db;
668
+ peak?: Db;
669
+ }
670
+
671
+ /** @public */
672
+ export declare type ChannelName =
673
+ /** Left front */
674
+ "l"
675
+ /** Right front */
676
+ | "r"
677
+ /** Centre front */
678
+ | "c"
679
+ /** Low frequency enhancement */
680
+ | "lfe"
681
+ /** Left surround */
682
+ | "ls"
683
+ /** Right surround */
684
+ | "rs"
685
+ /** Left front centre */
686
+ | "lc"
687
+ /** Right front centre */
688
+ | "rc"
689
+ /** Rear surround left */
690
+ | "lsr"
691
+ /** Rear Surround Right */
692
+ | "rsr"
693
+ /** Rear centre */
694
+ | "cs"
695
+ /** Left surround direct */
696
+ | "lsd"
697
+ /** Right surround direct */
698
+ | "rsd"
699
+ /** Left side surround */
700
+ | "lss"
701
+ /** Right side surround */
702
+ | "rss"
703
+ /** Left wide front */
704
+ | "lw"
705
+ /** Right wide front */
706
+ | "rw"
707
+ /** Left front vertical height */
708
+ | "lv"
709
+ /** Right front vertical height */
710
+ | "rv"
711
+ /** Centre front vertical height */
712
+ | "cv"
713
+ /** Left surround vertical height rear */
714
+ | "lvr"
715
+ /** Right surround vertical height rear */
716
+ | "rvr"
717
+ /** Centre vertical height rear */
718
+ | "cvr"
719
+ /** Left vertical height side surround */
720
+ | "lvss"
721
+ /** Right vertical height side surround */
722
+ | "rvss"
723
+ /** Top centre surround */
724
+ | "ts"
725
+ /** Low frequency enhancement 2 */
726
+ | "lfe2"
727
+ /** Left front vertical bottom */
728
+ | "lb"
729
+ /** Right front vertical bottom */
730
+ | "rb"
731
+ /** Centre front vertical bottom */
732
+ | "cb"
733
+ /** Left vertical height surround */
734
+ | "lvs"
735
+ /** Right vertical height surround */
736
+ | "rvs"
737
+ /** Low frequency enhancement 3 */
738
+ | "lfe3"
739
+ /** Left edge of screen */
740
+ | "leos"
741
+ /** Right edge of screen */
742
+ | "reos"
743
+ /** Halfway between centre of screen and left edge of screen */
744
+ | "hwbcal"
745
+ /** Halfway between centre of screen and right edge of screen */
746
+ | "hwbcar"
747
+ /** Left back surround */
748
+ | "lbs"
749
+ /** Right back surround */
750
+ | "rbs"
751
+ /** Unknown */
752
+ | "unknown";
753
+
754
+ /** @public */
755
+ export declare function clientHostExternal(): string;
756
+
757
+ /** @public */
758
+ export declare function clientHostInternal(): string;
759
+
760
+ /** @public */
761
+ export declare function clientPortExternal(): string;
762
+
763
+ /** @public */
764
+ export declare function clientPortInternal(): string;
765
+
766
+ /**
767
+ * @public
768
+ * see: {@link NorskOutput.cmafAudio}
769
+ */
770
+ export declare class CmafAudioOutputNode extends CmafNodeWithPlaylist<CmafAudioMessage, "audio", CmafAudioOutputNode> {
771
+ /** @public */
772
+ onPlaylistAddition?: (destinationId: DestinationId, pl: HlsPlaylistAdditions) => HlsPlaylist;
773
+ /**
774
+ * @public
775
+ * Updates the credentials for a specific destination within this output by id
776
+ * see: {@link UpdateCredentials}
777
+ * see: {@link CmafDestinationSettings}
778
+ */
779
+ updateCredentials(settings: UpdateCredentials): void;
780
+ }
781
+
782
+ /**
783
+ * @public
784
+ * Possible destinations for a segmented media stream
785
+ * - {@link HlsPushDestinationSettings}: Push to a generic HTTP server
786
+ * - {@link AwsS3PushDestinationSettings}: Push to Amazon S3
787
+ * - {@link LocalPullDestinationSettings}: Serve directly from the Norsk Web Server
788
+ * */
789
+ export declare type CmafDestinationSettings = HlsPushDestinationSettings | AwsS3PushDestinationSettings | LocalPullDestinationSettings;
790
+
791
+ /**
792
+ * @public
793
+ * see: {@link NorskOutput.cmafMultiVariant}
794
+ */
795
+ export declare class CmafMultiVariantOutputNode extends CmafNodeBase<CmafMultiVariantMessage, "video" | "audio" | "subtitle", CmafMultiVariantOutputNode> {
796
+ /** @public The URL of the file based multi variant playlist */
797
+ url: string;
798
+ /**
799
+ * @public
800
+ * Updates the credentials for a specific destination within this output by id
801
+ * see: {@link UpdateCredentials}
802
+ * see: {@link CmafDestinationSettings}
803
+ */
804
+ updateCredentials(settings: UpdateCredentials): void;
805
+ }
806
+
807
+ /**
808
+ * @public
809
+ * Settings for a CMAF Multi Variant Playlist
810
+ * see {@link NorskOutput.cmafMultiVariant}
811
+ */
812
+ export declare interface CmafMultiVariantOutputSettings extends SinkNodeSettings<CmafMultiVariantOutputNode> {
813
+ /**
814
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
815
+ */
816
+ playlistName: string;
817
+ /**
818
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
819
+ */
820
+ destinations: CmafDestinationSettings[];
821
+ /**
822
+ * Directives to add to the m3u multi variant playlist
823
+ */
824
+ m3uAdditions?: string;
825
+ /**
826
+ * XML fragment to add to the (top-level) MPD element
827
+ */
828
+ mpdAdditions?: string;
829
+ /**
830
+ * A callback invoked every time a CMAF multi variant playlist is changed
831
+ */
832
+ onPlaylistChange?: (destinationId: DestinationId, playlist: CmafMultiVariantPlaylistData) => CmafMultiVariantPlaylistData;
833
+ }
834
+
835
+ /** @public */
836
+ export declare interface CmafMultiVariantPlaylistData {
837
+ hls: string;
838
+ dash: string;
839
+ }
840
+
841
+ declare class CmafNodeBase<ClientMessage, Pins extends string, T extends MediaNodeState> extends AutoProcessorMediaNode<Pins> {
842
+ playlists: HlsPlaylistDestination;
843
+ destinations: DestinationId[];
844
+ scheduledTags: ScheduledTag[];
845
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: ProcessorNodeSettings<T> & StreamStatisticsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => Promise<boolean>, onPlaylistAddition: PlaylistOnChangeFn<ClientMessage>, destinations: DestinationId[], subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
846
+ scheduleTag(tag: MediaPlaylistPart, scheduleAt: Date, destinationId?: DestinationId): ScheduledTag;
847
+ removeScheduledTag(tagId: number): void;
848
+ }
849
+
850
+ declare class CmafNodeWithPlaylist<ClientMessage, Pins extends string, T extends MediaNodeState> extends CmafNodeBase<ClientMessage, Pins, T> {
851
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: ProcessorNodeSettings<T> & StreamStatisticsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => Promise<boolean>, playlistPath: PlaylistPath, segmentDuration: number, onPlaylistAddition: PlaylistOnChangeFn<ClientMessage>, destinations: DestinationId[], localDestination?: LocalPullDestinationSettings, maximumPlaylistSegments?: number);
852
+ /**
853
+ * @public
854
+ * Returns the URL to the HLS playlist entry. Note this can only be evaluated once the stream is active as it
855
+ * varies with the stream subscribed to. Useful during development, but you probably want to
856
+ * use {@link NorskOutput.cmafMultiVariant} for production.
857
+ */
858
+ url(): Promise<string>;
859
+ }
860
+
861
+ /**
862
+ * @public
863
+ * Settings for a CMAF Audio and Video Outputs
864
+ * see {@link NorskOutput.cmafAudio}, {@link NorskOutput.cmafVideo}
865
+ */
866
+ export declare interface CmafOutputSettings extends SinkNodeSettings<CmafAudioOutputNode | CmafVideoOutputNode> {
867
+ /**
868
+ * The target segment duration in seconds. Norsk will make the largest segments it can
869
+ * without going over this target
870
+ */
871
+ segmentDurationSeconds: number;
872
+ /**
873
+ * The target part duration in seconds. Norsk will make the largest parts it can
874
+ * without going over this target
875
+ */
876
+ partDurationSeconds: number;
877
+ /**
878
+ * By default, the program date time or event start time will be based on the
879
+ * timestamp of the first video packet received by Norsk in a stream.
880
+ *
881
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
882
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
883
+ *
884
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
885
+ *
886
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
887
+ */
888
+ delayOutputMs?: number;
889
+ /**
890
+ * Settings for encrypting the content.
891
+ */
892
+ encryption?: EncryptionSettings;
893
+ /**
894
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
895
+ */
896
+ destinations: CmafDestinationSettings[];
897
+ /**
898
+ * Directives to add to the m3u media playlist
899
+ */
900
+ m3uAdditions?: string;
901
+ /**
902
+ * XML fragment to add to the mpd Representation element
903
+ */
904
+ mpdAdditions?: string;
905
+ /**
906
+ * Audio or video bitrate for the {@link NorskOutput.cmafMultiVariant} playlist
907
+ */
908
+ bitrate?: number;
909
+ /**
910
+ * The maximum number of segments to display in a single generated playlist
911
+ */
912
+ maximumPlaylistSegments?: number;
913
+ }
914
+
915
+ /**
916
+ * @public
917
+ * see: {@link NorskOutput.cmafVideo}
918
+ */
919
+ export declare class CmafVideoOutputNode extends CmafNodeWithPlaylist<CmafVideoMessage, "video", CmafVideoOutputNode> {
920
+ /** @public */
921
+ onPlaylistAddition?: (destinationId: DestinationId, pl: HlsPlaylistAdditions) => HlsPlaylist;
922
+ /**
923
+ * @public
924
+ * Updates the credentials for a specific destination within this output by id
925
+ * see: {@link UpdateCredentials}
926
+ * see: {@link CmafDestinationSettings}
927
+ */
928
+ updateCredentials(settings: UpdateCredentials): void;
929
+ }
930
+
931
+ /**
932
+ * @public
933
+ * see: {@link NorskOutput.cmafWebVtt}
934
+ */
935
+ export declare class CmafWebVttOutputNode extends CmafNodeWithPlaylist<CmafWebVttMessage, "subtitle", CmafWebVttOutputNode> {
936
+ /** @public */
937
+ onPlaylistAddition?: (destinationId: DestinationId, pl: HlsPlaylistAdditions) => HlsPlaylist;
938
+ /**
939
+ * @public
940
+ * Updates the credentials for a specific destination within this output by id
941
+ * see: {@link UpdateCredentials}
942
+ * see: {@link CmafDestinationSettings}
943
+ */
944
+ updateCredentials(settings: UpdateCredentials): void;
945
+ }
946
+
947
+ /**
948
+ * @public
949
+ * Settings for a CMAF WebVTT Output
950
+ * see {@link NorskOutput.cmafWebVtt}
951
+ */
952
+ export declare interface CmafWebVttOutputSettings extends SinkNodeSettings<CmafWebVttOutputNode> {
953
+ /**
954
+ * The target segment duration in seconds, Norsk will split subtitles over multiple segments
955
+ * in a compliant manner if necessary
956
+ */
957
+ segmentDurationSeconds: number;
958
+ /**
959
+ * By default, the program date time or event start time will be based on the
960
+ * timestamp of the first video packet received by Norsk in a stream.
961
+ *
962
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
963
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
964
+ *
965
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
966
+ *
967
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
968
+ */
969
+ delayOutputMs?: number;
970
+ /**
971
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
972
+ */
973
+ destinations: CmafDestinationSettings[];
974
+ /**
975
+ * A local directory in which to store the segments while they're available
976
+ */
977
+ hlsCacheDirectory: string;
978
+ /**
979
+ * The maximum number of segments to display in a single generated playlist
980
+ */
981
+ maximumPlaylistSegments?: number;
982
+ }
983
+
984
+ /** @public */
985
+ export declare type ComposeHardwareAcceleration =
986
+ /**
987
+ * Use the quadra overlay functionality to perform the compose
988
+ */
989
+ "quadra"
990
+ /**
991
+ * Use an nvidia CUDA kernel to perform the compose
992
+ */
993
+ | "nvidia";
994
+
995
+ /** @public */
996
+ export declare type ComposeMissingStreamBehaviour =
997
+ /**
998
+ * Produce frames on output by dropping the part(s) of the composition which
999
+ * cannot be fulfilled.
1000
+ */
1001
+ "drop_part"
1002
+ /**
1003
+ * Wait for all streams to be present before producing (further) output
1004
+ */
1005
+ | "wait_for_all";
1006
+
1007
+ /**
1008
+ * @public
1009
+ * A single layer of a video compose operation
1010
+ * see {@link NorskTransform.videoCompose}
1011
+ * */
1012
+ export declare interface ComposePart<Pins> {
1013
+ /** Input pin for this source */
1014
+ pin: Pins;
1015
+ /**
1016
+ * The area within the source picture to include. This may be the full picture
1017
+ * or cropped, and will be rescaled if necessary.
1018
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
1019
+ * this is taken to be within the coordinate system of the input image
1020
+ */
1021
+ sourceRect: OffsetRect;
1022
+ /**
1023
+ * The area within the destination picture to place this part of the
1024
+ * composition.
1025
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
1026
+ * this is taken to be within the coordinate system of the destination image
1027
+ */
1028
+ destRect: OffsetRect;
1029
+ /**
1030
+ * Z-index to determine ordering by which the sources are overlaid
1031
+ * (higher layers appear on top)
1032
+ */
1033
+ zIndex: number;
1034
+ /**
1035
+ * Opacity multiplier of this overlay (where 0.0 is fully transparent and 1.0
1036
+ * is fully opaque)
1037
+ */
1038
+ opacity: number;
1039
+ /** Optionally identify the part to enable transitions */
1040
+ id?: string;
1041
+ /**
1042
+ * Optionally specify a transition for this part. A transition is applied only
1043
+ * if the part is specified in both the existing and the current/new
1044
+ * configuration, identified by having the same id specified, and a transition
1045
+ * is specified for the new configuration.
1046
+ */
1047
+ transition?: PartTransition;
1048
+ /**
1049
+ * Optionally supply a reference resolution. This allows description of the
1050
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
1051
+ * can be specified to allow the source and destination areas to be described
1052
+ * in percentage terms, or a notional resolution can be used that is
1053
+ * independant of the source resolutions that may be provided.
1054
+ *
1055
+ * If unset, this will be overriden by a global reference resolution if that is present
1056
+ *
1057
+ * This is useful to set if you don't know the input resolution of a part but want to be able to describe
1058
+ * an operation on that part.
1059
+ */
1060
+ referenceResolution?: Resolution;
1061
+ }
1062
+
1063
+ /** @public */
1064
+ export declare interface Context {
1065
+ streams: StreamMetadata[];
1066
+ }
1067
+
1068
+ /** @public */
1069
+ export declare type ContextType = "full" | "singleSource" | "singleProgram" | "singleStream" | "singleRendition";
1070
+
1071
+ /** @public */
1072
+ export declare interface Core {
1073
+ logicalCpuIds: LogicalCpuId[];
1074
+ }
1075
+
1076
+ /** @public */
1077
+ export declare interface CpuTopology {
1078
+ numaNodes: NumaNode[];
1079
+ }
1080
+
1081
+ /** @public A decibel (dB). A null value represents -inf. */
1082
+ export declare type Db = number | null;
1083
+
1084
+ /** @public */
1085
+ export declare function debugUrlPrefix(): string;
1086
+
1087
+ /** @public */
1088
+ export declare interface DeckLinkCard {
1089
+ index: number;
1090
+ displayName: string;
1091
+ inputConnections: DeckLinkVideoConnection[];
1092
+ outputConnections: DeckLinkVideoConnection[];
1093
+ ioSupport: DeckLinkVideoIOSupport[];
1094
+ }
1095
+
1096
+ /** @public */
1097
+ export declare interface DeckLinkDisplayMode {
1098
+ id: DeckLinkDisplayModeId;
1099
+ name: string;
1100
+ width: number;
1101
+ height: number;
1102
+ frameRate: FrameRate;
1103
+ }
1104
+
1105
+ /** @public */
1106
+ export declare type DeckLinkDisplayModeId = "auto" | "sd_ntsc" | "sd_nt23" | "sd_pal" | "sd_ntsp" | "sd_palp" | "hd1080_23ps" | "hd1080_24ps" | "hd1080_p25" | "hd1080_p29" | "hd1080_p30" | "hd1080_p47" | "hd1080_p48" | "hd1080_i50" | "hd1080_i59" | "hd1080_i60" | "hd1080_p95" | "hd1080_p96" | "hd1080_p10" | "hd1080_p11" | "hd1080_p12" | "hd1080_p50" | "hd1080_p59" | "hd1080_p60" | "hd720_p50" | "hd720_p59" | "hd720_p60" | "two_k_23" | "two_k_24" | "two_k_25" | "two_k_dci_23" | "two_k_dci_24" | "two_k_dci_25" | "two_k_dci_29" | "two_k_dci_30" | "two_k_dci_47" | "two_k_dci_48" | "two_k_dci_50" | "two_k_dci_59" | "two_k_dci_60" | "two_k_dci_95" | "two_k_dci_96" | "two_k_dci_10" | "two_k_dci_11" | "two_k_dci_12" | "four_k_23" | "four_k_24" | "four_k_25" | "four_k_29" | "four_k_30" | "four_k_47" | "four_k_48" | "four_k_50" | "four_k_59" | "four_k_60" | "four_k_95" | "four_k_96" | "four_k_10" | "four_k_11" | "four_k_12" | "four_k_dci_23" | "four_k_dci_24" | "four_k_dci_25" | "four_k_dci_29" | "four_k_dci_30" | "four_k_dci_47" | "four_k_dci_48" | "four_k_dci_50" | "four_k_dci_59" | "four_k_dci_60" | "four_k_dci_95" | "four_k_dci_96" | "four_k_dci_10" | "four_k_dci_11" | "four_k_dci_12" | "eight_k_23" | "eight_k_24" | "eight_k_25" | "eight_k_29" | "eight_k_30" | "eight_k_47" | "eight_k_48" | "eight_k_50" | "eight_k_59" | "eight_k_60" | "eight_k_dci_23" | "eight_k_dci_24" | "eight_k_dci_25" | "eight_k_dci_29" | "eight_k_dci_30" | "eight_k_dci_47" | "eight_k_dci_48" | "eight_k_dci_50" | "eight_k_dci_59" | "eight_k_dci_60" | "pc_vga6" | "pc_svg6" | "pc_wxg5" | "pc_wxg6" | "pc_sxg5" | "pc_sxg6" | "pc_uxg5" | "pc_uxg6" | "pc_wux5" | "pc_wux6" | "pc_1945" | "pc_1946" | "pc_wqh5" | "pc_wqh6" | "pc_wqx5" | "pc_wqx6" | "special_iunk";
1107
+
1108
+ /**
1109
+ * @public
1110
+ * SDI capture through a DeckLink card.
1111
+ * see: {@link NorskInput.deckLink}.
1112
+ */
1113
+ export declare class DeckLinkInputNode extends SourceMediaNode {
1114
+ /**
1115
+ * @public
1116
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1117
+ * */
1118
+ nudge(nudge: number): void;
1119
+ }
1120
+
1121
+ /**
1122
+ * @public
1123
+ * Settings to control SDI capture through a DeckLink card
1124
+ * see: {@link NorskInput.deckLink}
1125
+ */
1126
+ export declare interface DeckLinkInputSettings extends InputSettings<DeckLinkInputNode>, StreamStatisticsMixin {
1127
+ /** Which card to use */
1128
+ cardIndex: number;
1129
+ /** The audio channel layout for the input */
1130
+ channelLayout: ChannelLayout;
1131
+ /** SDI or HDMI capture */
1132
+ videoConnection: DeckLinkVideoConnection;
1133
+ /** Typically left undefined, but can be used to force capture for a specific {@link DeckLinkDisplayModeId}. If
1134
+ * the source is not currently in this mode, then no capture will occur. */
1135
+ displayModeId?: DeckLinkDisplayModeId;
1136
+ pixelFormat?: DeckLinkPixelFormat;
1137
+ }
1138
+
1139
+ /** @public */
1140
+ export declare type DeckLinkPixelFormat = "uyvy" | "argb";
1141
+
1142
+ /** @public */
1143
+ export declare type DeckLinkVideoConnection = "sdi" | "hdmi" | "optical_sdi" | "component" | "composite" | "svideo";
1144
+
1145
+ /** @public */
1146
+ export declare type DeckLinkVideoIOSupport = "capture" | "playback";
1147
+
1148
+ /** @public */
1149
+ export declare type DeferredVideoComposeSettings<Pins extends string> = (streams: StreamMetadata[]) => VideoComposeSettings<Pins> | undefined;
1150
+
1151
+ /** @public */
1152
+ export declare type DestinationId = string;
1153
+
1154
+ /**
1155
+ * @public
1156
+ * Drop every N frames from an incoming video stream
1157
+ * */
1158
+ export declare interface DropEvery {
1159
+ kind: "every";
1160
+ every: number;
1161
+ }
1162
+
1163
+ /**
1164
+ * @public
1165
+ * Randomly drop frames on a stream
1166
+ * - 0.0 means don't drop any frames
1167
+ * - 1.0 means drop every single frame
1168
+ * */
1169
+ export declare interface DropRandom {
1170
+ kind: "random";
1171
+ percentage: number;
1172
+ }
1173
+
1174
+ /**
1175
+ * @public
1176
+ * Drop the first N frames from an incoming video stream
1177
+ * */
1178
+ export declare interface DropStart {
1179
+ kind: "start";
1180
+ start: number;
1181
+ }
1182
+
1183
+ /** @public */
1184
+ export declare interface EncryptionSettings {
1185
+ /**
1186
+ * The 16-byte key ID used to identify the key, hexadecimal or GUID encoded.
1187
+ */
1188
+ encryptionKeyId: string;
1189
+ /**
1190
+ * The 16-byte key used to encrypt the data, hexadecimal encoded.
1191
+ */
1192
+ encryptionKey: string;
1193
+ /**
1194
+ * The PSSH box(es) to include in the MP4, base64 encoded.
1195
+ * This is typically given by the DRM provider.
1196
+ */
1197
+ encryptionPssh: string;
1198
+ /**
1199
+ * The common encryption scheme used to encrypt data, as per ISO/IEC 23001-7:2016.
1200
+ *
1201
+ * - Default: CBCS encryption scheme (AES-CBC 10% pattern encryption).
1202
+ * Full-sample encryption for audio tracks, subsample encryption for video
1203
+ * tracks.
1204
+ *
1205
+ * - CENC encryption scheme (AES-CTR). Full-sample encryption for audio tracks,
1206
+ * subsample encryption for video tracks.
1207
+ */
1208
+ encryptionScheme?: "cbcs" | "cenc";
1209
+ }
1210
+
1211
+ /**
1212
+ * @public
1213
+ * see: {@link NorskInput.fileImage}
1214
+ */
1215
+ export declare class FileImageInputNode extends SourceMediaNode {
1216
+ }
1217
+
1218
+ /**
1219
+ * @public
1220
+ * Settings for an image file source
1221
+ * see: {@link NorskInput.fileImage}
1222
+ * */
1223
+ export declare interface FileImageInputSettings extends SourceNodeSettings<FileImageInputNode>, StreamStatisticsMixin {
1224
+ /** The source name to set in the stream key of the outgoing stream */
1225
+ sourceName: string;
1226
+ /** the filename to read the image from */
1227
+ fileName: string;
1228
+ /** The file format for the image. Will be inferred from the file name if not specified. */
1229
+ imageFormat?: ImageFormat;
1230
+ }
1231
+
1232
+ /**
1233
+ * @public
1234
+ * Information about an Mp4 File
1235
+ * */
1236
+ export declare interface FileMp4Info {
1237
+ /** The duration of the Mp4 file in millseconds (if known) */
1238
+ durationMs?: number;
1239
+ /** The total length of the mp4 file in bytes, if known */
1240
+ byteLength?: number;
1241
+ }
1242
+
1243
+ /**
1244
+ * @public
1245
+ * see: {@link NorskInput.fileMp4}
1246
+ */
1247
+ export declare class FileMp4InputNode extends SourceMediaNode {
1248
+ /**
1249
+ * @public
1250
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1251
+ * */
1252
+ nudge(nudge: number): void;
1253
+ updateSettings(settings: FileMp4InputSettingsUpdate): void;
1254
+ }
1255
+
1256
+ /**
1257
+ * @public
1258
+ * Settings for an File Based Mp4 Input
1259
+ * see: {@link NorskInput.fileMp4}
1260
+ */
1261
+ export declare interface FileMp4InputSettings extends SourceNodeSettings<FileMp4InputNode>, StreamStatisticsMixin {
1262
+ /** The source name to set in the stream key of the outgoing stream */
1263
+ sourceName: string;
1264
+ /** Path to the MP4 file to read */
1265
+ fileName: string;
1266
+ /** Callback to be notified when the file ends */
1267
+ onEof?: () => void;
1268
+ /** Callback to be notified when the file is initially read */
1269
+ onInfo?: (info: FileMp4Info) => void;
1270
+ /** Whether to loop back to the start of the file after reaching the end (default false) */
1271
+ loop?: boolean;
1272
+ }
1273
+
1274
+ export declare interface FileMp4InputSettingsUpdate {
1275
+ /** Whether to loop back to the start of the file after reaching the end */
1276
+ loop?: boolean;
1277
+ }
1278
+
1279
+ /**
1280
+ * @public
1281
+ * see: {@link NorskOutput.fileMp4}
1282
+ */
1283
+ export declare class FileMp4OutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1284
+ /**
1285
+ * @public
1286
+ * Writes a non-fragmented MP4 file containing the data received so far to the
1287
+ * supplied filename
1288
+ */
1289
+ writeFile(nonfragmentedFileName: string): void;
1290
+ }
1291
+
1292
+ /**
1293
+ * @public
1294
+ * Settings to control MP4 file output
1295
+ * see {@link NorskOutput.fileMp4}
1296
+ */
1297
+ export declare interface FileMp4OutputSettings extends SinkNodeSettings<FileMp4OutputNode>, StreamStatisticsMixin {
1298
+ /**
1299
+ * Required: stream fragmented MP4 to this file.
1300
+ */
1301
+ fragmentedFileName: string;
1302
+ /**
1303
+ * Write non-fragmented MP4 to this file on close, creates a `.tmp` file to
1304
+ * store the frame data.
1305
+ */
1306
+ nonfragmentedFileName?: string;
1307
+ /**
1308
+ * Settings for encrypting the audio track.
1309
+ */
1310
+ audioEncryption?: EncryptionSettings;
1311
+ /**
1312
+ * Settings for encrypting the video track.
1313
+ */
1314
+ videoEncryption?: EncryptionSettings;
1315
+ /**
1316
+ * Callback that will be invoked once data stops being received by the node (determined by an empty context)
1317
+ * at which point it will automatically shut down
1318
+ */
1319
+ onStreamEof?: () => void;
1320
+ }
1321
+
1322
+ /**
1323
+ * @public
1324
+ * see: {@link NorskInput.fileTs}
1325
+ */
1326
+ export declare class FileTsInputNode extends TsCommonInputNode<FileTsInputMessage, FileTsInputNode> {
1327
+ updateSettings(settings: FileTsInputSettingsUpdate): void;
1328
+ }
1329
+
1330
+ /** @public */
1331
+ export declare interface FileTsInputSettings extends LocalFileInputSettings, StreamStatisticsMixin {
1332
+ /** Whether to loop back to the start of the file after reaching the end */
1333
+ loop?: boolean;
1334
+ }
1335
+
1336
+ /** @public */
1337
+ export declare interface FileTsInputSettingsUpdate {
1338
+ /** Whether to loop back to the start of the file after reaching the end */
1339
+ loop?: boolean;
1340
+ }
1341
+
1342
+ /**
1343
+ * @public
1344
+ * see: {@link NorskOutput.fileTs}
1345
+ */
1346
+ export declare class FileTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1347
+ }
1348
+
1349
+ /**
1350
+ * @public
1351
+ * The settings for an output Transport Stream written to file
1352
+ * see: {@link NorskOutput.fileTs}
1353
+ */
1354
+ export declare interface FileTsOutputSettings extends SinkNodeSettings<FileTsOutputNode>, StreamStatisticsMixin {
1355
+ /** The file to write - this will be truncated if it already exist */
1356
+ fileName: string;
1357
+ /** A/V delay in milliseconds - to allow inclusion of subtitles, metadata and other ancillary data. May be set to 0 if these are not present to reduce latency */
1358
+ avDelayMs?: number;
1359
+ }
1360
+
1361
+ /**
1362
+ * @public
1363
+ * see: {@link NorskInput.fileWebVtt}
1364
+ */
1365
+ export declare class FileWebVttInputNode extends SourceMediaNode {
1366
+ }
1367
+
1368
+ /** @public */
1369
+ export declare interface FrameRate {
1370
+ frames: number;
1371
+ seconds: number;
1372
+ }
1373
+
1374
+ /** @public */
1375
+ export declare type FrameStoreCut = {
1376
+ startDateTime: Date;
1377
+ durationMs: number;
1378
+ sessionNum?: number;
1379
+ };
1380
+
1381
+ /** @public */
1382
+ export declare type FrameStoreCutRequest = {
1383
+ fileFormat: "mp4";
1384
+ id: string;
1385
+ streamSelection: FrameStoreStreamSelection;
1386
+ cuts: FrameStoreCut[];
1387
+ trimPartialSegments: boolean;
1388
+ fileName: string;
1389
+ progressCb: (progress: number) => void;
1390
+ completeCb: (size: bigint) => void;
1391
+ };
1392
+
1393
+ /** @public */
1394
+ export declare type FrameStoreExpireBySize = {
1395
+ expire: "bySize";
1396
+ size: number;
1397
+ };
1398
+
1399
+ /** @public */
1400
+ export declare type FrameStoreExpireByTime = {
1401
+ expire: "byTime";
1402
+ durationS: number;
1403
+ };
1404
+
1405
+ /** @public */
1406
+ export declare type FrameStoreExpiry = FrameStoreExpireBySize | FrameStoreExpireByTime;
1407
+
1408
+ /**
1409
+ * @public
1410
+ * see: {@link NorskInput.frameStorePlayer}
1411
+ */
1412
+ export declare class FrameStorePlayerNode extends SourceMediaNode {
1413
+ }
1414
+
1415
+ /**
1416
+ * @public
1417
+ * Settings for Frame Store playback
1418
+ * see: {@link NorskInput.frameStorePlayer}
1419
+ */
1420
+ export declare interface FrameStorePlayerSettings extends InputSettings<FrameStorePlayerNode>, StreamStatisticsMixin {
1421
+ /**
1422
+ * Required: Name of the frame store instance.
1423
+ */
1424
+ name: string;
1425
+ /**
1426
+ * Required: The streams you want to playback from the frame store
1427
+ */
1428
+ streamSelection: FrameStoreStreamSelection;
1429
+ /**
1430
+ * Required: The start times and durations of the playback
1431
+ */
1432
+ cuts: FrameStoreCut[];
1433
+ /**
1434
+ * Required: If a start time or end time falls mid-gop, should the gop be trimmed to be frame accurate?
1435
+ */
1436
+ trimPartialSegments: boolean;
1437
+ /** Callback to be notified when playback ends */
1438
+ onEof?: () => void;
1439
+ }
1440
+
1441
+ /**
1442
+ * @public
1443
+ * see: {@link NorskOutput.frameStoreRecording}
1444
+ */
1445
+ export declare class FrameStoreRecorderNode extends AutoSinkMediaNode<"audio" | "video"> {
1446
+ makeCut(request: FrameStoreCutRequest): void;
1447
+ }
1448
+
1449
+ /**
1450
+ * @public
1451
+ * Settings to configure a frame store recorder
1452
+ * see {@link NorskOutput.frameStoreRecorder}
1453
+ */
1454
+ export declare interface FrameStoreRecorderSettings extends SinkNodeSettings<FrameStoreRecorderNode>, StreamStatisticsMixin {
1455
+ /**
1456
+ * Required: Name of this frame store instance.
1457
+ */
1458
+ name: string;
1459
+ /**
1460
+ * Required: Path for the frame store database.
1461
+ */
1462
+ path: string;
1463
+ /**
1464
+ * Required: Duration of the frame store chunk files
1465
+ */
1466
+ chunkFileDurationSeconds: number;
1467
+ /**
1468
+ * Optional: Expiry settings - if not supplied, then data will *not* get expired so on long-running events you may exhaust available disk space
1469
+ */
1470
+ expiry?: FrameStoreExpiry;
1471
+ }
1472
+
1473
+ /** @public */
1474
+ export declare type FrameStoreStreamSelection = "all" | StreamKey[];
1475
+
1476
+ export declare function fromVancPayloadFormat(format: VancPayloadFormat_2): VancPayloadFormat;
1477
+
1478
+ /**
1479
+ * @public
1480
+ * A relative change in decibels, expressing a power ratio.
1481
+ *
1482
+ * A value of 0dB means no change, positive values mean an increase in power, and negative values mean a decrease in power.
1483
+ */
1484
+ export declare type Gain = Db;
1485
+
1486
+ /** @public */
1487
+ export declare function getAmountOfChannels(layout: ChannelLayout): number;
1488
+
1489
+ /**
1490
+ * @public
1491
+ *
1492
+ */
1493
+ /**
1494
+ * @public
1495
+ */
1496
+ export declare interface HardwareInfo {
1497
+ cpuTopology: CpuTopology;
1498
+ systemMemory: number;
1499
+ deckLinkCards: DeckLinkCard[];
1500
+ }
1501
+
1502
+ /** @public */
1503
+ export declare type HlsPlaylist = {
1504
+ hlsFilePartPlaylist: MediaPlaylistPart[];
1505
+ hlsByteRangePlaylist: MediaPlaylistPart[];
1506
+ hlsStandardPlaylist: MediaPlaylistPart[];
1507
+ programDateTime?: Date;
1508
+ };
1509
+
1510
+ /** @public */
1511
+ export declare type HlsPlaylistAdditions = HlsPlaylist;
1512
+
1513
+ declare type HlsPlaylistDestination = {
1514
+ [destination: DestinationId]: HlsPlaylist;
1515
+ };
1516
+
1517
+ /**
1518
+ * @public
1519
+ * Configuration for pushing a segmented media stream directly to a generic http server
1520
+ * */
1521
+ export declare interface HlsPushDestinationSettings {
1522
+ type: "generic";
1523
+ /** The hostname of the web server being pushed to.
1524
+ * This will be used to re-resolve the IP address on failures
1525
+ * */
1526
+ host: string;
1527
+ /** the port of the web server being pushed to. */
1528
+ port: number;
1529
+ /** the path under which segments and playlists will be pushed to */
1530
+ pathPrefix: string;
1531
+ /**
1532
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1533
+ *
1534
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1535
+ * cacheing directives
1536
+ */
1537
+ sessionId?: string;
1538
+ /**
1539
+ * A unique identifier for this destination
1540
+ *
1541
+ * This can be used for supplying updates to configuration to this destination specifically
1542
+ * see: {@link UpdateCredentials}
1543
+ */
1544
+ id: DestinationId;
1545
+ /**
1546
+ * Informs the playlist generation how long segments will be retained for on the remote server
1547
+ * in order to generate an accurate playlist
1548
+ */
1549
+ retentionPeriodSeconds: number;
1550
+ }
1551
+
1552
+ /** @public */
1553
+ export declare interface HlsTag {
1554
+ tag: string;
1555
+ }
1556
+
1557
+ /**
1558
+ * @public
1559
+ * see: {@link NorskOutput.hlsTsAudio}
1560
+ */
1561
+ export declare class HlsTsAudioOutputNode extends CmafNodeWithPlaylist<HlsTsAudioMessage, "audio", HlsTsAudioOutputNode> {
1562
+ /** @public */
1563
+ onPlaylistAddition?: (destinationId: DestinationId, pl: TsPlaylistAdditions) => TsPlaylist;
1564
+ get playlist(): HlsTsPlaylistDestination;
1565
+ /**
1566
+ * @public
1567
+ * Updates the credentials for a specific destination within this output by id
1568
+ * see: {@link UpdateCredentials}
1569
+ * see: {@link CmafDestinationSettings}
1570
+ */
1571
+ updateCredentials(settings: UpdateCredentials): void;
1572
+ }
1573
+
1574
+ /**
1575
+ * @public
1576
+ * Settings for a HLS TS Audio Output
1577
+ * see {@link NorskOutput.hlsTsAudio}
1578
+ */
1579
+ export declare interface HlsTsAudioOutputSettings extends SinkNodeSettings<HlsTsAudioOutputNode> {
1580
+ /**
1581
+ * The target segment duration in seconds. Norsk will make the largest segments it can
1582
+ * without going over this target using the durations of the individual audio frames
1583
+ */
1584
+ segmentDurationSeconds: number;
1585
+ /**
1586
+ * By default, the program date time or event start time will be based on the
1587
+ * timestamp of the first video packet received by Norsk in a stream.
1588
+ *
1589
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1590
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1591
+ *
1592
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1593
+ *
1594
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1595
+ */
1596
+ delayOutputMs?: number;
1597
+ /**
1598
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1599
+ */
1600
+ destinations: CmafDestinationSettings[];
1601
+ /**
1602
+ * Directives to add to the m3u media playlist
1603
+ */
1604
+ m3uAdditions?: string;
1605
+ /**
1606
+ * XML fragment to add to the mpd Representation element
1607
+ */
1608
+ mpdAdditions?: string;
1609
+ /**
1610
+ * Audio bitrate for the {@link NorskOutput.hlsTsMultiVariant} playlist
1611
+ */
1612
+ bitrate?: number;
1613
+ /**
1614
+ * The maximum number of segments to display in a single generated playlist
1615
+ */
1616
+ maximumPlaylistSegments?: number;
1617
+ }
1618
+
1619
+ /**
1620
+ * @public
1621
+ * see: {@link NorskOutput.hlsTsCombinedPush}
1622
+ */
1623
+ export declare class HlsTsCombinedPushOutputNode extends CmafNodeWithPlaylist<HlsTsCombinedPushMessage, "audio" | "video", HlsTsCombinedPushOutputNode> {
1624
+ }
1625
+
1626
+ /**
1627
+ * @public
1628
+ * Settings for a HLS Transport Stream Combined Push Output
1629
+ * see {@link NorskOutput.hlsTsCombinedPush}
1630
+ */
1631
+ export declare interface HlsTsCombinedPushOutputSettings extends SinkNodeSettings<HlsTsCombinedPushOutputNode> {
1632
+ /**
1633
+ * The target segment duration in seconds. Norsk will use the framerate of the video stream in order
1634
+ * to produce compliant segments that are less than or equal to this in duration, with audio packaged alongside
1635
+ * using timestamps to line them up
1636
+ */
1637
+ segmentDurationSeconds: number;
1638
+ /**
1639
+ * By default, the program date time or event start time will be based on the
1640
+ * timestamp of the first video packet received by Norsk in a stream.
1641
+ *
1642
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1643
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1644
+ *
1645
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1646
+ *
1647
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1648
+ */
1649
+ delayOutputMs?: number;
1650
+ /**
1651
+ * The destination {@link CmafDestinationSettings} for this stream to be published to
1652
+ */
1653
+ destination: CmafDestinationSettings;
1654
+ /**
1655
+ * The name of this media playlist (.m3u8 will be added onto this field to generate a filename)
1656
+ */
1657
+ playlistName: string;
1658
+ /**
1659
+ * Directives to add to the m3u media playlists
1660
+ */
1661
+ m3uAdditions?: string;
1662
+ }
1663
+
1664
+ /**
1665
+ * @public
1666
+ * see: {@link NorskOutput.hlsTsMultiVariant}
1667
+ */
1668
+ export declare class HlsTsMultiVariantOutputNode extends CmafNodeBase<HlsTsMultiVariantMessage, "video" | "audio" | "subtitle", HlsTsMultiVariantOutputNode> {
1669
+ /** @public The URL of the file based multi variant playlist */
1670
+ url: string;
1671
+ /**
1672
+ * @public
1673
+ * Updates the credentials for a specific destination within this output by id
1674
+ * see: {@link UpdateCredentials}
1675
+ * see: {@link CmafDestinationSettings}
1676
+ */
1677
+ updateCredentials(settings: UpdateCredentials): void;
1678
+ }
1679
+
1680
+ /**
1681
+ * @public
1682
+ * Settings for a Hls Ts Multivariant Playlist
1683
+ * see {@link NorskOutput.hlsTsMultiVariant}
1684
+ */
1685
+ export declare interface HlsTsMultiVariantOutputSettings extends SinkNodeSettings<HlsTsMultiVariantOutputNode> {
1686
+ /**
1687
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
1688
+ */
1689
+ playlistName: string;
1690
+ /**
1691
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1692
+ */
1693
+ destinations: CmafDestinationSettings[];
1694
+ /**
1695
+ * Directives to add to the m3u multi variant playlist
1696
+ */
1697
+ m3uAdditions?: string;
1698
+ /**
1699
+ * A callback invoked every time a TS multi variant playlist is changed
1700
+ */
1701
+ onPlaylistChange?: (destinationId: DestinationId, playlist: string) => string;
1702
+ }
1703
+
1704
+ declare type HlsTsPlaylistDestination = {
1705
+ [destination: DestinationId]: MediaPlaylistPart[];
1706
+ };
1707
+
1708
+ /**
1709
+ * @public
1710
+ * see: {@link NorskOutput.hlsTsVideo}
1711
+ */
1712
+ export declare class HlsTsVideoOutputNode extends CmafNodeWithPlaylist<HlsTsVideoMessage, "video", HlsTsVideoOutputNode> {
1713
+ /** @public */
1714
+ onPlaylistAddition?: (destinationId: DestinationId, pl: TsPlaylistAdditions) => TsPlaylist;
1715
+ get playlist(): HlsTsPlaylistDestination;
1716
+ /**
1717
+ * @public
1718
+ * Updates the credentials for a specific destination within this output by id
1719
+ * see: {@link UpdateCredentials}
1720
+ * see: {@link CmafDestinationSettings}
1721
+ */
1722
+ updateCredentials(settings: UpdateCredentials): void;
1723
+ }
1724
+
1725
+ /**
1726
+ * @public
1727
+ * Settings for a HLS TS Video Output
1728
+ * see {@link NorskOutput.hlsTsVideo}
1729
+ */
1730
+ export declare interface HlsTsVideoOutputSettings extends SinkNodeSettings<HlsTsVideoOutputNode> {
1731
+ /**
1732
+ * The target segment duration in seconds. Norsk will use the framerate of the stream in order
1733
+ * to produce compliant segments that are less than or equal to this in duration
1734
+ */
1735
+ segmentDurationSeconds: number;
1736
+ /**
1737
+ * By default, the program date time or event start time will be based on the
1738
+ * timestamp of the first video packet received by Norsk in a stream.
1739
+ *
1740
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1741
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1742
+ *
1743
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1744
+ *
1745
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1746
+ */
1747
+ delayOutputMs?: number;
1748
+ /**
1749
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1750
+ */
1751
+ destinations: CmafDestinationSettings[];
1752
+ /**
1753
+ * Directives to add to the m3u media playlist
1754
+ */
1755
+ m3uAdditions?: string;
1756
+ /**
1757
+ * XML fragment to add to the mpd Representation element
1758
+ */
1759
+ mpdAdditions?: string;
1760
+ /**
1761
+ * Video bitrate for the {@link NorskOutput.hlsTsMultiVariant} playlist
1762
+ */
1763
+ bitrate?: number;
1764
+ /**
1765
+ * The maximum number of segments to display in a single generated playlist
1766
+ */
1767
+ maximumPlaylistSegments?: number;
1768
+ }
1769
+
1770
+ /** @public */
1771
+ export declare type IceServerSettings = {
1772
+ urls: string[];
1773
+ username?: string;
1774
+ credential?: string;
1775
+ };
1776
+
1777
+ /** @public */
1778
+ export declare type ImageFormat = "png" | "jpeg" | "gif" | "webp" | "pnm" | "tiff" | "tga" | "dds" | "bmp" | "ico" | "hdr" | "openexr" | "farbfeld" | "avif";
1779
+
1780
+ /**
1781
+ * @public
1782
+ * Base settings for most input nodes
1783
+ * */
1784
+ export declare interface InputSettings<T extends MediaNodeState> extends SourceNodeSettings<T> {
1785
+ /** The source name to set on the stream key on the outgoing stream from this node */
1786
+ sourceName: string;
1787
+ }
1788
+
1789
+ /** @public */
1790
+ export declare type InsertComponent = {
1791
+ components: Scte35InsertCommandComponent[];
1792
+ };
1793
+
1794
+ /** @public */
1795
+ export declare type InsertProgram = {
1796
+ spliceTime?: Scte35SpliceTime;
1797
+ };
1798
+
1799
+ /** @public A time interval measured as ticks / (ticks per second) */
1800
+ export declare interface Interval {
1801
+ n: number;
1802
+ d: number;
1803
+ }
1804
+
1805
+ /** @public */
1806
+ export declare interface IntervalTimestamp {
1807
+ n: number;
1808
+ d: number;
1809
+ }
1810
+
1811
+ /** @public */
1812
+ export declare function isAdMarker(seg: MediaPlaylistPart): seg is AdMarker;
1813
+
1814
+ /** @public */
1815
+ export declare function isHlsTag(seg: MediaPlaylistPart): seg is HlsTag;
1816
+
1817
+ /** @public */
1818
+ export declare function isMediaSegment(seg: MediaPlaylistPart): seg is MediaSegment;
1819
+
1820
+ /** @public */
1821
+ export declare function isProgramDateTime(seg: MediaPlaylistPart): seg is ProgramDateTime;
1822
+
1823
+ /** @public */
1824
+ export declare function isScheduledTag(seg: MediaPlaylistPart): seg is ScheduledTag;
1825
+
1826
+ /**
1827
+ * @public
1828
+ * see: {@link NorskTransform.jitterBuffer}
1829
+ */
1830
+ export declare class JitterBufferNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
1831
+ }
1832
+
1833
+ /**
1834
+ * @public
1835
+ * Settings for a Jitter Buffer
1836
+ * see: {@link NorskTransform.jitterBuffer}
1837
+ * */
1838
+ export declare interface JitterBufferSettings extends ProcessorNodeSettings<JitterBufferNode> {
1839
+ /** Buffer delay in milliseconds */
1840
+ delayMs: number;
1841
+ }
1842
+
1843
+ /**
1844
+ * @public
1845
+ * The standard settings for any node reading from a file
1846
+ * */
1847
+ export declare interface LocalFileInputSettings extends InputSettings<SourceMediaNode> {
1848
+ /** The file to be read from */
1849
+ fileName: string;
1850
+ /** An optional callback that will be invoked when file end is reached */
1851
+ onEof?: () => void;
1852
+ }
1853
+
1854
+ /**
1855
+ * @public
1856
+ * Configuration for the serving of segments and playlists directly from the Norsk Web Server
1857
+ * Note: While this is both useful for local testing and for sitting behind a reverse caching proxy / CDN
1858
+ * it is not expected that Norsk serve as the edge server in most scenarios
1859
+ * */
1860
+ export declare interface LocalPullDestinationSettings {
1861
+ type: "local";
1862
+ /**
1863
+ * A unique identifier for this destination
1864
+ */
1865
+ id: DestinationId;
1866
+ /**
1867
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1868
+ *
1869
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1870
+ * cacheing directives
1871
+ */
1872
+ sessionId?: string;
1873
+ /**
1874
+ * Informs the playlist generation how long segments will be retained for
1875
+ * and informs the local web server how long to retain those segments
1876
+ */
1877
+ retentionPeriodSeconds: number;
1878
+ }
1879
+
1880
+ /** @public */
1881
+ export declare type Log = {
1882
+ level: "emergency" | "alert" | "critical" | "error" | "warning" | "notice" | "info" | "debug";
1883
+ timestamp: Date;
1884
+ message: string;
1885
+ metadata: string;
1886
+ };
1887
+
1888
+ /**
1889
+ * @public
1890
+ * Settings for a H264 Encode using Netint Logan hardware
1891
+ * A detailed description of these params can be found
1892
+ * on the Netint Logan Encoder Documentation
1893
+ *
1894
+ * These fields have deliberately been written to maintain the same semantics as the
1895
+ * Logan documentation where possible.
1896
+ *
1897
+ * If left undefined, all will default to Logan's own defaults
1898
+ * */
1899
+ export declare interface LoganH264 {
1900
+ type: "logan-h264";
1901
+ /** This (for convenience) takes the xcoder string that Logan's
1902
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1903
+ * quickly and will override any values set manually in the rest of this interface.
1904
+ *
1905
+ * It is expected that developers will choose to use the typed fields for most things instead
1906
+ * when moving to production, as they offer a degree of validation and type safety
1907
+ * */
1908
+ extraOpts?: string;
1909
+ enableAud?: boolean;
1910
+ gpuIndex?: number;
1911
+ bitrate?: number;
1912
+ flushGop?: boolean;
1913
+ enableVfr?: boolean;
1914
+ crf?: number;
1915
+ cbr?: boolean;
1916
+ gopPresetIndex?: number;
1917
+ intraPeriod?: number;
1918
+ rcEnable?: boolean;
1919
+ intraQp?: number;
1920
+ rcInitDelay?: number;
1921
+ profile?: LoganH264Profile;
1922
+ level?: LoganH264Level;
1923
+ }
1924
+
1925
+ /** @public */
1926
+ export declare type LoganH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1927
+
1928
+ /** @public */
1929
+ export declare type LoganH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
1930
+
1931
+ /**
1932
+ * @public
1933
+ * Settings for a HEVC Encode using Netint Logan hardware
1934
+ * A detailed description of these params can be found
1935
+ * on the Netint Logan Encoder Documentation
1936
+ *
1937
+ * These fields have deliberately been written to maintain the same semantics as the
1938
+ * Logan documentation where possible.
1939
+ *
1940
+ * If left undefined, all will default to Logan's own defaults
1941
+ * */
1942
+ export declare interface LoganHevc {
1943
+ type: "logan-hevc";
1944
+ /** This (for convenience) takes the xcoder string that Logan's
1945
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1946
+ * quickly and will override any values set manually in the rest of this interface.
1947
+ *
1948
+ * It is expected that developers will choose to use the typed fields for most things instead
1949
+ * when moving to production, as they offer a degree of validation and type safety
1950
+ * */
1951
+ extraOpts?: string;
1952
+ enableAud?: boolean;
1953
+ gpuIndex?: number;
1954
+ bitrate?: number;
1955
+ flushGop?: boolean;
1956
+ enableVfr?: boolean;
1957
+ crf?: number;
1958
+ cbr?: boolean;
1959
+ gopPresetIndex?: number;
1960
+ intraPeriod?: number;
1961
+ rcEnable?: boolean;
1962
+ intraQp?: number;
1963
+ rcInitDelay?: number;
1964
+ profile?: LoganHevcProfile;
1965
+ level?: LoganHevcLevel;
1966
+ tier?: LoganHevcTier;
1967
+ lossless?: boolean;
1968
+ hrdEnable?: boolean;
1969
+ dolbyVisionProfile?: number;
1970
+ }
1971
+
1972
+ /** @public */
1973
+ export declare type LoganHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1974
+
1975
+ /** @public */
1976
+ export declare type LoganHevcProfile = "main" | "main10";
1977
+
1978
+ /** @public */
1979
+ export declare type LoganHevcTier = "main" | "high";
1980
+
1981
+ /** @public */
1982
+ export declare type LogicalCpuId = number;
1983
+
1984
+ /** @public */
1985
+ export declare type MediaNodeId = string;
1986
+
1987
+ /** @public */
1988
+ export declare class MediaNodeState {
1989
+ id: MediaNodeId | undefined;
1990
+ closeAwait?: () => void;
1991
+ closed: boolean;
1992
+ constructor(client: MediaClient);
1993
+ close(): Promise<void>;
1994
+ }
1995
+
1996
+ /** @public */
1997
+ export declare type MediaPlaylistPart = MediaSegment | AdMarker | HlsTag | ProgramDateTime | ScheduledTag;
1998
+
1999
+ /** @public */
2000
+ export declare interface MediaSegment {
2001
+ uri: string;
2002
+ duration: number;
2003
+ title: string;
2004
+ number?: number;
2005
+ }
2006
+
2007
+ export declare class MetadataCombineMode extends AutoProcessorMediaNode<"ancillary"> {
2008
+ }
2009
+
2010
+ export declare interface MetadataCombineSettings extends ProcessorNodeSettings<MetadataCombineMode> {
2011
+ outputStreamKey: StreamKey;
2012
+ maxSyncDelta?: number;
2013
+ }
2014
+
2015
+ /**
2016
+ * @public
2017
+ * Generate encryption parameters from from an encryption KeyID and Key,
2018
+ * in the form KEYID:KEY, both 16byte hexadecimal
2019
+ */
2020
+ export declare function mkEncryption(encryption: string | undefined, pssh?: string | undefined): EncryptionSettings | undefined;
2021
+
2022
+ /** @public */
2023
+ export declare function mkSine(freq: number): Wave;
2024
+
2025
+ /**
2026
+ * @public
2027
+ * see: {@link NorskOutput.moqEgest}
2028
+ */
2029
+ export declare class MoqEgestNode extends AutoSinkMediaNode<"audio" | "video"> {
2030
+ }
2031
+
2032
+ /**
2033
+ * @public
2034
+ * Settings to configure a Moq Egest
2035
+ * see {@link NorskOutput.moqEgest}
2036
+ */
2037
+ export declare interface MoqEgestSettings extends SinkNodeSettings<MoqEgestNode>, StreamStatisticsMixin {
2038
+ }
2039
+
2040
+ /** @public */
2041
+ export declare interface MultiStreamStatistics {
2042
+ allStreams: SingleStreamStatistics[];
2043
+ sampleSizeSeconds: number;
2044
+ /**
2045
+ * Either `"default"`, if there is only one direction, or `"input"`/`"output"`
2046
+ * (for duplex nodes, where there are two directions)
2047
+ */
2048
+ label: string;
2049
+ total: StreamStatistics;
2050
+ audio: StreamStatistics;
2051
+ video: StreamStatistics;
2052
+ }
2053
+
2054
+ /** @public */
2055
+ export declare function newSilentMatrix(rows: number, cols: number): Gain[][];
2056
+
2057
+ /**
2058
+ * @public
2059
+ * Settings common to all media nodes
2060
+ */
2061
+ export declare interface NodeSettings<T extends MediaNodeState> {
2062
+ /**
2063
+ * Media Node identifier. If one is not specified, a random identifier will be generated.
2064
+ */
2065
+ id?: string;
2066
+ /**
2067
+ * Called with any errors from the Node.
2068
+ *
2069
+ * This includes both errors which will cause the node to exit, and those that do not
2070
+ * but may e.g. indicate a connection has failed.
2071
+ */
2072
+ onError?: (error: Error) => void;
2073
+ /**
2074
+ * Called when the Node closes.
2075
+ *
2076
+ * This may be by request, because the node naturally exits or an error has occurred. See `onError`
2077
+ * to be notified of errors which may lead to the node closing.
2078
+ */
2079
+ onClose?: (() => Promise<void>) | (() => void);
2080
+ /**
2081
+ * Callback to synchronously perform an action when node creation is complete
2082
+ * (e.g. subscribe a downstream node before the first context/frame might arrive)
2083
+ */
2084
+ onCreate?: (node: T) => void;
2085
+ }
2086
+
2087
+ /**
2088
+ * @public
2089
+ * The entrypoint for all Norsk Media applications
2090
+ *
2091
+ * @example
2092
+ * ```ts
2093
+ * const norsk = new Norsk();
2094
+ * ```
2095
+ */
2096
+ export declare class Norsk {
2097
+ /**
2098
+ * Implements the {@link NorskInput} interface
2099
+ */
2100
+ input: NorskInput;
2101
+ /**
2102
+ * Implements the {@link NorskOutput} interface
2103
+ */
2104
+ output: NorskOutput;
2105
+ /**
2106
+ * Implements the {@link NorskDuplex} interface
2107
+ */
2108
+ duplex: NorskDuplex;
2109
+ /**
2110
+ * Implements the {@link NorskProcessor} interface
2111
+ */
2112
+ processor: NorskProcessor;
2113
+ /**
2114
+ * Implements the {@link NorskDebug} interface
2115
+ */
2116
+ debug: NorskDebug;
2117
+ /**
2118
+ * Implements the {@link NorskSystem} interface
2119
+ */
2120
+ system: NorskSystem;
2121
+ /**
2122
+ * Norsk Runtime version information
2123
+ */
2124
+ version: Version;
2125
+ close(): Promise<void>;
2126
+ /** @public */
2127
+ static connect(settings?: NorskSettings): Promise<Norsk>;
2128
+ }
2129
+
2130
+ /**
2131
+ * @public
2132
+ * Methods that allow you to control and monitor media streams
2133
+ */
2134
+ export declare interface NorskControl {
2135
+ /**
2136
+ * Switch between multiple input sources via a hard cut. May be used to switch between
2137
+ * sources of possibly different configurations or without decoding.
2138
+ * @param settings - Options for the switcher
2139
+ */
2140
+ streamSwitchHard<Pins extends string>(settings: StreamSwitchHardSettings<Pins>): Promise<StreamSwitchHardNode<Pins>>;
2141
+ /**
2142
+ * Switch between multiple input sources without interruption, via a transition.
2143
+ * @param settings - Options for the switcher
2144
+ */
2145
+ streamSwitchSmooth<Pins extends string>(settings: StreamSwitchSmoothSettings<Pins>): Promise<StreamSwitchSmoothNode<Pins>>;
2146
+ /**
2147
+ * Record statistical information about media streams, including bitrate,
2148
+ * frame rate, and number of keyframes, measured over some configurable
2149
+ * sampling windows.
2150
+ *
2151
+ * Corresponding settings are found on many input and output nodes.
2152
+ * @param settings - Callback and sampling intervals
2153
+ */
2154
+ streamStatistics(settings: StreamStatisticsSettings): Promise<StreamStatisticsNode>;
2155
+ /**
2156
+ * Monitor the volume of an audio stream
2157
+ * @param settings - Callback and options for the level data
2158
+ */
2159
+ audioMeasureLevels(settings: AudioMeasureLevelsSettings): Promise<AudioMeasureLevelsNode>;
2160
+ }
2161
+
2162
+ /**
2163
+ * @public
2164
+ * Methods that allow you to inspect a system (typically during development)
2165
+ */
2166
+ export declare interface NorskDebug {
2167
+ /** Creates a node to intercept all the timestamps of any subscribed stream
2168
+ * @param settings - Configuration for the report node
2169
+ */
2170
+ streamTimestampReport(settings: StreamTimestampReportSettings): Promise<StreamTimestampReportNode>;
2171
+ }
2172
+
2173
+ /**
2174
+ * @public
2175
+ * Methods that allow you to both ingest and egest media from your application
2176
+ * at the same time
2177
+ */
2178
+ export declare interface NorskDuplex {
2179
+ /**
2180
+ * Playback audio/video via webrtc to a browser, and accept audio/video input from a browser.
2181
+ * The browser client must conform to a custom protocol as implemented in the hosted test page.
2182
+ * (Available from {@link WebRTCBrowserNode.playerUrl}
2183
+ * For general WebRTC ingest prefer the WHIP input node, and for egest to a downstream media server
2184
+ * use the WHIP output node.
2185
+ * @param settings - Options for the webrtc node
2186
+ */
2187
+ webRtcBrowser(settings: WebRTCBrowserSettings): Promise<WebRTCBrowserNode>;
2188
+ }
2189
+
2190
+ /** @public */
2191
+ export declare function norskHost(): string;
2192
+
2193
+ /**
2194
+ * @public
2195
+ * Methods that allow you to ingest media into your application
2196
+ */
2197
+ export declare interface NorskInput {
2198
+ /** Create an RTMP Server to receive RTMP streams into your application
2199
+ * @param settings - Configuration for the RTMP server
2200
+ */
2201
+ rtmpServer(settings: RtmpServerInputSettings): Promise<RtmpServerInputNode>;
2202
+ /**
2203
+ * Read from a Transport Stream file with realtime playback.
2204
+ * @param settings - Configuration for the file input
2205
+ */
2206
+ fileTs(settings: FileTsInputSettings): Promise<FileTsInputNode>;
2207
+ /**
2208
+ * Stream from a SRT source
2209
+ * @param settings - Configuration for the SRT input
2210
+ */
2211
+ srt(settings: SrtInputSettings): Promise<SrtInputNode>;
2212
+ /**
2213
+ * Receive media via WebRTC via the WHIP standard.
2214
+ *
2215
+ * Here Norsk acts as the Media Server receiving from a remote WHIP client, to act as the
2216
+ * WHIP client sending to a remote media server see {@link NorskOutput.whip}. For a duplex
2217
+ * connection to a browser peer see {@link NorskDuplex.webRtcBrowser}.
2218
+ *
2219
+ * @param settings - Configuration for the WHIP input
2220
+ */
2221
+ whip(settings: WhipInputSettings): Promise<WhipInputNode>;
2222
+ /**
2223
+ * Read from a Transport Stream on the network
2224
+ * This can be multicast/unicast or broadcast
2225
+ * @param settings - Configuration for the UDP input
2226
+ */
2227
+ udpTs(settings: UdpTsInputSettings): Promise<UdpTsInputNode>;
2228
+ fileWebVtt(settings: LocalFileInputSettings): Promise<FileWebVttInputNode>;
2229
+ /**
2230
+ * Read an image from a file. Various image formats are supported, see the
2231
+ * documentation for {@link FileImageInputSettings} for more details.
2232
+ * @param settings - Configuration for the file input
2233
+ *
2234
+ * The image will then be provided into Norsk as a video at 25fps for use
2235
+ * in other operations
2236
+ */
2237
+ fileImage(settings: FileImageInputSettings): Promise<FileImageInputNode>;
2238
+ /**
2239
+ * Read a MP4 (fragmented or not) from a file with realtime playback.
2240
+ * This will not play frames that are written to the file after the node
2241
+ * starts.
2242
+ * @param settings - Configuration for the file input
2243
+ */
2244
+ fileMp4(settings: FileMp4InputSettings): Promise<FileMp4InputNode>;
2245
+ /**
2246
+ * Stream from a remote RTP source
2247
+ *
2248
+ * Note that MPEG-TS sources encapsulated in RTP should use the {@link NorskInput.udpTs} input with appropriate config.
2249
+ * @param settings - Configuration for the RTP input
2250
+ */
2251
+ rtp(settings: RtpInputSettings): Promise<RtpInputNode>;
2252
+ /**
2253
+ * Generate a test video card with a configurable pattern.
2254
+ * @param settings - Configuration for the video test card
2255
+ */
2256
+ videoTestCard(settings: VideoTestcardGeneratorSettings): Promise<VideoTestcardGeneratorNode>;
2257
+ /**
2258
+ * Generate a test audio signal with a configurable waveform.
2259
+ * @param settings - Configuration for the audio signal
2260
+ */
2261
+ audioSignal(settings: AudioSignalGeneratorSettings): Promise<AudioSignalGeneratorNode>;
2262
+ /**
2263
+ * Generates a video source by rendering an HTML page
2264
+ * @param settings - Settings for the web page
2265
+ */
2266
+ browser(settings: BrowserInputSettings): Promise<BrowserInputNode>;
2267
+ /**
2268
+ * SDI/HDMI Input using a BlackMagic DeckLink card.
2269
+ * The available cards on the machine can be enumerated using the {@link NorskSystem.hardwareInfo} API.
2270
+ *
2271
+ * Multiple cards and both SDI and HDMI inputs are supported, with all DeckLink-supported
2272
+ * input resolutions and framerates are supported. The capture format is currently 8-bit only,
2273
+ * but 10-bit captures will be supported soon. All supported audio channels can be captured.
2274
+ * At present, additional data such as closed-captions and HDR metadata is not captured.
2275
+ * @param settings - Settings for the SDI capture
2276
+ */
2277
+ deckLink(settings: DeckLinkInputSettings): Promise<DeckLinkInputNode>;
2278
+ /**
2279
+ * Playback from an existing Frame Store recording
2280
+ */
2281
+ frameStorePlayer(settings: FrameStorePlayerSettings): Promise<FrameStorePlayerNode>;
2282
+ }
2283
+
2284
+ /**
2285
+ * @public
2286
+ * Methods that allow you to egest media from your application
2287
+ */
2288
+ export declare interface NorskOutput {
2289
+ /**
2290
+ * Produces video segments with the supplied settings for use in
2291
+ * HLS or DASH manifests.
2292
+ *
2293
+ * These can optionally be served the Norsk web server or be pushed
2294
+ * to other locations - see {@link CmafDestinationSettings}
2295
+ *
2296
+ * @param settings - Configuration for the CMAF Video Stream
2297
+ */
2298
+ cmafVideo(settings: CmafOutputSettings): Promise<CmafVideoOutputNode>;
2299
+ /**
2300
+ * Produces audio segments with the supplied settings for use in
2301
+ * HLS or DASH manifests.
2302
+ *
2303
+ * These can optionally be served via the Norsk web server or be pushed
2304
+ * to other locations - see {@link CmafDestinationSettings}
2305
+ *
2306
+ * @param settings - Configuration for the CMAF Audio Stream
2307
+ */
2308
+ cmafAudio(settings: CmafOutputSettings): Promise<CmafAudioOutputNode>;
2309
+ /**
2310
+ * Produces WebVTT segments with the supplied settings for use in
2311
+ * HLS or DASH manifests. These are served via the Norsk web server
2312
+ *
2313
+ * @param settings - Configuration for the CMAF WebVTT Stream
2314
+ */
2315
+ cmafWebVtt(settings: CmafWebVttOutputSettings): Promise<CmafWebVttOutputNode>;
2316
+ /**
2317
+ * Produces a multi variant (used to be known as master) hls and/or dash manifest for a collection of media streams
2318
+ *
2319
+ * This can optionally be served via the Norsk web server or be pushed
2320
+ * to other locations - see {@link CmafDestinationSettings}
2321
+ *
2322
+ * @param settings - Configuration for the CMAF Multi Variant Manifest
2323
+ */
2324
+ cmafMultiVariant(settings: CmafMultiVariantOutputSettings): Promise<CmafMultiVariantOutputNode>;
2325
+ /**
2326
+ * Produces Transport Stream video segments with the supplied settings for use in
2327
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
2328
+ * or from other locations - see {@link CmafDestinationSettings}
2329
+ *
2330
+ * @param settings - Configuration for the HLS TS Stream
2331
+ */
2332
+ hlsTsVideo(settings: HlsTsVideoOutputSettings): Promise<HlsTsVideoOutputNode>;
2333
+ /**
2334
+ * Produces Transport Stream audio segments with the supplied settings for use in
2335
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
2336
+ * or from other locations - see {@link CmafDestinationSettings}
2337
+ *
2338
+ * @param settings - Configuration for the HLS TS Stream
2339
+ */
2340
+ hlsTsAudio(settings: HlsTsAudioOutputSettings): Promise<HlsTsAudioOutputNode>;
2341
+ /**
2342
+ * Produces Transport Stream segments containing both video and audio with the supplied settings for use in
2343
+ * HLS manifests and pushes them to the configured location (see {@link CmafDestinationSettings})
2344
+ *
2345
+ * @param settings - Configuration for the HLS TS Stream
2346
+ */
2347
+ hlsTsCombinedPush(settings: HlsTsCombinedPushOutputSettings): Promise<HlsTsCombinedPushOutputNode>;
2348
+ /**
2349
+ * Produces a multi variant HLS TS manifest for a collection of media streams
2350
+ *
2351
+ * This can optionally be served via the Norsk web server or be pushed
2352
+ * to other locations - see {@link CmafDestinationSettings}
2353
+ *
2354
+ * @param settings - Configuration for the Hls Ts Multivariant Playlist
2355
+ */
2356
+ hlsTsMultiVariant(settings: HlsTsMultiVariantOutputSettings): Promise<HlsTsMultiVariantOutputNode>;
2357
+ /**
2358
+ * Produces a Transport Stream optionally containing both video and audio
2359
+ * and sends it out over UDP
2360
+ *
2361
+ * @param settings - Configuration for the TS Stream
2362
+ */
2363
+ udpTs(settings: UdpTsOutputSettings): Promise<UdpTsOutputNode>;
2364
+ /**
2365
+ * Produces a Transport Stream, and allows Norsk to either connect to an existing
2366
+ * SRT server or act as an SRT server itself
2367
+ *
2368
+ * @param settings - Configuration for the SRT Stream
2369
+ */
2370
+ srt(settings: SrtOutputSettings): Promise<SrtOutputNode>;
2371
+ /**
2372
+ * Connects and sends media to a remote server via WebRTC using the WHIP standard.
2373
+ *
2374
+ * Here Norsk acts as the WHIP client sending to a remote Media Server; to
2375
+ * have Norsk act as the Media Server ingesting from some other WHIP client, see
2376
+ * {@link NorskInput.whip}
2377
+ *
2378
+ * @param settings - Configuration for the WebRTC Stream
2379
+ */
2380
+ whip(settings: WhipOutputSettings): Promise<WhipOutputNode>;
2381
+ /**
2382
+ * Hosts media for clients connecting via WebRTC using the WHEP standard.
2383
+ *
2384
+ * To send media to a remote Media Server via WebRTC see {@link NorskOutput.whip}.
2385
+ * See also {@link NorskInput.whip}, {@link NorskDuplex.webRtcBrowser}.
2386
+ *
2387
+ * @param settings - Configuration for the WebRTC Stream
2388
+ */
2389
+ whep(settings: WhepOutputSettings): Promise<WhepOutputNode>;
2390
+ /**
2391
+ * Connects and sends media to a remote RTMP server
2392
+ *
2393
+ * @param settings - Configuration for the WebRTC Stream
2394
+ */
2395
+ rtmp(settings: RtmpOutputSettings): Promise<RtmpOutputNode>;
2396
+ /**
2397
+ * Stream to a Transport Stream file.
2398
+ *
2399
+ * @param settings - Configuration for the Transport Stream output
2400
+ */
2401
+ fileTs(settings: FileTsOutputSettings): Promise<FileTsOutputNode>;
2402
+ /**
2403
+ * Output MP4 files to disk, both fragmented and non-fragmented.
2404
+ *
2405
+ * The fragmented output is required.
2406
+ *
2407
+ * The optional non-fragmented filename will be written when calling
2408
+ * {@link FileMp4OutputNode.close} and will be fully written by the time
2409
+ * {@link NodeSettings.onClose} is called. This sets up a temp file to
2410
+ * store the frame data by appending the extension `.tmp`.
2411
+ *
2412
+ * A non-fragmented MP4 file can be written on request with
2413
+ * {@link FileMp4OutputNode.writeFile}, which uses the frame data store if
2414
+ * {@link FileMp4OutputSettings.nonfragmentedFileName} was given or reads
2415
+ * back the fragmented mp4 if there is no non-fragmented file.
2416
+ *
2417
+ * @param settings - Configuration for the MP4 output.
2418
+ */
2419
+ fileMp4(settings: FileMp4OutputSettings): Promise<FileMp4OutputNode>;
2420
+ /**
2421
+ * Create a frame store recording
2422
+ *
2423
+ * @param settings - Configuration for the frame store recorder
2424
+ */
2425
+ frameStoreRecording(settings: FrameStoreRecorderSettings): Promise<FrameStoreRecorderNode>;
2426
+ /**
2427
+ * Create a Moq Egest
2428
+ *
2429
+ * @param settings - Configuration for the egest
2430
+ */
2431
+ moqEgest(settings: MoqEgestSettings): Promise<MoqEgestNode>;
2432
+ }
2433
+
2434
+ /** @public */
2435
+ export declare function norskPort(): string;
2436
+
2437
+ /** @public */
2438
+ export declare class NorskProcessor {
2439
+ /**
2440
+ * Implements the {@link NorskControl} interface
2441
+ */
2442
+ control: NorskControl;
2443
+ /**
2444
+ * Implements the {@link NorskTransform} interface
2445
+ */
2446
+ transform: NorskTransform;
2447
+ close(): Promise<void>;
2448
+ constructor(client: MediaClient);
2449
+ }
2450
+
2451
+ /**
2452
+ * @public
2453
+ * Top level Norsk configuration
2454
+ */
2455
+ export declare interface NorskSettings {
2456
+ /**
2457
+ * Callback URL to listen on for gRPC session with Norsk Media
2458
+ * Defaults to $NORSK_HOST:$NORSK_PORT if the environment variables are set
2459
+ * where NORSK_HOST defaults to "127.0.0.1" and NORSK_PORT to "6790"
2460
+ * (so "127.0.0.1:6790" if neither variable is set)
2461
+ */
2462
+ url?: string;
2463
+ onAttemptingToConnect?: () => void;
2464
+ onConnecting?: () => void;
2465
+ onReady?: () => void;
2466
+ onFailedToConnect?: () => void;
2467
+ /** Code to execute if the Norsk node is shutdown - by default it logs and nothing else */
2468
+ onShutdown?: () => void;
2469
+ onCurrentLoad?: (load: CurrentLoad) => void;
2470
+ onHello?: (version: Version) => void;
2471
+ onLogEvent?: (log: Log) => void;
2472
+ /**
2473
+ * Manually handle license events, such as missing/invalid licenses and
2474
+ * sandbox timeout. (Logs messages to console by default.)
2475
+ */
2476
+ onLicenseEvent?: (message: string) => void;
2477
+ }
2478
+
2479
+ /**
2480
+ * @public
2481
+ * Methods that allow you query the features of the system that Norsk is running in
2482
+ */
2483
+ export declare interface NorskSystem {
2484
+ hardwareInfo(): Promise<HardwareInfo>;
2485
+ }
2486
+
2487
+ /**
2488
+ * @public
2489
+ * Methods that allow you to manipulate your media streams
2490
+ */
2491
+ export declare interface NorskTransform {
2492
+ /**
2493
+ * Encode a video stream to one or more renditions
2494
+ * using either software or appropriate hardware if available
2495
+ * @param settings - Encode ladder settings
2496
+ */
2497
+ videoEncode(settings: VideoEncodeSettings): Promise<VideoEncodeNode>;
2498
+ /**
2499
+ * Transform a single video stream (rescale, frame rate, etc)
2500
+ * @param settings - Transform settings
2501
+ */
2502
+ videoTransform(settings: VideoTransformSettings): Promise<VideoTransformNode>;
2503
+ /**
2504
+ * Interferes with a stream by dropping frames
2505
+ * Why would you want this? Stick one of these after a decoder and before
2506
+ * anything else in order to simulate what the world is going to look like if you
2507
+ * have network problems (packet drops for example) in your ingest
2508
+ *
2509
+ * *Just don't forget to remove it again when you've finished testing!*
2510
+ * @param settings - Chaos monkey settings
2511
+ */
2512
+ streamChaosMonkey(settings: StreamChaosMonkeySettings): Promise<StreamChaosMonkeyNode>;
2513
+ /**
2514
+ * Compose multiple video streams together into a single output
2515
+ * @param settings - Composition settings
2516
+ */
2517
+ videoCompose<Pins extends string>(settings: VideoComposeSettings<Pins> | DeferredVideoComposeSettings<Pins>): Promise<VideoComposeNode<Pins>>;
2518
+ /**
2519
+ * Create a Media Node performing transcription into subtitles using the
2520
+ * Amazon Transcribe AWS service.
2521
+ * @param settings - Settings and credentials for AWS transcribe
2522
+ */
2523
+ audioTranscribeAws(settings: AudioTranscribeAwsSettings): Promise<AudioTranscribeAwsNode>;
2524
+ /**
2525
+ * Create a Media Node performing transcription into subtitles using the
2526
+ * Azure Speech service.
2527
+ * @param settings - Settings and credentials for Azure transcribe
2528
+ */
2529
+ audioTranscribeAzure(settings: AudioTranscribeAzureSettings): Promise<AudioTranscribeAzureNode>;
2530
+ /**
2531
+ * Create a Media Node performing transcription into subtitles using the
2532
+ * Whisper speech recognition model.
2533
+ * @param settings - Settings and credentials for Whisper transcribe
2534
+ */
2535
+ audioTranscribeWhisper(settings: AudioTranscribeWhisperSettings): Promise<AudioTranscribeWhisperNode>;
2536
+ /**
2537
+ * Mix multiple audio streams together into a single output,
2538
+ * with optional gain control on each input.
2539
+ * @param settings - Settings for the mixer, including the gain vectors
2540
+ */
2541
+ audioMix<Pins extends string>(settings: AudioMixSettings<Pins>): Promise<AudioMixNode<Pins>>;
2542
+ /**
2543
+ * Given an audio stream of N channels, mix it down to M channels through a matrix of NxM gains.
2544
+ * @param settings - Settings for the mixer, including the gain matrix
2545
+ */
2546
+ audioMixMatrix(settings: AudioMixMatrixSettings): Promise<AudioMixMatrixNode>;
2547
+ /**
2548
+ * Apply gain to an audio stream
2549
+ * @param settings - Settings for the gain node
2550
+ */
2551
+ audioGain(settings: AudioGainSettings): Promise<AudioGainNode>;
2552
+ /**
2553
+ * Aggregate many single-channel audio streams into a stream with the
2554
+ * specified channel layout. The streams must all have the same sample format
2555
+ * and sample rate. The order of the streams provided for the channels is
2556
+ * important.
2557
+ * @param settings - Settings for the builder, including the channel layout
2558
+ * and stream keys specifying the sources for each channel.
2559
+ */
2560
+ audioBuildMultichannel(settings: AudioBuildMultichannelSettings): Promise<AudioBuildMultichannelNode>;
2561
+ /**
2562
+ * Split a multichannel audio stream into its individual channels. The first
2563
+ * channel receives the specified stream key, and each subsequent channel
2564
+ * increments the stream id on the stream key.
2565
+ * @param settings - Settings for the splitter
2566
+ */
2567
+ audioSplitMultichannel(settings: AudioSplitMultichannelSettings): Promise<AudioSplitMultichannelNode>;
2568
+ /**
2569
+ * Encode an audio stream.
2570
+ * @param settings - Settings for the encoder, including channel layout and
2571
+ * bitrate.
2572
+ */
2573
+ audioEncode(settings: AudioEncodeSettings): Promise<AudioEncodeNode>;
2574
+ /**
2575
+ * A node to nudge the timestamps on a stream, which affects how it syncs
2576
+ * with other streams. Useful for correcting for drift between different
2577
+ * sources.
2578
+ *
2579
+ * Subsequent nudges, via the `nudge` method, are applied gradually.
2580
+ *
2581
+ * This functionality is also provided by a `nudge` method on many sources.
2582
+ * @param settings - Initial nudge plus general node settings.
2583
+ */
2584
+ streamTimestampNudge(settings: StreamTimestampNudgeSettings): Promise<StreamTimestampNudgeNode>;
2585
+ /**
2586
+ * Provide a new stream key for a single stream. Cannot be subscribed to
2587
+ * multiple streams at once.
2588
+ *
2589
+ * The stream key is used for identifying streams within multiplexed sources
2590
+ * and also is translated into URIs for HLS playlists and other resources.
2591
+ *
2592
+ * This can be useful if changing sources and wanting to maintain a consistent
2593
+ * streamkey going into an output
2594
+ * @param settings - New stream key plus general node settings.
2595
+ */
2596
+ streamKeyOverride(settings: StreamKeyOverrideSettings): Promise<StreamKeyOverrideNode>;
2597
+ /**
2598
+ * Override bitrate and language metadata on streams.
2599
+ *
2600
+ * Audio and video bitrate metadata is required for playlists for the
2601
+ * {@link NorskOutput.cmafMultiVariant} node.
2602
+ * It is automatically configured for some sources (like RTMP) and in
2603
+ * cases where re-encoding is done, but is unset for other sources (like SRT).
2604
+ * @param settings - Bitrate and language metadata plus general node settings.
2605
+ */
2606
+ streamMetadataOverride(settings: StreamMetadataOverrideSettings): Promise<StreamMetadataOverrideNode>;
2607
+ /**
2608
+ * Buffer a stream for the specified number of milliseconds. This can be used
2609
+ * to reduce or eliminate jitter.
2610
+ * @param settings - Buffer delay time.
2611
+ */
2612
+ jitterBuffer(settings: JitterBufferSettings): Promise<JitterBufferNode>;
2613
+ /**
2614
+ * Sync multiple streams together by timestamps, queuing frames from streams
2615
+ * that are behind the others. This is already included in most nodes,
2616
+ * especially outputs.
2617
+ */
2618
+ streamSync(settings: StreamSyncSettings): Promise<StreamSyncNode>;
2619
+ /**
2620
+ * This processor does multiple things
2621
+ * - joins together multiple streams from multiple sources
2622
+ * - rebases their timestamps so that they all start at the same point
2623
+ * - sets the program id to a common value
2624
+ *
2625
+ * It is useful for syncing multiple incoming streams that on paper are already synchronised but because
2626
+ * of the time taken to set up connections and subscriptions across various protocols, are off by a few
2627
+ * hundred milliseconds
2628
+ */
2629
+ streamAlign(settings: StreamAlignSettings): Promise<StreamAlignNode>;
2630
+ /**
2631
+ * Observe, modify or inject ancillary data such as SCTE-35
2632
+ */
2633
+ ancillary(settings: AncillarySettings): Promise<AncillaryNode>;
2634
+ /**
2635
+ * Combine compatible streams of metadata (this refers to ancillary streams of metadata messages, such as that
2636
+ * carried in an MPEG-TS PES metadata stream (e.g. KLV), unrelated to operations on the metadata of audio/video/etc streams.
2637
+ */
2638
+ metadataCombine(settings: MetadataCombineSettings): Promise<MetadataCombineMode>;
2639
+ }
2640
+
2641
+ /** @public */
2642
+ export declare interface NumaNode {
2643
+ processors: Processor[];
2644
+ }
2645
+
2646
+ /**
2647
+ * @public
2648
+ * Settings for a H264 Encode using Nvidia hardware
2649
+ * A detailed description of these params can be found
2650
+ * on the Nvidia Encoder Documentation
2651
+ *
2652
+ * If left undefined, all will default to Nvidia's own defaults
2653
+ * If a preset is configured, then all will default to the values provided
2654
+ * by that preset
2655
+ * */
2656
+ export declare interface NvidiaH264 {
2657
+ type: "nv-h264";
2658
+ /** The preset to use for this encode */
2659
+ preset?: NvidiaPreset;
2660
+ /** The IDR period */
2661
+ idrPeriod?: number;
2662
+ /**The gopInterval to use for this encode
2663
+ * Note: This is different from the idrPeriod but usually you want
2664
+ * them set to the same value regardless
2665
+ * */
2666
+ gopInterval?: number;
2667
+ /** This is the gop structure to be used, and again it's best to look this up
2668
+ * in the Nvidia documentation
2669
+ * */
2670
+ frameIntervalP?: number;
2671
+ /** This is somewhat related to the gop structure and again, care should be taken when overriding this from
2672
+ * the preset */
2673
+ maxNumRefFrames?: number;
2674
+ /** The target level of this H264 encode
2675
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2676
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2677
+ level?: NvidiaH264Level;
2678
+ /** The target profile of this H264 encode
2679
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2680
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2681
+ profile?: NvidiaH264Profile;
2682
+ /** Output Access Unit Delimiters */
2683
+ outputAud?: boolean;
2684
+ /** Rate Control Settings */
2685
+ rateControl?: NvidiaRateControl;
2686
+ }
2687
+
2688
+ /**
2689
+ * @public
2690
+ * See the Nvidia Encoder Docs for a description of this value
2691
+ * */
2692
+ export declare type NvidiaH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2693
+
2694
+ /**
2695
+ * @public
2696
+ * See the Nvidia Encoder Docs for a description of this value
2697
+ * */
2698
+ export declare type NvidiaH264Profile = "baseline" | "main" | "high" | "high444";
2699
+
2700
+ /**
2701
+ * @public
2702
+ * Settings for a HEVC Encode using Nvidia hardware
2703
+ * A detailed description of these params can be found
2704
+ * on the Nvidia Encoder Documentation
2705
+ *
2706
+ * If left undefined, all will default to Nvidia's own defaults
2707
+ * If a preset is configured, then all will default to the values provided
2708
+ * by that preset
2709
+ * */
2710
+ export declare interface NvidiaHevc {
2711
+ type: "nv-hevc";
2712
+ /** The preset to use for this encode */
2713
+ preset?: NvidiaPreset;
2714
+ /** The IDR period */
2715
+ idrPeriod?: number;
2716
+ /**The gopInterval to use for this encode
2717
+ * Note: This is different from the idrPeriod but usually you want
2718
+ * them set to the same value regardless
2719
+ * */
2720
+ gopInterval?: number;
2721
+ /** This is the gop structure to be used, and again it's best to look this up
2722
+ * in the Nvidia documentation
2723
+ * */
2724
+ frameIntervalP?: number;
2725
+ /** The target level of this HEVC encode
2726
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2727
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2728
+ level?: NvidiaHevcLevel;
2729
+ /** The target profile of this HEVC encode
2730
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2731
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2732
+ profile?: NvidiaHevcProfile;
2733
+ /** Output Access Unit Delimiters */
2734
+ outputAud?: boolean;
2735
+ /** The target tier of this HEVC encode
2736
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2737
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2738
+ tier?: NvidiaHevcTier;
2739
+ /** Rate Control Settings */
2740
+ rateControl?: NvidiaRateControl;
2741
+ }
2742
+
2743
+ /**
2744
+ * @public
2745
+ * See the Nvidia Encoder Docs for a description of this value
2746
+ * */
2747
+ export declare type NvidiaHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2748
+
2749
+ /**
2750
+ * @public
2751
+ * See the Nvidia Encoder Docs for a description of this value
2752
+ * */
2753
+ export declare type NvidiaHevcProfile = "main" | "main10";
2754
+
2755
+ /**
2756
+ * @public
2757
+ * See the Nvidia Encoder Docs for a description of this value
2758
+ * */
2759
+ export declare type NvidiaHevcTier = "main" | "high";
2760
+
2761
+ /**
2762
+ * @public
2763
+ * See the Nvidia Encoder Docs for a description of this value
2764
+ * */
2765
+ export declare type NvidiaPreset = "p1" | "p2" | "p3" | "p4" | "p5" | "p6" | "p7";
2766
+
2767
+ /**
2768
+ * @public
2769
+ * The rate control options for an nvidia encode
2770
+ * For further info, consult the Nvidia Encoder docs
2771
+ * */
2772
+ export declare interface NvidiaRateControl {
2773
+ /** the mode to use for this rate control operation */
2774
+ mode: NvidiaRateControlMode;
2775
+ /** The average bitrate of this encode */
2776
+ averageBitrate: number;
2777
+ /** The max bitrate of this encode */
2778
+ maxBitrate?: number;
2779
+ /** The vbv buffer size used for this encode */
2780
+ vbvBufferSize?: number;
2781
+ /** The vbv initial delay used for this encode */
2782
+ vbvInitialDelay?: number;
2783
+ /** Enable lookahead or not:
2784
+ * Note: enabling lookahead will introduce scene cuts unless this is specifically disabled
2785
+ * elswhere in the codec settings
2786
+ * */
2787
+ enableLookahead?: boolean;
2788
+ strictGopTarget?: boolean;
2789
+ lookaheadDepth?: number;
2790
+ }
2791
+
2792
+ /**
2793
+ * @public
2794
+ * See the Nvidia Encoder Docs for a description of this value
2795
+ * */
2796
+ export declare type NvidiaRateControlMode = "con_stqp" | "vbr" | "cbr";
2797
+
2798
+ /**
2799
+ * @public
2800
+ * A rectangle used for describing a subset of an image
2801
+ * */
2802
+ export declare interface OffsetRect {
2803
+ /** The leftmost coordinate of the rect, where 0,0 is top left */
2804
+ x: number;
2805
+ /** The topmost coordinate of the rect, where 0,0 is top left */
2806
+ y: number;
2807
+ /** the width of this rectangle */
2808
+ width: number;
2809
+ /** the height of this rectangle */
2810
+ height: number;
2811
+ }
2812
+
2813
+ /**
2814
+ * @public
2815
+ * Return type to enable control of an RTMP stream once media arrives on it
2816
+ */
2817
+ export declare type OnStreamResult =
2818
+ /** Accept the stream */
2819
+ {
2820
+ accept: true;
2821
+ videoStreamKey: StreamKey_2 | StreamKeySettings;
2822
+ audioStreamKey: StreamKey_2 | StreamKeySettings;
2823
+ }
2824
+ /** Reject the stream */
2825
+ | {
2826
+ accept: false;
2827
+ reason: string;
2828
+ };
2829
+
2830
+ /**
2831
+ * @public
2832
+ * Settings for an Opus encode
2833
+ * see: {@link NorskTransform.audioEncode}
2834
+ * */
2835
+ export declare interface OpusSettings {
2836
+ kind: "opus";
2837
+ }
2838
+
2839
+ /** @public
2840
+ * A transition for a video composition part.
2841
+ *
2842
+ * A transition interpolates the source_rect, dest_rect, and opacity properties
2843
+ * over the specified duration according to the specified easing function.
2844
+ *
2845
+ * As a special case, if a transition is specified and the input pin of the part
2846
+ * changes, an opacity fade from one to the other will occur.
2847
+ */
2848
+ export declare interface PartTransition {
2849
+ /** Duration for the transition */
2850
+ durationMs: number;
2851
+ /**
2852
+ * Easing function to apply to the transition. If not specified will be
2853
+ * linear.
2854
+ */
2855
+ easing?: SimpleEasing;
2856
+ }
2857
+
2858
+ declare type Pattern = "black" | "smpte75" | "smpte100";
2859
+
2860
+ /** @public */
2861
+ export declare type PinToKey<Pins extends string> = Nullable<Partial<Record<Pins, StreamKey[]>>>;
2862
+
2863
+ /** @public */
2864
+ export declare type PixelFormat = "bgra" | "rgba" | "yuv420p" | "yuv422p" | "yuv444p" | "yuva420p" | "yuva422p" | "yuva444p";
2865
+
2866
+ /** @public */
2867
+ export declare type PlaylistOnChangeFn<ClientMessage> = {
2868
+ cmafMediaPlaylist?: (grpcStream: grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, destinationId: DestinationId, additions: HlsPlaylistAdditions, playlistWithAdditions: HlsPlaylist) => void;
2869
+ tsMediaPlaylist?: (grpcStream: grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, destinationId: DestinationId, additions: TsPlaylistAdditions, playlistWithAdditions: MediaPlaylistPart[]) => void;
2870
+ cmafMultiVariantPlaylist?: (grpcStream: grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, destinationId: DestinationId, playlist: CmafMultiVariantPlaylistData) => void;
2871
+ hlsTsMultiVariantPlaylist?: (grpcStream: grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, destinationId: DestinationId, playlist: string) => void;
2872
+ };
2873
+
2874
+ /** @public */
2875
+ declare enum PlaylistPath {
2876
+ Cmaf = 0,
2877
+ Ts = 1
2878
+ }
2879
+
2880
+ /**
2881
+ * @public
2882
+ * Returns the stream keys for playlist streams in a media context
2883
+ * @param streams - The media context from which to return the stream keys
2884
+ * @returns The playlist stream keys in the media context
2885
+ */
2886
+ export declare function playlistStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
2887
+
2888
+ /** @public */
2889
+ export declare interface PlaylistStreamMetadata {
2890
+ }
2891
+
2892
+ /**
2893
+ * @public
2894
+ * Filters a context to only the playlist streams within it
2895
+ * @param streams - The media context from which to return the streams
2896
+ * @returns The playlist streams in the media context
2897
+ */
2898
+ export declare function playlistStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
2899
+
2900
+ /** @public */
2901
+ export declare interface Processor {
2902
+ cores: Core[];
2903
+ }
2904
+
2905
+ export declare interface ProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
2906
+ }
2907
+
2908
+ export declare class ProcessorMediaNode<Pins extends string> {
2909
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => Promise<boolean>, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
2910
+ }
2911
+
2912
+ /** @public */
2913
+ export declare interface ProcessorNodeSettings<T extends MediaNodeState> extends SinkNodeSettings<T>, SourceNodeSettings<T> {
2914
+ }
2915
+
2916
+ /** @public */
2917
+ export declare interface ProgramDateTime {
2918
+ programDateTime: Date;
2919
+ }
2920
+
2921
+ /** @public */
2922
+ export declare function publicUrlPrefix(): string;
2923
+
2924
+ /**
2925
+ * @public
2926
+ * Settings for a H264 Encode using Netint Quadra hardware
2927
+ * A detailed description of these params can be found
2928
+ * on the Netint Quadra Encoder Documentation
2929
+ *
2930
+ * These fields have deliberately been written to maintain the same semantics as the
2931
+ * Quadra documentation where possible.
2932
+ *
2933
+ * If left undefined, all will default to Quadra's own defaults
2934
+ * */
2935
+ export declare interface QuadraH264 {
2936
+ type: "quadra-h264";
2937
+ /** This (for convenience) takes the xcoder string that Quadra's
2938
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2939
+ * quickly and will override any values set manually in the rest of this interface.
2940
+ *
2941
+ * It is expected that developers will choose to use the typed fields for most things instead
2942
+ * when moving to production, as they offer a degree of validation and type safety
2943
+ * */
2944
+ extraOpts?: string;
2945
+ enableAud?: boolean;
2946
+ gpuIndex?: number;
2947
+ bitrate?: number;
2948
+ enableVfr?: boolean;
2949
+ crf?: number;
2950
+ gopPresetIndex?: number;
2951
+ intraPeriod?: number;
2952
+ rcEnable?: boolean;
2953
+ intraQp?: number;
2954
+ rcInitDelay?: number;
2955
+ profile?: QuadraH264Profile;
2956
+ level?: QuadraH264Level;
2957
+ fillerEnable?: boolean;
2958
+ minQp?: number;
2959
+ maxQp?: number;
2960
+ maxDeltaQp?: number;
2961
+ cuLevelRCEnable?: boolean;
2962
+ lookAheadDepth?: number;
2963
+ vbvBufferSize?: number;
2964
+ vbvMaxRate?: number;
2965
+ }
2966
+
2967
+ /** @public */
2968
+ export declare type QuadraH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2969
+
2970
+ /** @public */
2971
+ export declare type QuadraH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
2972
+
2973
+ /**
2974
+ * @public
2975
+ * Settings for a HEVC Encode using Netint Quadra hardware
2976
+ * A detailed description of these params can be found
2977
+ * on the Netint Quadra Encoder Documentation
2978
+ *
2979
+ * These fields have deliberately been written to maintain the same semantics as the
2980
+ * Quadra documentation where possible.
2981
+ *
2982
+ * If left undefined, all will default to Quadra's own defaults
2983
+ * */
2984
+ export declare interface QuadraHevc {
2985
+ type: "quadra-hevc";
2986
+ /** This (for convenience) takes the xcoder string that Quadra's
2987
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2988
+ * quickly and will override any values set manually in the rest of this interface.
2989
+ *
2990
+ * It is expected that developers will choose to use the typed fields for most things instead
2991
+ * when moving to production, as they offer a degree of validation and type safety
2992
+ * */
2993
+ extraOpts?: string;
2994
+ enableAud?: boolean;
2995
+ gpuIndex?: number;
2996
+ bitrate?: number;
2997
+ enableVfr?: boolean;
2998
+ crf?: number;
2999
+ gopPresetIndex?: number;
3000
+ intraPeriod?: number;
3001
+ rcEnable?: boolean;
3002
+ intraQp?: number;
3003
+ rcInitDelay?: number;
3004
+ profile?: QuadraHevcProfile;
3005
+ level?: QuadraHevcLevel;
3006
+ tier?: QuadraHevcTier;
3007
+ lossless?: boolean;
3008
+ hrdEnable?: boolean;
3009
+ dolbyVisionProfile?: number;
3010
+ fillerEnable?: boolean;
3011
+ minQp?: number;
3012
+ maxQp?: number;
3013
+ maxDeltaQp?: number;
3014
+ cuLevelRCEnable?: boolean;
3015
+ lookAheadDepth?: number;
3016
+ vbvBufferSize?: number;
3017
+ vbvMaxRate?: number;
3018
+ }
3019
+
3020
+ /** @public */
3021
+ export declare type QuadraHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
3022
+
3023
+ /** @public */
3024
+ export declare type QuadraHevcProfile = "main" | "main10";
3025
+
3026
+ /** @public */
3027
+ export declare type QuadraHevcTier = "main" | "high";
3028
+
3029
+ /** @public */
3030
+ export declare type ReceiveFromAddress<Pins extends string> = {
3031
+ source: SourceMediaNode;
3032
+ sourceSelector: (streams: StreamMetadata[]) => PinToKey<Pins>;
3033
+ };
3034
+
3035
+ /** @public */
3036
+ export declare type ReceiveFromAddressAuto = {
3037
+ source: SourceMediaNode;
3038
+ sourceSelector: (streams: StreamMetadata[]) => StreamKey[];
3039
+ };
3040
+
3041
+ /**
3042
+ * @public
3043
+ * Base settings for any input node requiring access to a host:port pair
3044
+ * */
3045
+ export declare interface RemoteInputSettings<T extends MediaNodeState> extends InputSettings<T> {
3046
+ /** The IP of the remote server*/
3047
+ ip: string;
3048
+ /** The port the remote server is listening on*/
3049
+ port: number;
3050
+ }
3051
+
3052
+ /**
3053
+ * @public
3054
+ * Validation function to require at least one audio and at least one video stream. Often the default validation
3055
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
3056
+ * will produce both audio and video, default validation cannot know that both are required.
3057
+ */
3058
+ export declare function requireAV(ctx: Context): boolean;
3059
+
3060
+ /**
3061
+ * @public
3062
+ * Validation function to require exactly N audio and exactly M video streams. Often the default validation
3063
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
3064
+ * will produce both audio and video, default validation cannot know that both are required.
3065
+ */
3066
+ export declare function requireExactAV({ audio, video }: {
3067
+ audio: number;
3068
+ video: number;
3069
+ }): (ctx: Context) => boolean;
3070
+
3071
+ /**
3072
+ * @public
3073
+ * The resolution of a video within Norsk
3074
+ * */
3075
+ export declare interface Resolution {
3076
+ width: number;
3077
+ height: number;
3078
+ }
3079
+
3080
+ /** @public */
3081
+ export declare enum RtmpConnectionFailureReason {
3082
+ RtmpConnectionFailedRetry = "RtmpConnectionFailedRetry"
3083
+ }
3084
+
3085
+ /**
3086
+ * @public
3087
+ * see: {@link NorskOutput.rtmp}
3088
+ */
3089
+ export declare class RtmpOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3090
+ }
3091
+
3092
+ /**
3093
+ * @public
3094
+ * The settings for an RTMP output
3095
+ * see: {@link NorskOutput.rtmp}
3096
+ * */
3097
+ export declare interface RtmpOutputSettings extends SinkNodeSettings<RtmpOutputNode>, StreamStatisticsMixin {
3098
+ /**
3099
+ * The URL of the remote RTMP server to connect to, including the full stream path and credentials
3100
+ */
3101
+ url: string;
3102
+ /** Jitter buffer delay in milliseconds */
3103
+ bufferDelayMs?: number;
3104
+ /** A/V delay in milliseconds (to allow embedded captions to be added) */
3105
+ avDelayMs?: number;
3106
+ /** Called when the RTMP output succesfully connects to a server and starts publishing data */
3107
+ onPublishStart?: () => void;
3108
+ /** Called when the connection to the RTMP server fails */
3109
+ onConnectionFailure?: (failureReason: RtmpConnectionFailureReason) => void;
3110
+ /**
3111
+ * Number of seconds to wait until a retry is attempted to the RTMP server.
3112
+ * Defaults to five seconds
3113
+ */
3114
+ retryConnectionTimeout?: number;
3115
+ sslOptions?: {
3116
+ verifyPeerCert?: boolean;
3117
+ };
3118
+ }
3119
+
3120
+ /**
3121
+ * @public
3122
+ * see: {@link NorskInput.rtmpServer}
3123
+ */
3124
+ export declare class RtmpServerInputNode extends SourceMediaNode {
3125
+ /**
3126
+ * @public
3127
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3128
+ * */
3129
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
3130
+ }
3131
+
3132
+ /**
3133
+ * @public
3134
+ * Settings to control how RTMP streams can be included as sources in your media workflow
3135
+ * see: {@link NorskInput.rtmpServer}
3136
+ */
3137
+ export declare interface RtmpServerInputSettings extends SourceNodeSettings<RtmpServerInputNode>, StreamStatisticsMixin {
3138
+ /** The port the RTMP server should listen on */
3139
+ port?: number;
3140
+ ssl?: boolean;
3141
+ sslOptions?: {
3142
+ certFile?: string;
3143
+ keyFile?: string;
3144
+ };
3145
+ /**
3146
+ * On connect callback, use to accept/reject connections given app/url in use
3147
+ * @eventProperty
3148
+ */
3149
+ onConnection?: (
3150
+ /** The connection ID, unique to this RtmpServer node */
3151
+ connectionId: string,
3152
+ /** The RTMP "app" field from the connection string */
3153
+ app: string,
3154
+ /** The full URL of the RTMP connection string */
3155
+ url: string) => {
3156
+ accept: true;
3157
+ } | {
3158
+ accept: false;
3159
+ reason?: string;
3160
+ };
3161
+ /**
3162
+ * On stream callback, set up the stream keys for a given stream or reject the stream
3163
+ * @eventProperty
3164
+ */
3165
+ onStream?: (
3166
+ /** The connection ID, unique to this RtmpServer node */
3167
+ connectionId: string,
3168
+ /** The RTMP "app" field from the connection string */
3169
+ app: string,
3170
+ /** The full URL of the RTMP connection string */
3171
+ url: string,
3172
+ /** The Norsk streamId of this media stream */
3173
+ streamId: number,
3174
+ /** TODO - publishingName */
3175
+ publishingName: string) => OnStreamResult;
3176
+ /**
3177
+ * Called when the connection status has changed (e.g. when the RTMP connection drops)
3178
+ * @eventProperty
3179
+ */
3180
+ onConnectionStatusChange?: (
3181
+ /** The connection ID, unique to this RtmpServer node */
3182
+ connectionId: string,
3183
+ /** The new connection state */
3184
+ status: RtmpServerInputStatus,
3185
+ /** The audio and video stream keys that were present in the stream at the time of the status change */
3186
+ streamKeys: {
3187
+ audioStreamKey: StreamKey_2;
3188
+ videoStreamKey: StreamKey_2;
3189
+ }[]) => void;
3190
+ onConnectionError?: (
3191
+ /** The connection ID, unique to this RtmpServer node */
3192
+ connectionId: string,
3193
+ /** The error */
3194
+ error: RtmpError_UnsupportedVideo | RtmpError_UnsupportedAudio) => void;
3195
+ onConnectionBytesRead?: (
3196
+ /** The connection ID, unique to this RtmpServer node */
3197
+ connectionId: string,
3198
+ /** The number of bytes read, as reported by the peer */
3199
+ bytesRead: bigint) => void;
3200
+ }
3201
+
3202
+ /** @public */
3203
+ export declare type RtmpServerInputStatus = "disconnected";
3204
+
3205
+ /**
3206
+ * @public
3207
+ * The stream keys in an RTMP input stream
3208
+ */
3209
+ export declare type RtmpServerStreamKeys = {
3210
+ audioStreamKey: StreamKey_2;
3211
+ videoStreamKey: StreamKey_2;
3212
+ }[];
3213
+
3214
+ /**
3215
+ * @public
3216
+ * A description of an Eac3 stream being delivered via RTP
3217
+ * */
3218
+ export declare interface RtpEac3 {
3219
+ kind: "eac3";
3220
+ /** The clock rate of the stream */
3221
+ clockRate: number;
3222
+ /** The language code (this will end up in outgoing metadata). RFC 5646 language tag. */
3223
+ languageCode?: string;
3224
+ ec3Extension: boolean;
3225
+ complexityIndex: number;
3226
+ }
3227
+
3228
+ /**
3229
+ * @public
3230
+ * A description of an H264 stream delivered over RTP
3231
+ * */
3232
+ export declare interface RtpH264 {
3233
+ kind: "h264";
3234
+ /** The clock rate of the stream */
3235
+ clockRate: number;
3236
+ }
3237
+
3238
+ /**
3239
+ * @public
3240
+ * A description of an HEVC stream delivered over RTP
3241
+ * */
3242
+ export declare interface RtpHEVC {
3243
+ kind: "hevc";
3244
+ /** The clock rate of the stream */
3245
+ clockRate: number;
3246
+ }
3247
+
3248
+ /**
3249
+ * @public
3250
+ * see: {@link NorskInput.rtp}
3251
+ */
3252
+ export declare class RtpInputNode extends SourceMediaNode {
3253
+ /**
3254
+ * @public
3255
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3256
+ * */
3257
+ nudge(nudge: number): void;
3258
+ }
3259
+
3260
+ /**
3261
+ * @public
3262
+ * Settings for an RTP input
3263
+ * see: {@link NorskInput.rtp}
3264
+ * */
3265
+ export declare interface RtpInputSettings extends SourceNodeSettings<RtpInputNode>, StreamStatisticsMixin {
3266
+ sourceName: string;
3267
+ streams: readonly RtpStreamSettings[];
3268
+ }
3269
+
3270
+ /**
3271
+ * @public
3272
+ * A description of a LinearPCM stream being delivered via RTP
3273
+ * */
3274
+ export declare interface RtpLinearPcm {
3275
+ kind: "linearpcm";
3276
+ /** The sample rate of the stream */
3277
+ sampleRate: SampleRate;
3278
+ /** The channel layotu the stream */
3279
+ channelLayout: ChannelLayout;
3280
+ /** The bit depth of the stream */
3281
+ bitDepth: RtpLinearPcmBitDepth;
3282
+ }
3283
+
3284
+ /** @public */
3285
+ export declare type RtpLinearPcmBitDepth = 16 | 24;
3286
+
3287
+ /**
3288
+ * @public
3289
+ * A description of a Mpeg4 Generic Aac stream
3290
+ * */
3291
+ export declare interface RtpMpeg4GenericAacHbr {
3292
+ kind: "mpeg4-generic-aac-hbr";
3293
+ config: string;
3294
+ }
3295
+
3296
+ /**
3297
+ * @public
3298
+ * A description of an incoming RTP stream
3299
+ * */
3300
+ export declare interface RtpStreamSettings {
3301
+ /** A streamID to assign to the outgoing stream key */
3302
+ streamId: number;
3303
+ /** The IP Address to join the RTP stream on */
3304
+ ip: string;
3305
+ /** The interface to bind to, "loopback" and "any" are special cases
3306
+ * and anything else will be interpreted as the name of a network interface */
3307
+ interface: string;
3308
+ /** The port to connect to for the RTP stream itself */
3309
+ rtpPort: number;
3310
+ /** The port to connect to for the associated RTCP stream */
3311
+ rtcpPort: number;
3312
+ /** A description of the stream being joined */
3313
+ streamType: RtpLinearPcm | RtpEac3 | RtpMpeg4GenericAacHbr | RtpH264 | RtpHEVC;
3314
+ }
3315
+
3316
+ /**
3317
+ * @public
3318
+ * This is the SAR/PAR for a video stream and is an expression of what shape each pixel has within a video stream
3319
+ * x:1, y:1 being a square and the most common value for this
3320
+ * */
3321
+ export declare interface SampleAspectRatio {
3322
+ x: number;
3323
+ y: number;
3324
+ }
3325
+
3326
+ /** @public */
3327
+ export declare type SampleFormat =
3328
+ /** Signed 16 bits, non-planar */
3329
+ "s16"
3330
+ /** Signed 16 bits, planar */
3331
+ | "s16p"
3332
+ /** 32bit floating point, non-planar */
3333
+ | "flt"
3334
+ /** 32bit floating point, planar */
3335
+ | "fltp";
3336
+
3337
+ /** @public Audio sample rate, in Hz */
3338
+ export declare type SampleRate = 8000 | 11025 | 12000 | 16000 | 22050 | 24000 | 32000 | 44100 | 48000 | 64000 | 88200 | 96000;
3339
+
3340
+ /** @public */
3341
+ export declare type ScheduleComponent = {
3342
+ components: Scte35ScheduleCommandComponent[];
3343
+ };
3344
+
3345
+ /** @public */
3346
+ export declare type ScheduledTag = [number, MediaPlaylistPart, Date, DestinationId?];
3347
+
3348
+ /** @public */
3349
+ export declare type ScheduleProgram = {
3350
+ utcSpliceTime: number;
3351
+ };
3352
+
3353
+ /** @public */
3354
+ export declare type Scte35AudioComponent = {
3355
+ componentTag: number;
3356
+ isoCode: number;
3357
+ bitStreamMode: number;
3358
+ numChannels: number;
3359
+ fullSrvcAudio: boolean;
3360
+ };
3361
+
3362
+ /** @public */
3363
+ export declare type Scte35AudioDescriptor = {
3364
+ components: Scte35AudioComponent[];
3365
+ };
3366
+
3367
+ /** @public */
3368
+ export declare type Scte35AvailDescriptor = {
3369
+ providerAvailId: number;
3370
+ };
3371
+
3372
+ /** @public */
3373
+ export declare type Scte35BreakDuration = {
3374
+ autoReturn: boolean;
3375
+ duration: bigint;
3376
+ };
3377
+
3378
+ /** @public */
3379
+ export declare type Scte35DtmfDescriptor = {
3380
+ preroll: number;
3381
+ dtmfChars: Uint8Array;
3382
+ };
3383
+
3384
+ /** @public */
3385
+ export declare type Scte35GenericDescriptor = {
3386
+ identifier: number;
3387
+ spliceDescriptorTag: number;
3388
+ privateBytes: Uint8Array;
3389
+ };
3390
+
3391
+ /** @public */
3392
+ export declare type Scte35InsertCommand = {
3393
+ spliceEventId: number;
3394
+ spliceEventCancelIndicator: boolean;
3395
+ outOfNetworkIndicator: boolean;
3396
+ spliceImmediateFlag: boolean;
3397
+ mode: Scte35InsertCommandMode;
3398
+ breakDuration: Scte35BreakDuration;
3399
+ uniqueProgramId: number;
3400
+ availNum: number;
3401
+ availsExpected: number;
3402
+ };
3403
+
3404
+ /** @public */
3405
+ export declare type Scte35InsertCommandComponent = {
3406
+ componentTag: number;
3407
+ spliceTime: Scte35SpliceTime;
3408
+ };
3409
+
3410
+ /** @public */
3411
+ export declare type Scte35InsertCommandMode = InsertProgram | InsertComponent;
3412
+
3413
+ /** @public */
3414
+ export declare type Scte35PrivateCommand = {
3415
+ identifier: number;
3416
+ privateBytes: Uint8Array;
3417
+ };
3418
+
3419
+ /** @public */
3420
+ export declare type Scte35ReservedCommand = {
3421
+ reserved: number;
3422
+ };
3423
+
3424
+ /** @public */
3425
+ export declare type Scte35ScheduleCommand = {
3426
+ items: Scte35ScheduleItem[];
3427
+ };
3428
+
3429
+ /** @public */
3430
+ export declare type Scte35ScheduleCommandComponent = {
3431
+ componentTag: number;
3432
+ utcSpliceTime: number;
3433
+ };
3434
+
3435
+ /** @public */
3436
+ export declare type Scte35ScheduleCommandMode = ScheduleProgram | ScheduleComponent;
3437
+
3438
+ /** @public */
3439
+ export declare type Scte35ScheduleItem = {
3440
+ spliceEventId: number;
3441
+ spliceEventCancelIndicator: boolean;
3442
+ outOfNetworkIndicator: boolean;
3443
+ mode: Scte35ScheduleCommandMode;
3444
+ breakDuration: Scte35BreakDuration;
3445
+ uniqueProgramId: number;
3446
+ availNum: number;
3447
+ availsExpected: number;
3448
+ };
3449
+
3450
+ /** @public */
3451
+ export declare type Scte35SegmentationComponent = {
3452
+ componentTag: number;
3453
+ ptsOffset: bigint;
3454
+ };
3455
+
3456
+ /** @public */
3457
+ export declare type Scte35SegmentationDescriptor = {
3458
+ segmentationEventId: number;
3459
+ segmentationEventCancelIndicator: boolean;
3460
+ programSegmentationFlag: boolean;
3461
+ deliveryRestrictions?: Scte35SegmentDeliveryRestrictions;
3462
+ components: Scte35SegmentationComponent[];
3463
+ segmentationDuration?: bigint;
3464
+ segmentationUpidType: number;
3465
+ segmentationUpid: Uint8Array;
3466
+ segmentationTypeId: number;
3467
+ segmentNum: number;
3468
+ segmentsExpected: number;
3469
+ subSegmentNum?: number;
3470
+ subSegmentsExpected?: number;
3471
+ };
3472
+
3473
+ /** @public */
3474
+ export declare type Scte35SegmentDeliveryRestrictions = {
3475
+ webDeliveryAllowedFlag: boolean;
3476
+ noRegionalBlackoutFlag: boolean;
3477
+ archiveAllowedFlag: boolean;
3478
+ deviceRestrictions: number;
3479
+ };
3480
+
3481
+ /** @public */
3482
+ export declare type Scte35SpliceCommand = {
3483
+ type: Scte35SpliceCommandType;
3484
+ value?: Scte35InsertCommand | Scte35ScheduleCommand | Scte35TimeSignalCommand | Scte35PrivateCommand | Scte35ReservedCommand;
3485
+ };
3486
+
3487
+ /** @public */
3488
+ export declare type Scte35SpliceCommandType = "null" | "schedule" | "insert" | "timeSignal" | "bandwidthReservation" | "privateCommand" | "reserved";
3489
+
3490
+ /** @public */
3491
+ export declare type Scte35SpliceDescriptor = {
3492
+ type: Scte35SpliceDescriptorType;
3493
+ value: Scte35AvailDescriptor | Scte35DtmfDescriptor | Scte35SegmentationDescriptor | Scte35TimeDescriptor | Scte35AudioDescriptor | Scte35GenericDescriptor;
3494
+ };
3495
+
3496
+ /** @public */
3497
+ /** @public */
3498
+ export declare type Scte35SpliceDescriptorType = "avail" | "dtmf" | "segmentation" | "time" | "audio" | "generic";
3499
+
3500
+ /** @public */
3501
+ export declare type Scte35SpliceInfoSection = {
3502
+ sapType: number;
3503
+ protocolVersion: number;
3504
+ encryptedPacket: boolean;
3505
+ encryptionAlgorithm: number;
3506
+ ptsAdjustment: bigint;
3507
+ cwIndex: number;
3508
+ tier: number;
3509
+ spliceCommand: Scte35SpliceCommand;
3510
+ descriptors: Scte35SpliceDescriptor[];
3511
+ };
3512
+
3513
+ /** @public */
3514
+ export declare type Scte35SpliceTime = {
3515
+ ptsTime?: number;
3516
+ };
3517
+
3518
+ /** @public */
3519
+ export declare type Scte35TimeDescriptor = {
3520
+ taiSeconds: bigint;
3521
+ taiNs: number;
3522
+ utcOffset: number;
3523
+ };
3524
+
3525
+ /** @public */
3526
+ export declare type Scte35TimeSignalCommand = {
3527
+ spliceTime: Scte35SpliceTime;
3528
+ };
3529
+
3530
+ /**
3531
+ * @public
3532
+ * Select all the streams from the input
3533
+ * @param streams - The streams from the inbound Context
3534
+ * @returns Array of selected StreamKeys
3535
+ */
3536
+ export declare function selectAll(streams: readonly StreamMetadata[]): StreamKey[];
3537
+
3538
+ /**
3539
+ * @public
3540
+ * Select all the ancillary data streams from the input
3541
+ * @param streams - The streams from the inbound Context
3542
+ * @returns Array of selected StreamKeys
3543
+ */
3544
+ export declare function selectAncillary(streams: readonly StreamMetadata[]): StreamKey[];
3545
+
3546
+ /**
3547
+ * @public
3548
+ * Select all the audio streams from the input
3549
+ * @param streams - The streams from the inbound Context
3550
+ * @returns Array of selected StreamKeys
3551
+ */
3552
+ export declare function selectAudio(streams: readonly StreamMetadata[]): StreamKey[];
3553
+
3554
+ /**
3555
+ * @public
3556
+ * Select all the audio and video streams from the input
3557
+ * @param streams - The streams from the inbound Context
3558
+ * @returns Array of selected StreamKeys
3559
+ */
3560
+ export declare function selectAV(streams: readonly StreamMetadata[]): StreamKey[];
3561
+
3562
+ /** @public */
3563
+ export declare function selectExactKey(key: StreamKey): (streams: readonly StreamMetadata[]) => StreamKey[];
3564
+
3565
+ /** @public */
3566
+ export declare function selectPlaylist(streams: readonly StreamMetadata[]): StreamKey[];
3567
+
3568
+ /**
3569
+ * @public
3570
+ * Select all the subtitle streams from the input
3571
+ * @param streams - The streams from the inbound Context
3572
+ * @returns Array of selected StreamKeys
3573
+ */
3574
+ export declare function selectSubtitles(streams: readonly StreamMetadata[]): StreamKey[];
3575
+
3576
+ /**
3577
+ * @public
3578
+ * Select all the video streams from the input
3579
+ * @param streams - The streams from the inbound Context
3580
+ * @returns Array of selected StreamKeys
3581
+ */
3582
+ export declare function selectVideo(streams: readonly StreamMetadata[]): StreamKey[];
3583
+
3584
+ /**
3585
+ * @public
3586
+ * Create a selector selecting all the video streams from the input with the specified rendition name
3587
+ * @param renditionName - The streams from the inbound Context
3588
+ * @returns Array of selected StreamKeys
3589
+ */
3590
+ export declare function selectVideoRendition(renditionName: string): (streams: readonly StreamMetadata[]) => StreamKey[];
3591
+
3592
+ /** @public */
3593
+ export declare type SentenceBuildMode = "raw" | "stable" | "partial" | "complete";
3594
+
3595
+ /** @public */
3596
+ export declare type SimpleEasing = "linear" | "ease_in" | "ease_in_out" | "ease_out";
3597
+
3598
+ /** @public */
3599
+ export declare interface SingleStreamStatistics extends StreamStatistics {
3600
+ streamKey: StreamKey;
3601
+ metadata: StreamMetadataMessage;
3602
+ }
3603
+
3604
+ /** @public */
3605
+ export declare class SinkMediaNode<Pins extends string> extends MediaNodeState implements SubscribeDestination {
3606
+ permissiveSubscriptionValidation(_context: Context): SubscriptionValidationResponse;
3607
+ restrictiveSubscriptionValidation(context: Context): SubscriptionValidationResponse;
3608
+ /** Subscribe to the given sources.
3609
+ *
3610
+ * This version of the function call accepts the target pins of an output
3611
+ * and is suitable for advanced use where a node is capable of subscribing to
3612
+ * multiple video streams and provides a means of distinguishing them via pins
3613
+ * discarding any existing subscriptions.
3614
+ *
3615
+ * @param done - will be called with no arguments if the subscription succeeds,
3616
+ * or an error if it failed. This error indicates the specific reason it
3617
+ * failed, so you can take appropriate actions in response. It will be called
3618
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
3619
+ * provided in the config for the node.
3620
+ *
3621
+ * Errors are also logged to the debug log.
3622
+ */
3623
+ subscribeToPins(sources: ReceiveFromAddress<Pins>[], validation?: (context: Context) => SubscriptionValidationResponse, done?: (error?: SubscriptionError) => void): void;
3624
+ sourceContextChange(responseCallback: (error?: SubscriptionError) => void): Promise<boolean>;
3625
+ finalise(): void;
3626
+ }
3627
+
3628
+ /** @public */
3629
+ export declare interface SinkNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
3630
+ onSubscriptionError?: (error: SubscriptionError) => void;
3631
+ }
3632
+
3633
+ export declare interface Smpte2038Message {
3634
+ cNotYChannelFlag: boolean;
3635
+ lineNumber: number;
3636
+ horizontalOffset: number;
3637
+ payloadFormat: VancPayloadFormat;
3638
+ ancillaryId: VancType2AncillaryId;
3639
+ userData: Uint8Array;
3640
+ }
3641
+
3642
+ /** @public */
3643
+ export declare class SourceMediaNode extends MediaNodeState {
3644
+ outputStreams: StreamMetadata[];
3645
+ registerForContextChange(subscriber: SubscribeDestination): void;
3646
+ unregisterForContextChange(subscriber: SubscribeDestination): void;
3647
+ }
3648
+
3649
+ /** @public */
3650
+ export declare interface SourceNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
3651
+ onOutboundContextChange?: (streams: StreamMetadata[]) => Promise<void>;
3652
+ }
3653
+
3654
+ /**
3655
+ * @public
3656
+ * Errors found while subscribing to a particular source, separated out by reason:
3657
+ *
3658
+ * - `internal`: An opaque internal error
3659
+ *
3660
+ * - `unknownSourceId`: The media node does not exist (maybe it crashed)
3661
+ *
3662
+ * - `unknownSourceStream`: The media node exists, but does not have the stream key
3663
+ *
3664
+ * - `noSubscriberPin`: The media node is not set up to receive data on this pin (which may be auto-detected)
3665
+ *
3666
+ * - `unsupportedConversion`: Norsk does not support conversion from the media types of the source to the media types accepted by the subscriber
3667
+ */
3668
+ export declare type SourceSubscriptionError = {
3669
+ info: string;
3670
+ reason: "internal";
3671
+ } | {
3672
+ mediaNodeId: MediaNodeId;
3673
+ reason: "unknownSourceId";
3674
+ } | {
3675
+ mediaNodeId: MediaNodeId;
3676
+ streamKey: StreamKey;
3677
+ reason: "unknownSourceStream";
3678
+ } | {
3679
+ mediaNodeId: MediaNodeId;
3680
+ streamKey: StreamKey;
3681
+ pin: string;
3682
+ subscriberPins: string[];
3683
+ reason: "noSubscriberPin";
3684
+ } | {
3685
+ mediaNodeId: MediaNodeId;
3686
+ streamKey: StreamKey;
3687
+ sourceTypes: string[];
3688
+ subscriberTypes: string[];
3689
+ reason: "unsupportedConversion";
3690
+ };
3691
+
3692
+ /** @public */
3693
+ export declare function sourceToPin<Pins extends string>(source: string, pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3694
+
3695
+ /**
3696
+ * @public
3697
+ * The return value for the {@link SrtInputSettings.onConnection} callback
3698
+ * determining what to do with an incoming stream
3699
+ */
3700
+ export declare type SrtConnectionResult =
3701
+ /** Accept the stream */
3702
+ {
3703
+ accept: true;
3704
+ /** The source name to assign to the connection */
3705
+ sourceName: string;
3706
+ }
3707
+ /** Reject the stream */
3708
+ | {
3709
+ accept: false;
3710
+ };
3711
+
3712
+ /**
3713
+ * @public
3714
+ * see: {@link NorskInput.srt}
3715
+ */
3716
+ export declare class SrtInputNode extends SourceMediaNode {
3717
+ /**
3718
+ * @public
3719
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3720
+ * */
3721
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
3722
+ /**
3723
+ * @public
3724
+ * Closes a connected stream as specified by 'streamIndex'
3725
+ * @param streamIndex - the index of the stream to be terminated
3726
+ * */
3727
+ closeStream(streamIndex: number): void;
3728
+ }
3729
+
3730
+ /**
3731
+ * @public
3732
+ * Settings for an SRT Input node
3733
+ * see: {@link NorskInput.srt}
3734
+ */
3735
+ export declare interface SrtInputSettings extends RemoteInputSettings<SrtInputNode>, StreamStatisticsMixin {
3736
+ /**
3737
+ * The mode to act in (caller or listener)
3738
+ */
3739
+ mode: SrtMode;
3740
+ /**
3741
+ * Passphrase for encryption
3742
+ */
3743
+ passphrase?: string;
3744
+ /**
3745
+ * Stream ID to set on the socket when acting in caller mode
3746
+ */
3747
+ streamId?: string;
3748
+ /**
3749
+ * On connect callback, notifying that a new caller has connected (in listener mode) and set the source name accordingly
3750
+ * @eventProperty
3751
+ */
3752
+ onConnection?: (
3753
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3754
+ streamId: string,
3755
+ /**
3756
+ * Identifier indicating which connection this message refers to (for a
3757
+ * listener which may have multiple connections)
3758
+ */
3759
+ index: number,
3760
+ /** The address of the remote host */
3761
+ remoteHost: string) => SrtConnectionResult;
3762
+ /**
3763
+ * Called when the connection status has changed (e.g. when the SRT socket is closed)
3764
+ * @eventProperty
3765
+ */
3766
+ onConnectionStatusChange?: (
3767
+ /** The new connection state */
3768
+ status: SrtInputStatus,
3769
+ /** The source name assigned to the connection which changed status */
3770
+ sourceName: string | undefined) => void;
3771
+ }
3772
+
3773
+ /** @public */
3774
+ export declare type SrtInputStatus = "disconnected";
3775
+
3776
+ /** @public */
3777
+ export declare type SrtMode = "listener" | "caller";
3778
+
3779
+ /**
3780
+ * @public
3781
+ * see: {@link NorskOutput.srt}
3782
+ */
3783
+ export declare class SrtOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3784
+ }
3785
+
3786
+ /**
3787
+ * @public
3788
+ * The settings for an SRT output
3789
+ * see: {@link NorskOutput.srt}
3790
+ * */
3791
+ export declare interface SrtOutputSettings extends SinkNodeSettings<SrtOutputNode>, StreamStatisticsMixin {
3792
+ /**
3793
+ * Passphrase for encryption
3794
+ */
3795
+ passphrase?: string;
3796
+ /**
3797
+ * Stream ID to set on the socket when acting in caller mode
3798
+ */
3799
+ streamId?: string;
3800
+ /**
3801
+ * The mode to act in (see {@link SrtMode})
3802
+ */
3803
+ mode: SrtMode;
3804
+ /**
3805
+ * The IP address to listen on in listener mode, or to connect to in caller mode
3806
+ */
3807
+ ip: string;
3808
+ /**
3809
+ * The port to listen on in listener mode, or to connect to in caller mode
3810
+ */
3811
+ port: number;
3812
+ /** Jitter buffer delay in milliseconds */
3813
+ bufferDelayMs?: number;
3814
+ /** A/V delay in milliseconds - to allow inclusion of subtitles, metadata and other ancillary data. May be set to 0 if these are not present to reduce latency */
3815
+ avDelayMs?: number;
3816
+ /**
3817
+ * On connect callback, notifying that a new caller has connected (in listener mode) and providing the stream_id that was set on the socket
3818
+ * @eventProperty
3819
+ */
3820
+ onConnection?: (
3821
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3822
+ streamId: string,
3823
+ /** The stream index (count of connections that have been made) */
3824
+ streamIndex: number,
3825
+ /** The remote host address */
3826
+ remoteHost: string) => void;
3827
+ }
3828
+
3829
+ /** @public */
3830
+ export declare type StabilizationMode = "low" | "medium" | "high";
3831
+
3832
+ /**
3833
+ * @public
3834
+ * see: {@link NorskTransform.streamSync}
3835
+ */
3836
+ export declare class StreamAlignNode extends AutoProcessorMediaNode<"audio" | "video"> {
3837
+ }
3838
+
3839
+ /**
3840
+ * @public
3841
+ * Settings for a StreamAlign node
3842
+ * This will reset all streams to the same framerates/sample rates
3843
+ * and align their timestamps so that they completely line up for downstream operations
3844
+ * see {@link NorskTransform.streamAlign}
3845
+ * */
3846
+ export declare interface StreamAlignSettings extends ProcessorNodeSettings<StreamAlignNode> {
3847
+ sampleRate: SampleRate;
3848
+ frameRate: FrameRate;
3849
+ }
3850
+
3851
+ /**
3852
+ * @public
3853
+ * see: {@link NorskTransform.streamChaosMonkey}
3854
+ */
3855
+ export declare class StreamChaosMonkeyNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3856
+ }
3857
+
3858
+ /**
3859
+ * @public
3860
+ * The settings for a Chaos Monkey
3861
+ * see: {@link NorskTransform.streamChaosMonkey}
3862
+ * */
3863
+ export declare interface StreamChaosMonkeySettings extends ProcessorNodeSettings<StreamChaosMonkeyNode> {
3864
+ /** Optional configuration to drop frames from a stream
3865
+ * leaving this undefined means don't drop any frames
3866
+ * */
3867
+ frameDrop?: DropRandom | DropEvery | DropStart;
3868
+ /**
3869
+ * Introduce random jitter
3870
+ */
3871
+ jitterMs?: number;
3872
+ }
3873
+
3874
+ /** @public */
3875
+ export declare interface StreamKey {
3876
+ streamId: number;
3877
+ programNumber: number;
3878
+ sourceName: string;
3879
+ renditionName: string;
3880
+ }
3881
+
3882
+ /**
3883
+ * @public
3884
+ * see: {@link NorskTransform.streamKeyOverride}
3885
+ */
3886
+ export declare class StreamKeyOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3887
+ }
3888
+
3889
+ /**
3890
+ * @public
3891
+ * Settings for a Stream Key Override
3892
+ * see: {@link NorskTransform.streamKeyOverride}
3893
+ * */
3894
+ export declare interface StreamKeyOverrideSettings extends ProcessorNodeSettings<StreamKeyOverrideNode> {
3895
+ /** The stream key that all frames passing through this node will be assigned */
3896
+ streamKey: StreamKey;
3897
+ }
3898
+
3899
+ /**
3900
+ * Compares two stream keys by value, returning true if the stream keys refer to the same stream
3901
+ */
3902
+ export declare function streamKeysAreEqual(l: StreamKey, r: StreamKey): unknown;
3903
+
3904
+ /** @public */
3905
+ export declare interface StreamKeySettings {
3906
+ /** Source name. Default: the rtmp app */
3907
+ sourceName?: string;
3908
+ /** Program number. Default: 1 */
3909
+ programNumber?: number;
3910
+ /** Stream Id. Default: 1 for audio, 2 for video */
3911
+ streamId?: number;
3912
+ /** Rendition name. Default: the stream publishing name */
3913
+ renditionName?: string;
3914
+ }
3915
+
3916
+ /** @public */
3917
+ export declare interface StreamMetadata {
3918
+ streamKey: StreamKey;
3919
+ message: StreamMetadataMessage;
3920
+ }
3921
+
3922
+ /** @public */
3923
+ export declare type StreamMetadataMessage = {
3924
+ case: "audio";
3925
+ value: AudioStreamMetadata;
3926
+ } | {
3927
+ case: "video";
3928
+ value: VideoStreamMetadata;
3929
+ } | {
3930
+ case: "subtitle";
3931
+ value: SubtitleStreamMetadata;
3932
+ } | {
3933
+ case: "playlist";
3934
+ value: PlaylistStreamMetadata;
3935
+ } | {
3936
+ case: "ancillary";
3937
+ value: AncillaryStreamMetadata;
3938
+ } | {
3939
+ case: undefined;
3940
+ value?: undefined;
3941
+ };
3942
+
3943
+ /**
3944
+ * @public
3945
+ * see: {@link NorskTransform.streamMetadataOverride}
3946
+ */
3947
+ export declare class StreamMetadataOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3948
+ /**
3949
+ * @public
3950
+ * Updates the config used by this metadata override node for all subsequent frames
3951
+ * @param settings - The new settings
3952
+ */
3953
+ updateConfig(settings: StreamMetadataOverrideSettingsUpdate): void;
3954
+ }
3955
+
3956
+ /**
3957
+ * @public
3958
+ * Settings for a Stream Metadata Override Node
3959
+ * see: {@link NorskTransform.streamMetadataOverride}
3960
+ * */
3961
+ export declare interface StreamMetadataOverrideSettings extends ProcessorNodeSettings<StreamMetadataOverrideNode>, StreamMetadataOverrideSettingsUpdate {
3962
+ }
3963
+
3964
+ /** @public */
3965
+ export declare interface StreamMetadataOverrideSettingsUpdate {
3966
+ video?: {
3967
+ /** Override the bitrate metadata of a compressed video stream, or `0` to clear */
3968
+ bitrate?: number;
3969
+ };
3970
+ audio?: {
3971
+ /** Override the bitrate metadata of a compressed audio stream, or `0` to clear */
3972
+ bitrate?: number;
3973
+ /** Override the language metadata of an audio stream, or `""` to clear. RFC 5646 language tag. */
3974
+ language?: string;
3975
+ };
3976
+ subtitles?: {
3977
+ /** Override the language metadata of a subtitles stream, or `""` to clear. RFC 5646 language tag. */
3978
+ language?: string;
3979
+ };
3980
+ }
3981
+
3982
+ /** @public */
3983
+ export declare interface StreamStatistics {
3984
+ /** The size of the sample window in seconds */
3985
+ sampleSizeSeconds: number;
3986
+ /** The number of bits over the sample window */
3987
+ bitsForSample: number;
3988
+ /** The bitrate, in bits per second */
3989
+ bitrate: number;
3990
+ /** The number of frames over the sample window */
3991
+ framesForSample: number;
3992
+ /** The frame rate, in frames per second */
3993
+ framerate: number;
3994
+ /** The number of key frames over the sample window */
3995
+ keyFramesForSample: number;
3996
+ }
3997
+
3998
+ /** @public */
3999
+ export declare interface StreamStatisticsMixin {
4000
+ /**
4001
+ * Sampling rates for stream stats, in seconds
4002
+ */
4003
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
4004
+ /**
4005
+ * Called at periodic intervals when stream statistics are ready.
4006
+ * @eventProperty
4007
+ */
4008
+ onStreamStatistics?: (
4009
+ /** The stats */
4010
+ stats: MultiStreamStatistics) => void;
4011
+ onGopStructure?: (structure: GopStructure) => void;
4012
+ }
4013
+
4014
+ /**
4015
+ * @public
4016
+ * see {@link NorskControl.streamStatistics}.
4017
+ */
4018
+ export declare class StreamStatisticsNode extends AutoProcessorMediaNode<"audio" | "video"> {
4019
+ }
4020
+
4021
+ /**
4022
+ * @public
4023
+ * Settings for a Stream Statistics Node
4024
+ * see: {@link NorskControl.streamStatistics}
4025
+ */
4026
+ export declare interface StreamStatisticsSettings extends ProcessorNodeSettings<StreamStatisticsNode>, StreamStatisticsMixin {
4027
+ /**
4028
+ * Called periodically with the stream stats
4029
+ * @param stats - The statistics for the stream
4030
+ * @eventProperty
4031
+ */
4032
+ onStreamStatistics: (stats: MultiStreamStatistics) => void;
4033
+ /**
4034
+ * Sampling rates for stream stats, in seconds
4035
+ */
4036
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
4037
+ }
4038
+
4039
+ /**
4040
+ * @public
4041
+ * see: {@link NorskControl.streamSwitchHard}
4042
+ */
4043
+ export declare class StreamSwitchHardNode<Pins extends string> extends ProcessorMediaNode<Pins> {
4044
+ switchSource(newSource: Pins): void;
4045
+ }
4046
+
4047
+ /**
4048
+ * @public
4049
+ * Settings for the Hard Stream Switch
4050
+ * see: {@link NorskControl.streamSwitchHard}
4051
+ * */
4052
+ export declare interface StreamSwitchHardSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchHardNode<Pins>> {
4053
+ /** The currently active source to display on the output */
4054
+ activeSource: Pins;
4055
+ /** the source name to give the output of this switch operation */
4056
+ outputSource: string;
4057
+ }
4058
+
4059
+ /** @public */
4060
+ export declare type StreamSwitchSmoothHardwareAcceleration =
4061
+ /**
4062
+ * Use the quadra overlay functionality to perform the compose
4063
+ */
4064
+ "quadra"
4065
+ /**
4066
+ * Use an nvidia CUDA kernel to perform the compose
4067
+ */
4068
+ | "nvidia";
4069
+
4070
+ /**
4071
+ * @public
4072
+ * see: {@link NorskControl.streamSwitchSmooth}
4073
+ */
4074
+ export declare class StreamSwitchSmoothNode<Pins extends string> extends ProcessorMediaNode<Pins> {
4075
+ /**
4076
+ * @public
4077
+ * Switches the source used for the current output of this node
4078
+ */
4079
+ switchSource(newSource: Pins): void;
4080
+ }
4081
+
4082
+ /**
4083
+ * @public
4084
+ * Settings for the Smooth Source Switch
4085
+ * see {@link NorskControl.streamSwitchSmooth}
4086
+ * */
4087
+ export declare interface StreamSwitchSmoothSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchSmoothNode<Pins>> {
4088
+ /** The presently active source being used to generate output for this node */
4089
+ activeSource?: Pins;
4090
+ /** The source name given to the output of this node */
4091
+ outputSource: string;
4092
+ /** How many milliseconds to use for the fade operation between two sources */
4093
+ transitionDurationMs?: number;
4094
+ /** The constant resolution that all output video will be scaled to */
4095
+ outputResolution: Resolution;
4096
+ /** The constant framerate that all output video will be sampled to */
4097
+ frameRate: FrameRate;
4098
+ /** The constant samplerate that all output audio will be resampled to */
4099
+ sampleRate: SampleRate;
4100
+ /** The constant channel layout that all output audio will be resampled to */
4101
+ channelLayout: ChannelLayout;
4102
+ /** Alignment behaviour of the component
4103
+ * whether to rebase all incoming streams to a common timeline
4104
+ * Note: This will modify the timestamps, meaning that merging streams not involved in this may result in
4105
+ * operation may result in sync issues. To avoid this, you can use {@link NorskProcessor.streamAlign} instead of relying
4106
+ * on this component for this behaviour
4107
+ * Note: This behaviour may be removed in a future release and replaced with something similar
4108
+ * */
4109
+ alignment?: "aligned" | "not_aligned";
4110
+ /** Callback which will be called if a switch request cannot be fulfilled */
4111
+ onSwitchError?: (message: string, inputPin?: Pins) => void;
4112
+ /** Callback which will be called a transition has succesfully completed for a requested switch, i.e. the new source
4113
+ * is now showing.
4114
+ *
4115
+ * Note that if additional transitions are triggered when a transition is already in progress, a notification may only be
4116
+ * given for the last transition to finish.
4117
+ **/
4118
+ onTransitionComplete?: (inputPin: Pins) => void;
4119
+ /**
4120
+ * Callback to be called when inbound context changes on some input; presence of an
4121
+ * input means that media has arrived and is ready to switch
4122
+ * immediately
4123
+ * @param allStreams The collection of input contexts received over all input pins
4124
+ */
4125
+ onInboundContextChange?: (allStreams: Map<Pins, StreamMetadata[]>) => Promise<void>;
4126
+ /**
4127
+ * Optionally attempt to perform the compose operation on hardware
4128
+ */
4129
+ hardwareAcceleration?: ComposeHardwareAcceleration;
4130
+ }
4131
+
4132
+ /**
4133
+ * @public
4134
+ * see: {@link NorskTransform.streamSync}
4135
+ */
4136
+ export declare class StreamSyncNode extends AutoProcessorMediaNode<"audio" | "video"> {
4137
+ }
4138
+
4139
+ /**
4140
+ * @public
4141
+ * Settings for a StreamSync node
4142
+ * see {@link NorskTransform.streamSync}
4143
+ * */
4144
+ export declare interface StreamSyncSettings extends ProcessorNodeSettings<StreamSyncNode> {
4145
+ }
4146
+
4147
+ /**
4148
+ * @public
4149
+ * see: {@link NorskTransform.streamTimestampNudge}
4150
+ */
4151
+ export declare class StreamTimestampNudgeNode extends AutoProcessorMediaNode<"audio" | "video"> {
4152
+ /**
4153
+ * @public
4154
+ * Applies a gradual nudge to the stream timestamps by the specified number of milliseconds
4155
+ * */
4156
+ nudge(nudge: number): void;
4157
+ }
4158
+
4159
+ /**
4160
+ * @public
4161
+ * Settings for a Stream Timestamp Nudge
4162
+ * see: {@link NorskTransform.streamTimestampNudge}
4163
+ * */
4164
+ export declare interface StreamTimestampNudgeSettings extends ProcessorNodeSettings<StreamTimestampNudgeNode> {
4165
+ nudge?: number;
4166
+ }
4167
+
4168
+ /**
4169
+ * @public
4170
+ * see: {@link NorskDebug.streamTimestampReport}
4171
+ */
4172
+ export declare class StreamTimestampReportNode extends AutoSinkMediaNode<string> {
4173
+ }
4174
+
4175
+ /**
4176
+ * @public
4177
+ * Settings to control MP4 file output
4178
+ * see {@link NorskOutput.fileMp4}
4179
+ */
4180
+ export declare interface StreamTimestampReportSettings extends SinkNodeSettings<StreamTimestampReportNode> {
4181
+ onTimestamp?: (streamKey: StreamKey, timestamp: IntervalTimestamp) => Promise<void>;
4182
+ }
4183
+
4184
+ export declare interface SubscribeDestination {
4185
+ id?: string;
4186
+ sourceContextChange(responseCallback: (error?: SubscriptionError) => void): Promise<boolean>;
4187
+ }
4188
+
4189
+ /**
4190
+ * @public
4191
+ * Errors found while setting up subscriptions, separated out by reason:
4192
+ *
4193
+ * - `internal`: An opaque internal error
4194
+ *
4195
+ * - `unknownSubscriber`: The media node requesting the subscription does not exist
4196
+ *
4197
+ * - `multipleStreams`: Multiple stream keys found for the context type
4198
+ *
4199
+ * - `sourceSubscriptionError`: Per-source errors
4200
+ */
4201
+ export declare type SubscriptionError = {
4202
+ info: string;
4203
+ reason: "internal";
4204
+ } | {
4205
+ mediaNodeId: MediaNodeId;
4206
+ reason: "unknownSubscriber";
4207
+ } | {
4208
+ contextType: ContextType;
4209
+ streamKeys: StreamKey[];
4210
+ reason: "multipleStreams";
4211
+ } | {
4212
+ sourceErrors: SourceSubscriptionError[];
4213
+ reason: "sourceSubscriptionError";
4214
+ };
4215
+
4216
+ /**
4217
+ * @public
4218
+ * Determines what to do with an incoming context
4219
+ *
4220
+ * - true/accept: Allow the incoming context through, and any subsequent/queued data that belongs to it
4221
+ * - false/deny: Deny the incoming context, if no context has been accepted, then queue data until one is
4222
+ * - accept_and_terminate: Allow the incoming context, then deny further data, flush and shut down the node
4223
+ * this is useful for cleanly terminating outputs when the context is empty
4224
+ * - deny_and_queue: Deny the incoming context, and revert to the original queueing behaviour as if no context has been accepted
4225
+ * this is useful when switching from one full context to another, avoiding any "in-between"
4226
+ * */
4227
+ export declare type SubscriptionValidationResponse = true | false | "accept" | "deny" | "accept_and_terminate" | "deny_and_queue";
4228
+
4229
+ /** @public */
4230
+ export declare function subtitlesToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
4231
+
4232
+ /**
4233
+ * @public
4234
+ * Returns the stream keys for subtitle streams in a media context
4235
+ * @param streams - The media context from which to return the stream keys
4236
+ * @returns The subtitle stream keys in the media context
4237
+ */
4238
+ export declare function subtitleStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
4239
+
4240
+ /** @public */
4241
+ export declare interface SubtitleStreamMetadata {
4242
+ }
4243
+
4244
+ /**
4245
+ * @public
4246
+ * Filters a context to only the subtitle streams within it
4247
+ * @param streams - The media context from which to return the streams
4248
+ * @returns The subtitle streams in the media context
4249
+ */
4250
+ export declare function subtitleStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
4251
+
4252
+ declare class TsCommonInputNode<SourceMessage, T extends SourceMediaNode> extends SourceMediaNode {
4253
+ constructor(tsType: TsInputType, client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: SourceNodeSettings<T> & StreamStatisticsMixin, nudgeFn: (nudge: TimestampProgramNudge) => SourceMessage, onEof: (() => void) | undefined, grpcStartFn: () => grpc.ClientDuplexStream<SourceMessage, TsInputEvent>);
4254
+ /**
4255
+ * @public
4256
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
4257
+ * */
4258
+ nudge(programNumber: number, nudge: number): void;
4259
+ }
4260
+
4261
+ declare enum TsInputType {
4262
+ TsFile = 0,
4263
+ Srt = 1,
4264
+ Udp = 2,
4265
+ M3u8 = 3
4266
+ }
4267
+
4268
+ /** @public */
4269
+ export declare type TsPlaylist = MediaPlaylistPart[];
4270
+
4271
+ /** @public */
4272
+ export declare type TsPlaylistAdditions = [MediaPlaylistPart[], Date];
4273
+
4274
+ /**
4275
+ * @public
4276
+ * see: {@link NorskInput.udpTs}
4277
+ */
4278
+ export declare class UdpTsInputNode extends TsCommonInputNode<UdpTsInputMessage, UdpTsInputNode> {
4279
+ }
4280
+
4281
+ /**
4282
+ * @public
4283
+ * Settings for a UDP Transport Stream input
4284
+ * see: {@link NorskInput.udpTs}
4285
+ * */
4286
+ export declare interface UdpTsInputSettings extends RemoteInputSettings<UdpTsInputNode> {
4287
+ interface?: string;
4288
+ timeout?: number;
4289
+ /** Whether to expect the input TS to be encapsulated in RTP via RFC 2250 (default: false) */
4290
+ rtpDecapsulate?: boolean;
4291
+ }
4292
+
4293
+ /**
4294
+ * @public
4295
+ * see: {@link NorskOutput.udpTs}
4296
+ */
4297
+ export declare class UdpTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
4298
+ }
4299
+
4300
+ /**
4301
+ * @public
4302
+ * The settings for an output Transport Stream over UDP
4303
+ * see: {@link NorskOutput.udpTs}
4304
+ */
4305
+ export declare interface UdpTsOutputSettings extends SinkNodeSettings<UdpTsOutputNode>, StreamStatisticsMixin {
4306
+ /**
4307
+ * The IP address to publish to
4308
+ * This can be multicast, unicast or broadcast
4309
+ */
4310
+ destinationIp: string;
4311
+ /**
4312
+ * The interface to bind to for publishing
4313
+ * This can be 'any', 'loopback' or any named interface on the machine
4314
+ * Note: If running inside docker this may be different to expected
4315
+ */
4316
+ interface: string;
4317
+ /** The port to send to */
4318
+ port: number;
4319
+ /** Jitter buffer delay in milliseconds */
4320
+ bufferDelayMs?: number;
4321
+ /** A/V delay in milliseconds - to allow inclusion of subtitles, metadata and other ancillary data. May be set to 0 if these are not present to reduce latency */
4322
+ avDelayMs?: number;
4323
+ /** Whether to encapsulate in RTP via RFC 2250 (default: false) */
4324
+ rtpEncapsulate?: boolean;
4325
+ }
4326
+
4327
+ /**
4328
+ * @public
4329
+ * An update request for credentials on a CMAF output
4330
+ */
4331
+ export declare interface UpdateCredentials {
4332
+ /**
4333
+ * The id of the destination that is to be updated (see {@link HlsPushDestinationSettings.id})
4334
+ */
4335
+ destinationId: string;
4336
+ /**
4337
+ * the new credentials to be used by the destination
4338
+ */
4339
+ awsCredentials: AwsCredentials;
4340
+ }
4341
+
4342
+ /** @public */
4343
+ export declare type VancPayloadFormat = "other" | "afd_bar" | "pan_scan" | "scte104" | "dvb_scte_vbi" | "op47_sdp" | "op47_vanc_multipacket" | "ancillary_time_code" | "eia_708" | "eia_608";
4344
+
4345
+ /** @public */
4346
+ export declare interface VancType2AncillaryId {
4347
+ did: number;
4348
+ sdid: number;
4349
+ }
4350
+
4351
+ export { Version }
4352
+
4353
+ /**
4354
+ * @public
4355
+ * see: {@link NorskTransform.videoCompose}
4356
+ */
4357
+ export declare class VideoComposeNode<Pins extends string> extends ProcessorMediaNode<Pins> {
4358
+ /**
4359
+ * @public
4360
+ * Updates the config used for a video compose operation
4361
+ * If transitions are specified, animations will be provided, otherwise
4362
+ * the change will be immediate
4363
+ *
4364
+ * Note: This is not a 'cheap' operation and care should be taken not to
4365
+ * do this too often (more than once a second for example!)
4366
+ */
4367
+ updateConfig(settings: VideoComposeSettingsUpdate<Pins>): void;
4368
+ }
4369
+
4370
+ /** @public */
4371
+ export declare interface VideoComposeSettings<Pins extends string> extends ProcessorNodeSettings<VideoComposeNode<Pins>> {
4372
+ /**
4373
+ * Required. Stream key of the reference stream. This is the video stream
4374
+ * which defines the output frame timing, which will typically be part of the
4375
+ * composition, e.g. the main picture in the case of a simple
4376
+ * overlay/picture-in-picture, or the top left quadrant of a 4-way split
4377
+ * screen.
4378
+ */
4379
+ referenceStream: Pins;
4380
+ /** The parts (images/overlays) to include in the composition */
4381
+ parts: readonly ComposePart<Pins>[];
4382
+ /**
4383
+ * Optionally supply a fallback reference resolution. This allows description of the
4384
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
4385
+ * can be specified to allow the source and destination areas to be described
4386
+ * in percentage terms, or a notional resolution can be used that is
4387
+ * independant of the source resolutions that may be provided.
4388
+ *
4389
+ * if set here, this reference resolution will be applied to
4390
+ * any parts that do not have their own reference resolution specified
4391
+ *
4392
+ * If not provided the source and destination rectangles are in terms of the
4393
+ * source and output resolutions respectively.
4394
+ */
4395
+ referenceResolution?: Resolution;
4396
+ /** The resolution of the output video */
4397
+ outputResolution: Resolution;
4398
+ /**
4399
+ * Output pixel format to use. If not specified, this will be chosen
4400
+ * automatically based on the sources present in the initial composition
4401
+ */
4402
+ outputPixelFormat?: PixelFormat;
4403
+ /**
4404
+ * Behaviour in the case of a missing stream used in an active composition
4405
+ * part. Note that this does not apply to the reference stream, but to every
4406
+ * part which does not use the reference stream, whether at startup or on
4407
+ * context change.
4408
+ *
4409
+ * Missing means not present in the context or never having sent a frame.
4410
+ */
4411
+ missingStreamBehaviour?: ComposeMissingStreamBehaviour;
4412
+ /**
4413
+ * Optionally attempt to perform the compose operation on hardware
4414
+ */
4415
+ hardwareAcceleration?: ComposeHardwareAcceleration;
4416
+ /**
4417
+ * Called when the transitions specified in the last config update have
4418
+ * completed (in the case of multiple parts with specified transitions of
4419
+ * different duration, this means that the last remaining transitions have
4420
+ * completed
4421
+ */
4422
+ onTransitionComplete?: () => void;
4423
+ }
4424
+
4425
+ /**
4426
+ * @public
4427
+ * An update operation for a VideoCompose operation
4428
+ * see: {@link VideoComposeNode.updateConfig}
4429
+ * */
4430
+ export declare interface VideoComposeSettingsUpdate<Pins extends string> {
4431
+ /** Update the parts (images/overlays) to include in the composition */
4432
+ parts: readonly ComposePart<Pins>[];
4433
+ }
4434
+
4435
+ /**
4436
+ * @public
4437
+ * see: {@link NorskTransform.videoEncode}
4438
+ */
4439
+ export declare class VideoEncodeNode extends AutoProcessorMediaNode<"video"> {
4440
+ }
4441
+
4442
+ /**
4443
+ * @public
4444
+ * A single rung in a video encode ladder
4445
+ * see: {@link NorskTransform.videoEncode}
4446
+ * */
4447
+ export declare interface VideoEncodeRung {
4448
+ /** The name of this rung, this should be unique across the ladder
4449
+ * and will end up in the renditionName of the outgoing StreamKey
4450
+ */
4451
+ name: string;
4452
+ /** The width of the outgoing video resolution */
4453
+ width: number;
4454
+ /** The height of the outgoing video resolution */
4455
+ height: number;
4456
+ /**
4457
+ * Optionally change the frameRate for this rendition
4458
+ * This can be useful if the input is 50FPS for example and some
4459
+ * lower rungs need to be 25fps
4460
+ *
4461
+ * Note: If you wish to apply the same frame rate across all rungs, it is
4462
+ * more efficient to use a single {@link VideoTransformNode} before the ladder
4463
+ * created with {@link NorskTransform.videoTransform} and leave this value undefined
4464
+ * */
4465
+ frameRate?: FrameRate;
4466
+ /**
4467
+ * Specifies the input video's Sample Aspect Ratio (SAR) to be used by the
4468
+ * encoder in width:height
4469
+ */
4470
+ sar?: SampleAspectRatio;
4471
+ /**
4472
+ * The codec (and detailed configuration) to use for the encoding operation.
4473
+ *
4474
+ * Note: Nvidia, Logan/Quadra, and Xilinx require Nvidia, Logan/Quadra and Xilinx hardware to be set up and
4475
+ * made available to Norsk
4476
+ *
4477
+ * A ladder can use several different codecs across its various rungs and the
4478
+ * VideoEncode node will attempt to build a pipeline that uses the hardware efficently
4479
+ */
4480
+ codec: X264Codec | X265Codec | NvidiaH264 | NvidiaHevc | LoganH264 | LoganHevc | QuadraH264 | QuadraHevc | XilinxH264 | XilinxHevc;
4481
+ }
4482
+
4483
+ /**
4484
+ * @public
4485
+ * Settings for a VideoEncode operation
4486
+ * see: {@link NorskTransform.videoEncode}
4487
+ * */
4488
+ export declare interface VideoEncodeSettings extends ProcessorNodeSettings<VideoEncodeNode> {
4489
+ rungs: readonly VideoEncodeRung[];
4490
+ }
4491
+
4492
+ /**
4493
+ * @public
4494
+ * Returns the stream keys for video streams in a media context
4495
+ * @param streams - The media context from which to return the stream keys
4496
+ * @returns The video stream keys in the media context
4497
+ */
4498
+ export declare function videoStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
4499
+
4500
+ /** @public */
4501
+ export declare interface VideoStreamMetadata {
4502
+ codec: string;
4503
+ width: number;
4504
+ height: number;
4505
+ frameRate?: FrameRate_2;
4506
+ }
4507
+
4508
+ /**
4509
+ * @public
4510
+ * Filters a context to only the video streams within it
4511
+ * @param streams - The media context from which to return the streams
4512
+ * @returns The video streams in the media context
4513
+ */
4514
+ export declare function videoStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
4515
+
4516
+ /**
4517
+ * @public
4518
+ * see: {@link NorskInput.audioSignal}
4519
+ */
4520
+ export declare class VideoTestcardGeneratorNode extends SourceMediaNode {
4521
+ }
4522
+
4523
+ /**
4524
+ * @public
4525
+ * Settings for an Video Testcard Generator
4526
+ * see: {@link NorskInput.videoTestcard}
4527
+ * */
4528
+ export declare interface VideoTestcardGeneratorSettings extends SourceNodeSettings<VideoTestcardGeneratorNode> {
4529
+ /** The source name to set in the stream key of the outgoing stream */
4530
+ sourceName: string;
4531
+ /** The number of frames to send before shutting down */
4532
+ numberOfFrames?: number;
4533
+ /** Resolution of the test card stream **/
4534
+ resolution: {
4535
+ width: number;
4536
+ height: number;
4537
+ };
4538
+ /** Framerate of the produced video stream **/
4539
+ frameRate: {
4540
+ frames: number;
4541
+ seconds: number;
4542
+ };
4543
+ pattern: Pattern;
4544
+ }
4545
+
4546
+ /** @public */
4547
+ export declare function videoToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
4548
+
4549
+ /**
4550
+ * @public
4551
+ * see: {@link NorskTransform.videoTransform}
4552
+ */
4553
+ export declare class VideoTransformNode extends AutoProcessorMediaNode<"video"> {
4554
+ }
4555
+
4556
+ /**
4557
+ * @public
4558
+ * Settings for a Video Transform node
4559
+ * see: {@link NorskTransform.videoTransform}
4560
+ * */
4561
+ export declare interface VideoTransformSettings extends ProcessorNodeSettings<VideoTransformNode> {
4562
+ /** An optional resolution to rescale this single stream to */
4563
+ resolution?: Resolution;
4564
+ /** An optional framerate to resample this single stream to */
4565
+ frameRate?: FrameRate;
4566
+ /** An optional SAR to set on the outgoing stream
4567
+ * Note: You can set this and only this if the SAR on your incoming stream is incorrect
4568
+ * for example (An often-seen problem with sources)
4569
+ * */
4570
+ sar?: SampleAspectRatio;
4571
+ }
4572
+
4573
+ /**
4574
+ * @public
4575
+ * see: {@link NorskDuplex.webRtcBrowser}
4576
+ */
4577
+ export declare class WebRTCBrowserNode extends AutoProcessorMediaNode<"audio" | "video"> {
4578
+ /** @public The URL of the local player */
4579
+ playerUrl: string;
4580
+ }
4581
+
4582
+ /**
4583
+ * @public
4584
+ * Settings for a WebRTC browser session
4585
+ * see: {@link NorskDuplex.webRtcBrowser}
4586
+ * */
4587
+ export declare interface WebRTCBrowserSettings extends ProcessorNodeSettings<WebRTCBrowserNode>, StreamStatisticsMixin {
4588
+ iceServers?: IceServerSettings[];
4589
+ reportedIceServers?: IceServerSettings[];
4590
+ hostIps?: string[];
4591
+ serverReflexiveIps?: string[];
4592
+ /** Jitter buffer delay in milliseconds */
4593
+ bufferDelayMs?: number;
4594
+ }
4595
+
4596
+ /**
4597
+ * @public
4598
+ * see: {@link NorskOutput.whep}
4599
+ */
4600
+ export declare class WhepOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
4601
+ /** @public The URL of the local player */
4602
+ playerUrl: string;
4603
+ /** @public The URL of the WHEP endpoint */
4604
+ endpointUrl: string;
4605
+ }
4606
+
4607
+ /**
4608
+ * @public
4609
+ * The settings for a WebRTC WHEP Output
4610
+ * see {@link NorskOutput.whep}
4611
+ */
4612
+ export declare interface WhepOutputSettings extends SinkNodeSettings<WhepOutputNode>, StreamStatisticsMixin {
4613
+ iceServers?: IceServerSettings[];
4614
+ reportedIceServers?: IceServerSettings[];
4615
+ hostIps?: string[];
4616
+ serverReflexiveIps?: string[];
4617
+ /** Jitter buffer delay in milliseconds */
4618
+ bufferDelayMs?: number;
4619
+ }
4620
+
4621
+ /**
4622
+ * @public
4623
+ * see: {@link NorskInput.whip}
4624
+ */
4625
+ export declare class WhipInputNode extends SourceMediaNode {
4626
+ /** @public The URL of the local test client */
4627
+ clientUrl: string;
4628
+ /** @public The URL of the WHIP endpoint */
4629
+ endpointUrl: string;
4630
+ /**
4631
+ * @public
4632
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
4633
+ * */
4634
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
4635
+ }
4636
+
4637
+ /** @public */
4638
+ export declare interface WhipInputSettings extends InputSettings<WhipInputNode>, StreamStatisticsMixin {
4639
+ /** List of ice servers to use as part of session negotiation */
4640
+ iceServers?: IceServerSettings[];
4641
+ /** Internal addresses for the ice servers (defaults to iceServers) */
4642
+ reportedIceServers?: IceServerSettings[];
4643
+ /**
4644
+ * List of IPs to advertise as your host address - useful e.g. when on a cloud server
4645
+ * so that the public rather than private IP is used.
4646
+ */
4647
+ hostIps?: string[];
4648
+ /**
4649
+ * Similar to hostIps, but a list of server reflexive candidates so that ICE negotiations can be
4650
+ * sped up
4651
+ */
4652
+ serverReflexiveIps?: string[];
4653
+ }
4654
+
4655
+ /**
4656
+ * @public
4657
+ * see: {@link NorskOutput.whip}
4658
+ */
4659
+ export declare class WhipOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
4660
+ }
4661
+
4662
+ /**
4663
+ * @public
4664
+ * The settings for a WebRTC Whip Output
4665
+ * see {@link NorskOutput.whip}
4666
+ */
4667
+ export declare interface WhipOutputSettings extends SinkNodeSettings<WhipOutputNode>, StreamStatisticsMixin {
4668
+ /**
4669
+ * The URI to make the initial publish request to (as per the WHIP protocol)
4670
+ */
4671
+ uri: string;
4672
+ /** The auth header to supply (for example: 'Bearer: mybearertoken') */
4673
+ authHeader: string;
4674
+ /** Jitter buffer delay in milliseconds */
4675
+ bufferDelayMs?: number;
4676
+ }
4677
+
4678
+ declare type WhisperSamplingStrategy = {
4679
+ strategy: 'greedy';
4680
+ bestOf?: number;
4681
+ } | {
4682
+ strategy: "beam_search";
4683
+ beam_size?: number;
4684
+ };
4685
+
4686
+ /** @public */
4687
+ export declare interface X264Codec {
4688
+ type: "x264";
4689
+ threads?: number;
4690
+ /**
4691
+ * Rate control options - one of abr, cqp or crf
4692
+ */
4693
+ bitrateMode?: BitrateMode;
4694
+ /**
4695
+ * Set the H264 profile of the output
4696
+ */
4697
+ profile?: X264Profile;
4698
+ /**
4699
+ * Sets the level flag in the output
4700
+ */
4701
+ level?: X264Level;
4702
+ /**
4703
+ * Sets the minimum length between IDR frames
4704
+ */
4705
+ keyFrameIntervalMin?: number;
4706
+ /**
4707
+ * Sets the maximum length between IDR frames
4708
+ */
4709
+ keyFrameIntervalMax?: number;
4710
+ /**
4711
+ * Sets the maximum number of concurrent B-frames
4712
+ */
4713
+ bframes?: number;
4714
+ /**
4715
+ * Tune options to further optimize them for your input content. If you
4716
+ * specify a tuning, the changes will be applied after 'preset' but before all
4717
+ * other parameters.
4718
+ */
4719
+ tune?: X264Tune;
4720
+ /**
4721
+ * Change options to trade off compression efficiency against encoding speed.
4722
+ * If you specify a preset, the changes it makes will be applied before all
4723
+ * other parameters are applied.
4724
+ */
4725
+ preset?: X264Preset;
4726
+ /**
4727
+ * Maximum number of reference frames, i.e., the number of previous frames
4728
+ * each P-frame can use as references
4729
+ */
4730
+ frameReference?: number;
4731
+ /**
4732
+ * Enables CABAC (Context Adaptive Binary Arithmetic Coder) stream compression
4733
+ * instead of the less efficient CAVLC (Context Adaptive Variable Length
4734
+ * Coder) system. Significantly improves both the compression efficiency
4735
+ * (10-20% typically) and the decoding requirements, at the expense of encode
4736
+ * CPU requirements
4737
+ */
4738
+ cabac?: boolean;
4739
+ /**
4740
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
4741
+ */
4742
+ vbvMaxRate?: number;
4743
+ /**
4744
+ * Sets the size of the VBV buffer in kilobits
4745
+ */
4746
+ vbvBufferSize?: number;
4747
+ /**
4748
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
4749
+ * disables adaptive I-frame decisioning
4750
+ */
4751
+ sceneCut?: number;
4752
+ /**
4753
+ * Use access unit delimiters in the output
4754
+ */
4755
+ aud?: boolean;
4756
+ /**
4757
+ * Disables the loop filter. Not Recommended.
4758
+ */
4759
+ noDeblock?: boolean;
4760
+ /**
4761
+ * Signal HRD information
4762
+ */
4763
+ nalHrd?: X264NalHrd;
4764
+ }
4765
+
4766
+ /**
4767
+ * @public
4768
+ * See the X264 Docs for a description of this value
4769
+ * */
4770
+ export declare type X264Level = 1 | 1.1 | 1.2 | 1.3 | 2 | 2.1 | 2.2 | 3 | 3.1 | 3.2 | 4 | 4.1 | 4.2 | 5 | 5.1;
4771
+
4772
+ /**
4773
+ * @public
4774
+ * Three possible values:
4775
+ *
4776
+ * - "none": specify no HRD information
4777
+ *
4778
+ * - "vbr": specify HRD information
4779
+ *
4780
+ * - "cbr": specify HRD information and pack the bitstream to the bitrate specified
4781
+ *
4782
+ * See the X264 Docs for a further description of this value
4783
+ */
4784
+ export declare type X264NalHrd = "none" | "vbr" | "cbr";
4785
+
4786
+ /**
4787
+ * @public
4788
+ * See the X264 Docs for a description of this value
4789
+ * */
4790
+ export declare type X264Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
4791
+
4792
+ /** @public */
4793
+ export declare type X264Profile = "baseline" | "main" | "high" | "high10" | "high422" | "high444";
4794
+
4795
+ /**
4796
+ * @public
4797
+ * See the X264 Docs for a description of this value
4798
+ * */
4799
+ export declare type X264Tune = "film" | "animation" | "grain" | "stillimage" | "psnr" | "ssim" | "fastdecode" | "zerolatency";
4800
+
4801
+ /** @public X265 codec */
4802
+ export declare interface X265Codec {
4803
+ type: "x265";
4804
+ threads?: number;
4805
+ bitrateMode?: BitrateMode;
4806
+ profile?: X265Profile;
4807
+ /**
4808
+ * Sets the level flag in the output
4809
+ */
4810
+ level?: X265Level;
4811
+ tier?: X265Tier;
4812
+ /**
4813
+ * Sets the minimum length between IDR frames
4814
+ */
4815
+ keyFrameIntervalMin?: number;
4816
+ /**
4817
+ * Sets the maximum length between IDR frames
4818
+ */
4819
+ keyFrameIntervalMax?: number;
4820
+ /**
4821
+ * Sets the maximum number of concurrent B-frames
4822
+ */
4823
+ bframes?: number;
4824
+ /**
4825
+ * Tune options to further optimize them for your input content. If you
4826
+ * specify a tuning, the changes will be applied after 'preset' but before all
4827
+ * other parameters.
4828
+ */
4829
+ tune?: X265Tune;
4830
+ /**
4831
+ * Change options to trade off compression efficiency against encoding speed.
4832
+ * If you specify a preset, the changes it makes will be applied before all
4833
+ * other parameters are applied.
4834
+ */
4835
+ preset?: X265Preset;
4836
+ /**
4837
+ * Maximum number of reference frames, i.e., the number of previous frames
4838
+ * each P-frame can use as references
4839
+ */
4840
+ frameReference?: number;
4841
+ /**
4842
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
4843
+ */
4844
+ vbvMaxRate?: number;
4845
+ /**
4846
+ * Sets the size of the VBV buffer in kilobits
4847
+ */
4848
+ vbvBufferSize?: number;
4849
+ /**
4850
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
4851
+ * disables adaptive I-frame decisioning
4852
+ */
4853
+ sceneCut?: number;
4854
+ /**
4855
+ * Use access unit delimiters in the output
4856
+ */
4857
+ aud?: boolean;
4858
+ /**
4859
+ * Disables the loop filter. Not Recommended.
4860
+ */
4861
+ noDeblock?: boolean;
4862
+ }
4863
+
4864
+ /**
4865
+ * @public
4866
+ * See the X265 Docs for a description of this value
4867
+ * */
4868
+ export declare type X265Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4869
+
4870
+ /**
4871
+ * @public
4872
+ * See the X265 Docs for a description of this value
4873
+ * */
4874
+ export declare type X265Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
4875
+
4876
+ /**
4877
+ * @public
4878
+ * See the X265 Docs for a description of this value
4879
+ * */
4880
+ export declare type X265Profile = "main" | "main10" | "main444_8" | "main422_10" | "main444_10";
4881
+
4882
+ /**
4883
+ * @public
4884
+ * See the X265 Docs for a description of this value
4885
+ * */
4886
+ export declare type X265Tier = "main" | "high";
4887
+
4888
+ /**
4889
+ * @public
4890
+ * See the X265 Docs for a description of this value
4891
+ * */
4892
+ export declare type X265Tune = "psnr" | "ssim" | "grain" | "zerolatency" | "fastdecode" | "animation";
4893
+
4894
+ /**
4895
+ * @public
4896
+ * Settings for a H264 Encode using Xilinx hardware
4897
+ * A detailed description of these params can be found
4898
+ * on the Xilinx Encoder Documentation
4899
+ *
4900
+ * These fields have deliberately been written to maintain the same semantics as the
4901
+ * Xilinx documentation where possible.
4902
+ *
4903
+ * If left undefined, all will default to Xilinx's own defaults
4904
+ * */
4905
+ export declare interface XilinxH264 {
4906
+ type: "xilinx-h264";
4907
+ profile?: XilinxH264Profile;
4908
+ level?: XilinxH264Level;
4909
+ rateControl?: XilinxRateControl;
4910
+ lookaheadDepth?: number;
4911
+ idrPeriod?: number;
4912
+ bframes?: number;
4913
+ gopSize?: number;
4914
+ minQp?: number;
4915
+ maxQp?: number;
4916
+ }
4917
+
4918
+ /** @public */
4919
+ export declare type XilinxH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4920
+
4921
+ /** @public */
4922
+ export declare type XilinxH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
4923
+
4924
+ /**
4925
+ * @public
4926
+ * Settings for a HEVC Encode using Xilinx hardware
4927
+ * A detailed description of these params can be found
4928
+ * on the Xilinx Encoder Documentation
4929
+ *
4930
+ * These fields have deliberately been written to maintain the same semantics as the
4931
+ * Xilinx documentation where possible.
4932
+ *
4933
+ * If left undefined, all will default to Xilinx's own defaults
4934
+ * */
4935
+ export declare interface XilinxHevc {
4936
+ type: "xilinx-hevc";
4937
+ profile?: XilinxHevcProfile;
4938
+ level?: XilinxHevcLevel;
4939
+ tier?: XilinxHevcTier;
4940
+ }
4941
+
4942
+ /** @public */
4943
+ export declare type XilinxHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4944
+
4945
+ /** @public */
4946
+ export declare type XilinxHevcProfile = "main" | "main10";
4947
+
4948
+ /** @public */
4949
+ export declare type XilinxHevcTier = "main" | "high";
4950
+
4951
+ /** @public */
4952
+ export declare interface XilinxRateControl {
4953
+ value: number;
4954
+ mode: "constQp" | "cbr" | "vbr" | "lowLatency";
4955
+ }
4956
+
4957
+ export { }