@norskvideo/norsk-sdk 1.0.346 → 1.0.348

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4369 @@
1
+ /// <reference types="node" />
2
+
3
+ import { AudioCodec } from '@norskvideo/norsk-api/lib/media_pb';
4
+ import { CmafAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
5
+ import { CmafMultiVariantMessage } from '@norskvideo/norsk-api/lib/media_pb';
6
+ import { CmafVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
7
+ import { CmafWebVttMessage } from '@norskvideo/norsk-api/lib/media_pb';
8
+ import { CurrentLoad } from '@norskvideo/norsk-api/lib/shared/common_pb';
9
+ import { ExplicitChannel } from '@norskvideo/norsk-api/lib/media_pb';
10
+ import { FileTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
11
+ import { FrameRate as FrameRate_2 } from '@norskvideo/norsk-api/lib/media_pb';
12
+ import { GopStructure } from '@norskvideo/norsk-api/lib/media_pb';
13
+ import * as grpc from '@grpc/grpc-js';
14
+ import { HlsOutputEvent } from '@norskvideo/norsk-api/lib/media_pb';
15
+ import { HlsTsAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
16
+ import { HlsTsCombinedPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
17
+ import { HlsTsMultiVariantMessage } from '@norskvideo/norsk-api/lib/media_pb';
18
+ import { HlsTsVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
19
+ import { MediaClient } from '@norskvideo/norsk-api/lib/media_grpc_pb';
20
+ import { Nullable } from 'typescript-nullable';
21
+ import { OptionalBool } from '@norskvideo/norsk-api/lib/shared/common_pb';
22
+ import { OptionalInt } from '@norskvideo/norsk-api/lib/shared/common_pb';
23
+ import { PlainMessage } from '@bufbuild/protobuf';
24
+ import { Readable } from 'stream';
25
+ import { RtmpError_UnsupportedAudio } from '@norskvideo/norsk-api/lib/media_pb';
26
+ import { RtmpError_UnsupportedVideo } from '@norskvideo/norsk-api/lib/media_pb';
27
+ import { Scte35SpliceInfoSection } from '@norskvideo/norsk-api/lib/media_pb';
28
+ import { StreamKey as StreamKey_2 } from '@norskvideo/norsk-api/lib/media_pb';
29
+ import { StreamStatisticsSampling } from '@norskvideo/norsk-api/lib/media_pb';
30
+ import { Subscription } from '@norskvideo/norsk-api/lib/media_pb';
31
+ import { TimestampProgramNudge } from '@norskvideo/norsk-api/lib/media_pb';
32
+ import { TsInputEvent } from '@norskvideo/norsk-api/lib/media_pb';
33
+ import { UdpTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
34
+ import { VancPayloadFormat as VancPayloadFormat_2 } from '@norskvideo/norsk-api/lib/media_pb';
35
+ import { Version } from '@norskvideo/norsk-api/lib/shared/common_pb';
36
+ import { Wave } from '@norskvideo/norsk-api/lib/media_pb';
37
+ import { Writable } from 'stream';
38
+
39
+ /** @public */
40
+ export declare type AacProfile = "lc" | "main" | "high";
41
+
42
+ /**
43
+ * @public
44
+ * Settings for an AAC encode
45
+ * see: {@link NorskTransform.audioEncode}
46
+ */
47
+ export declare interface AacSettings {
48
+ kind: "aac";
49
+ /** The output sample rate of this AAC encode */
50
+ sampleRate: SampleRate;
51
+ /** The AAC profile of this AAC encode */
52
+ profile: AacProfile;
53
+ }
54
+
55
+ export declare class AncillaryNode extends AutoProcessorMediaNode<"ancillary"> {
56
+ sendScte35(key: StreamKey, info: Scte35SpliceInfoSection): void;
57
+ }
58
+
59
+ export declare interface AncillarySettings extends ProcessorNodeSettings<AncillaryNode> {
60
+ onScte35?: (stream: StreamKey, message: Scte35SpliceInfoSection) => void;
61
+ onSmpte2038?: (stream: StreamKey, message: Smpte2038Message) => void;
62
+ }
63
+
64
+ /**
65
+ * @public
66
+ * Returns the stream keys for ancillary streams in a media context
67
+ * @param streams - The media context from which to return the stream keys
68
+ * @returns The ancillary stream keys in the media context
69
+ */
70
+ export declare function ancillaryStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
71
+
72
+ /** @public */
73
+ export declare interface AncillaryStreamMetadata {
74
+ }
75
+
76
+ /**
77
+ * @public
78
+ * Filters a context to only the ancillary streams within it
79
+ * @param streams - The media context from which to return the streams
80
+ * @returns The ancillary streams in the media context
81
+ */
82
+ export declare function ancillaryStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
83
+
84
+ /**
85
+ * @public
86
+ * see: {@link NorskTransform.audioBuildMultichannel}
87
+ */
88
+ export declare class AudioBuildMultichannelNode extends AutoProcessorMediaNode<"audio"> {
89
+ }
90
+
91
+ /**
92
+ * @public
93
+ * Settings for an Audio Build Multichannel Node
94
+ * see: {@link NorskTransform.audioBuildMultichannel}
95
+ * */
96
+ export declare interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
97
+ /** The channel layout of the built outgoing stream */
98
+ channelLayout: ChannelLayout;
99
+ /**
100
+ * Stream keys specifying the source for each channel, where the order is
101
+ * significant. The streams must all have the same sample format and sample
102
+ * rate.
103
+ */
104
+ channelList: readonly StreamKey[];
105
+ /**
106
+ * Callback invoked when the inbound context changes
107
+ * a new channel list can be returned here that overrides the initial configuration
108
+ * and allows the channel order to be changed at runtime
109
+ */
110
+ onInputChanged?: (keys: StreamKey[]) => StreamKey[] | undefined;
111
+ /** The stream key to use for the outoging stream*/
112
+ outputStreamKey: StreamKey;
113
+ }
114
+
115
+ export { AudioCodec }
116
+
117
+ /**
118
+ * @public
119
+ * see: {@link NorskTransform.audioEncode}
120
+ */
121
+ export declare class AudioEncodeNode extends AutoProcessorMediaNode<"audio"> {
122
+ }
123
+
124
+ /**
125
+ * @public
126
+ * Settings for an audio encode
127
+ * see: {@link NorskTransform.audioEncode}
128
+ * */
129
+ export declare interface AudioEncodeSettings extends ProcessorNodeSettings<AudioEncodeNode> {
130
+ /**
131
+ * The channel layout of this encode
132
+ * Note: If the channel layout doesn't match then it will be automatically converted
133
+ * to gain greater control over this process, see {@link NorskTransform.audioMix} and {@link NorskTransform.audioMixMatrix}
134
+ * */
135
+ channelLayout: ChannelLayout;
136
+ /** The target bitrate of this encode */
137
+ bitrate: number;
138
+ /** The name given to the rendition portion of the stream key assigned to this node's output */
139
+ outputRenditionName: string;
140
+ /** What codec to (re) encode the audio to */
141
+ codec: OpusSettings | AacSettings;
142
+ }
143
+
144
+ /**
145
+ * @public
146
+ * see: {@link NorskTransform.audioGain}
147
+ */
148
+ export declare class AudioGainNode extends AutoProcessorMediaNode<"audio"> {
149
+ /**
150
+ * @public
151
+ * Updates the config of this AudioGain node for all subsequent frames
152
+ * this allows the user to change the gains in the outgoing stream
153
+ * dynamically as the stream progresses
154
+ * @param settings - The updated settings
155
+ */
156
+ updateConfig(settings: AudioGainSettingsUpdate): void;
157
+ }
158
+
159
+ /**
160
+ * @public
161
+ * Settings for an Audio Gain node
162
+ * see: {@link NorskTransform.audioGain}
163
+ * */
164
+ export declare interface AudioGainSettings extends ProcessorNodeSettings<AudioGainNode> {
165
+ /** A vector of gains for this source, one for each channel */
166
+ channelGains: readonly Gain[];
167
+ }
168
+
169
+ /**
170
+ * @public
171
+ * An update operation for an Audio Gain node
172
+ * see: {@link AudioGainNode.updateConfig}
173
+ * */
174
+ export declare interface AudioGainSettingsUpdate {
175
+ /** A vector of gains for this source, one for each channel */
176
+ channelGains?: readonly Gain[];
177
+ }
178
+
179
+ /** @public */
180
+ export declare interface AudioMeasureLevels {
181
+ stream: StreamKey;
182
+ pts: Interval;
183
+ channelLevels: ChannelLevels[];
184
+ }
185
+
186
+ /**
187
+ * @public
188
+ * see: {@link NorskControl.audioMeasureLevels}.
189
+ */
190
+ export declare class AudioMeasureLevelsNode extends AutoProcessorMediaNode<"audio"> {
191
+ }
192
+
193
+ /**
194
+ * @public
195
+ * Settings for an AudioMeasureLevelsNode
196
+ * see: {@link NorskControl.audioMeasureLevels}
197
+ */
198
+ export declare interface AudioMeasureLevelsSettings extends ProcessorNodeSettings<AudioMeasureLevelsNode> {
199
+ /**
200
+ * Called with the audio level data
201
+ * @param levels - The level data for the audio stream
202
+ * @eventProperty
203
+ */
204
+ onData: (levels: AudioMeasureLevels) => void;
205
+ intervalFrames?: number;
206
+ }
207
+
208
+ /**
209
+ * @public
210
+ * see: {@link NorskTransform.audioMixMatrix}
211
+ */
212
+ export declare class AudioMixMatrixNode extends AutoProcessorMediaNode<"audio"> {
213
+ /**
214
+ * @public
215
+ * Updates the config of this AudioMixMatrix node for all subsequent frames
216
+ * this allows the user to change the gains in the outgoing mix
217
+ * dynamically as the stream progresses
218
+ * @param settings - The updated settings
219
+ */
220
+ updateConfig(settings: AudioMixMatrixSettingsUpdate): void;
221
+ }
222
+
223
+ /**
224
+ * @public
225
+ * Settings for the Audio Mix Matrix Node
226
+ * see: {@link NorskTransform.audioMixMatrix}
227
+ * */
228
+ export declare interface AudioMixMatrixSettings extends ProcessorNodeSettings<AudioMixMatrixNode> {
229
+ /** The NxM matrix of gains from N input channels to M output channels */
230
+ channelGains: readonly Gain[][];
231
+ /** The desired output channel layout, such as "5.1" */
232
+ outputChannelLayout: ChannelLayout;
233
+ }
234
+
235
+ /**
236
+ * @public
237
+ * Config update for the {@link AudioMixMatrixNode}.
238
+ * Call {@link AudioMixMatrixNode.updateConfig} for updating the config.
239
+ */
240
+ export declare interface AudioMixMatrixSettingsUpdate {
241
+ /** The NxM updated matrix of gains from N input channels to M output channels */
242
+ channelGains: readonly Gain[][];
243
+ }
244
+
245
+ /**
246
+ * @public
247
+ * see: {@link NorskTransform.audioMix}
248
+ */
249
+ export declare class AudioMixNode<Pins extends string> extends ProcessorMediaNode<Pins> {
250
+ /**
251
+ * @public
252
+ * Updates the config of this AudioMix for all subsequent frames
253
+ * this allows the user to change the levels and sources in the outgoing mix
254
+ * dynamically as the stream progresses
255
+ * @param settings - The updated settings
256
+ */
257
+ updateConfig(settings: AudioMixSettingsUpdate<Pins>): void;
258
+ }
259
+
260
+ /**
261
+ * @public
262
+ * The settings for an AudioMix operation
263
+ * see: {@link NorskTransform.audioMix}
264
+ * */
265
+ export declare interface AudioMixSettings<Pins extends string> extends ProcessorNodeSettings<AudioMixNode<Pins>> {
266
+ /** The audio sources to mix */
267
+ sources: readonly AudioMixSource<Pins>[];
268
+ /** The source name to use for the output stream */
269
+ outputSource: string;
270
+ /** The channel layout that the mixer runs at
271
+ * all audio streams will be normalised to this value and therefore
272
+ * this will be the output channel layout of this node */
273
+ channelLayout: ChannelLayout;
274
+ /** The sample rate that the mixer runs at
275
+ * all audio streams will be normalised to this value and therefore
276
+ * this will be the output sample rate of this node */
277
+ sampleRate?: SampleRate;
278
+ }
279
+
280
+ /**
281
+ * @public
282
+ * An update operation for an AudioMix node
283
+ * see: {@link AudioMixNode.updateConfig}
284
+ * */
285
+ export declare interface AudioMixSettingsUpdate<Pins extends string> {
286
+ /** The audio sources to mix along with their potentially new gain values */
287
+ sources: readonly AudioMixSource<Pins>[];
288
+ }
289
+
290
+ /**
291
+ * @public
292
+ * The settings for a single source within an AudioMix operation
293
+ * see: {@link NorskTransform.audioMix}
294
+ * */
295
+ export declare interface AudioMixSource<Pins> {
296
+ /** The name of the InputPin for this source */
297
+ pin: Pins;
298
+ /** A vector of gains for this source, one for each channel */
299
+ channelGains?: readonly Gain[];
300
+ }
301
+
302
+ /**
303
+ * @public
304
+ * see: {@link NorskInput.audioSignal}
305
+ */
306
+ export declare class AudioSignalGeneratorNode extends SourceMediaNode {
307
+ }
308
+
309
+ /**
310
+ * @public
311
+ * Settings for an Audio Signal Generator
312
+ * see: {@link NorskInput.audioSignal}
313
+ * */
314
+ export declare interface AudioSignalGeneratorSettings extends SourceNodeSettings<AudioSignalGeneratorNode> {
315
+ /** The source name to set in the stream key of the outgoing stream */
316
+ sourceName: string;
317
+ /** The audio channel layout of the generated stream */
318
+ channelLayout: ChannelLayout;
319
+ /** The sample rate of the generated stream */
320
+ sampleRate: SampleRate;
321
+ /** The sample format to use. Default: "fltp" */
322
+ sampleFormat?: SampleFormat;
323
+ /**
324
+ * Waveform - create one with {@link mkSine}
325
+ * */
326
+ wave?: Wave;
327
+ }
328
+
329
+ /**
330
+ * @public
331
+ * see: {@link NorskTransform.audioSplitMultichannel}
332
+ */
333
+ export declare class AudioSplitMultichannelNode extends AutoProcessorMediaNode<"audio"> {
334
+ }
335
+
336
+ /**
337
+ * @public
338
+ * Settings for an Audio Split Multichannel node
339
+ * see: {@link NorskTransform.audioSplitMultichannel}
340
+ * */
341
+ export declare interface AudioSplitMultichannelSettings extends ProcessorNodeSettings<AudioSplitMultichannelNode> {
342
+ /**
343
+ * The output stream key of the first channel
344
+ * subsequent channels will have streamId incremented by N
345
+ */
346
+ outputStreamKey: StreamKey;
347
+ }
348
+
349
+ /**
350
+ * @public
351
+ * Returns the stream keys for audio streams in a media context
352
+ * @param streams - The media context from which to return the stream keys
353
+ * @returns The audio stream keys in the media context
354
+ */
355
+ export declare function audioStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
356
+
357
+ /** @public */
358
+ export declare interface AudioStreamMetadata {
359
+ codec: AudioCodec;
360
+ sampleRate: SampleRate;
361
+ channelLayout?: ChannelLayout;
362
+ }
363
+
364
+ /**
365
+ * @public
366
+ * Filters a context to only the audio streams within it
367
+ * @param streams - The media context from which to return the streams
368
+ * @returns The audio streams in the media context
369
+ */
370
+ export declare function audioStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
371
+
372
+ /** @public */
373
+ export declare function audioToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
374
+
375
+ /**
376
+ * @public
377
+ * see: {@link NorskTransform.audioTranscribeAws}
378
+ */
379
+ export declare class AudioTranscribeAwsNode extends AutoProcessorMediaNode<"audio"> {
380
+ }
381
+
382
+ /**
383
+ * @public
384
+ * Settings for an Audio Transcribe operation using AWS
385
+ * see: {@link NorskTransform.audioTranscribeAws}
386
+ * */
387
+ export declare interface AudioTranscribeAwsSettings extends ProcessorNodeSettings<AudioTranscribeAwsNode> {
388
+ /** Region for the transcribe endpoint */
389
+ awsRegion: string;
390
+ /** the stream id to allocate to the outgoing stream*/
391
+ outputStreamId: number;
392
+ /** the language that we want to transcribe (also put in the outgoing metadata) */
393
+ language: string;
394
+ /** The mode to be used for building sentences */
395
+ sentenceBuildMode: SentenceBuildMode;
396
+ /** The mode to be used for stabilising sentences */
397
+ sentenceStabilizationMode: StabilizationMode;
398
+ /** The AWS credentials to use for this operation
399
+ * If not supplied, the standard environment variables will be used if present
400
+ * */
401
+ awsCredentials?: AwsCredentials;
402
+ }
403
+
404
+ /**
405
+ * @public
406
+ * see: {@link NorskTransform.audioTranscribeAws}
407
+ */
408
+ export declare class AudioTranscribeAzureNode extends AutoProcessorMediaNode<"audio"> {
409
+ }
410
+
411
+ /**
412
+ * @public
413
+ * Settings for an audio transcribe/translate operation using Azure Speech Service
414
+ * see: {@link NorskTransform.audioTranscribeAzure}
415
+ * */
416
+ export declare interface AudioTranscribeAzureSettings extends ProcessorNodeSettings<AudioTranscribeAzureNode> {
417
+ outputStreamId: number;
418
+ sourceLanguage: string;
419
+ targetLanguages?: string[];
420
+ /** Key for the Azure Speech Service endpoint */
421
+ azureKey: string;
422
+ /** Region for the Azure Speech Service endpoint */
423
+ azureRegion: string;
424
+ maximumLineLength: undefined | number;
425
+ }
426
+
427
+ /**
428
+ * @public
429
+ * see: {@link NorskTransform.audioTranscribeAws}
430
+ */
431
+ export declare class AudioTranscribeWhisperNode extends AutoProcessorMediaNode<"audio"> {
432
+ }
433
+
434
+ /**
435
+ * @public
436
+ * Settings for an Audio Transcribe operation using Whisper sdk
437
+ * see: {@link NorskTransform.audioTranscribeWhisper}
438
+ * */
439
+ export declare interface AudioTranscribeWhisperSettings extends ProcessorNodeSettings<AudioTranscribeWhisperNode> {
440
+ outputStreamId: number;
441
+ stepMs?: number;
442
+ lengthMs?: number;
443
+ keepMs?: number;
444
+ maxTokens?: number;
445
+ speedUp?: boolean;
446
+ noFallback?: boolean;
447
+ numThreads?: number;
448
+ useGpu?: boolean;
449
+ language: string;
450
+ model: string;
451
+ }
452
+
453
+ export declare interface AutoProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
454
+ }
455
+
456
+ export declare class AutoProcessorMediaNode<Pins extends string> {
457
+ constructor(client: MediaClient, _unregisterNode: (node: MediaNodeState) => void, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => Promise<boolean>, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
458
+ }
459
+
460
+ /** @public */
461
+ export declare class AutoSinkMediaNode<Pins extends string> extends SinkMediaNode<Pins | "auto"> {
462
+ /** Subscribe to the given sources.
463
+ *
464
+ * This version of subscribe simply requires a list of stream keys to be
465
+ * returned from each selector, and the server will automatically
466
+ * assign each stream to the appropriate pin on the sink node.
467
+ * This is the appropriate method for most cases.
468
+ *
469
+ * @param done - will be called with no arguments if the subscription succeeds,
470
+ * or an error if it failed. This error indicates the specific reason it
471
+ * failed, so you can take appropriate actions in response. It will be called
472
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
473
+ * provided in the config for the node.
474
+ *
475
+ * Errors are also logged to the debug log.
476
+ */
477
+ subscribe(sources: ReceiveFromAddressAuto[], validation?: (context: Context) => SubscriptionValidationResponse, done?: (error?: SubscriptionError) => void): void;
478
+ }
479
+
480
+ /** @public */
481
+ export declare function avToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
482
+
483
+ /** @public */
484
+ export declare interface AwsCredentials {
485
+ accessKey: string;
486
+ secretKey: string;
487
+ sessionToken: string;
488
+ }
489
+
490
+ /**
491
+ * @public
492
+ * Configuration for pushing a segmented media stream directly to AWS S3
493
+ * */
494
+ export declare interface AwsS3PushDestinationSettings {
495
+ type: "s3";
496
+ /** The hostname of the s3 server being pushed to. */
497
+ host: string;
498
+ /** the port of the s3 server being pushed to. */
499
+ port: number;
500
+ /** the path under which segments and playlists will be pushed to */
501
+ pathPrefix: string;
502
+ /**
503
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
504
+ *
505
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
506
+ * cacheing directives
507
+ */
508
+ sessionId?: string;
509
+ /**
510
+ * A unique identifier for this destination
511
+ *
512
+ * This can be used for supplying updates to configuration to this destination specifically
513
+ * see: {@link UpdateCredentials}
514
+ */
515
+ id: string;
516
+ /**
517
+ * The AWS region being pushed to
518
+ */
519
+ awsRegion: string;
520
+ /**
521
+ * AWS credentials to be used for connecting to S3
522
+ * Standard environment variables will be read if these are not provided
523
+ */
524
+ awsCredentials?: AwsCredentials;
525
+ /**
526
+ * Informs the playlist generation how long segments will be retained for on the remote server
527
+ * in order to generate an accurate playlist
528
+ */
529
+ retentionPeriodSeconds: number;
530
+ }
531
+
532
+ /**
533
+ * @public
534
+ * There are three possible modes:
535
+ *
536
+ * - "abr": encode in average bitrate mode, specified in kilobits/sec (note, 1
537
+ * kilobit is 1000 bits). You can make use of the vbv settings to control
538
+ * the bounds on how much the actual bitrate can fluctuate within the bounds
539
+ * of the average
540
+ *
541
+ * - "cqp": encode in constant quantizer mode. In general, crf will give better
542
+ * results, although cqp can be faster to encode
543
+ *
544
+ * - "crf": encode in constant rate factor mode. This will give a constant 'quality'
545
+ * to the encode, but with a variable bitrate
546
+ */
547
+ export declare interface BitrateMode {
548
+ value: number;
549
+ mode: "abr" | "cqp" | "crf";
550
+ }
551
+
552
+ /** @public */
553
+ export declare type BrowserEvent = {
554
+ case: "onLoaded";
555
+ value: BrowserOnLoaded;
556
+ } | {
557
+ case: "onLoadStart";
558
+ value: BrowserOnLoadStart;
559
+ } | {
560
+ case: "onLoadEnd";
561
+ value: BrowserOnLoadEnd;
562
+ } | {
563
+ case: "onLoadError";
564
+ value: BrowserOnLoadError;
565
+ };
566
+
567
+ /**
568
+ * @public
569
+ * see: {@link NorskInput.browser}
570
+ */
571
+ export declare class BrowserInputNode extends SourceMediaNode {
572
+ /**
573
+ * @public
574
+ * Supply new config for an active web browser session
575
+ * */
576
+ updateConfig(settings: BrowserInputSettingsUpdate): void;
577
+ /**
578
+ * @public
579
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
580
+ * */
581
+ nudge(nudge: number): void;
582
+ }
583
+
584
+ /**
585
+ * @public
586
+ * Settings for a Browser Input
587
+ * see: {@link NorskInput.browser}
588
+ * */
589
+ export declare interface BrowserInputSettings extends SourceNodeSettings<BrowserInputNode>, StreamStatisticsMixin {
590
+ /** The url to load in the browser session */
591
+ url: string;
592
+ /** This is the resolution of the window opened to render the page
593
+ * This is therefore also output resolution of the generated video
594
+ */
595
+ resolution: {
596
+ width: number;
597
+ height: number;
598
+ };
599
+ /** The source name to populate the outgoing stream key with */
600
+ sourceName: string;
601
+ /** The frame rate at which to generate video from the web page
602
+ * Note: If the web page is static, this will just mean the initial frame
603
+ * is duplicated at the required frame rate
604
+ * */
605
+ frameRate: FrameRate;
606
+ /** An optional callback for reacting to events from the embedded browser
607
+ * At the very least this is useful for logging events (such as a 404)
608
+ * */
609
+ onBrowserEvent?: (event: BrowserEvent) => void;
610
+ }
611
+
612
+ /**
613
+ * @public
614
+ * A settings update for a running browser
615
+ * see: {@link BrowserInputNode.updateConfig}
616
+ * */
617
+ export declare interface BrowserInputSettingsUpdate {
618
+ /** Optionally, a new URL to load within the active session */
619
+ url?: string;
620
+ /** Optionally, a new resolution to use for outgoing video */
621
+ resolution?: {
622
+ width: number;
623
+ height: number;
624
+ };
625
+ }
626
+
627
+ /** @public */
628
+ export declare interface BrowserOnLoaded {
629
+ url: string;
630
+ }
631
+
632
+ /** @public */
633
+ export declare interface BrowserOnLoadEnd {
634
+ url: string;
635
+ statusCode: number;
636
+ }
637
+
638
+ /** @public */
639
+ export declare interface BrowserOnLoadError {
640
+ url: string;
641
+ errorText: string;
642
+ errorCode: number;
643
+ }
644
+
645
+ /** @public */
646
+ export declare interface BrowserOnLoadStart {
647
+ url: string;
648
+ }
649
+
650
+ /** @public Channel layout for an audio stream */
651
+ export declare type ChannelLayout = "mono" | "stereo" | "surround" | "4.0" | "5.0" | "5.1" | "7.1" | "5.1.4" | "7.1.4" | (ChannelName | ExplicitChannel)[];
652
+
653
+ /** @public */
654
+ export declare interface ChannelLevels {
655
+ rms?: Db;
656
+ peak?: Db;
657
+ }
658
+
659
+ /** @public */
660
+ export declare type ChannelName =
661
+ /** Left front */
662
+ "l"
663
+ /** Right front */
664
+ | "r"
665
+ /** Centre front */
666
+ | "c"
667
+ /** Low frequency enhancement */
668
+ | "lfe"
669
+ /** Left surround */
670
+ | "ls"
671
+ /** Right surround */
672
+ | "rs"
673
+ /** Left front centre */
674
+ | "lc"
675
+ /** Right front centre */
676
+ | "rc"
677
+ /** Rear surround left */
678
+ | "lsr"
679
+ /** Rear Surround Right */
680
+ | "rsr"
681
+ /** Rear centre */
682
+ | "cs"
683
+ /** Left surround direct */
684
+ | "lsd"
685
+ /** Right surround direct */
686
+ | "rsd"
687
+ /** Left side surround */
688
+ | "lss"
689
+ /** Right side surround */
690
+ | "rss"
691
+ /** Left wide front */
692
+ | "lw"
693
+ /** Right wide front */
694
+ | "rw"
695
+ /** Left front vertical height */
696
+ | "lv"
697
+ /** Right front vertical height */
698
+ | "rv"
699
+ /** Centre front vertical height */
700
+ | "cv"
701
+ /** Left surround vertical height rear */
702
+ | "lvr"
703
+ /** Right surround vertical height rear */
704
+ | "rvr"
705
+ /** Centre vertical height rear */
706
+ | "cvr"
707
+ /** Left vertical height side surround */
708
+ | "lvss"
709
+ /** Right vertical height side surround */
710
+ | "rvss"
711
+ /** Top centre surround */
712
+ | "ts"
713
+ /** Low frequency enhancement 2 */
714
+ | "lfe2"
715
+ /** Left front vertical bottom */
716
+ | "lb"
717
+ /** Right front vertical bottom */
718
+ | "rb"
719
+ /** Centre front vertical bottom */
720
+ | "cb"
721
+ /** Left vertical height surround */
722
+ | "lvs"
723
+ /** Right vertical height surround */
724
+ | "rvs"
725
+ /** Low frequency enhancement 3 */
726
+ | "lfe3"
727
+ /** Left edge of screen */
728
+ | "leos"
729
+ /** Right edge of screen */
730
+ | "reos"
731
+ /** Halfway between centre of screen and left edge of screen */
732
+ | "hwbcal"
733
+ /** Halfway between centre of screen and right edge of screen */
734
+ | "hwbcar"
735
+ /** Left back surround */
736
+ | "lbs"
737
+ /** Right back surround */
738
+ | "rbs"
739
+ /** Unknown */
740
+ | "unknown";
741
+
742
+ /** @public */
743
+ export declare function clientHostExternal(): string;
744
+
745
+ /** @public */
746
+ export declare function clientHostInternal(): string;
747
+
748
+ /** @public */
749
+ export declare function clientPortExternal(): string;
750
+
751
+ /** @public */
752
+ export declare function clientPortInternal(): string;
753
+
754
+ /**
755
+ * @public
756
+ * see: {@link NorskOutput.cmafAudio}
757
+ */
758
+ export declare class CmafAudioOutputNode extends CmafNodeWithPlaylist<CmafAudioMessage, "audio", CmafAudioOutputNode> {
759
+ /**
760
+ * @public
761
+ * Updates the credentials for a specific destination within this output by id
762
+ * see: {@link UpdateCredentials}
763
+ * see: {@link CmafDestinationSettings}
764
+ */
765
+ updateCredentials(settings: UpdateCredentials): void;
766
+ }
767
+
768
+ /**
769
+ * @public
770
+ * Possible destinations for a segmented media stream
771
+ * - {@link HlsPushDestinationSettings}: Push to a generic HTTP server
772
+ * - {@link AwsS3PushDestinationSettings}: Push to Amazon S3
773
+ * - {@link LocalPullDestinationSettings}: Serve directly from the Norsk Web Server
774
+ * */
775
+ export declare type CmafDestinationSettings = HlsPushDestinationSettings | AwsS3PushDestinationSettings | LocalPullDestinationSettings;
776
+
777
+ /**
778
+ * @public
779
+ * see: {@link NorskOutput.cmafMultiVariant}
780
+ */
781
+ export declare class CmafMultiVariantOutputNode extends CmafNodeBase<CmafMultiVariantMessage, "video" | "audio" | "subtitle", CmafMultiVariantOutputNode> {
782
+ /** @public The URL of the file based multi variant playlist */
783
+ url: string;
784
+ /**
785
+ * @public
786
+ * Updates the credentials for a specific destination within this output by id
787
+ * see: {@link UpdateCredentials}
788
+ * see: {@link CmafDestinationSettings}
789
+ */
790
+ updateCredentials(settings: UpdateCredentials): void;
791
+ }
792
+
793
+ /**
794
+ * @public
795
+ * Settings for a CMAF Multi Variant Playlist
796
+ * see {@link NorskOutput.cmafMultiVariant}
797
+ */
798
+ export declare interface CmafMultiVariantOutputSettings extends SinkNodeSettings<CmafMultiVariantOutputNode> {
799
+ /**
800
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
801
+ */
802
+ playlistName: string;
803
+ /**
804
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
805
+ */
806
+ destinations: CmafDestinationSettings[];
807
+ /**
808
+ * Directives to add to the m3u multi variant playlist
809
+ */
810
+ m3uAdditions?: string;
811
+ /**
812
+ * XML fragment to add to the (top-level) MPD element
813
+ */
814
+ mpdAdditions?: string;
815
+ }
816
+
817
+ declare class CmafNodeBase<ClientMessage, Pins extends string, T extends MediaNodeState> extends AutoProcessorMediaNode<Pins> {
818
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: ProcessorNodeSettings<T> & StreamStatisticsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => Promise<boolean>, _playlistPath: PlaylistPath, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
819
+ }
820
+
821
+ declare class CmafNodeWithPlaylist<ClientMessage, Pins extends string, T extends MediaNodeState> extends CmafNodeBase<ClientMessage, Pins, T> {
822
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: ProcessorNodeSettings<T> & StreamStatisticsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => Promise<boolean>, playlistPath: PlaylistPath, sessionId: string);
823
+ /**
824
+ * @public
825
+ * Returns the URL to the HLS playlist entry. Note this can only be evaluated once the stream is active as it
826
+ * varies with the stream subscribed to. Useful during development, but you probably want to
827
+ * use {@link NorskOutput.cmafMultiVariant} for production.
828
+ */
829
+ url(): Promise<string>;
830
+ }
831
+
832
+ /**
833
+ * @public
834
+ * Settings for a CMAF Audio and Video Outputs
835
+ * see {@link NorskOutput.cmafAudio}, {@link NorskOutput.cmafVideo}
836
+ */
837
+ export declare interface CmafOutputSettings extends SinkNodeSettings<CmafAudioOutputNode | CmafVideoOutputNode> {
838
+ /**
839
+ * The target segment duration in seconds. Norsk will make the largest segments it can
840
+ * without going over this target
841
+ */
842
+ segmentDurationSeconds: number;
843
+ /**
844
+ * The target part duration in seconds. Norsk will make the largest parts it can
845
+ * without going over this target
846
+ */
847
+ partDurationSeconds: number;
848
+ /**
849
+ * By default, the program date time or event start time will be based on the
850
+ * timestamp of the first video packet received by Norsk in a stream.
851
+ *
852
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
853
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
854
+ *
855
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
856
+ *
857
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
858
+ */
859
+ delayOutputMs?: number;
860
+ /**
861
+ * Settings for encrypting the content.
862
+ */
863
+ encryption?: EncryptionSettings;
864
+ /**
865
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
866
+ */
867
+ destinations: CmafDestinationSettings[];
868
+ /**
869
+ * Directives to add to the m3u media playlist
870
+ */
871
+ m3uAdditions?: string;
872
+ /**
873
+ * XML fragment to add to the mpd Representation element
874
+ */
875
+ mpdAdditions?: string;
876
+ /**
877
+ * Audio or video bitrate for the {@link NorskOutput.cmafMultiVariant} playlist
878
+ */
879
+ bitrate?: number;
880
+ }
881
+
882
+ /**
883
+ * @public
884
+ * see: {@link NorskOutput.cmafVideo}
885
+ */
886
+ export declare class CmafVideoOutputNode extends CmafNodeWithPlaylist<CmafVideoMessage, "video", CmafVideoOutputNode> {
887
+ /**
888
+ * @public
889
+ * Updates the credentials for a specific destination within this output by id
890
+ * see: {@link UpdateCredentials}
891
+ * see: {@link CmafDestinationSettings}
892
+ */
893
+ updateCredentials(settings: UpdateCredentials): void;
894
+ }
895
+
896
+ /**
897
+ * @public
898
+ * see: {@link NorskOutput.cmafWebVtt}
899
+ */
900
+ export declare class CmafWebVttOutputNode extends CmafNodeWithPlaylist<CmafWebVttMessage, "subtitle", CmafWebVttOutputNode> {
901
+ /**
902
+ * @public
903
+ * Updates the credentials for a specific destination within this output by id
904
+ * see: {@link UpdateCredentials}
905
+ * see: {@link CmafDestinationSettings}
906
+ */
907
+ updateCredentials(settings: UpdateCredentials): void;
908
+ }
909
+
910
+ /**
911
+ * @public
912
+ * Settings for a CMAF WebVTT Output
913
+ * see {@link NorskOutput.cmafWebVtt}
914
+ */
915
+ export declare interface CmafWebVttOutputSettings extends SinkNodeSettings<CmafWebVttOutputNode> {
916
+ /**
917
+ * The target segment duration in seconds, Norsk will split subtitles over multiple segments
918
+ * in a compliant manner if necessary
919
+ */
920
+ segmentDurationSeconds: number;
921
+ /**
922
+ * By default, the program date time or event start time will be based on the
923
+ * timestamp of the first video packet received by Norsk in a stream.
924
+ *
925
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
926
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
927
+ *
928
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
929
+ *
930
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
931
+ */
932
+ delayOutputMs?: number;
933
+ /**
934
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
935
+ */
936
+ destinations: CmafDestinationSettings[];
937
+ /**
938
+ * A local directory in which to store the segments while they're available
939
+ */
940
+ hlsCacheDirectory: string;
941
+ /**
942
+ * The maximum number of segments to display in a single generated playlist
943
+ */
944
+ maximumPlaylistSegments: number;
945
+ }
946
+
947
+ /** @public */
948
+ export declare type ComposeHardwareAcceleration =
949
+ /**
950
+ * Use the quadra overlay functionality to perform the compose
951
+ */
952
+ "quadra"
953
+ /**
954
+ * Use an nvidia CUDA kernel to perform the compose
955
+ */
956
+ | "nvidia";
957
+
958
+ /** @public */
959
+ export declare type ComposeMissingStreamBehaviour =
960
+ /**
961
+ * Produce frames on output by dropping the part(s) of the composition which
962
+ * cannot be fulfilled.
963
+ */
964
+ "drop_part"
965
+ /**
966
+ * Wait for all streams to be present before producing (further) output
967
+ */
968
+ | "wait_for_all";
969
+
970
+ /**
971
+ * @public
972
+ * A single layer of a video compose operation
973
+ * see {@link NorskTransform.videoCompose}
974
+ * */
975
+ export declare interface ComposePart<Pins> {
976
+ /** Input pin for this source */
977
+ pin: Pins;
978
+ /**
979
+ * The area within the source picture to include. This may be the full picture
980
+ * or cropped, and will be rescaled if necessary.
981
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
982
+ * this is taken to be within the coordinate system of the input image
983
+ */
984
+ sourceRect: OffsetRect;
985
+ /**
986
+ * The area within the destination picture to place this part of the
987
+ * composition.
988
+ * If a referenceResolution is specified, then this is within that coordinate system, otherwise
989
+ * this is taken to be within the coordinate system of the destination image
990
+ */
991
+ destRect: OffsetRect;
992
+ /**
993
+ * Z-index to determine ordering by which the sources are overlaid
994
+ * (higher layers appear on top)
995
+ */
996
+ zIndex: number;
997
+ /**
998
+ * Opacity multiplier of this overlay (where 0.0 is fully transparent and 1.0
999
+ * is fully opaque)
1000
+ */
1001
+ opacity: number;
1002
+ /** Optionally identify the part to enable transitions */
1003
+ id?: string;
1004
+ /**
1005
+ * Optionally specify a transition for this part. A transition is applied only
1006
+ * if the part is specified in both the existing and the current/new
1007
+ * configuration, identified by having the same id specified, and a transition
1008
+ * is specified for the new configuration.
1009
+ */
1010
+ transition?: PartTransition;
1011
+ /**
1012
+ * Optionally supply a reference resolution. This allows description of the
1013
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
1014
+ * can be specified to allow the source and destination areas to be described
1015
+ * in percentage terms, or a notional resolution can be used that is
1016
+ * independant of the source resolutions that may be provided.
1017
+ *
1018
+ * If unset, this will be overriden by a global reference resolution if that is present
1019
+ *
1020
+ * This is useful to set if you don't know the input resolution of a part but want to be able to describe
1021
+ * an operation on that part.
1022
+ */
1023
+ referenceResolution?: Resolution;
1024
+ }
1025
+
1026
+ /** @public */
1027
+ export declare interface Context {
1028
+ streams: StreamMetadata[];
1029
+ }
1030
+
1031
+ /** @public */
1032
+ export declare type ContextType = "full" | "singleSource" | "singleProgram" | "singleStream" | "singleRendition";
1033
+
1034
+ /** @public */
1035
+ export declare interface Core {
1036
+ logicalCpuIds: LogicalCpuId[];
1037
+ }
1038
+
1039
+ /** @public */
1040
+ export declare interface CpuTopology {
1041
+ numaNodes: NumaNode[];
1042
+ }
1043
+
1044
+ /** @public A decibel (dB). A null value represents -inf. */
1045
+ export declare type Db = number | null;
1046
+
1047
+ /** @public */
1048
+ export declare function debugUrlPrefix(): string;
1049
+
1050
+ /** @public */
1051
+ export declare interface DeckLinkCard {
1052
+ index: number;
1053
+ displayName: string;
1054
+ inputConnections: DeckLinkVideoConnection[];
1055
+ outputConnections: DeckLinkVideoConnection[];
1056
+ ioSupport: DeckLinkVideoIOSupport[];
1057
+ }
1058
+
1059
+ /** @public */
1060
+ export declare interface DeckLinkDisplayMode {
1061
+ id: DeckLinkDisplayModeId;
1062
+ name: string;
1063
+ width: number;
1064
+ height: number;
1065
+ frameRate: FrameRate;
1066
+ }
1067
+
1068
+ /** @public */
1069
+ export declare type DeckLinkDisplayModeId = "auto" | "sd_ntsc" | "sd_nt23" | "sd_pal" | "sd_ntsp" | "sd_palp" | "hd1080_23ps" | "hd1080_24ps" | "hd1080_p25" | "hd1080_p29" | "hd1080_p30" | "hd1080_p47" | "hd1080_p48" | "hd1080_i50" | "hd1080_i59" | "hd1080_i60" | "hd1080_p95" | "hd1080_p96" | "hd1080_p10" | "hd1080_p11" | "hd1080_p12" | "hd1080_p50" | "hd1080_p59" | "hd1080_p60" | "hd720_p50" | "hd720_p59" | "hd720_p60" | "two_k_23" | "two_k_24" | "two_k_25" | "two_k_dci_23" | "two_k_dci_24" | "two_k_dci_25" | "two_k_dci_29" | "two_k_dci_30" | "two_k_dci_47" | "two_k_dci_48" | "two_k_dci_50" | "two_k_dci_59" | "two_k_dci_60" | "two_k_dci_95" | "two_k_dci_96" | "two_k_dci_10" | "two_k_dci_11" | "two_k_dci_12" | "four_k_23" | "four_k_24" | "four_k_25" | "four_k_29" | "four_k_30" | "four_k_47" | "four_k_48" | "four_k_50" | "four_k_59" | "four_k_60" | "four_k_95" | "four_k_96" | "four_k_10" | "four_k_11" | "four_k_12" | "four_k_dci_23" | "four_k_dci_24" | "four_k_dci_25" | "four_k_dci_29" | "four_k_dci_30" | "four_k_dci_47" | "four_k_dci_48" | "four_k_dci_50" | "four_k_dci_59" | "four_k_dci_60" | "four_k_dci_95" | "four_k_dci_96" | "four_k_dci_10" | "four_k_dci_11" | "four_k_dci_12" | "eight_k_23" | "eight_k_24" | "eight_k_25" | "eight_k_29" | "eight_k_30" | "eight_k_47" | "eight_k_48" | "eight_k_50" | "eight_k_59" | "eight_k_60" | "eight_k_dci_23" | "eight_k_dci_24" | "eight_k_dci_25" | "eight_k_dci_29" | "eight_k_dci_30" | "eight_k_dci_47" | "eight_k_dci_48" | "eight_k_dci_50" | "eight_k_dci_59" | "eight_k_dci_60" | "pc_vga6" | "pc_svg6" | "pc_wxg5" | "pc_wxg6" | "pc_sxg5" | "pc_sxg6" | "pc_uxg5" | "pc_uxg6" | "pc_wux5" | "pc_wux6" | "pc_1945" | "pc_1946" | "pc_wqh5" | "pc_wqh6" | "pc_wqx5" | "pc_wqx6" | "special_iunk";
1070
+
1071
+ /**
1072
+ * @public
1073
+ * SDI capture through a DeckLink card.
1074
+ * see: {@link NorskInput.deckLink}.
1075
+ */
1076
+ export declare class DeckLinkInputNode extends SourceMediaNode {
1077
+ /**
1078
+ * @public
1079
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1080
+ * */
1081
+ nudge(nudge: number): void;
1082
+ }
1083
+
1084
+ /**
1085
+ * @public
1086
+ * Settings to control SDI capture through a DeckLink card
1087
+ * see: {@link NorskInput.deckLink}
1088
+ */
1089
+ export declare interface DeckLinkInputSettings extends InputSettings<DeckLinkInputNode>, StreamStatisticsMixin {
1090
+ /** Which card to use */
1091
+ cardIndex: number;
1092
+ /** The audio channel layout for the input */
1093
+ channelLayout: ChannelLayout;
1094
+ /** SDI or HDMI capture */
1095
+ videoConnection: DeckLinkVideoConnection;
1096
+ /** Typically left undefined, but can be used to force capture for a specific {@link DeckLinkDisplayModeId}. If
1097
+ * the source is not currently in this mode, then no capture will occur. */
1098
+ displayModeId?: DeckLinkDisplayModeId;
1099
+ pixelFormat?: DeckLinkPixelFormat;
1100
+ }
1101
+
1102
+ /** @public */
1103
+ export declare type DeckLinkPixelFormat = "uyvy" | "argb";
1104
+
1105
+ /** @public */
1106
+ export declare type DeckLinkVideoConnection = "sdi" | "hdmi" | "optical_sdi" | "component" | "composite" | "svideo";
1107
+
1108
+ /** @public */
1109
+ export declare type DeckLinkVideoIOSupport = "capture" | "playback";
1110
+
1111
+ /** @public */
1112
+ export declare type DeferredVideoComposeSettings<Pins extends string> = (streams: StreamMetadata[]) => VideoComposeSettings<Pins> | undefined;
1113
+
1114
+ /**
1115
+ * @public
1116
+ * Drop every N frames from an incoming video stream
1117
+ * */
1118
+ export declare interface DropEvery {
1119
+ kind: "every";
1120
+ every: number;
1121
+ }
1122
+
1123
+ /**
1124
+ * @public
1125
+ * Randomly drop frames on a stream
1126
+ * - 0.0 means don't drop any frames
1127
+ * - 1.0 means drop every single frame
1128
+ * */
1129
+ export declare interface DropRandom {
1130
+ kind: "random";
1131
+ percentage: number;
1132
+ }
1133
+
1134
+ /**
1135
+ * @public
1136
+ * Drop the first N frames from an incoming video stream
1137
+ * */
1138
+ export declare interface DropStart {
1139
+ kind: "start";
1140
+ start: number;
1141
+ }
1142
+
1143
+ /** @public */
1144
+ export declare interface EncryptionSettings {
1145
+ /**
1146
+ * The 16-byte key ID used to identify the key, hexadecimal or GUID encoded.
1147
+ */
1148
+ encryptionKeyId: string;
1149
+ /**
1150
+ * The 16-byte key used to encrypt the data, hexadecimal encoded.
1151
+ */
1152
+ encryptionKey: string;
1153
+ /**
1154
+ * The PSSH box(es) to include in the MP4, base64 encoded.
1155
+ * This is typically given by the DRM provider.
1156
+ */
1157
+ encryptionPssh: string;
1158
+ /**
1159
+ * The common encryption scheme used to encrypt data, as per ISO/IEC 23001-7:2016.
1160
+ *
1161
+ * - Default: CBCS encryption scheme (AES-CBC 10% pattern encryption).
1162
+ * Full-sample encryption for audio tracks, subsample encryption for video
1163
+ * tracks.
1164
+ *
1165
+ * - CENC encryption scheme (AES-CTR). Full-sample encryption for audio tracks,
1166
+ * subsample encryption for video tracks.
1167
+ */
1168
+ encryptionScheme?: "cbcs" | "cenc";
1169
+ }
1170
+
1171
+ /**
1172
+ * @public
1173
+ * see: {@link NorskInput.fileImage}
1174
+ */
1175
+ export declare class FileImageInputNode extends SourceMediaNode {
1176
+ }
1177
+
1178
+ /**
1179
+ * @public
1180
+ * Settings for an image file source
1181
+ * see: {@link NorskInput.fileImage}
1182
+ * */
1183
+ export declare interface FileImageInputSettings extends SourceNodeSettings<FileImageInputNode>, StreamStatisticsMixin {
1184
+ /** The source name to set in the stream key of the outgoing stream */
1185
+ sourceName: string;
1186
+ /** the filename to read the image from */
1187
+ fileName: string;
1188
+ /** The file format for the image. Will be inferred from the file name if not specified. */
1189
+ imageFormat?: ImageFormat;
1190
+ }
1191
+
1192
+ /**
1193
+ * @public
1194
+ * Information about an Mp4 File
1195
+ * */
1196
+ export declare interface FileMp4Info {
1197
+ /** The duration of the Mp4 file in millseconds (if known) */
1198
+ durationMs?: number;
1199
+ /** The total length of the mp4 file in bytes, if known */
1200
+ byteLength?: number;
1201
+ }
1202
+
1203
+ /**
1204
+ * @public
1205
+ * see: {@link NorskInput.fileMp4}
1206
+ */
1207
+ export declare class FileMp4InputNode extends SourceMediaNode {
1208
+ /**
1209
+ * @public
1210
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
1211
+ * */
1212
+ nudge(nudge: number): void;
1213
+ updateSettings(settings: FileMp4InputSettingsUpdate): void;
1214
+ }
1215
+
1216
+ /**
1217
+ * @public
1218
+ * Settings for an File Based Mp4 Input
1219
+ * see: {@link NorskInput.fileMp4}
1220
+ */
1221
+ export declare interface FileMp4InputSettings extends SourceNodeSettings<FileMp4InputNode>, StreamStatisticsMixin {
1222
+ /** The source name to set in the stream key of the outgoing stream */
1223
+ sourceName: string;
1224
+ /** Path to the MP4 file to read */
1225
+ fileName: string;
1226
+ /** Callback to be notified when the file ends */
1227
+ onEof?: () => void;
1228
+ /** Callback to be notified when the file is initially read */
1229
+ onInfo?: (info: FileMp4Info) => void;
1230
+ /** Whether to loop back to the start of the file after reaching the end (default false) */
1231
+ loop?: boolean;
1232
+ }
1233
+
1234
+ export declare interface FileMp4InputSettingsUpdate {
1235
+ /** Whether to loop back to the start of the file after reaching the end */
1236
+ loop?: boolean;
1237
+ }
1238
+
1239
+ /**
1240
+ * @public
1241
+ * see: {@link NorskOutput.fileMp4}
1242
+ */
1243
+ export declare class FileMp4OutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1244
+ /**
1245
+ * @public
1246
+ * Writes a non-fragmented MP4 file containing the data received so far to the
1247
+ * supplied filename
1248
+ */
1249
+ writeFile(nonfragmentedFileName: string): void;
1250
+ }
1251
+
1252
+ /**
1253
+ * @public
1254
+ * Settings to control MP4 file output
1255
+ * see {@link NorskOutput.fileMp4}
1256
+ */
1257
+ export declare interface FileMp4OutputSettings extends SinkNodeSettings<FileMp4OutputNode>, StreamStatisticsMixin {
1258
+ /**
1259
+ * Required: stream fragmented MP4 to this file.
1260
+ */
1261
+ fragmentedFileName: string;
1262
+ /**
1263
+ * Write non-fragmented MP4 to this file on close, creates a `.tmp` file to
1264
+ * store the frame data.
1265
+ */
1266
+ nonfragmentedFileName?: string;
1267
+ /**
1268
+ * Settings for encrypting the audio track.
1269
+ */
1270
+ audioEncryption?: EncryptionSettings;
1271
+ /**
1272
+ * Settings for encrypting the video track.
1273
+ */
1274
+ videoEncryption?: EncryptionSettings;
1275
+ /**
1276
+ * Callback that will be invoked once data stops being received by the node (determined by an empty context)
1277
+ * at which point it will automatically shut down
1278
+ */
1279
+ onStreamEof?: () => void;
1280
+ }
1281
+
1282
+ /**
1283
+ * @public
1284
+ * see: {@link NorskInput.fileTs}
1285
+ */
1286
+ export declare class FileTsInputNode extends TsCommonInputNode<FileTsInputMessage, FileTsInputNode> {
1287
+ updateSettings(settings: FileTsInputSettingsUpdate): void;
1288
+ }
1289
+
1290
+ /** @public */
1291
+ export declare interface FileTsInputSettings extends LocalFileInputSettings, StreamStatisticsMixin {
1292
+ /** Whether to loop back to the start of the file after reaching the end */
1293
+ loop?: boolean;
1294
+ }
1295
+
1296
+ /** @public */
1297
+ export declare interface FileTsInputSettingsUpdate {
1298
+ /** Whether to loop back to the start of the file after reaching the end */
1299
+ loop?: boolean;
1300
+ }
1301
+
1302
+ /**
1303
+ * @public
1304
+ * see: {@link NorskOutput.fileTs}
1305
+ */
1306
+ export declare class FileTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1307
+ }
1308
+
1309
+ /**
1310
+ * @public
1311
+ * The settings for an output Transport Stream written to file
1312
+ * see: {@link NorskOutput.fileTs}
1313
+ */
1314
+ export declare interface FileTsOutputSettings extends SinkNodeSettings<FileTsOutputNode>, StreamStatisticsMixin {
1315
+ /** The file to write - this will be truncated if it already exist */
1316
+ fileName: string;
1317
+ }
1318
+
1319
+ /**
1320
+ * @public
1321
+ * see: {@link NorskInput.fileWebVtt}
1322
+ */
1323
+ export declare class FileWebVttInputNode extends SourceMediaNode {
1324
+ }
1325
+
1326
+ /** @public */
1327
+ export declare interface FrameRate {
1328
+ frames: number;
1329
+ seconds: number;
1330
+ }
1331
+
1332
+ export declare function fromVancPayloadFormat(format: VancPayloadFormat_2): VancPayloadFormat;
1333
+
1334
+ /**
1335
+ * @public
1336
+ * A relative change in decibels, expressing a power ratio.
1337
+ *
1338
+ * A value of 0dB means no change, positive values mean an increase in power, and negative values mean a decrease in power.
1339
+ */
1340
+ export declare type Gain = Db;
1341
+
1342
+ /** @public */
1343
+ export declare function getAmountOfChannels(layout: ChannelLayout): number;
1344
+
1345
+ /**
1346
+ * @public
1347
+ *
1348
+ */
1349
+ /**
1350
+ * @public
1351
+ */
1352
+ export declare interface HardwareInfo {
1353
+ cpuTopology: CpuTopology;
1354
+ systemMemory: number;
1355
+ deckLinkCards: DeckLinkCard[];
1356
+ }
1357
+
1358
+ /**
1359
+ * @public
1360
+ * Configuration for pushing a segmented media stream directly to a generic http server
1361
+ * */
1362
+ export declare interface HlsPushDestinationSettings {
1363
+ type: "generic";
1364
+ /** The hostname of the web server being pushed to.
1365
+ * This will be used to re-resolve the IP address on failures
1366
+ * */
1367
+ host: string;
1368
+ /** the port of the web server being pushed to. */
1369
+ port: number;
1370
+ /** the path under which segments and playlists will be pushed to */
1371
+ pathPrefix: string;
1372
+ /**
1373
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1374
+ *
1375
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1376
+ * cacheing directives
1377
+ */
1378
+ sessionId?: string;
1379
+ /**
1380
+ * A unique identifier for this destination
1381
+ *
1382
+ * This can be used for supplying updates to configuration to this destination specifically
1383
+ * see: {@link UpdateCredentials}
1384
+ */
1385
+ id: string;
1386
+ /**
1387
+ * Informs the playlist generation how long segments will be retained for on the remote server
1388
+ * in order to generate an accurate playlist
1389
+ */
1390
+ retentionPeriodSeconds: number;
1391
+ }
1392
+
1393
+ /**
1394
+ * @public
1395
+ * see: {@link NorskOutput.hlsTsAudio}
1396
+ */
1397
+ export declare class HlsTsAudioOutputNode extends CmafNodeWithPlaylist<HlsTsAudioMessage, "audio", HlsTsAudioOutputNode> {
1398
+ /**
1399
+ * @public
1400
+ * Updates the credentials for a specific destination within this output by id
1401
+ * see: {@link UpdateCredentials}
1402
+ * see: {@link CmafDestinationSettings}
1403
+ */
1404
+ updateCredentials(settings: UpdateCredentials): void;
1405
+ }
1406
+
1407
+ /**
1408
+ * @public
1409
+ * Settings for a HLS TS Audio Output
1410
+ * see {@link NorskOutput.hlsTsAudio}
1411
+ */
1412
+ export declare interface HlsTsAudioOutputSettings extends SinkNodeSettings<HlsTsAudioOutputNode> {
1413
+ /**
1414
+ * The target segment duration in seconds. Norsk will make the largest segments it can
1415
+ * without going over this target using the durations of the individual audio frames
1416
+ */
1417
+ segmentDurationSeconds: number;
1418
+ /**
1419
+ * By default, the program date time or event start time will be based on the
1420
+ * timestamp of the first video packet received by Norsk in a stream.
1421
+ *
1422
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1423
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1424
+ *
1425
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1426
+ *
1427
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1428
+ */
1429
+ delayOutputMs?: number;
1430
+ /**
1431
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1432
+ */
1433
+ destinations: CmafDestinationSettings[];
1434
+ /**
1435
+ * Directives to add to the m3u media playlist
1436
+ */
1437
+ m3uAdditions?: string;
1438
+ /**
1439
+ * XML fragment to add to the mpd Representation element
1440
+ */
1441
+ mpdAdditions?: string;
1442
+ /**
1443
+ * Audio bitrate for the {@link NorskOutput.hlsTsMultiVariant} playlist
1444
+ */
1445
+ bitrate?: number;
1446
+ }
1447
+
1448
+ /**
1449
+ * @public
1450
+ * see: {@link NorskOutput.hlsTsCombinedPush}
1451
+ */
1452
+ export declare class HlsTsCombinedPushOutputNode extends CmafNodeWithPlaylist<HlsTsCombinedPushMessage, "audio" | "video", HlsTsCombinedPushOutputNode> {
1453
+ }
1454
+
1455
+ /**
1456
+ * @public
1457
+ * Settings for a HLS Transport Stream Combined Push Output
1458
+ * see {@link NorskOutput.hlsTsCombinedPush}
1459
+ */
1460
+ export declare interface HlsTsCombinedPushOutputSettings extends SinkNodeSettings<HlsTsCombinedPushOutputNode> {
1461
+ /**
1462
+ * The target segment duration in seconds. Norsk will use the framerate of the video stream in order
1463
+ * to produce compliant segments that are less than or equal to this in duration, with audio packaged alongside
1464
+ * using timestamps to line them up
1465
+ */
1466
+ segmentDurationSeconds: number;
1467
+ /**
1468
+ * By default, the program date time or event start time will be based on the
1469
+ * timestamp of the first video packet received by Norsk in a stream.
1470
+ *
1471
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1472
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1473
+ *
1474
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1475
+ *
1476
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1477
+ */
1478
+ delayOutputMs?: number;
1479
+ /**
1480
+ * The destination {@link CmafDestinationSettings} for this stream to be published to
1481
+ */
1482
+ destination: CmafDestinationSettings;
1483
+ /**
1484
+ * The name of this media playlist (.m3u8 will be added onto this field to generate a filename)
1485
+ */
1486
+ playlistName: string;
1487
+ /**
1488
+ * Directives to add to the m3u media playlists
1489
+ */
1490
+ m3uAdditions?: string;
1491
+ }
1492
+
1493
+ /**
1494
+ * @public
1495
+ * see: {@link NorskOutput.hlsTsMultiVariant}
1496
+ */
1497
+ export declare class HlsTsMultiVariantOutputNode extends CmafNodeBase<HlsTsMultiVariantMessage, "video" | "audio" | "subtitle", HlsTsMultiVariantOutputNode> {
1498
+ /** @public The URL of the file based multi variant playlist */
1499
+ url: string;
1500
+ /**
1501
+ * @public
1502
+ * Updates the credentials for a specific destination within this output by id
1503
+ * see: {@link UpdateCredentials}
1504
+ * see: {@link CmafDestinationSettings}
1505
+ */
1506
+ updateCredentials(settings: UpdateCredentials): void;
1507
+ }
1508
+
1509
+ /**
1510
+ * @public
1511
+ * Settings for a Hls Ts Multivariant Playlist
1512
+ * see {@link NorskOutput.hlsTsMultiVariant}
1513
+ */
1514
+ export declare interface HlsTsMultiVariantOutputSettings extends SinkNodeSettings<HlsTsMultiVariantOutputNode> {
1515
+ /**
1516
+ * The name of this multi variant playlist (.m3u8 will be added onto this field to generate a filename)
1517
+ */
1518
+ playlistName: string;
1519
+ /**
1520
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1521
+ */
1522
+ destinations: CmafDestinationSettings[];
1523
+ /**
1524
+ * Directives to add to the m3u multi variant playlist
1525
+ */
1526
+ m3uAdditions?: string;
1527
+ }
1528
+
1529
+ /**
1530
+ * @public
1531
+ * see: {@link NorskOutput.hlsTsVideo}
1532
+ */
1533
+ export declare class HlsTsVideoOutputNode extends CmafNodeWithPlaylist<HlsTsVideoMessage, "video", HlsTsVideoOutputNode> {
1534
+ /**
1535
+ * @public
1536
+ * Updates the credentials for a specific destination within this output by id
1537
+ * see: {@link UpdateCredentials}
1538
+ * see: {@link CmafDestinationSettings}
1539
+ */
1540
+ updateCredentials(settings: UpdateCredentials): void;
1541
+ }
1542
+
1543
+ /**
1544
+ * @public
1545
+ * Settings for a HLS TS Video Output
1546
+ * see {@link NorskOutput.hlsTsVideo}
1547
+ */
1548
+ export declare interface HlsTsVideoOutputSettings extends SinkNodeSettings<HlsTsVideoOutputNode> {
1549
+ /**
1550
+ * The target segment duration in seconds. Norsk will use the framerate of the stream in order
1551
+ * to produce compliant segments that are less than or equal to this in duration
1552
+ */
1553
+ segmentDurationSeconds: number;
1554
+ /**
1555
+ * By default, the program date time or event start time will be based on the
1556
+ * timestamp of the first video packet received by Norsk in a stream.
1557
+ *
1558
+ * Assuming minimal latency in Norsk itself, this behaviour is fine - but encodes and composition
1559
+ * or synchronisation with external streams can then result in players requesting segments that don't exist yet
1560
+ *
1561
+ * delayOutputMs can be used to push the timestamp forwards so that players can calculate the edge of the stream accurately.
1562
+ *
1563
+ * This number should match the delayOutputMs of other streams which will be served within the same multi variant playlist
1564
+ */
1565
+ delayOutputMs?: number;
1566
+ /**
1567
+ * A list of destinations {@link CmafDestinationSettings} for this stream to be published to
1568
+ */
1569
+ destinations: CmafDestinationSettings[];
1570
+ /**
1571
+ * Directives to add to the m3u media playlist
1572
+ */
1573
+ m3uAdditions?: string;
1574
+ /**
1575
+ * XML fragment to add to the mpd Representation element
1576
+ */
1577
+ mpdAdditions?: string;
1578
+ /**
1579
+ * Video bitrate for the {@link NorskOutput.hlsTsMultiVariant} playlist
1580
+ */
1581
+ bitrate?: number;
1582
+ }
1583
+
1584
+ /** @public */
1585
+ export declare type IceServerSettings = {
1586
+ urls: string[];
1587
+ username?: string;
1588
+ credential?: string;
1589
+ };
1590
+
1591
+ /** @public */
1592
+ export declare type ImageFormat = "png" | "jpeg" | "gif" | "webp" | "pnm" | "tiff" | "tga" | "dds" | "bmp" | "ico" | "hdr" | "openexr" | "farbfeld" | "avif";
1593
+
1594
+ /**
1595
+ * @public
1596
+ * Base settings for most input nodes
1597
+ * */
1598
+ export declare interface InputSettings<T extends MediaNodeState> extends SourceNodeSettings<T> {
1599
+ /** The source name to set on the stream key on the outgoing stream from this node */
1600
+ sourceName: string;
1601
+ }
1602
+
1603
+ /** @public A time interval measured as ticks / (ticks per second) */
1604
+ export declare interface Interval {
1605
+ n: number;
1606
+ d: number;
1607
+ }
1608
+
1609
+ /** @public */
1610
+ export declare interface IntervalTimestamp {
1611
+ n: number;
1612
+ d: number;
1613
+ }
1614
+
1615
+ /**
1616
+ * @public
1617
+ * see: {@link NorskTransform.jitterBuffer}
1618
+ */
1619
+ export declare class JitterBufferNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
1620
+ }
1621
+
1622
+ /**
1623
+ * @public
1624
+ * Settings for a Jitter Buffer
1625
+ * see: {@link NorskTransform.jitterBuffer}
1626
+ * */
1627
+ export declare interface JitterBufferSettings extends ProcessorNodeSettings<JitterBufferNode> {
1628
+ /** Buffer delay in milliseconds */
1629
+ delayMs: number;
1630
+ }
1631
+
1632
+ /**
1633
+ * @public
1634
+ * The standard settings for any node reading from a file
1635
+ * */
1636
+ export declare interface LocalFileInputSettings extends InputSettings<SourceMediaNode> {
1637
+ /** The file to be read from */
1638
+ fileName: string;
1639
+ /** An optional callback that will be invoked when file end is reached */
1640
+ onEof?: () => void;
1641
+ }
1642
+
1643
+ /**
1644
+ * @public
1645
+ * Configuration for the serving of segments and playlists directly from the Norsk Web Server
1646
+ * Note: While this is both useful for local testing and for sitting behind a reverse caching proxy / CDN
1647
+ * it is not expected that Norsk serve as the edge server in most scenarios
1648
+ * */
1649
+ export declare interface LocalPullDestinationSettings {
1650
+ type: "local";
1651
+ /**
1652
+ * Optionally supply a string that will be inserted into the path structure for segments published in this stream
1653
+ *
1654
+ * This is useful for stream restarts or republishing when duplicate segment IDs would be generated causing problems with
1655
+ * cacheing directives
1656
+ */
1657
+ sessionId?: string;
1658
+ /**
1659
+ * Informs the playlist generation how long segments will be retained for
1660
+ * and informs the local web server how long to retain those segments
1661
+ */
1662
+ retentionPeriodSeconds: number;
1663
+ }
1664
+
1665
+ /** @public */
1666
+ export declare type Log = {
1667
+ level: "emergency" | "alert" | "critical" | "error" | "warning" | "notice" | "info" | "debug";
1668
+ timestamp: Date;
1669
+ message: string;
1670
+ metadata: string;
1671
+ };
1672
+
1673
+ /**
1674
+ * @public
1675
+ * Settings for a H264 Encode using Netint Logan hardware
1676
+ * A detailed description of these params can be found
1677
+ * on the Netint Logan Encoder Documentation
1678
+ *
1679
+ * These fields have deliberately been written to maintain the same semantics as the
1680
+ * Logan documentation where possible.
1681
+ *
1682
+ * If left undefined, all will default to Logan's own defaults
1683
+ * */
1684
+ export declare interface LoganH264 {
1685
+ type: "logan-h264";
1686
+ /** This (for convenience) takes the xcoder string that Logan's
1687
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1688
+ * quickly and will override any values set manually in the rest of this interface.
1689
+ *
1690
+ * It is expected that developers will choose to use the typed fields for most things instead
1691
+ * when moving to production, as they offer a degree of validation and type safety
1692
+ * */
1693
+ extraOpts?: string;
1694
+ enableAud?: boolean;
1695
+ gpuIndex?: number;
1696
+ bitrate?: number;
1697
+ flushGop?: boolean;
1698
+ enableVfr?: boolean;
1699
+ crf?: number;
1700
+ cbr?: boolean;
1701
+ gopPresetIndex?: number;
1702
+ intraPeriod?: number;
1703
+ rcEnable?: boolean;
1704
+ intraQp?: number;
1705
+ rcInitDelay?: number;
1706
+ profile?: LoganH264Profile;
1707
+ level?: LoganH264Level;
1708
+ }
1709
+
1710
+ /** @public */
1711
+ export declare type LoganH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1712
+
1713
+ /** @public */
1714
+ export declare type LoganH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
1715
+
1716
+ /**
1717
+ * @public
1718
+ * Settings for a HEVC Encode using Netint Logan hardware
1719
+ * A detailed description of these params can be found
1720
+ * on the Netint Logan Encoder Documentation
1721
+ *
1722
+ * These fields have deliberately been written to maintain the same semantics as the
1723
+ * Logan documentation where possible.
1724
+ *
1725
+ * If left undefined, all will default to Logan's own defaults
1726
+ * */
1727
+ export declare interface LoganHevc {
1728
+ type: "logan-hevc";
1729
+ /** This (for convenience) takes the xcoder string that Logan's
1730
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
1731
+ * quickly and will override any values set manually in the rest of this interface.
1732
+ *
1733
+ * It is expected that developers will choose to use the typed fields for most things instead
1734
+ * when moving to production, as they offer a degree of validation and type safety
1735
+ * */
1736
+ extraOpts?: string;
1737
+ enableAud?: boolean;
1738
+ gpuIndex?: number;
1739
+ bitrate?: number;
1740
+ flushGop?: boolean;
1741
+ enableVfr?: boolean;
1742
+ crf?: number;
1743
+ cbr?: boolean;
1744
+ gopPresetIndex?: number;
1745
+ intraPeriod?: number;
1746
+ rcEnable?: boolean;
1747
+ intraQp?: number;
1748
+ rcInitDelay?: number;
1749
+ profile?: LoganHevcProfile;
1750
+ level?: LoganHevcLevel;
1751
+ tier?: LoganHevcTier;
1752
+ lossless?: boolean;
1753
+ hrdEnable?: boolean;
1754
+ dolbyVisionProfile?: number;
1755
+ }
1756
+
1757
+ /** @public */
1758
+ export declare type LoganHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1759
+
1760
+ /** @public */
1761
+ export declare type LoganHevcProfile = "main" | "main10";
1762
+
1763
+ /** @public */
1764
+ export declare type LoganHevcTier = "main" | "high";
1765
+
1766
+ /** @public */
1767
+ export declare type LogicalCpuId = number;
1768
+
1769
+ /** @public */
1770
+ export declare type MediaNodeId = string;
1771
+
1772
+ /** @public */
1773
+ export declare class MediaNodeState {
1774
+ id: MediaNodeId | undefined;
1775
+ constructor(client: MediaClient);
1776
+ close(): Promise<void>;
1777
+ }
1778
+
1779
+ /**
1780
+ * @public
1781
+ * Generate encryption parameters from from an encryption KeyID and Key,
1782
+ * in the form KEYID:KEY, both 16byte hexadecimal
1783
+ */
1784
+ export declare function mkEncryption(encryption: string | undefined, pssh?: string | undefined): EncryptionSettings | undefined;
1785
+
1786
+ /** @public */
1787
+ export declare function mkSine(freq: number): Wave;
1788
+
1789
+ /** @public */
1790
+ export declare interface MultiStreamStatistics {
1791
+ allStreams: SingleStreamStatistics[];
1792
+ sampleSizeSeconds: number;
1793
+ /**
1794
+ * Either `"default"`, if there is only one direction, or `"input"`/`"output"`
1795
+ * (for duplex nodes, where there are two directions)
1796
+ */
1797
+ label: string;
1798
+ total: StreamStatistics;
1799
+ audio: StreamStatistics;
1800
+ video: StreamStatistics;
1801
+ }
1802
+
1803
+ /** @public */
1804
+ export declare function newSilentMatrix(rows: number, cols: number): Gain[][];
1805
+
1806
+ /**
1807
+ * @public
1808
+ * Settings common to all media nodes
1809
+ */
1810
+ export declare interface NodeSettings<T extends MediaNodeState> {
1811
+ /**
1812
+ * Media Node identifier. If one is not specified, a random identifier will be generated.
1813
+ */
1814
+ id?: string;
1815
+ /**
1816
+ * Called with any errors from the Node.
1817
+ *
1818
+ * This includes both errors which will cause the node to exit, and those that do not
1819
+ * but may e.g. indicate a connection has failed.
1820
+ */
1821
+ onError?: (error: Error) => void;
1822
+ /**
1823
+ * Called when the Node closes.
1824
+ *
1825
+ * This may be by request, because the node naturally exits or an error has occurred. See `onError`
1826
+ * to be notified of errors which may lead to the node closing.
1827
+ */
1828
+ onClose?: () => void;
1829
+ /**
1830
+ * Callback to synchronously perform an action when node creation is complete
1831
+ * (e.g. subscribe a downstream node before the first context/frame might arrive)
1832
+ */
1833
+ onCreate?: (node: T) => void;
1834
+ }
1835
+
1836
+ /**
1837
+ * @public
1838
+ * The entrypoint for all Norsk Media applications
1839
+ *
1840
+ * @example
1841
+ * ```ts
1842
+ * const norsk = new Norsk();
1843
+ * ```
1844
+ */
1845
+ export declare class Norsk {
1846
+ /**
1847
+ * Implements the {@link NorskInput} interface
1848
+ */
1849
+ input: NorskInput;
1850
+ /**
1851
+ * Implements the {@link NorskOutput} interface
1852
+ */
1853
+ output: NorskOutput;
1854
+ /**
1855
+ * Implements the {@link NorskDuplex} interface
1856
+ */
1857
+ duplex: NorskDuplex;
1858
+ /**
1859
+ * Implements the {@link NorskProcessor} interface
1860
+ */
1861
+ processor: NorskProcessor;
1862
+ /**
1863
+ * Implements the {@link NorskDebug} interface
1864
+ */
1865
+ debug: NorskDebug;
1866
+ /**
1867
+ * Implements the {@link NorskSystem} interface
1868
+ */
1869
+ system: NorskSystem;
1870
+ /**
1871
+ * Norsk Runtime version information
1872
+ */
1873
+ version: Version;
1874
+ close(): Promise<void>;
1875
+ /** @public */
1876
+ static connect(settings?: NorskSettings): Promise<Norsk>;
1877
+ }
1878
+
1879
+ /**
1880
+ * @public
1881
+ * Methods that allow you to control and monitor media streams
1882
+ */
1883
+ export declare interface NorskControl {
1884
+ /**
1885
+ * Switch between multiple input sources via a hard cut. May be used to switch between
1886
+ * sources of possibly different configurations or without decoding.
1887
+ * @param settings - Options for the switcher
1888
+ */
1889
+ streamSwitchHard<Pins extends string>(settings: StreamSwitchHardSettings<Pins>): Promise<StreamSwitchHardNode<Pins>>;
1890
+ /**
1891
+ * Switch between multiple input sources without interruption, via a transition.
1892
+ * @param settings - Options for the switcher
1893
+ */
1894
+ streamSwitchSmooth<Pins extends string>(settings: StreamSwitchSmoothSettings<Pins>): Promise<StreamSwitchSmoothNode<Pins>>;
1895
+ /**
1896
+ * Record statistical information about media streams, including bitrate,
1897
+ * frame rate, and number of keyframes, measured over some configurable
1898
+ * sampling windows.
1899
+ *
1900
+ * Corresponding settings are found on many input and output nodes.
1901
+ * @param settings - Callback and sampling intervals
1902
+ */
1903
+ streamStatistics(settings: StreamStatisticsSettings): Promise<StreamStatisticsNode>;
1904
+ /**
1905
+ * Monitor the volume of an audio stream
1906
+ * @param settings - Callback and options for the level data
1907
+ */
1908
+ audioMeasureLevels(settings: AudioMeasureLevelsSettings): Promise<AudioMeasureLevelsNode>;
1909
+ }
1910
+
1911
+ /**
1912
+ * @public
1913
+ * Methods that allow you to inspect a system (typically during development)
1914
+ */
1915
+ export declare interface NorskDebug {
1916
+ /** Creates a node to intercept all the timestamps of any subscribed stream
1917
+ * @param settings - Configuration for the report node
1918
+ */
1919
+ streamTimestampReport(settings: StreamTimestampReportSettings): Promise<StreamTimestampReportNode>;
1920
+ }
1921
+
1922
+ /**
1923
+ * @public
1924
+ * Methods that allow you to both ingest and egest media from your application
1925
+ * at the same time
1926
+ */
1927
+ export declare interface NorskDuplex {
1928
+ /**
1929
+ * Playback audio/video via webrtc to a browser, and accept audio/video input from a browser.
1930
+ * The browser client must conform to a custom protocol as implemented in the hosted test page.
1931
+ * (Available from {@link WebRTCBrowserNode.playerUrl}
1932
+ * For general WebRTC ingest prefer the WHIP input node, and for egest to a downstream media server
1933
+ * use the WHIP output node.
1934
+ * @param settings - Options for the webrtc node
1935
+ */
1936
+ webRtcBrowser(settings: WebRTCBrowserSettings): Promise<WebRTCBrowserNode>;
1937
+ }
1938
+
1939
+ /** @public */
1940
+ export declare function norskHost(): string;
1941
+
1942
+ /**
1943
+ * @public
1944
+ * Methods that allow you to ingest media into your application
1945
+ */
1946
+ export declare interface NorskInput {
1947
+ /** Create an RTMP Server to receive RTMP streams into your application
1948
+ * @param settings - Configuration for the RTMP server
1949
+ */
1950
+ rtmpServer(settings: RtmpServerInputSettings): Promise<RtmpServerInputNode>;
1951
+ /**
1952
+ * Read from a Transport Stream file with realtime playback.
1953
+ * @param settings - Configuration for the file input
1954
+ */
1955
+ fileTs(settings: FileTsInputSettings): Promise<FileTsInputNode>;
1956
+ /**
1957
+ * Stream from a SRT source
1958
+ * @param settings - Configuration for the SRT input
1959
+ */
1960
+ srt(settings: SrtInputSettings): Promise<SrtInputNode>;
1961
+ /**
1962
+ * Receive media via WebRTC via the WHIP standard.
1963
+ *
1964
+ * Here Norsk acts as the Media Server receiving from a remote WHIP client, to act as the
1965
+ * WHIP client sending to a remote media server see {@link NorskOutput.whip}. For a duplex
1966
+ * connection to a browser peer see {@link NorskDuplex.webRtcBrowser}.
1967
+ *
1968
+ * @param settings - Configuration for the WHIP input
1969
+ */
1970
+ whip(settings: WhipInputSettings): Promise<WhipInputNode>;
1971
+ /**
1972
+ * Read from a Transport Stream on the network
1973
+ * This can be multicast/unicast or broadcast
1974
+ * @param settings - Configuration for the UDP input
1975
+ */
1976
+ udpTs(settings: RemoteInputSettings<UdpTsInputNode>): Promise<UdpTsInputNode>;
1977
+ fileWebVtt(settings: LocalFileInputSettings): Promise<FileWebVttInputNode>;
1978
+ /**
1979
+ * Read an image from a file. Various image formats are supported, see the
1980
+ * documentation for {@link FileImageInputSettings} for more details.
1981
+ * @param settings - Configuration for the file input
1982
+ *
1983
+ * The image will then be provided into Norsk as a video at 25fps for use
1984
+ * in other operations
1985
+ */
1986
+ fileImage(settings: FileImageInputSettings): Promise<FileImageInputNode>;
1987
+ /**
1988
+ * Read a MP4 (fragmented or not) from a file with realtime playback.
1989
+ * This will not play frames that are written to the file after the node
1990
+ * starts.
1991
+ * @param settings - Configuration for the file input
1992
+ */
1993
+ fileMp4(settings: FileMp4InputSettings): Promise<FileMp4InputNode>;
1994
+ /**
1995
+ * Stream from a remote RTP source
1996
+ * @param settings - Configuration for the RTP input
1997
+ */
1998
+ rtp(settings: RtpInputSettings): Promise<RtpInputNode>;
1999
+ /**
2000
+ * Generate a test video card with a configurable pattern.
2001
+ * @param settings - Configuration for the video test card
2002
+ */
2003
+ videoTestCard(settings: VideoTestcardGeneratorSettings): Promise<VideoTestcardGeneratorNode>;
2004
+ /**
2005
+ * Generate a test audio signal with a configurable waveform.
2006
+ * @param settings - Configuration for the audio signal
2007
+ */
2008
+ audioSignal(settings: AudioSignalGeneratorSettings): Promise<AudioSignalGeneratorNode>;
2009
+ /**
2010
+ * Generates a video source by rendering an HTML page
2011
+ * @param settings - Settings for the web page
2012
+ */
2013
+ browser(settings: BrowserInputSettings): Promise<BrowserInputNode>;
2014
+ /**
2015
+ * SDI/HDMI Input using a BlackMagic DeckLink card.
2016
+ * The available cards on the machine can be enumerated using the {@link NorskSystem.hardwareInfo} API.
2017
+ *
2018
+ * Multiple cards and both SDI and HDMI inputs are supported, with all DeckLink-supported
2019
+ * input resolutions and framerates are supported. The capture format is currently 8-bit only,
2020
+ * but 10-bit captures will be supported soon. All supported audio channels can be captured.
2021
+ * At present, additional data such as closed-captions and HDR metadata is not captured.
2022
+ * @param settings - Settings for the SDI capture
2023
+ */
2024
+ deckLink(settings: DeckLinkInputSettings): Promise<DeckLinkInputNode>;
2025
+ }
2026
+
2027
+ /**
2028
+ * @public
2029
+ * Methods that allow you to egest media from your application
2030
+ */
2031
+ export declare interface NorskOutput {
2032
+ /**
2033
+ * Produces video segments with the supplied settings for use in
2034
+ * HLS or DASH manifests.
2035
+ *
2036
+ * These can optionally be served the Norsk web server or be pushed
2037
+ * to other locations - see {@link CmafDestinationSettings}
2038
+ *
2039
+ * @param settings - Configuration for the CMAF Video Stream
2040
+ */
2041
+ cmafVideo(settings: CmafOutputSettings): Promise<CmafVideoOutputNode>;
2042
+ /**
2043
+ * Produces audio segments with the supplied settings for use in
2044
+ * HLS or DASH manifests.
2045
+ *
2046
+ * These can optionally be served via the Norsk web server or be pushed
2047
+ * to other locations - see {@link CmafDestinationSettings}
2048
+ *
2049
+ * @param settings - Configuration for the CMAF Audio Stream
2050
+ */
2051
+ cmafAudio(settings: CmafOutputSettings): Promise<CmafAudioOutputNode>;
2052
+ /**
2053
+ * Produces WebVTT segments with the supplied settings for use in
2054
+ * HLS or DASH manifests. These are served via the Norsk web server
2055
+ *
2056
+ * @param settings - Configuration for the CMAF WebVTT Stream
2057
+ */
2058
+ cmafWebVtt(settings: CmafWebVttOutputSettings): Promise<CmafWebVttOutputNode>;
2059
+ /**
2060
+ * Produces a multi variant (used to be known as master) hls and/or dash manifest for a collection of media streams
2061
+ *
2062
+ * This can optionally be served via the Norsk web server or be pushed
2063
+ * to other locations - see {@link CmafDestinationSettings}
2064
+ *
2065
+ * @param settings - Configuration for the CMAF Multi Variant Manifest
2066
+ */
2067
+ cmafMultiVariant(settings: CmafMultiVariantOutputSettings): Promise<CmafMultiVariantOutputNode>;
2068
+ /**
2069
+ * Produces Transport Stream video segments with the supplied settings for use in
2070
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
2071
+ * or from other locations - see {@link CmafDestinationSettings}
2072
+ *
2073
+ * @param settings - Configuration for the HLS TS Stream
2074
+ */
2075
+ hlsTsVideo(settings: HlsTsVideoOutputSettings): Promise<HlsTsVideoOutputNode>;
2076
+ /**
2077
+ * Produces Transport Stream audio segments with the supplied settings for use in
2078
+ * HLS manifests and builds a playlist served locally from the Norsk Web Server
2079
+ * or from other locations - see {@link CmafDestinationSettings}
2080
+ *
2081
+ * @param settings - Configuration for the HLS TS Stream
2082
+ */
2083
+ hlsTsAudio(settings: HlsTsAudioOutputSettings): Promise<HlsTsAudioOutputNode>;
2084
+ /**
2085
+ * Produces Transport Stream segments containing both video and audio with the supplied settings for use in
2086
+ * HLS manifests and pushes them to the configured location (see {@link CmafDestinationSettings})
2087
+ *
2088
+ * @param settings - Configuration for the HLS TS Stream
2089
+ */
2090
+ hlsTsCombinedPush(settings: HlsTsCombinedPushOutputSettings): Promise<HlsTsCombinedPushOutputNode>;
2091
+ /**
2092
+ * Produces a multi variant HLS TS manifest for a collection of media streams
2093
+ *
2094
+ * This can optionally be served via the Norsk web server or be pushed
2095
+ * to other locations - see {@link CmafDestinationSettings}
2096
+ *
2097
+ * @param settings - Configuration for the Hls Ts Multivariant Playlist
2098
+ */
2099
+ hlsTsMultiVariant(settings: HlsTsMultiVariantOutputSettings): Promise<HlsTsMultiVariantOutputNode>;
2100
+ /**
2101
+ * Produces a Transport Stream optionally containing both video and audio
2102
+ * and sends it out over UDP
2103
+ *
2104
+ * @param settings - Configuration for the TS Stream
2105
+ */
2106
+ udpTs(settings: UdpTsOutputSettings): Promise<UdpTsOutputNode>;
2107
+ /**
2108
+ * Produces a Transport Stream, and allows Norsk to either connect to an existing
2109
+ * SRT server or act as an SRT server itself
2110
+ *
2111
+ * @param settings - Configuration for the SRT Stream
2112
+ */
2113
+ srt(settings: SrtOutputSettings): Promise<SrtOutputNode>;
2114
+ /**
2115
+ * Connects and sends media to a remote server via WebRTC using the WHIP standard.
2116
+ *
2117
+ * Here Norsk acts as the WHIP client sending to a remote Media Server; to
2118
+ * have Norsk act as the Media Server ingesting from some other WHIP client, see
2119
+ * {@link NorskInput.whip}
2120
+ *
2121
+ * @param settings - Configuration for the WebRTC Stream
2122
+ */
2123
+ whip(settings: WhipOutputSettings): Promise<WhipOutputNode>;
2124
+ /**
2125
+ * Hosts media for clients connecting via WebRTC using the WHEP standard.
2126
+ *
2127
+ * To send media to a remote Media Server via WebRTC see {@link NorskOutput.whip}.
2128
+ * See also {@link NorskInput.whip}, {@link NorskDuplex.webRtcBrowser}.
2129
+ *
2130
+ * @param settings - Configuration for the WebRTC Stream
2131
+ */
2132
+ whep(settings: WhepOutputSettings): Promise<WhepOutputNode>;
2133
+ /**
2134
+ * Connects and sends media to a remote RTMP server
2135
+ *
2136
+ * @param settings - Configuration for the WebRTC Stream
2137
+ */
2138
+ rtmp(settings: RtmpOutputSettings): Promise<RtmpOutputNode>;
2139
+ /**
2140
+ * Stream to a Transport Stream file.
2141
+ *
2142
+ * @param settings - Configuration for the Transport Stream output
2143
+ */
2144
+ fileTs(settings: FileTsOutputSettings): Promise<FileTsOutputNode>;
2145
+ /**
2146
+ * Output MP4 files to disk, both fragmented and non-fragmented.
2147
+ *
2148
+ * The fragmented output is required.
2149
+ *
2150
+ * The optional non-fragmented filename will be written when calling
2151
+ * {@link FileMp4OutputNode.close} and will be fully written by the time
2152
+ * {@link NodeSettings.onClose} is called. This sets up a temp file to
2153
+ * store the frame data by appending the extension `.tmp`.
2154
+ *
2155
+ * A non-fragmented MP4 file can be written on request with
2156
+ * {@link FileMp4OutputNode.writeFile}, which uses the frame data store if
2157
+ * {@link FileMp4OutputSettings.nonfragmentedFileName} was given or reads
2158
+ * back the fragmented mp4 if there is no non-fragmented file.
2159
+ *
2160
+ * @param settings - Configuration for the MP4 output.
2161
+ */
2162
+ fileMp4(settings: FileMp4OutputSettings): Promise<FileMp4OutputNode>;
2163
+ }
2164
+
2165
+ /** @public */
2166
+ export declare function norskPort(): string;
2167
+
2168
+ /** @public */
2169
+ export declare class NorskProcessor {
2170
+ /**
2171
+ * Implements the {@link NorskControl} interface
2172
+ */
2173
+ control: NorskControl;
2174
+ /**
2175
+ * Implements the {@link NorskTransform} interface
2176
+ */
2177
+ transform: NorskTransform;
2178
+ close(): Promise<void>;
2179
+ constructor(client: MediaClient);
2180
+ }
2181
+
2182
+ /**
2183
+ * @public
2184
+ * Top level Norsk configuration
2185
+ */
2186
+ export declare interface NorskSettings {
2187
+ /**
2188
+ * Callback URL to listen on for gRPC session with Norsk Media
2189
+ * Defaults to $NORSK_HOST:$NORSK_PORT if the environment variables are set
2190
+ * where NORSK_HOST defaults to "127.0.0.1" and NORSK_PORT to "6790"
2191
+ * (so "127.0.0.1:6790" if neither variable is set)
2192
+ */
2193
+ url?: string;
2194
+ onAttemptingToConnect?: () => void;
2195
+ onConnecting?: () => void;
2196
+ onReady?: () => void;
2197
+ onFailedToConnect?: () => void;
2198
+ /** Code to execute if the Norsk node is shutdown - by default it logs and nothing else */
2199
+ onShutdown?: () => void;
2200
+ onCurrentLoad?: (load: CurrentLoad) => void;
2201
+ onHello?: (version: Version) => void;
2202
+ onLogEvent?: (log: Log) => void;
2203
+ /**
2204
+ * Manually handle license events, such as missing/invalid licenses and
2205
+ * sandbox timeout. (Logs messages to console by default.)
2206
+ */
2207
+ onLicenseEvent?: (message: string) => void;
2208
+ }
2209
+
2210
+ /**
2211
+ * @public
2212
+ * Methods that allow you query the features of the system that Norsk is running in
2213
+ */
2214
+ export declare interface NorskSystem {
2215
+ hardwareInfo(): Promise<HardwareInfo>;
2216
+ }
2217
+
2218
+ /**
2219
+ * @public
2220
+ * Methods that allow you to manipulate your media streams
2221
+ */
2222
+ export declare interface NorskTransform {
2223
+ /**
2224
+ * Encode a video stream to one or more renditions
2225
+ * using either software or appropriate hardware if available
2226
+ * @param settings - Encode ladder settings
2227
+ */
2228
+ videoEncode(settings: VideoEncodeSettings): Promise<VideoEncodeNode>;
2229
+ /**
2230
+ * Transform a single video stream (rescale, frame rate, etc)
2231
+ * @param settings - Transform settings
2232
+ */
2233
+ videoTransform(settings: VideoTransformSettings): Promise<VideoTransformNode>;
2234
+ /**
2235
+ * Interferes with a stream by dropping frames
2236
+ * Why would you want this? Stick one of these after a decoder and before
2237
+ * anything else in order to simulate what the world is going to look like if you
2238
+ * have network problems (packet drops for example) in your ingest
2239
+ *
2240
+ * *Just don't forget to remove it again when you've finished testing!*
2241
+ * @param settings - Chaos monkey settings
2242
+ */
2243
+ streamChaosMonkey(settings: StreamChaosMonkeySettings): Promise<StreamChaosMonkeyNode>;
2244
+ /**
2245
+ * Compose multiple video streams together into a single output
2246
+ * @param settings - Composition settings
2247
+ */
2248
+ videoCompose<Pins extends string>(settings: VideoComposeSettings<Pins> | DeferredVideoComposeSettings<Pins>): Promise<VideoComposeNode<Pins>>;
2249
+ /**
2250
+ * Create a Media Node performing transcription into subtitles using the
2251
+ * Amazon Transcribe AWS service.
2252
+ * @param settings - Settings and credentials for AWS transcribe
2253
+ */
2254
+ audioTranscribeAws(settings: AudioTranscribeAwsSettings): Promise<AudioTranscribeAwsNode>;
2255
+ /**
2256
+ * Create a Media Node performing transcription into subtitles using the
2257
+ * Azure Speech service.
2258
+ * @param settings - Settings and credentials for Azure transcribe
2259
+ */
2260
+ audioTranscribeAzure(settings: AudioTranscribeAzureSettings): Promise<AudioTranscribeAzureNode>;
2261
+ /**
2262
+ * Create a Media Node performing transcription into subtitles using the
2263
+ * Whisper speech recognition model.
2264
+ * @param settings - Settings and credentials for Whisper transcribe
2265
+ */
2266
+ audioTranscribeWhisper(settings: AudioTranscribeWhisperSettings): Promise<AudioTranscribeWhisperNode>;
2267
+ /**
2268
+ * Mix multiple audio streams together into a single output,
2269
+ * with optional gain control on each input.
2270
+ * @param settings - Settings for the mixer, including the gain vectors
2271
+ */
2272
+ audioMix<Pins extends string>(settings: AudioMixSettings<Pins>): Promise<AudioMixNode<Pins>>;
2273
+ /**
2274
+ * Given an audio stream of N channels, mix it down to M channels through a matrix of NxM gains.
2275
+ * @param settings - Settings for the mixer, including the gain matrix
2276
+ */
2277
+ audioMixMatrix(settings: AudioMixMatrixSettings): Promise<AudioMixMatrixNode>;
2278
+ /**
2279
+ * Apply gain to an audio stream
2280
+ * @param settings - Settings for the gain node
2281
+ */
2282
+ audioGain(settings: AudioGainSettings): Promise<AudioGainNode>;
2283
+ /**
2284
+ * Aggregate many single-channel audio streams into a stream with the
2285
+ * specified channel layout. The streams must all have the same sample format
2286
+ * and sample rate. The order of the streams provided for the channels is
2287
+ * important.
2288
+ * @param settings - Settings for the builder, including the channel layout
2289
+ * and stream keys specifying the sources for each channel.
2290
+ */
2291
+ audioBuildMultichannel(settings: AudioBuildMultichannelSettings): Promise<AudioBuildMultichannelNode>;
2292
+ /**
2293
+ * Split a multichannel audio stream into its individual channels. The first
2294
+ * channel receives the specified stream key, and each subsequent channel
2295
+ * increments the stream id on the stream key.
2296
+ * @param settings - Settings for the splitter
2297
+ */
2298
+ audioSplitMultichannel(settings: AudioSplitMultichannelSettings): Promise<AudioSplitMultichannelNode>;
2299
+ /**
2300
+ * Encode an audio stream.
2301
+ * @param settings - Settings for the encoder, including channel layout and
2302
+ * bitrate.
2303
+ */
2304
+ audioEncode(settings: AudioEncodeSettings): Promise<AudioEncodeNode>;
2305
+ /**
2306
+ * A node to nudge the timestamps on a stream, which affects how it syncs
2307
+ * with other streams. Useful for correcting for drift between different
2308
+ * sources.
2309
+ *
2310
+ * Subsequent nudges, via the `nudge` method, are applied gradually.
2311
+ *
2312
+ * This functionality is also provided by a `nudge` method on many sources.
2313
+ * @param settings - Initial nudge plus general node settings.
2314
+ */
2315
+ streamTimestampNudge(settings: StreamTimestampNudgeSettings): Promise<StreamTimestampNudgeNode>;
2316
+ /**
2317
+ * Provide a new stream key for a single stream. Cannot be subscribed to
2318
+ * multiple streams at once.
2319
+ *
2320
+ * The stream key is used for identifying streams within multiplexed sources
2321
+ * and also is translated into URIs for HLS playlists and other resources.
2322
+ *
2323
+ * This can be useful if changing sources and wanting to maintain a consistent
2324
+ * streamkey going into an output
2325
+ * @param settings - New stream key plus general node settings.
2326
+ */
2327
+ streamKeyOverride(settings: StreamKeyOverrideSettings): Promise<StreamKeyOverrideNode>;
2328
+ /**
2329
+ * Override bitrate and language metadata on streams.
2330
+ *
2331
+ * Audio and video bitrate metadata is required for playlists for the
2332
+ * {@link NorskOutput.cmafMultiVariant} node.
2333
+ * It is automatically configured for some sources (like RTMP) and in
2334
+ * cases where re-encoding is done, but is unset for other sources (like SRT).
2335
+ * @param settings - Bitrate and language metadata plus general node settings.
2336
+ */
2337
+ streamMetadataOverride(settings: StreamMetadataOverrideSettings): Promise<StreamMetadataOverrideNode>;
2338
+ /**
2339
+ * Buffer a stream for the specified number of milliseconds. This can be used
2340
+ * to reduce or eliminate jitter.
2341
+ * @param settings - Buffer delay time.
2342
+ */
2343
+ jitterBuffer(settings: JitterBufferSettings): Promise<JitterBufferNode>;
2344
+ /**
2345
+ * Sync multiple streams together by timestamps, queuing frames from streams
2346
+ * that are behind the others. This is already included in most nodes,
2347
+ * especially outputs.
2348
+ */
2349
+ streamSync(settings: StreamSyncSettings): Promise<StreamSyncNode>;
2350
+ /**
2351
+ * This processor does multiple things
2352
+ * - joins together multiple streams from multiple sources
2353
+ * - rebases their timestamps so that they all start at the same point
2354
+ * - sets the program id to a common value
2355
+ *
2356
+ * It is useful for syncing multiple incoming streams that on paper are already synchronised but because
2357
+ * of the time taken to set up connections and subscriptions across various protocols, are off by a few
2358
+ * hundred milliseconds
2359
+ */
2360
+ streamAlign(settings: StreamAlignSettings): Promise<StreamAlignNode>;
2361
+ ancillary(settings: AncillarySettings): Promise<AncillaryNode>;
2362
+ }
2363
+
2364
+ /** @public */
2365
+ export declare interface NumaNode {
2366
+ processors: Processor[];
2367
+ }
2368
+
2369
+ /**
2370
+ * @public
2371
+ * Settings for a H264 Encode using Nvidia hardware
2372
+ * A detailed description of these params can be found
2373
+ * on the Nvidia Encoder Documentation
2374
+ *
2375
+ * If left undefined, all will default to Nvidia's own defaults
2376
+ * If a preset is configured, then all will default to the values provided
2377
+ * by that preset
2378
+ * */
2379
+ export declare interface NvidiaH264 {
2380
+ type: "nv-h264";
2381
+ /** The preset to use for this encode */
2382
+ preset?: NvidiaPreset;
2383
+ /** The IDR period */
2384
+ idrPeriod?: number;
2385
+ /**The gopInterval to use for this encode
2386
+ * Note: This is different from the idrPeriod but usually you want
2387
+ * them set to the same value regardless
2388
+ * */
2389
+ gopInterval?: number;
2390
+ /** This is the gop structure to be used, and again it's best to look this up
2391
+ * in the Nvidia documentation
2392
+ * */
2393
+ frameIntervalP?: number;
2394
+ /** This is somewhat related to the gop structure and again, care should be taken when overriding this from
2395
+ * the preset */
2396
+ maxNumRefFrames?: number;
2397
+ /** The target level of this H264 encode
2398
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2399
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2400
+ level?: NvidiaH264Level;
2401
+ /** The target profile of this H264 encode
2402
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2403
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2404
+ profile?: NvidiaH264Profile;
2405
+ /** Output Access Unit Delimiters */
2406
+ outputAud?: boolean;
2407
+ /** Rate Control Settings */
2408
+ rateControl?: NvidiaRateControl;
2409
+ }
2410
+
2411
+ /**
2412
+ * @public
2413
+ * See the Nvidia Encoder Docs for a description of this value
2414
+ * */
2415
+ export declare type NvidiaH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2416
+
2417
+ /**
2418
+ * @public
2419
+ * See the Nvidia Encoder Docs for a description of this value
2420
+ * */
2421
+ export declare type NvidiaH264Profile = "baseline" | "main" | "high" | "high444";
2422
+
2423
+ /**
2424
+ * @public
2425
+ * Settings for a HEVC Encode using Nvidia hardware
2426
+ * A detailed description of these params can be found
2427
+ * on the Nvidia Encoder Documentation
2428
+ *
2429
+ * If left undefined, all will default to Nvidia's own defaults
2430
+ * If a preset is configured, then all will default to the values provided
2431
+ * by that preset
2432
+ * */
2433
+ export declare interface NvidiaHevc {
2434
+ type: "nv-hevc";
2435
+ /** The preset to use for this encode */
2436
+ preset?: NvidiaPreset;
2437
+ /** The IDR period */
2438
+ idrPeriod?: number;
2439
+ /**The gopInterval to use for this encode
2440
+ * Note: This is different from the idrPeriod but usually you want
2441
+ * them set to the same value regardless
2442
+ * */
2443
+ gopInterval?: number;
2444
+ /** This is the gop structure to be used, and again it's best to look this up
2445
+ * in the Nvidia documentation
2446
+ * */
2447
+ frameIntervalP?: number;
2448
+ /** The target level of this HEVC encode
2449
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2450
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2451
+ level?: NvidiaHevcLevel;
2452
+ /** The target profile of this HEVC encode
2453
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2454
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2455
+ profile?: NvidiaHevcProfile;
2456
+ /** Output Access Unit Delimiters */
2457
+ outputAud?: boolean;
2458
+ /** The target tier of this HEVC encode
2459
+ * Note: The behaviour of Nvidia is to error out if this is incompatible with the other settings
2460
+ * this differs from other codecs which just silently change the outgoing level/profile for example */
2461
+ tier?: NvidiaHevcTier;
2462
+ /** Rate Control Settings */
2463
+ rateControl?: NvidiaRateControl;
2464
+ }
2465
+
2466
+ /**
2467
+ * @public
2468
+ * See the Nvidia Encoder Docs for a description of this value
2469
+ * */
2470
+ export declare type NvidiaHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2471
+
2472
+ /**
2473
+ * @public
2474
+ * See the Nvidia Encoder Docs for a description of this value
2475
+ * */
2476
+ export declare type NvidiaHevcProfile = "main" | "main10";
2477
+
2478
+ /**
2479
+ * @public
2480
+ * See the Nvidia Encoder Docs for a description of this value
2481
+ * */
2482
+ export declare type NvidiaHevcTier = "main" | "high";
2483
+
2484
+ /**
2485
+ * @public
2486
+ * See the Nvidia Encoder Docs for a description of this value
2487
+ * */
2488
+ export declare type NvidiaPreset = "p1" | "p2" | "p3" | "p4" | "p5" | "p6" | "p7";
2489
+
2490
+ /**
2491
+ * @public
2492
+ * The rate control options for an nvidia encode
2493
+ * For further info, consult the Nvidia Encoder docs
2494
+ * */
2495
+ export declare interface NvidiaRateControl {
2496
+ /** the mode to use for this rate control operation */
2497
+ mode: NvidiaRateControlMode;
2498
+ /** The average bitrate of this encode */
2499
+ averageBitrate: number;
2500
+ /** The max bitrate of this encode */
2501
+ maxBitrate?: number;
2502
+ /** The vbv buffer size used for this encode */
2503
+ vbvBufferSize?: number;
2504
+ /** The vbv initial delay used for this encode */
2505
+ vbvInitialDelay?: number;
2506
+ /** Enable lookahead or not:
2507
+ * Note: enabling lookahead will introduce scene cuts unless this is specifically disabled
2508
+ * elswhere in the codec settings
2509
+ * */
2510
+ enableLookahead?: boolean;
2511
+ strictGopTarget?: boolean;
2512
+ lookaheadDepth?: number;
2513
+ }
2514
+
2515
+ /**
2516
+ * @public
2517
+ * See the Nvidia Encoder Docs for a description of this value
2518
+ * */
2519
+ export declare type NvidiaRateControlMode = "con_stqp" | "vbr" | "cbr";
2520
+
2521
+ /**
2522
+ * @public
2523
+ * A rectangle used for describing a subset of an image
2524
+ * */
2525
+ export declare interface OffsetRect {
2526
+ /** The leftmost coordinate of the rect, where 0,0 is top left */
2527
+ x: number;
2528
+ /** The topmost coordinate of the rect, where 0,0 is top left */
2529
+ y: number;
2530
+ /** the width of this rectangle */
2531
+ width: number;
2532
+ /** the height of this rectangle */
2533
+ height: number;
2534
+ }
2535
+
2536
+ /**
2537
+ * @public
2538
+ * Return type to enable control of an RTMP stream once media arrives on it
2539
+ */
2540
+ export declare type OnStreamResult =
2541
+ /** Accept the stream */
2542
+ {
2543
+ accept: true;
2544
+ videoStreamKey: StreamKey_2 | StreamKeySettings;
2545
+ audioStreamKey: StreamKey_2 | StreamKeySettings;
2546
+ }
2547
+ /** Reject the stream */
2548
+ | {
2549
+ accept: false;
2550
+ reason: string;
2551
+ };
2552
+
2553
+ /**
2554
+ * @public
2555
+ * Settings for an Opus encode
2556
+ * see: {@link NorskTransform.audioEncode}
2557
+ * */
2558
+ export declare interface OpusSettings {
2559
+ kind: "opus";
2560
+ }
2561
+
2562
+ /** @public
2563
+ * A transition for a video composition part.
2564
+ *
2565
+ * A transition interpolates the source_rect, dest_rect, and opacity properties
2566
+ * over the specified duration according to the specified easing function.
2567
+ *
2568
+ * As a special case, if a transition is specified and the input pin of the part
2569
+ * changes, an opacity fade from one to the other will occur.
2570
+ */
2571
+ export declare interface PartTransition {
2572
+ /** Duration for the transition */
2573
+ durationMs: number;
2574
+ /**
2575
+ * Easing function to apply to the transition. If not specified will be
2576
+ * linear.
2577
+ */
2578
+ easing?: SimpleEasing;
2579
+ }
2580
+
2581
+ declare type Pattern = "black" | "smpte75" | "smpte100";
2582
+
2583
+ /** @public */
2584
+ export declare type PinToKey<Pins extends string> = Nullable<Partial<Record<Pins, StreamKey[]>>>;
2585
+
2586
+ /** @public */
2587
+ export declare type PixelFormat = "bgra" | "rgba" | "yuv420p" | "yuv422p" | "yuv444p" | "yuva420p" | "yuva422p" | "yuva444p";
2588
+
2589
+ /** @public */
2590
+ declare enum PlaylistPath {
2591
+ Cmaf = 0,
2592
+ Ts = 1
2593
+ }
2594
+
2595
+ /**
2596
+ * @public
2597
+ * Returns the stream keys for playlist streams in a media context
2598
+ * @param streams - The media context from which to return the stream keys
2599
+ * @returns The playlist stream keys in the media context
2600
+ */
2601
+ export declare function playlistStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
2602
+
2603
+ /** @public */
2604
+ export declare interface PlaylistStreamMetadata {
2605
+ }
2606
+
2607
+ /**
2608
+ * @public
2609
+ * Filters a context to only the playlist streams within it
2610
+ * @param streams - The media context from which to return the streams
2611
+ * @returns The playlist streams in the media context
2612
+ */
2613
+ export declare function playlistStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
2614
+
2615
+ /** @public */
2616
+ export declare interface Processor {
2617
+ cores: Core[];
2618
+ }
2619
+
2620
+ export declare interface ProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
2621
+ }
2622
+
2623
+ export declare class ProcessorMediaNode<Pins extends string> {
2624
+ constructor(client: MediaClient, unregisterNode: (node: MediaNodeState) => void, getGrpcStream: () => (Readable | Writable), subscribeFn: (subscription: Subscription) => Promise<boolean>, subscribeErrorFn?: (error: SubscriptionError) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
2625
+ }
2626
+
2627
+ /** @public */
2628
+ export declare interface ProcessorNodeSettings<T extends MediaNodeState> extends SinkNodeSettings<T>, SourceNodeSettings<T> {
2629
+ }
2630
+
2631
+ /** @public */
2632
+ export declare function publicUrlPrefix(): string;
2633
+
2634
+ /**
2635
+ * @public
2636
+ * Settings for a H264 Encode using Netint Quadra hardware
2637
+ * A detailed description of these params can be found
2638
+ * on the Netint Quadra Encoder Documentation
2639
+ *
2640
+ * These fields have deliberately been written to maintain the same semantics as the
2641
+ * Quadra documentation where possible.
2642
+ *
2643
+ * If left undefined, all will default to Quadra's own defaults
2644
+ * */
2645
+ export declare interface QuadraH264 {
2646
+ type: "quadra-h264";
2647
+ /** This (for convenience) takes the xcoder string that Quadra's
2648
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2649
+ * quickly and will override any values set manually in the rest of this interface.
2650
+ *
2651
+ * It is expected that developers will choose to use the typed fields for most things instead
2652
+ * when moving to production, as they offer a degree of validation and type safety
2653
+ * */
2654
+ extraOpts?: string;
2655
+ enableAud?: boolean;
2656
+ gpuIndex?: number;
2657
+ bitrate?: number;
2658
+ enableVfr?: boolean;
2659
+ crf?: number;
2660
+ gopPresetIndex?: number;
2661
+ intraPeriod?: number;
2662
+ rcEnable?: boolean;
2663
+ intraQp?: number;
2664
+ rcInitDelay?: number;
2665
+ profile?: QuadraH264Profile;
2666
+ level?: QuadraH264Level;
2667
+ fillerEnable?: boolean;
2668
+ minQp?: number;
2669
+ maxQp?: number;
2670
+ maxDeltaQp?: number;
2671
+ cuLevelRCEnable?: boolean;
2672
+ lookAheadDepth?: number;
2673
+ vbvBufferSize?: number;
2674
+ vbvMaxRate?: number;
2675
+ }
2676
+
2677
+ /** @public */
2678
+ export declare type QuadraH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2679
+
2680
+ /** @public */
2681
+ export declare type QuadraH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
2682
+
2683
+ /**
2684
+ * @public
2685
+ * Settings for a HEVC Encode using Netint Quadra hardware
2686
+ * A detailed description of these params can be found
2687
+ * on the Netint Quadra Encoder Documentation
2688
+ *
2689
+ * These fields have deliberately been written to maintain the same semantics as the
2690
+ * Quadra documentation where possible.
2691
+ *
2692
+ * If left undefined, all will default to Quadra's own defaults
2693
+ * */
2694
+ export declare interface QuadraHevc {
2695
+ type: "quadra-hevc";
2696
+ /** This (for convenience) takes the xcoder string that Quadra's
2697
+ * Ffmpeg integration accepts, this is to aid developers in getting up and running
2698
+ * quickly and will override any values set manually in the rest of this interface.
2699
+ *
2700
+ * It is expected that developers will choose to use the typed fields for most things instead
2701
+ * when moving to production, as they offer a degree of validation and type safety
2702
+ * */
2703
+ extraOpts?: string;
2704
+ enableAud?: boolean;
2705
+ gpuIndex?: number;
2706
+ bitrate?: number;
2707
+ enableVfr?: boolean;
2708
+ crf?: number;
2709
+ gopPresetIndex?: number;
2710
+ intraPeriod?: number;
2711
+ rcEnable?: boolean;
2712
+ intraQp?: number;
2713
+ rcInitDelay?: number;
2714
+ profile?: QuadraHevcProfile;
2715
+ level?: QuadraHevcLevel;
2716
+ tier?: QuadraHevcTier;
2717
+ lossless?: boolean;
2718
+ hrdEnable?: boolean;
2719
+ dolbyVisionProfile?: number;
2720
+ fillerEnable?: boolean;
2721
+ minQp?: number;
2722
+ maxQp?: number;
2723
+ maxDeltaQp?: number;
2724
+ cuLevelRCEnable?: boolean;
2725
+ lookAheadDepth?: number;
2726
+ vbvBufferSize?: number;
2727
+ vbvMaxRate?: number;
2728
+ }
2729
+
2730
+ /** @public */
2731
+ export declare type QuadraHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2732
+
2733
+ /** @public */
2734
+ export declare type QuadraHevcProfile = "main" | "main10";
2735
+
2736
+ /** @public */
2737
+ export declare type QuadraHevcTier = "main" | "high";
2738
+
2739
+ /** @public */
2740
+ export declare type ReceiveFromAddress<Pins extends string> = {
2741
+ source: SourceMediaNode;
2742
+ sourceSelector: (streams: StreamMetadata[]) => PinToKey<Pins>;
2743
+ };
2744
+
2745
+ /** @public */
2746
+ export declare type ReceiveFromAddressAuto = {
2747
+ source: SourceMediaNode;
2748
+ sourceSelector: (streams: StreamMetadata[]) => StreamKey[];
2749
+ };
2750
+
2751
+ /**
2752
+ * @public
2753
+ * Base settings for any input node requiring access to a host:port pair
2754
+ * */
2755
+ export declare interface RemoteInputSettings<T extends MediaNodeState> extends InputSettings<T> {
2756
+ /** The IP of the remote server*/
2757
+ ip: string;
2758
+ /** The port the remote server is listening on*/
2759
+ port: number;
2760
+ }
2761
+
2762
+ /**
2763
+ * @public
2764
+ * Validation function to require at least one audio and at least one video stream. Often the default validation
2765
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
2766
+ * will produce both audio and video, default validation cannot know that both are required.
2767
+ */
2768
+ export declare function requireAV(ctx: Context): boolean;
2769
+
2770
+ /**
2771
+ * @public
2772
+ * Validation function to require exactly N audio and exactly M video streams. Often the default validation
2773
+ * will happen to ensure this, as audio and video are subscribed from separate media nodes, but when one media node
2774
+ * will produce both audio and video, default validation cannot know that both are required.
2775
+ */
2776
+ export declare function requireExactAV({ audio, video }: {
2777
+ audio: number;
2778
+ video: number;
2779
+ }): (ctx: Context) => boolean;
2780
+
2781
+ /**
2782
+ * @public
2783
+ * The resolution of a video within Norsk
2784
+ * */
2785
+ export declare interface Resolution {
2786
+ width: number;
2787
+ height: number;
2788
+ }
2789
+
2790
+ /**
2791
+ * @public
2792
+ * see: {@link NorskOutput.rtmp}
2793
+ */
2794
+ export declare class RtmpOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
2795
+ }
2796
+
2797
+ /**
2798
+ * @public
2799
+ * The settings for an RTMP output
2800
+ * see: {@link NorskOutput.rtmp}
2801
+ * */
2802
+ export declare interface RtmpOutputSettings extends SinkNodeSettings<RtmpOutputNode>, StreamStatisticsMixin {
2803
+ /**
2804
+ * The URL of the remote RTMP server to connect to, including the full stream path and credentials
2805
+ */
2806
+ url: string;
2807
+ /** Jitter buffer delay in milliseconds */
2808
+ bufferDelayMs?: number;
2809
+ /** Called when the RTMP output succesfully connects to a server and starts publishing data */
2810
+ onPublishStart?: () => void;
2811
+ }
2812
+
2813
+ /**
2814
+ * @public
2815
+ * see: {@link NorskInput.rtmpServer}
2816
+ */
2817
+ export declare class RtmpServerInputNode extends SourceMediaNode {
2818
+ /**
2819
+ * @public
2820
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
2821
+ * */
2822
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
2823
+ }
2824
+
2825
+ /**
2826
+ * @public
2827
+ * Settings to control how RTMP streams can be included as sources in your media workflow
2828
+ * see: {@link NorskInput.rtmpServer}
2829
+ */
2830
+ export declare interface RtmpServerInputSettings extends SourceNodeSettings<RtmpServerInputNode>, StreamStatisticsMixin {
2831
+ /** The port the RTMP server should listen on */
2832
+ port?: number;
2833
+ /**
2834
+ * On connect callback, use to accept/reject connections given app/url in use
2835
+ * @eventProperty
2836
+ */
2837
+ onConnection?: (
2838
+ /** The connection ID, unique to this RtmpServer node */
2839
+ connectionId: string,
2840
+ /** The RTMP "app" field from the connection string */
2841
+ app: string,
2842
+ /** The full URL of the RTMP connection string */
2843
+ url: string) => {
2844
+ accept: true;
2845
+ } | {
2846
+ accept: false;
2847
+ reason?: string;
2848
+ };
2849
+ /**
2850
+ * On stream callback, set up the stream keys for a given stream or reject the stream
2851
+ * @eventProperty
2852
+ */
2853
+ onStream?: (
2854
+ /** The connection ID, unique to this RtmpServer node */
2855
+ connectionId: string,
2856
+ /** The RTMP "app" field from the connection string */
2857
+ app: string,
2858
+ /** The full URL of the RTMP connection string */
2859
+ url: string,
2860
+ /** The Norsk streamId of this media stream */
2861
+ streamId: number,
2862
+ /** TODO - publishingName */
2863
+ publishingName: string) => OnStreamResult;
2864
+ /**
2865
+ * Called when the connection status has changed (e.g. when the RTMP connection drops)
2866
+ * @eventProperty
2867
+ */
2868
+ onConnectionStatusChange?: (
2869
+ /** The connection ID, unique to this RtmpServer node */
2870
+ connectionId: string,
2871
+ /** The new connection state */
2872
+ status: RtmpServerInputStatus,
2873
+ /** The audio and video stream keys that were present in the stream at the time of the status change */
2874
+ streamKeys: {
2875
+ audioStreamKey: StreamKey_2;
2876
+ videoStreamKey: StreamKey_2;
2877
+ }[]) => void;
2878
+ onConnectionError?: (
2879
+ /** The connection ID, unique to this RtmpServer node */
2880
+ connectionId: string,
2881
+ /** The error */
2882
+ error: RtmpError_UnsupportedVideo | RtmpError_UnsupportedAudio) => void;
2883
+ onConnectionBytesRead?: (
2884
+ /** The connection ID, unique to this RtmpServer node */
2885
+ connectionId: string,
2886
+ /** The number of bytes read, as reported by the peer */
2887
+ bytesRead: bigint) => void;
2888
+ }
2889
+
2890
+ /** @public */
2891
+ export declare type RtmpServerInputStatus = "disconnected";
2892
+
2893
+ /**
2894
+ * @public
2895
+ * The stream keys in an RTMP input stream
2896
+ */
2897
+ export declare type RtmpServerStreamKeys = {
2898
+ audioStreamKey: StreamKey_2;
2899
+ videoStreamKey: StreamKey_2;
2900
+ }[];
2901
+
2902
+ /**
2903
+ * @public
2904
+ * A description of an Eac3 stream being delivered via RTP
2905
+ * */
2906
+ export declare interface RtpEac3 {
2907
+ kind: "eac3";
2908
+ /** The clock rate of the stream */
2909
+ clockRate: number;
2910
+ /** The language code (this will end up in outgoing metadata). RFC 5646 language tag. */
2911
+ languageCode?: string;
2912
+ ec3Extension: boolean;
2913
+ complexityIndex: number;
2914
+ }
2915
+
2916
+ /**
2917
+ * @public
2918
+ * A description of an H264 stream delivered over RTP
2919
+ * */
2920
+ export declare interface RtpH264 {
2921
+ kind: "h264";
2922
+ /** The clock rate of the stream */
2923
+ clockRate: number;
2924
+ }
2925
+
2926
+ /**
2927
+ * @public
2928
+ * A description of an HEVC stream delivered over RTP
2929
+ * */
2930
+ export declare interface RtpHEVC {
2931
+ kind: "hevc";
2932
+ /** The clock rate of the stream */
2933
+ clockRate: number;
2934
+ }
2935
+
2936
+ /**
2937
+ * @public
2938
+ * see: {@link NorskInput.rtp}
2939
+ */
2940
+ export declare class RtpInputNode extends SourceMediaNode {
2941
+ /**
2942
+ * @public
2943
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
2944
+ * */
2945
+ nudge(nudge: number): void;
2946
+ }
2947
+
2948
+ /**
2949
+ * @public
2950
+ * Settings for an RTP input
2951
+ * see: {@link NorskInput.rtp}
2952
+ * */
2953
+ export declare interface RtpInputSettings extends SourceNodeSettings<RtpInputNode>, StreamStatisticsMixin {
2954
+ sourceName: string;
2955
+ streams: readonly RtpStreamSettings[];
2956
+ }
2957
+
2958
+ /**
2959
+ * @public
2960
+ * A description of a LinearPCM stream being delivered via RTP
2961
+ * */
2962
+ export declare interface RtpLinearPcm {
2963
+ kind: "linearpcm";
2964
+ /** The sample rate of the stream */
2965
+ sampleRate: SampleRate;
2966
+ /** The channel layotu the stream */
2967
+ channelLayout: ChannelLayout;
2968
+ /** The bit depth of the stream */
2969
+ bitDepth: RtpLinearPcmBitDepth;
2970
+ }
2971
+
2972
+ /** @public */
2973
+ export declare type RtpLinearPcmBitDepth = 16 | 24;
2974
+
2975
+ /**
2976
+ * @public
2977
+ * A description of a Mpeg4 Generic Aac stream
2978
+ * */
2979
+ export declare interface RtpMpeg4GenericAacHbr {
2980
+ kind: "mpeg4-generic-aac-hbr";
2981
+ config: string;
2982
+ }
2983
+
2984
+ /**
2985
+ * @public
2986
+ * A description of an incoming RTP stream
2987
+ * */
2988
+ export declare interface RtpStreamSettings {
2989
+ /** A streamID to assign to the outgoing stream key */
2990
+ streamId: number;
2991
+ /** The IP Address to join the RTP stream on */
2992
+ ip: string;
2993
+ /** The interface to bind to, "loopback" and "any" are special cases
2994
+ * and anything else will be interpreted as the name of a network interface */
2995
+ iface: string;
2996
+ /** The port to connect to for the RTP stream itself */
2997
+ rtpPort: number;
2998
+ /** The port to connect to for the associated RTCP stream */
2999
+ rtcpPort: number;
3000
+ /** A description of the stream being joined */
3001
+ streamType: RtpLinearPcm | RtpEac3 | RtpMpeg4GenericAacHbr | RtpH264 | RtpHEVC;
3002
+ }
3003
+
3004
+ /**
3005
+ * @public
3006
+ * This is the SAR/PAR for a video stream and is an expression of what shape each pixel has within a video stream
3007
+ * x:1, y:1 being a square and the most common value for this
3008
+ * */
3009
+ export declare interface SampleAspectRatio {
3010
+ x: number;
3011
+ y: number;
3012
+ }
3013
+
3014
+ /** @public */
3015
+ export declare type SampleFormat =
3016
+ /** Signed 16 bits, non-planar */
3017
+ "s16"
3018
+ /** Signed 16 bits, planar */
3019
+ | "s16p"
3020
+ /** 32bit floating point, non-planar */
3021
+ | "flt"
3022
+ /** 32bit floating point, planar */
3023
+ | "fltp";
3024
+
3025
+ /** @public Audio sample rate, in Hz */
3026
+ export declare type SampleRate = 8000 | 11025 | 12000 | 16000 | 22050 | 24000 | 32000 | 44100 | 48000 | 64000 | 88200 | 96000;
3027
+
3028
+ /** @public */
3029
+ export declare function selectAudio(streams: readonly StreamMetadata[]): StreamKey[];
3030
+
3031
+ /**
3032
+ * @public
3033
+ * Select all the audio and video streams from the input
3034
+ * @param streams - The streams from the inbound Context
3035
+ * @returns Array of selected StreamKeys
3036
+ */
3037
+ export declare function selectAV(streams: readonly StreamMetadata[]): StreamKey[];
3038
+
3039
+ /** @public */
3040
+ export declare function selectExactKey(key: StreamKey): (streams: readonly StreamMetadata[]) => StreamKey[];
3041
+
3042
+ /** @public */
3043
+ export declare function selectPlaylist(streams: readonly StreamMetadata[]): StreamKey[];
3044
+
3045
+ /** @public */
3046
+ export declare function selectSubtitles(streams: readonly StreamMetadata[]): StreamKey[];
3047
+
3048
+ /** @public */
3049
+ export declare function selectVideo(streams: readonly StreamMetadata[]): StreamKey[];
3050
+
3051
+ /** @public */
3052
+ export declare function selectVideoRendition(renditionName: string): (streams: readonly StreamMetadata[]) => StreamKey[];
3053
+
3054
+ /** @public */
3055
+ export declare type SentenceBuildMode = "raw" | "stable" | "partial" | "complete";
3056
+
3057
+ /** @public */
3058
+ export declare type SimpleEasing = "linear" | "ease_in" | "ease_in_out" | "ease_out";
3059
+
3060
+ /** @public */
3061
+ export declare interface SingleStreamStatistics extends StreamStatistics {
3062
+ streamKey: StreamKey;
3063
+ metadata: StreamMetadataMessage;
3064
+ }
3065
+
3066
+ /** @public */
3067
+ export declare class SinkMediaNode<Pins extends string> extends MediaNodeState implements SubscribeDestination {
3068
+ permissiveSubscriptionValidation(_context: Context): SubscriptionValidationResponse;
3069
+ restrictiveSubscriptionValidation(context: Context): SubscriptionValidationResponse;
3070
+ /** Subscribe to the given sources.
3071
+ *
3072
+ * This version of the function call accepts the target pins of an output
3073
+ * and is suitable for advanced use where a node is capable of subscribing to
3074
+ * multiple video streams and provides a means of distinguishing them via pins
3075
+ * discarding any existing subscriptions.
3076
+ *
3077
+ * @param done - will be called with no arguments if the subscription succeeds,
3078
+ * or an error if it failed. This error indicates the specific reason it
3079
+ * failed, so you can take appropriate actions in response. It will be called
3080
+ * before the `subscribedStreamsChangedFn` or `subscribeErrorFn` callbacks
3081
+ * provided in the config for the node.
3082
+ *
3083
+ * Errors are also logged to the debug log.
3084
+ */
3085
+ subscribeToPins(sources: ReceiveFromAddress<Pins>[], validation?: (context: Context) => SubscriptionValidationResponse, done?: (error?: SubscriptionError) => void): void;
3086
+ sourceContextChange(responseCallback: (error?: SubscriptionError) => void): Promise<boolean>;
3087
+ finalise(): void;
3088
+ }
3089
+
3090
+ /** @public */
3091
+ export declare interface SinkNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
3092
+ onSubscriptionError?: (error: SubscriptionError) => void;
3093
+ }
3094
+
3095
+ export declare interface Smpte2038Message {
3096
+ cNotYChannelFlag: boolean;
3097
+ lineNumber: number;
3098
+ horizontalOffset: number;
3099
+ payloadFormat: VancPayloadFormat;
3100
+ ancillaryId: VancType2AncillaryId;
3101
+ userData: Uint8Array;
3102
+ }
3103
+
3104
+ /** @public */
3105
+ export declare class SourceMediaNode extends MediaNodeState {
3106
+ outputStreams: StreamMetadata[];
3107
+ registerForContextChange(subscriber: SubscribeDestination): void;
3108
+ unregisterForContextChange(subscriber: SubscribeDestination): void;
3109
+ }
3110
+
3111
+ /** @public */
3112
+ export declare interface SourceNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
3113
+ onOutboundContextChange?: (streams: StreamMetadata[]) => Promise<void>;
3114
+ }
3115
+
3116
+ /**
3117
+ * @public
3118
+ * Errors found while subscribing to a particular source, separated out by reason:
3119
+ *
3120
+ * - `internal`: An opaque internal error
3121
+ *
3122
+ * - `unknownSourceId`: The media node does not exist (maybe it crashed)
3123
+ *
3124
+ * - `unknownSourceStream`: The media node exists, but does not have the stream key
3125
+ *
3126
+ * - `noSubscriberPin`: The media node is not set up to receive data on this pin (which may be auto-detected)
3127
+ *
3128
+ * - `unsupportedConversion`: Norsk does not support conversion from the media types of the source to the media types accepted by the subscriber
3129
+ */
3130
+ export declare type SourceSubscriptionError = {
3131
+ info: string;
3132
+ reason: "internal";
3133
+ } | {
3134
+ mediaNodeId: MediaNodeId;
3135
+ reason: "unknownSourceId";
3136
+ } | {
3137
+ mediaNodeId: MediaNodeId;
3138
+ streamKey: StreamKey;
3139
+ reason: "unknownSourceStream";
3140
+ } | {
3141
+ mediaNodeId: MediaNodeId;
3142
+ streamKey: StreamKey;
3143
+ pin: string;
3144
+ subscriberPins: string[];
3145
+ reason: "noSubscriberPin";
3146
+ } | {
3147
+ mediaNodeId: MediaNodeId;
3148
+ streamKey: StreamKey;
3149
+ sourceTypes: string[];
3150
+ subscriberTypes: string[];
3151
+ reason: "unsupportedConversion";
3152
+ };
3153
+
3154
+ /** @public */
3155
+ export declare function sourceToPin<Pins extends string>(source: string, pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3156
+
3157
+ /**
3158
+ * @public
3159
+ * The return value for the {@link SrtInputSettings.onConnection} callback
3160
+ * determining what to do with an incoming stream
3161
+ */
3162
+ export declare type SrtConnectionResult =
3163
+ /** Accept the stream */
3164
+ {
3165
+ accept: true;
3166
+ /** The source name to assign to the connection */
3167
+ sourceName: string;
3168
+ }
3169
+ /** Reject the stream */
3170
+ | {
3171
+ accept: false;
3172
+ };
3173
+
3174
+ /**
3175
+ * @public
3176
+ * see: {@link NorskInput.srt}
3177
+ */
3178
+ export declare class SrtInputNode extends SourceMediaNode {
3179
+ /**
3180
+ * @public
3181
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3182
+ * */
3183
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
3184
+ /**
3185
+ * @public
3186
+ * Closes a connected stream as specified by 'streamIndex'
3187
+ * @param streamIndex - the index of the stream to be terminated
3188
+ * */
3189
+ closeStream(streamIndex: number): void;
3190
+ }
3191
+
3192
+ /**
3193
+ * @public
3194
+ * Settings for an SRT Input node
3195
+ * see: {@link NorskInput.srt}
3196
+ */
3197
+ export declare interface SrtInputSettings extends RemoteInputSettings<SrtInputNode>, StreamStatisticsMixin {
3198
+ /**
3199
+ * The mode to act in (caller or listener)
3200
+ */
3201
+ mode: SrtMode;
3202
+ /**
3203
+ * Passphrase for encryption
3204
+ */
3205
+ passphrase?: string;
3206
+ /**
3207
+ * Stream ID to set on the socket when acting in caller mode
3208
+ */
3209
+ streamId?: string;
3210
+ /**
3211
+ * On connect callback, notifying that a new caller has connected (in listener mode) and set the source name accordingly
3212
+ * @eventProperty
3213
+ */
3214
+ onConnection?: (
3215
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3216
+ streamId: string,
3217
+ /**
3218
+ * Identifier indicating which connection this message refers to (for a
3219
+ * listener which may have multiple connections)
3220
+ */
3221
+ index: number,
3222
+ /** The address of the remote host */
3223
+ remoteHost: string) => SrtConnectionResult;
3224
+ /**
3225
+ * Called when the connection status has changed (e.g. when the SRT socket is closed)
3226
+ * @eventProperty
3227
+ */
3228
+ onConnectionStatusChange?: (
3229
+ /** The new connection state */
3230
+ status: SrtInputStatus,
3231
+ /** The source name assigned to the connection which changed status */
3232
+ sourceName: string | undefined) => void;
3233
+ }
3234
+
3235
+ /** @public */
3236
+ export declare type SrtInputStatus = "disconnected";
3237
+
3238
+ /** @public */
3239
+ export declare type SrtMode = "listener" | "caller";
3240
+
3241
+ /**
3242
+ * @public
3243
+ * see: {@link NorskOutput.srt}
3244
+ */
3245
+ export declare class SrtOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3246
+ }
3247
+
3248
+ /**
3249
+ * @public
3250
+ * The settings for an SRT output
3251
+ * see: {@link NorskOutput.srt}
3252
+ * */
3253
+ export declare interface SrtOutputSettings extends SinkNodeSettings<SrtOutputNode>, StreamStatisticsMixin {
3254
+ /**
3255
+ * Passphrase for encryption
3256
+ */
3257
+ passphrase?: string;
3258
+ /**
3259
+ * Stream ID to set on the socket when acting in caller mode
3260
+ */
3261
+ streamId?: string;
3262
+ /**
3263
+ * The mode to act in (see {@link SrtMode})
3264
+ */
3265
+ mode: SrtMode;
3266
+ /**
3267
+ * The IP address to listen on in listener mode, or to connect to in caller mode
3268
+ */
3269
+ ip: string;
3270
+ /**
3271
+ * The port to listen on in listener mode, or to connect to in caller mode
3272
+ */
3273
+ port: number;
3274
+ /** Jitter buffer delay in milliseconds */
3275
+ bufferDelayMs?: number;
3276
+ /**
3277
+ * On connect callback, notifying that a new caller has connected (in listener mode) and providing the stream_id that was set on the socket
3278
+ * @eventProperty
3279
+ */
3280
+ onConnection?: (
3281
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
3282
+ streamId: string,
3283
+ /** The stream index (count of connections that have been made) */
3284
+ streamIndex: number,
3285
+ /** The remote host address */
3286
+ remoteHost: string) => void;
3287
+ }
3288
+
3289
+ /** @public */
3290
+ export declare type StabilizationMode = "low" | "medium" | "high";
3291
+
3292
+ /**
3293
+ * @public
3294
+ * see: {@link NorskTransform.streamSync}
3295
+ */
3296
+ export declare class StreamAlignNode extends AutoProcessorMediaNode<"audio" | "video"> {
3297
+ }
3298
+
3299
+ /**
3300
+ * @public
3301
+ * Settings for a StreamAlign node
3302
+ * This will reset all streams to the same framerates/sample rates
3303
+ * and align their timestamps so that they completely line up for downstream operations
3304
+ * see {@link NorskTransform.streamAlign}
3305
+ * */
3306
+ export declare interface StreamAlignSettings extends ProcessorNodeSettings<StreamAlignNode> {
3307
+ sampleRate: SampleRate;
3308
+ frameRate: FrameRate;
3309
+ }
3310
+
3311
+ /**
3312
+ * @public
3313
+ * see: {@link NorskTransform.streamChaosMonkey}
3314
+ */
3315
+ export declare class StreamChaosMonkeyNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3316
+ }
3317
+
3318
+ /**
3319
+ * @public
3320
+ * The settings for a Chaos Monkey
3321
+ * see: {@link NorskTransform.streamChaosMonkey}
3322
+ * */
3323
+ export declare interface StreamChaosMonkeySettings extends ProcessorNodeSettings<StreamChaosMonkeyNode> {
3324
+ /** Optional configuration to drop frames from a stream
3325
+ * leaving this undefined means don't drop any frames
3326
+ * */
3327
+ frameDrop?: DropRandom | DropEvery | DropStart;
3328
+ /**
3329
+ * Introduce random jitter
3330
+ */
3331
+ jitterMs?: number;
3332
+ }
3333
+
3334
+ /** @public */
3335
+ export declare interface StreamKey {
3336
+ streamId: number;
3337
+ programNumber: number;
3338
+ sourceName: string;
3339
+ renditionName: string;
3340
+ }
3341
+
3342
+ /**
3343
+ * @public
3344
+ * see: {@link NorskTransform.streamKeyOverride}
3345
+ */
3346
+ export declare class StreamKeyOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3347
+ }
3348
+
3349
+ /**
3350
+ * @public
3351
+ * Settings for a Stream Key Override
3352
+ * see: {@link NorskTransform.streamKeyOverride}
3353
+ * */
3354
+ export declare interface StreamKeyOverrideSettings extends ProcessorNodeSettings<StreamKeyOverrideNode> {
3355
+ /** The stream key that all frames passing through this node will be assigned */
3356
+ streamKey: StreamKey;
3357
+ }
3358
+
3359
+ /**
3360
+ * Compares two stream keys by value, returning true if the stream keys refer to the same stream
3361
+ */
3362
+ export declare function streamKeysAreEqual(l: StreamKey, r: StreamKey): unknown;
3363
+
3364
+ /** @public */
3365
+ export declare interface StreamKeySettings {
3366
+ /** Source name. Default: the rtmp app */
3367
+ sourceName?: string;
3368
+ /** Program number. Default: 1 */
3369
+ programNumber?: number;
3370
+ /** Stream Id. Default: 1 for audio, 2 for video */
3371
+ streamId?: number;
3372
+ /** Rendition name. Default: the stream publishing name */
3373
+ renditionName?: string;
3374
+ }
3375
+
3376
+ /** @public */
3377
+ export declare interface StreamMetadata {
3378
+ streamKey?: StreamKey;
3379
+ message: StreamMetadataMessage;
3380
+ }
3381
+
3382
+ /** @public */
3383
+ export declare type StreamMetadataMessage = {
3384
+ case: "audio";
3385
+ value: AudioStreamMetadata;
3386
+ } | {
3387
+ case: "video";
3388
+ value: VideoStreamMetadata;
3389
+ } | {
3390
+ case: "subtitle";
3391
+ value: SubtitleStreamMetadata;
3392
+ } | {
3393
+ case: "playlist";
3394
+ value: PlaylistStreamMetadata;
3395
+ } | {
3396
+ case: "ancillary";
3397
+ value: AncillaryStreamMetadata;
3398
+ } | {
3399
+ case: undefined;
3400
+ value?: undefined;
3401
+ };
3402
+
3403
+ /**
3404
+ * @public
3405
+ * see: {@link NorskTransform.streamMetadataOverride}
3406
+ */
3407
+ export declare class StreamMetadataOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
3408
+ /**
3409
+ * @public
3410
+ * Updates the config used by this metadata override node for all subsequent frames
3411
+ * @param settings - The new settings
3412
+ */
3413
+ updateConfig(settings: StreamMetadataOverrideSettingsUpdate): void;
3414
+ }
3415
+
3416
+ /**
3417
+ * @public
3418
+ * Settings for a Stream Key Metadata Override Node
3419
+ * see: {@link NorskTransform.streamMetadataOverride}
3420
+ * */
3421
+ export declare interface StreamMetadataOverrideSettings extends ProcessorNodeSettings<StreamMetadataOverrideNode>, StreamMetadataOverrideSettingsUpdate {
3422
+ }
3423
+
3424
+ /** @public */
3425
+ export declare interface StreamMetadataOverrideSettingsUpdate {
3426
+ video?: {
3427
+ /** Override the bitrate metadata of a compressed video stream, or `0` to clear */
3428
+ bitrate?: number;
3429
+ };
3430
+ audio?: {
3431
+ /** Override the bitrate metadata of a compressed audio stream, or `0` to clear */
3432
+ bitrate?: number;
3433
+ /** Override the language metadata of an audio stream, or `""` to clear. RFC 5646 language tag. */
3434
+ language?: string;
3435
+ };
3436
+ subtitles?: {
3437
+ /** Override the language metadata of a subtitles stream, or `""` to clear. RFC 5646 language tag. */
3438
+ language?: string;
3439
+ };
3440
+ }
3441
+
3442
+ /** @public */
3443
+ export declare interface StreamStatistics {
3444
+ /** The size of the sample window in seconds */
3445
+ sampleSizeSeconds: number;
3446
+ /** The number of bits over the sample window */
3447
+ bitsForSample: number;
3448
+ /** The bitrate, in bits per second */
3449
+ bitrate: number;
3450
+ /** The number of frames over the sample window */
3451
+ framesForSample: number;
3452
+ /** The frame rate, in frames per second */
3453
+ framerate: number;
3454
+ /** The number of key frames over the sample window */
3455
+ keyFramesForSample: number;
3456
+ }
3457
+
3458
+ /** @public */
3459
+ export declare interface StreamStatisticsMixin {
3460
+ /**
3461
+ * Sampling rates for stream stats, in seconds
3462
+ */
3463
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
3464
+ /**
3465
+ * Called at periodic intervals when stream statistics are ready.
3466
+ * @eventProperty
3467
+ */
3468
+ onStreamStatistics?: (
3469
+ /** The stats */
3470
+ stats: MultiStreamStatistics) => void;
3471
+ onGopStructure?: (structure: GopStructure) => void;
3472
+ }
3473
+
3474
+ /**
3475
+ * @public
3476
+ * see {@link NorskControl.streamStatistics}.
3477
+ */
3478
+ export declare class StreamStatisticsNode extends AutoProcessorMediaNode<"audio" | "video"> {
3479
+ }
3480
+
3481
+ /**
3482
+ * @public
3483
+ * Settings for a Stream Statistics Node
3484
+ * see: {@link NorskControl.streamStatistics}
3485
+ */
3486
+ export declare interface StreamStatisticsSettings extends ProcessorNodeSettings<StreamStatisticsNode>, StreamStatisticsMixin {
3487
+ /**
3488
+ * Called periodically with the stream stats
3489
+ * @param stats - The statistics for the stream
3490
+ * @eventProperty
3491
+ */
3492
+ onStreamStatistics: (stats: MultiStreamStatistics) => void;
3493
+ /**
3494
+ * Sampling rates for stream stats, in seconds
3495
+ */
3496
+ statsSampling?: PlainMessage<StreamStatisticsSampling>;
3497
+ }
3498
+
3499
+ /**
3500
+ * @public
3501
+ * see: {@link NorskControl.streamSwitchHard}
3502
+ */
3503
+ export declare class StreamSwitchHardNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3504
+ switchSource(newSource: Pins): void;
3505
+ }
3506
+
3507
+ /**
3508
+ * @public
3509
+ * Settings for the Hard Stream Switch
3510
+ * see: {@link NorskControl.streamSwitchHard}
3511
+ * */
3512
+ export declare interface StreamSwitchHardSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchHardNode<Pins>> {
3513
+ /** The currently active source to display on the output */
3514
+ activeSource: Pins;
3515
+ /** the source name to give the output of this switch operation */
3516
+ outputSource: string;
3517
+ }
3518
+
3519
+ /**
3520
+ * @public
3521
+ * see: {@link NorskControl.streamSwitchSmooth}
3522
+ */
3523
+ export declare class StreamSwitchSmoothNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3524
+ /**
3525
+ * @public
3526
+ * Switches the source used for the current output of this node
3527
+ */
3528
+ switchSource(newSource: Pins): void;
3529
+ }
3530
+
3531
+ /**
3532
+ * @public
3533
+ * Settings for the Smooth Source Switch
3534
+ * see {@link NorskControl.streamSwitchSmooth}
3535
+ * */
3536
+ export declare interface StreamSwitchSmoothSettings<Pins extends string> extends ProcessorNodeSettings<StreamSwitchSmoothNode<Pins>> {
3537
+ /** The presently active source being used to generate output for this node */
3538
+ activeSource?: Pins;
3539
+ /** The source name given to the output of this node */
3540
+ outputSource: string;
3541
+ /** How many milliseconds to use for the fade operation between two sources */
3542
+ transitionDurationMs?: number;
3543
+ /** The constant resolution that all output video will be scaled to */
3544
+ outputResolution: Resolution;
3545
+ /** The constant framerate that all output video will be sampled to */
3546
+ frameRate: FrameRate;
3547
+ /** The constant samplerate that all output audio will be resampled to */
3548
+ sampleRate: SampleRate;
3549
+ /** The constant channel layout that all output audio will be resampled to */
3550
+ channelLayout: ChannelLayout;
3551
+ /** Alignment behaviour of the component
3552
+ * whether to rebase all incoming streams to a common timeline
3553
+ * Note: This will modify the timestamps, meaning that merging streams not involved in this may result in
3554
+ * operation may result in sync issues. To avoid this, you can use {@link NorskProcessor.streamAlign} instead of relying
3555
+ * on this component for this behaviour
3556
+ * Note: This behaviour may be removed in a future release and replaced with something similar
3557
+ * */
3558
+ alignment?: "aligned" | "not_aligned";
3559
+ /** Callback which will be called if a switch request cannot be fulfilled */
3560
+ onSwitchError?: (message: string, inputPin?: Pins) => void;
3561
+ /** Callback which will be called a transition has succesfully completed for a requested switch, i.e. the new source
3562
+ * is now showing.
3563
+ *
3564
+ * Note that if additional transitions are triggered when a transition is already in progress, a notification may only be
3565
+ * given for the last transition to finish.
3566
+ **/
3567
+ onTransitionComplete?: (inputPin: Pins) => void;
3568
+ /**
3569
+ * Callback to be called when inbound context changes on some input; presence of an
3570
+ * input means that media has arrived and is ready to switch
3571
+ * immediately
3572
+ * @param allStreams The collection of input contexts received over all input pins
3573
+ */
3574
+ onInboundContextChange?: (allStreams: Map<Pins, StreamMetadata[]>) => Promise<void>;
3575
+ }
3576
+
3577
+ /**
3578
+ * @public
3579
+ * see: {@link NorskTransform.streamSync}
3580
+ */
3581
+ export declare class StreamSyncNode extends AutoProcessorMediaNode<"audio" | "video"> {
3582
+ }
3583
+
3584
+ /**
3585
+ * @public
3586
+ * Settings for a StreamSync node
3587
+ * see {@link NorskTransform.streamSync}
3588
+ * */
3589
+ export declare interface StreamSyncSettings extends ProcessorNodeSettings<StreamSyncNode> {
3590
+ }
3591
+
3592
+ /**
3593
+ * @public
3594
+ * see: {@link NorskTransform.streamTimestampNudge}
3595
+ */
3596
+ export declare class StreamTimestampNudgeNode extends AutoProcessorMediaNode<"audio" | "video"> {
3597
+ /**
3598
+ * @public
3599
+ * Applies a gradual nudge to the stream timestamps by the specified number of milliseconds
3600
+ * */
3601
+ nudge(nudge: number): void;
3602
+ }
3603
+
3604
+ /**
3605
+ * @public
3606
+ * Settings for a Stream Timestamp Nudge
3607
+ * see: {@link NorskTransform.streamTimestampNudge}
3608
+ * */
3609
+ export declare interface StreamTimestampNudgeSettings extends ProcessorNodeSettings<StreamTimestampNudgeNode> {
3610
+ nudge?: number;
3611
+ }
3612
+
3613
+ /**
3614
+ * @public
3615
+ * see: {@link NorskDebug.streamTimestampReport}
3616
+ */
3617
+ export declare class StreamTimestampReportNode extends AutoSinkMediaNode<string> {
3618
+ }
3619
+
3620
+ /**
3621
+ * @public
3622
+ * Settings to control MP4 file output
3623
+ * see {@link NorskOutput.fileMp4}
3624
+ */
3625
+ export declare interface StreamTimestampReportSettings extends SinkNodeSettings<StreamTimestampReportNode> {
3626
+ onTimestamp?: (streamKey: StreamKey, timestamp: IntervalTimestamp) => Promise<void>;
3627
+ }
3628
+
3629
+ export declare interface SubscribeDestination {
3630
+ id?: string;
3631
+ sourceContextChange(responseCallback: (error?: SubscriptionError) => void): Promise<boolean>;
3632
+ }
3633
+
3634
+ /**
3635
+ * @public
3636
+ * Errors found while setting up subscriptions, separated out by reason:
3637
+ *
3638
+ * - `internal`: An opaque internal error
3639
+ *
3640
+ * - `unknownSubscriber`: The media node requesting the subscription does not exist
3641
+ *
3642
+ * - `multipleStreams`: Multiple stream keys found for the context type
3643
+ *
3644
+ * - `sourceSubscriptionError`: Per-source errors
3645
+ */
3646
+ export declare type SubscriptionError = {
3647
+ info: string;
3648
+ reason: "internal";
3649
+ } | {
3650
+ mediaNodeId: MediaNodeId;
3651
+ reason: "unknownSubscriber";
3652
+ } | {
3653
+ contextType: ContextType;
3654
+ streamKeys: StreamKey[];
3655
+ reason: "multipleStreams";
3656
+ } | {
3657
+ sourceErrors: SourceSubscriptionError[];
3658
+ reason: "sourceSubscriptionError";
3659
+ };
3660
+
3661
+ /**
3662
+ * @public
3663
+ * Determines what to do with an incoming context
3664
+ *
3665
+ * - true/accept: Allow the incoming context through, and any subsequent/queued data that belongs to it
3666
+ * - false/deny: Deny the incoming context, if no context has been accepted, then queue data until one is
3667
+ * - accept_and_terminate: Allow the incoming context, then deny further data, flush and shut down the node
3668
+ * this is useful for cleanly terminating outputs when the context is empty
3669
+ * */
3670
+ export declare type SubscriptionValidationResponse = true | false | "accept" | "deny" | "accept_and_terminate";
3671
+
3672
+ /** @public */
3673
+ export declare function subtitlesToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3674
+
3675
+ /**
3676
+ * @public
3677
+ * Returns the stream keys for subtitle streams in a media context
3678
+ * @param streams - The media context from which to return the stream keys
3679
+ * @returns The subtitle stream keys in the media context
3680
+ */
3681
+ export declare function subtitleStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
3682
+
3683
+ /** @public */
3684
+ export declare interface SubtitleStreamMetadata {
3685
+ }
3686
+
3687
+ /**
3688
+ * @public
3689
+ * Filters a context to only the subtitle streams within it
3690
+ * @param streams - The media context from which to return the streams
3691
+ * @returns The subtitle streams in the media context
3692
+ */
3693
+ export declare function subtitleStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
3694
+
3695
+ export declare function toOptBool(b?: boolean): OptionalBool | undefined;
3696
+
3697
+ export declare function toOptInt(i?: number): OptionalInt | undefined;
3698
+
3699
+ declare class TsCommonInputNode<SourceMessage, T extends SourceMediaNode> extends SourceMediaNode {
3700
+ constructor(tsType: TsInputType, client: MediaClient, unregisterNode: (node: MediaNodeState) => void, settings: SourceNodeSettings<T> & StreamStatisticsMixin, nudgeFn: (nudge: TimestampProgramNudge) => SourceMessage, onEof: (() => void) | undefined, grpcStartFn: () => grpc.ClientDuplexStream<SourceMessage, TsInputEvent>);
3701
+ /**
3702
+ * @public
3703
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
3704
+ * */
3705
+ nudge(programNumber: number, nudge: number): void;
3706
+ }
3707
+
3708
+ declare enum TsInputType {
3709
+ TsFile = 0,
3710
+ Srt = 1,
3711
+ Udp = 2,
3712
+ M3u8 = 3
3713
+ }
3714
+
3715
+ /**
3716
+ * @public
3717
+ * see: {@link NorskInput.udpTs}
3718
+ */
3719
+ export declare class UdpTsInputNode extends TsCommonInputNode<UdpTsInputMessage, UdpTsInputNode> {
3720
+ }
3721
+
3722
+ /**
3723
+ * @public
3724
+ * Settings for a UDP Transport Stream input
3725
+ * see: {@link NorskInput.udpTs}
3726
+ * */
3727
+ export declare interface UdpTsInputSettings extends RemoteInputSettings<UdpTsInputNode> {
3728
+ }
3729
+
3730
+ /**
3731
+ * @public
3732
+ * see: {@link NorskOutput.udpTs}
3733
+ */
3734
+ export declare class UdpTsOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
3735
+ }
3736
+
3737
+ /**
3738
+ * @public
3739
+ * The settings for an output Transport Stream over UDP
3740
+ * see: {@link NorskOutput.udpTs}
3741
+ */
3742
+ export declare interface UdpTsOutputSettings extends SinkNodeSettings<UdpTsOutputNode>, StreamStatisticsMixin {
3743
+ /**
3744
+ * The IP address to publish to
3745
+ * This can be multicast, unicast or broadcast
3746
+ */
3747
+ destinationIp: string;
3748
+ /**
3749
+ * The interface to bind to for publishing
3750
+ * This can be 'any', 'loopback' or any named interface on the machine
3751
+ * Note: If running inside docker this may be different to expected
3752
+ */
3753
+ iface: string;
3754
+ /** The port to send to */
3755
+ port: number;
3756
+ /** Jitter buffer delay in milliseconds */
3757
+ bufferDelayMs?: number;
3758
+ }
3759
+
3760
+ /**
3761
+ * @public
3762
+ * An update request for credentials on a CMAF output
3763
+ */
3764
+ export declare interface UpdateCredentials {
3765
+ /**
3766
+ * The id of the destination that is to be updated (see {@link HlsPushDestinationSettings.id})
3767
+ */
3768
+ destinationId: string;
3769
+ /**
3770
+ * the new credentials to be used by the destination
3771
+ */
3772
+ awsCredentials: AwsCredentials;
3773
+ }
3774
+
3775
+ /** @public */
3776
+ export declare type VancPayloadFormat = "other" | "afd_bar" | "pan_scan" | "scte104" | "dvb_scte_vbi" | "op47_sdp" | "op47_vanc_multipacket" | "ancillary_time_code" | "eia_708" | "eia_608";
3777
+
3778
+ /** @public */
3779
+ export declare interface VancType2AncillaryId {
3780
+ did: number;
3781
+ sdid: number;
3782
+ }
3783
+
3784
+ export { Version }
3785
+
3786
+ /**
3787
+ * @public
3788
+ * see: {@link NorskTransform.videoCompose}
3789
+ */
3790
+ export declare class VideoComposeNode<Pins extends string> extends ProcessorMediaNode<Pins> {
3791
+ /**
3792
+ * @public
3793
+ * Updates the config used for a video compose operation
3794
+ * If transitions are specified, animations will be provided, otherwise
3795
+ * the change will be immediate
3796
+ *
3797
+ * Note: This is not a 'cheap' operation and care should be taken not to
3798
+ * do this too often (more than once a second for example!)
3799
+ */
3800
+ updateConfig(settings: VideoComposeSettingsUpdate<Pins>): void;
3801
+ }
3802
+
3803
+ /** @public */
3804
+ export declare interface VideoComposeSettings<Pins extends string> extends ProcessorNodeSettings<VideoComposeNode<Pins>> {
3805
+ /**
3806
+ * Required. Stream key of the reference stream. This is the video stream
3807
+ * which defines the output frame timing, which will typically be part of the
3808
+ * composition, e.g. the main picture in the case of a simple
3809
+ * overlay/picture-in-picture, or the top left quadrant of a 4-way split
3810
+ * screen.
3811
+ */
3812
+ referenceStream: Pins;
3813
+ /** The parts (images/overlays) to include in the composition */
3814
+ parts: readonly ComposePart<Pins>[];
3815
+ /**
3816
+ * Optionally supply a fallback reference resolution. This allows description of the
3817
+ * composition in a desired coordinate system, e.g. a resolution of 100x100
3818
+ * can be specified to allow the source and destination areas to be described
3819
+ * in percentage terms, or a notional resolution can be used that is
3820
+ * independant of the source resolutions that may be provided.
3821
+ *
3822
+ * if set here, this reference resolution will be applied to
3823
+ * any parts that do not have their own reference resolution specified
3824
+ *
3825
+ * If not provided the source and destination rectangles are in terms of the
3826
+ * source and output resolutions respectively.
3827
+ */
3828
+ referenceResolution?: Resolution;
3829
+ /** The resolution of the output video */
3830
+ outputResolution: Resolution;
3831
+ /**
3832
+ * Output pixel format to use. If not specified, this will be chosen
3833
+ * automatically based on the sources present in the initial composition
3834
+ */
3835
+ outputPixelFormat?: PixelFormat;
3836
+ /**
3837
+ * Behaviour in the case of a missing stream used in an active composition
3838
+ * part. Note that this does not apply to the reference stream, but to every
3839
+ * part which does not use the reference stream, whether at startup or on
3840
+ * context change.
3841
+ *
3842
+ * Missing means not present in the context or never having sent a frame.
3843
+ */
3844
+ missingStreamBehaviour?: ComposeMissingStreamBehaviour;
3845
+ /**
3846
+ * Optionally attempt to perform the compose operation on hardware
3847
+ */
3848
+ hardwareAcceleration?: ComposeHardwareAcceleration;
3849
+ /**
3850
+ * Called when the transitions specified in the last config update have
3851
+ * completed (in the case of multiple parts with specified transitions of
3852
+ * different duration, this means that the last remaining transitions have
3853
+ * completed
3854
+ */
3855
+ onTransitionComplete?: () => void;
3856
+ }
3857
+
3858
+ /**
3859
+ * @public
3860
+ * An update operation for a VideoCompose operation
3861
+ * see: {@link VideoComposeNode.updateConfig}
3862
+ * */
3863
+ export declare interface VideoComposeSettingsUpdate<Pins extends string> {
3864
+ /** Update the parts (images/overlays) to include in the composition */
3865
+ parts: readonly ComposePart<Pins>[];
3866
+ }
3867
+
3868
+ /**
3869
+ * @public
3870
+ * see: {@link NorskTransform.videoEncode}
3871
+ */
3872
+ export declare class VideoEncodeNode extends AutoProcessorMediaNode<"video"> {
3873
+ }
3874
+
3875
+ /**
3876
+ * @public
3877
+ * A single rung in a video encode ladder
3878
+ * see: {@link NorskTransform.videoEncode}
3879
+ * */
3880
+ export declare interface VideoEncodeRung {
3881
+ /** The name of this rung, this should be unique across the ladder
3882
+ * and will end up in the renditionName of the outgoing StreamKey
3883
+ */
3884
+ name: string;
3885
+ /** The width of the outgoing video resolution */
3886
+ width: number;
3887
+ /** The height of the outgoing video resolution */
3888
+ height: number;
3889
+ /**
3890
+ * Optionally change the frameRate for this rendition
3891
+ * This can be useful if the input is 50FPS for example and some
3892
+ * lower rungs need to be 25fps
3893
+ *
3894
+ * Note: If you wish to apply the same frame rate across all rungs, it is
3895
+ * more efficient to use a single {@link VideoTransformNode} before the ladder
3896
+ * created with {@link NorskTransform.videoTransform} and leave this value undefined
3897
+ * */
3898
+ frameRate?: FrameRate;
3899
+ /**
3900
+ * Specifies the input video's Sample Aspect Ratio (SAR) to be used by the
3901
+ * encoder in width:height
3902
+ */
3903
+ sar?: SampleAspectRatio;
3904
+ /**
3905
+ * The codec (and detailed configuration) to use for the encoding operation.
3906
+ *
3907
+ * Note: Nvidia, Logan/Quadra, and Xilinx require Nvidia, Logan/Quadra and Xilinx hardware to be set up and
3908
+ * made available to Norsk
3909
+ *
3910
+ * A ladder can use several different codecs across its various rungs and the
3911
+ * VideoEncode node will attempt to build a pipeline that uses the hardware efficently
3912
+ */
3913
+ codec: X264Codec | X265Codec | NvidiaH264 | NvidiaHevc | LoganH264 | LoganHevc | QuadraH264 | QuadraHevc | XilinxH264 | XilinxHevc;
3914
+ }
3915
+
3916
+ /**
3917
+ * @public
3918
+ * Settings for a VideoEncode operation
3919
+ * see: {@link NorskTransform.videoEncode}
3920
+ * */
3921
+ export declare interface VideoEncodeSettings extends ProcessorNodeSettings<VideoEncodeNode> {
3922
+ rungs: readonly VideoEncodeRung[];
3923
+ }
3924
+
3925
+ /**
3926
+ * @public
3927
+ * Returns the stream keys for video streams in a media context
3928
+ * @param streams - The media context from which to return the stream keys
3929
+ * @returns The video stream keys in the media context
3930
+ */
3931
+ export declare function videoStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
3932
+
3933
+ /** @public */
3934
+ export declare interface VideoStreamMetadata {
3935
+ codec: string;
3936
+ width: number;
3937
+ height: number;
3938
+ frameRate?: FrameRate_2;
3939
+ }
3940
+
3941
+ /**
3942
+ * @public
3943
+ * Filters a context to only the video streams within it
3944
+ * @param streams - The media context from which to return the streams
3945
+ * @returns The video streams in the media context
3946
+ */
3947
+ export declare function videoStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
3948
+
3949
+ /**
3950
+ * @public
3951
+ * see: {@link NorskInput.audioSignal}
3952
+ */
3953
+ export declare class VideoTestcardGeneratorNode extends SourceMediaNode {
3954
+ }
3955
+
3956
+ /**
3957
+ * @public
3958
+ * Settings for an Video Testcard Generator
3959
+ * see: {@link NorskInput.videoTestcard}
3960
+ * */
3961
+ export declare interface VideoTestcardGeneratorSettings extends SourceNodeSettings<VideoTestcardGeneratorNode> {
3962
+ /** The source name to set in the stream key of the outgoing stream */
3963
+ sourceName: string;
3964
+ /** The number of frames to send before shutting down */
3965
+ numberOfFrames?: number;
3966
+ /** Resolution of the test card stream **/
3967
+ resolution: {
3968
+ width: number;
3969
+ height: number;
3970
+ };
3971
+ /** Framerate of the produced video stream **/
3972
+ frameRate: {
3973
+ frames: number;
3974
+ seconds: number;
3975
+ };
3976
+ pattern: Pattern;
3977
+ }
3978
+
3979
+ /** @public */
3980
+ export declare function videoToPin<Pins extends string>(pin: Pins): (streams: StreamMetadata[]) => PinToKey<Pins>;
3981
+
3982
+ /**
3983
+ * @public
3984
+ * see: {@link NorskTransform.videoTransform}
3985
+ */
3986
+ export declare class VideoTransformNode extends AutoProcessorMediaNode<"video"> {
3987
+ }
3988
+
3989
+ /**
3990
+ * @public
3991
+ * Settings for a Video Transform node
3992
+ * see: {@link NorskTransform.videoTransform}
3993
+ * */
3994
+ export declare interface VideoTransformSettings extends ProcessorNodeSettings<VideoTransformNode> {
3995
+ /** An optional resolution to rescale this single stream to */
3996
+ resolution?: Resolution;
3997
+ /** An optional framerate to resample this single stream to */
3998
+ frameRate?: FrameRate;
3999
+ /** An optional SAR to set on the outgoing stream
4000
+ * Note: You can set this and only this if the SAR on your incoming stream is incorrect
4001
+ * for example (An often-seen problem with sources)
4002
+ * */
4003
+ sar?: SampleAspectRatio;
4004
+ }
4005
+
4006
+ /**
4007
+ * @public
4008
+ * see: {@link NorskDuplex.webRtcBrowser}
4009
+ */
4010
+ export declare class WebRTCBrowserNode extends AutoProcessorMediaNode<"audio" | "video"> {
4011
+ /** @public The URL of the local player */
4012
+ playerUrl: string;
4013
+ }
4014
+
4015
+ /**
4016
+ * @public
4017
+ * Settings for a WebRTC browser session
4018
+ * see: {@link NorskDuplex.webRtcBrowser}
4019
+ * */
4020
+ export declare interface WebRTCBrowserSettings extends ProcessorNodeSettings<WebRTCBrowserNode>, StreamStatisticsMixin {
4021
+ iceServers?: IceServerSettings[];
4022
+ reportedIceServers?: IceServerSettings[];
4023
+ hostIps?: string[];
4024
+ serverReflexiveIps?: string[];
4025
+ /** Jitter buffer delay in milliseconds */
4026
+ bufferDelayMs?: number;
4027
+ }
4028
+
4029
+ /**
4030
+ * @public
4031
+ * see: {@link NorskOutput.whep}
4032
+ */
4033
+ export declare class WhepOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
4034
+ /** @public The URL of the local player */
4035
+ playerUrl: string;
4036
+ /** @public The URL of the WHEP endpoint */
4037
+ endpointUrl: string;
4038
+ }
4039
+
4040
+ /**
4041
+ * @public
4042
+ * The settings for a WebRTC WHEP Output
4043
+ * see {@link NorskOutput.whep}
4044
+ */
4045
+ export declare interface WhepOutputSettings extends SinkNodeSettings<WhepOutputNode>, StreamStatisticsMixin {
4046
+ iceServers?: IceServerSettings[];
4047
+ reportedIceServers?: IceServerSettings[];
4048
+ hostIps?: string[];
4049
+ serverReflexiveIps?: string[];
4050
+ /** Jitter buffer delay in milliseconds */
4051
+ bufferDelayMs?: number;
4052
+ }
4053
+
4054
+ /**
4055
+ * @public
4056
+ * see: {@link NorskInput.whip}
4057
+ */
4058
+ export declare class WhipInputNode extends SourceMediaNode {
4059
+ /** @public The URL of the local test client */
4060
+ clientUrl: string;
4061
+ /** @public The URL of the WHIP endpoint */
4062
+ endpointUrl: string;
4063
+ /**
4064
+ * @public
4065
+ * Applies a gradual nudge to the outgoing stream timestamps by the specified number of milliseconds
4066
+ * */
4067
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
4068
+ }
4069
+
4070
+ /** @public */
4071
+ export declare interface WhipInputSettings extends InputSettings<WhipInputNode>, StreamStatisticsMixin {
4072
+ /** List of ice servers to use as part of session negotiation */
4073
+ iceServers?: IceServerSettings[];
4074
+ /** Internal addresses for the ice servers (defaults to iceServers) */
4075
+ reportedIceServers?: IceServerSettings[];
4076
+ /**
4077
+ * List of IPs to advertise as your host address - useful e.g. when on a cloud server
4078
+ * so that the public rather than private IP is used.
4079
+ */
4080
+ hostIps?: string[];
4081
+ /**
4082
+ * Similar to hostIps, but a list of server reflexive candidates so that ICE negotiations can be
4083
+ * sped up
4084
+ */
4085
+ serverReflexiveIps?: string[];
4086
+ }
4087
+
4088
+ /**
4089
+ * @public
4090
+ * see: {@link NorskOutput.whip}
4091
+ */
4092
+ export declare class WhipOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
4093
+ }
4094
+
4095
+ /**
4096
+ * @public
4097
+ * The settings for a WebRTC Whip Output
4098
+ * see {@link NorskOutput.whip}
4099
+ */
4100
+ export declare interface WhipOutputSettings extends SinkNodeSettings<WhipOutputNode>, StreamStatisticsMixin {
4101
+ /**
4102
+ * The URI to make the initial publish request to (as per the WHIP protocol)
4103
+ */
4104
+ uri: string;
4105
+ /** The auth header to supply (for example: 'Bearer: mybearertoken') */
4106
+ authHeader: string;
4107
+ /** Jitter buffer delay in milliseconds */
4108
+ bufferDelayMs?: number;
4109
+ }
4110
+
4111
+ /** @public */
4112
+ export declare interface X264Codec {
4113
+ type: "x264";
4114
+ threads?: number;
4115
+ /**
4116
+ * Rate control options - one of abr, cqp or crf
4117
+ */
4118
+ bitrateMode?: BitrateMode;
4119
+ /**
4120
+ * Set the H264 profile of the output
4121
+ */
4122
+ profile?: X264Profile;
4123
+ /**
4124
+ * Sets the level flag in the output
4125
+ */
4126
+ level?: X264Level;
4127
+ /**
4128
+ * Sets the minimum length between IDR frames
4129
+ */
4130
+ keyFrameIntervalMin?: number;
4131
+ /**
4132
+ * Sets the maximum length between IDR frames
4133
+ */
4134
+ keyFrameIntervalMax?: number;
4135
+ /**
4136
+ * Sets the maximum number of concurrent B-frames
4137
+ */
4138
+ bframes?: number;
4139
+ /**
4140
+ * Tune options to further optimize them for your input content. If you
4141
+ * specify a tuning, the changes will be applied after 'preset' but before all
4142
+ * other parameters.
4143
+ */
4144
+ tune?: X264Tune;
4145
+ /**
4146
+ * Change options to trade off compression efficiency against encoding speed.
4147
+ * If you specify a preset, the changes it makes will be applied before all
4148
+ * other parameters are applied.
4149
+ */
4150
+ preset?: X264Preset;
4151
+ /**
4152
+ * Maximum number of reference frames, i.e., the number of previous frames
4153
+ * each P-frame can use as references
4154
+ */
4155
+ frameReference?: number;
4156
+ /**
4157
+ * Enables CABAC (Context Adaptive Binary Arithmetic Coder) stream compression
4158
+ * instead of the less efficient CAVLC (Context Adaptive Variable Length
4159
+ * Coder) system. Significantly improves both the compression efficiency
4160
+ * (10-20% typically) and the decoding requirements, at the expense of encode
4161
+ * CPU requirements
4162
+ */
4163
+ cabac?: boolean;
4164
+ /**
4165
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
4166
+ */
4167
+ vbvMaxRate?: number;
4168
+ /**
4169
+ * Sets the size of the VBV buffer in kilobits
4170
+ */
4171
+ vbvBufferSize?: number;
4172
+ /**
4173
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
4174
+ * disables adaptive I-frame decisioning
4175
+ */
4176
+ sceneCut?: number;
4177
+ /**
4178
+ * Use access unit delimiters in the output
4179
+ */
4180
+ aud?: boolean;
4181
+ /**
4182
+ * Disables the loop filter. Not Recommended.
4183
+ */
4184
+ noDeblock?: boolean;
4185
+ /**
4186
+ * Signal HRD information
4187
+ */
4188
+ nalHrd?: X264NalHrd;
4189
+ }
4190
+
4191
+ /**
4192
+ * @public
4193
+ * See the X264 Docs for a description of this value
4194
+ * */
4195
+ export declare type X264Level = 1 | 1.1 | 1.2 | 1.3 | 2 | 2.1 | 2.2 | 3 | 3.1 | 3.2 | 4 | 4.1 | 4.2 | 5 | 5.1;
4196
+
4197
+ /**
4198
+ * @public
4199
+ * Three possible values:
4200
+ *
4201
+ * - "none": specify no HRD information
4202
+ *
4203
+ * - "vbr": specify HRD information
4204
+ *
4205
+ * - "cbr": specify HRD information and pack the bitstream to the bitrate specified
4206
+ *
4207
+ * See the X264 Docs for a further description of this value
4208
+ */
4209
+ export declare type X264NalHrd = "none" | "vbr" | "cbr";
4210
+
4211
+ /**
4212
+ * @public
4213
+ * See the X264 Docs for a description of this value
4214
+ * */
4215
+ export declare type X264Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
4216
+
4217
+ /** @public */
4218
+ export declare type X264Profile = "baseline" | "main" | "high" | "high10" | "high422" | "high444";
4219
+
4220
+ /**
4221
+ * @public
4222
+ * See the X264 Docs for a description of this value
4223
+ * */
4224
+ export declare type X264Tune = "film" | "animation" | "grain" | "stillimage" | "psnr" | "ssim" | "fastdecode" | "zerolatency";
4225
+
4226
+ /** @public X265 codec */
4227
+ export declare interface X265Codec {
4228
+ type: "x265";
4229
+ threads?: number;
4230
+ bitrateMode?: BitrateMode;
4231
+ profile?: X265Profile;
4232
+ /**
4233
+ * Sets the level flag in the output
4234
+ */
4235
+ level?: X265Level;
4236
+ tier?: X265Tier;
4237
+ /**
4238
+ * Sets the minimum length between IDR frames
4239
+ */
4240
+ keyFrameIntervalMin?: number;
4241
+ /**
4242
+ * Sets the maximum length between IDR frames
4243
+ */
4244
+ keyFrameIntervalMax?: number;
4245
+ /**
4246
+ * Sets the maximum number of concurrent B-frames
4247
+ */
4248
+ bframes?: number;
4249
+ /**
4250
+ * Tune options to further optimize them for your input content. If you
4251
+ * specify a tuning, the changes will be applied after 'preset' but before all
4252
+ * other parameters.
4253
+ */
4254
+ tune?: X265Tune;
4255
+ /**
4256
+ * Change options to trade off compression efficiency against encoding speed.
4257
+ * If you specify a preset, the changes it makes will be applied before all
4258
+ * other parameters are applied.
4259
+ */
4260
+ preset?: X265Preset;
4261
+ /**
4262
+ * Maximum number of reference frames, i.e., the number of previous frames
4263
+ * each P-frame can use as references
4264
+ */
4265
+ frameReference?: number;
4266
+ /**
4267
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
4268
+ */
4269
+ vbvMaxRate?: number;
4270
+ /**
4271
+ * Sets the size of the VBV buffer in kilobits
4272
+ */
4273
+ vbvBufferSize?: number;
4274
+ /**
4275
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
4276
+ * disables adaptive I-frame decisioning
4277
+ */
4278
+ sceneCut?: number;
4279
+ /**
4280
+ * Use access unit delimiters in the output
4281
+ */
4282
+ aud?: boolean;
4283
+ /**
4284
+ * Disables the loop filter. Not Recommended.
4285
+ */
4286
+ noDeblock?: boolean;
4287
+ }
4288
+
4289
+ /**
4290
+ * @public
4291
+ * See the X265 Docs for a description of this value
4292
+ * */
4293
+ export declare type X265Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4294
+
4295
+ /**
4296
+ * @public
4297
+ * See the X265 Docs for a description of this value
4298
+ * */
4299
+ export declare type X265Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
4300
+
4301
+ /**
4302
+ * @public
4303
+ * See the X265 Docs for a description of this value
4304
+ * */
4305
+ export declare type X265Profile = "main" | "main10" | "main444_8" | "main422_10" | "main444_10";
4306
+
4307
+ /**
4308
+ * @public
4309
+ * See the X265 Docs for a description of this value
4310
+ * */
4311
+ export declare type X265Tier = "main" | "high";
4312
+
4313
+ /**
4314
+ * @public
4315
+ * See the X265 Docs for a description of this value
4316
+ * */
4317
+ export declare type X265Tune = "psnr" | "ssim" | "grain" | "zerolatency" | "fastdecode" | "animation";
4318
+
4319
+ /**
4320
+ * @public
4321
+ * Settings for a H264 Encode using Netint Xilinx hardware
4322
+ * A detailed description of these params can be found
4323
+ * on the Netint Xilinx Encoder Documentation
4324
+ *
4325
+ * These fields have deliberately been written to maintain the same semantics as the
4326
+ * Xilinx documentation where possible.
4327
+ *
4328
+ * If left undefined, all will default to Xilinx's own defaults
4329
+ * */
4330
+ export declare interface XilinxH264 {
4331
+ type: "xilinx-h264";
4332
+ profile?: XilinxH264Profile;
4333
+ level?: XilinxH264Level;
4334
+ }
4335
+
4336
+ /** @public */
4337
+ export declare type XilinxH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4338
+
4339
+ /** @public */
4340
+ export declare type XilinxH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
4341
+
4342
+ /**
4343
+ * @public
4344
+ * Settings for a HEVC Encode using Netint Xilinx hardware
4345
+ * A detailed description of these params can be found
4346
+ * on the Netint Xilinx Encoder Documentation
4347
+ *
4348
+ * These fields have deliberately been written to maintain the same semantics as the
4349
+ * Xilinx documentation where possible.
4350
+ *
4351
+ * If left undefined, all will default to Xilinx's own defaults
4352
+ * */
4353
+ export declare interface XilinxHevc {
4354
+ type: "xilinx-hevc";
4355
+ profile?: XilinxHevcProfile;
4356
+ level?: XilinxHevcLevel;
4357
+ tier?: XilinxHevcTier;
4358
+ }
4359
+
4360
+ /** @public */
4361
+ export declare type XilinxHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
4362
+
4363
+ /** @public */
4364
+ export declare type XilinxHevcProfile = "main" | "main10";
4365
+
4366
+ /** @public */
4367
+ export declare type XilinxHevcTier = "main" | "high";
4368
+
4369
+ export { }