@norskvideo/norsk-sdk 0.0.322

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2412 @@
1
+ import { AudioCodec } from '@norskvideo/norsk-api/lib/media_pb';
2
+ import { Context } from '@norskvideo/norsk-api/lib/media_pb';
3
+ import { CurrentLoad } from '@norskvideo/norsk-api/lib/shared/common_pb';
4
+ import { ExplicitChannel } from '@norskvideo/norsk-api/lib/media_pb';
5
+ import * as grpc from '@grpc/grpc-js';
6
+ import { HlsAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
7
+ import { HlsMasterMessage } from '@norskvideo/norsk-api/lib/media_pb';
8
+ import { HlsMasterPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
9
+ import { HlsOutputEvent } from '@norskvideo/norsk-api/lib/media_pb';
10
+ import { HlsTsAudioMessage } from '@norskvideo/norsk-api/lib/media_pb';
11
+ import { HlsTsAudioPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
12
+ import { HlsTsCombinedPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
13
+ import { HlsTsVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
14
+ import { HlsTsVideoPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
15
+ import { HlsVideoMessage } from '@norskvideo/norsk-api/lib/media_pb';
16
+ import { HlsWebVttMessage } from '@norskvideo/norsk-api/lib/media_pb';
17
+ import { HlsWebVttPushMessage } from '@norskvideo/norsk-api/lib/media_pb';
18
+ import { MediaClient } from '@norskvideo/norsk-api/lib/media_grpc_pb';
19
+ import { Nullable } from 'typescript-nullable';
20
+ import { PlainMessage } from '@bufbuild/protobuf';
21
+ import { StreamKey as StreamKey_2 } from '@norskvideo/norsk-api/lib/media_pb';
22
+ import { StreamStatsSampling } from '@norskvideo/norsk-api/lib/media_pb';
23
+ import { Subscription } from '@norskvideo/norsk-api/lib/media_pb';
24
+ import { TimestampProgramNudge } from '@norskvideo/norsk-api/lib/media_pb';
25
+ import { TsFileInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
26
+ import { TsInputEvent } from '@norskvideo/norsk-api/lib/media_pb';
27
+ import { UdpTsInputMessage } from '@norskvideo/norsk-api/lib/media_pb';
28
+ import { Version } from '@norskvideo/norsk-api/lib/shared/common_pb';
29
+ import { Wave } from '@norskvideo/norsk-api/lib/media_pb';
30
+
31
+ /** @public */
32
+ export declare type AacProfile = "lc" | "main" | "high";
33
+
34
+ /** @public */
35
+ export declare interface AacSettings {
36
+ kind: "aac";
37
+ sampleRate: SampleRate;
38
+ profile: AacProfile;
39
+ }
40
+
41
+ /**
42
+ * @public
43
+ * {@link NorskTransform.audioBuildMultichannel}
44
+ */
45
+ export declare class AudioBuildMultichannelNode extends AutoProcessorMediaNode<"audio"> {
46
+ close(): void;
47
+ }
48
+
49
+ /** @public */
50
+ export declare interface AudioBuildMultichannelSettings extends ProcessorNodeSettings<AudioBuildMultichannelNode> {
51
+ channelLayout: ChannelLayout;
52
+ channelList: readonly StreamKey[];
53
+ outputStreamKey: StreamKey;
54
+ }
55
+
56
+ export { AudioCodec }
57
+
58
+ /**
59
+ * @public
60
+ * {@link NorskTransform.audioEncoder}
61
+ */
62
+ export declare class AudioEncoderNode extends AutoProcessorMediaNode<"audio"> {
63
+ close(): void;
64
+ }
65
+
66
+ /** @public */
67
+ export declare interface AudioEncoderSettings extends ProcessorNodeSettings<AudioEncoderNode> {
68
+ channelLayout: ChannelLayout;
69
+ bitrate: number;
70
+ outputRenditionName: string;
71
+ codec: OpusSettings | AacSettings;
72
+ }
73
+
74
+ /**
75
+ * @public
76
+ * {@link NorskTransform.audioGain}
77
+ */
78
+ export declare class AudioGainNode extends AutoProcessorMediaNode<"audio"> {
79
+ updateConfig(settings: AudioGainSettingsUpdate): void;
80
+ close(): void;
81
+ }
82
+
83
+ /** @public */
84
+ export declare interface AudioGainSettings extends ProcessorNodeSettings<AudioGainNode> {
85
+ /** A vector of gains for this source, one for each channel */
86
+ channelGains: readonly Gain[];
87
+ }
88
+
89
+ /** @public */
90
+ export declare interface AudioGainSettingsUpdate {
91
+ /** A vector of gains for this source, one for each channel */
92
+ channelGains?: readonly Gain[];
93
+ }
94
+
95
+ /** @public */
96
+ export declare interface AudioLevels {
97
+ stream: StreamKey;
98
+ pts: Interval;
99
+ channelLevels: ChannelLevels[];
100
+ }
101
+
102
+ /**
103
+ * @public
104
+ * Monitor audio levels. Create a new instance of this node with {@link NorskControl.audioLevels}.
105
+ */
106
+ export declare class AudioLevelsNode extends AutoProcessorMediaNode<"audio"> {
107
+ close(): void;
108
+ }
109
+
110
+ /**
111
+ * @public
112
+ * Settings for an {@link AudioLevelsNode}
113
+ */
114
+ export declare interface AudioLevelsSettings extends ProcessorNodeSettings<AudioLevelsNode> {
115
+ /**
116
+ * Called with the audio level data
117
+ * @param levels - The level data for the audio stream
118
+ * @eventProperty
119
+ */
120
+ onData: (levels: AudioLevels) => void;
121
+ intervalFrames?: number;
122
+ }
123
+
124
+ /**
125
+ * @public Mix N audio channels to M audio channels with a matrix of gains.
126
+ * Use {@link NorskTransform.audioMatrixMixer} to create one.
127
+ */
128
+ export declare class AudioMatrixMixerNode extends AutoProcessorMediaNode<"audio"> {
129
+ /** @public */
130
+ updateConfig(settings: AudioMatrixMixerSettingsUpdate): void;
131
+ close(): void;
132
+ }
133
+
134
+ /** @public Config for the {@link AudioMatrixMixerNode} */
135
+ export declare interface AudioMatrixMixerSettings extends ProcessorNodeSettings<AudioMatrixMixerNode> {
136
+ /** The NxM matrix of gains from N input channels to M output channels */
137
+ channelGains: readonly Gain[][];
138
+ /** The desired output channel layout, such as "5.1" */
139
+ outputChannelLayout: ChannelLayout;
140
+ }
141
+
142
+ /** @public Config update for the {@link AudioMatrixMixerNode}.
143
+ * Call {@link AudioMatrixMixerNode.updateConfig} for updating the config.
144
+ */
145
+ export declare interface AudioMatrixMixerSettingsUpdate {
146
+ /** The NxM updated matrix of gains from N input channels to M output channels */
147
+ channelGains: readonly Gain[][];
148
+ }
149
+
150
+ /**
151
+ * @public
152
+ * {@link NorskTransform.audioMixer}
153
+ */
154
+ export declare class AudioMixerNode<Pins extends string> extends ProcessorMediaNode<Pins> {
155
+ updateConfig(settings: AudioMixerSettingsUpdate<Pins>): void;
156
+ close(): void;
157
+ }
158
+
159
+ /** @public */
160
+ export declare interface AudioMixerSettings<Pins extends string> extends ProcessorNodeSettings<AudioMixerNode<Pins>> {
161
+ /** The sources to mix */
162
+ sources: readonly AudioMixerSource<Pins>[];
163
+ /** The name of the output source */
164
+ outputSource: string;
165
+ /** The sample rate of the output */
166
+ sampleRate?: SampleRate;
167
+ }
168
+
169
+ /** @public */
170
+ export declare interface AudioMixerSettingsUpdate<Pins extends string> {
171
+ sources: readonly AudioMixerSource<Pins>[];
172
+ }
173
+
174
+ /** @public */
175
+ export declare interface AudioMixerSource<Pins> {
176
+ /** The name of the InputPin for this source */
177
+ pin: Pins;
178
+ /** A vector of gains for this source, one for each channel */
179
+ channelGains?: readonly Gain[];
180
+ }
181
+
182
+ /**
183
+ * @public
184
+ * {@link NorskInput.audioSignal}
185
+ */
186
+ export declare class AudioSignalGeneratorNode extends SourceMediaNode {
187
+ close(): void;
188
+ }
189
+
190
+ /** @public */
191
+ export declare interface AudioSignalGeneratorSettings extends SourceNodeSettings<AudioSignalGeneratorNode> {
192
+ sourceName: string;
193
+ channelLayout: ChannelLayout;
194
+ sampleRate: SampleRate;
195
+ /** The sample format to use. Default: "fltp" */
196
+ sampleFormat?: SampleFormat;
197
+ /** Waveform - create one with {@link mkSine} */
198
+ wave?: Wave;
199
+ }
200
+
201
+ /**
202
+ * @public
203
+ * {@link NorskTransform.audioSplitMultichannel}
204
+ */
205
+ export declare class AudioSplitMultichannelNode extends AutoProcessorMediaNode<"audio"> {
206
+ close(): void;
207
+ }
208
+
209
+ /** @public */
210
+ export declare interface AudioSplitMultichannelSettings extends ProcessorNodeSettings<AudioSplitMultichannelNode> {
211
+ outputStreamKey: StreamKey;
212
+ }
213
+
214
+ /**
215
+ * @public
216
+ * Returns the stream keys for audio streams in a media context
217
+ * @param streams - The media context from which to return the stream keys
218
+ * @returns The audio stream keys in the media context
219
+ */
220
+ export declare function audioStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
221
+
222
+ /** @public */
223
+ export declare interface AudioStreamMetadata {
224
+ codec: AudioCodec;
225
+ sampleRate: SampleRate;
226
+ channelLayout?: ChannelLayout;
227
+ }
228
+
229
+ /**
230
+ * @public
231
+ * Filters a context to only the audio streams within it
232
+ * @param streams - The media context from which to return the streams
233
+ * @returns The audio streams in the media context
234
+ */
235
+ export declare function audioStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
236
+
237
+ /** @public */
238
+ export declare const audioToPin: <Pins extends string>(pin: Pins) => (streams: StreamMetadata[]) => PinToKey<Pins>;
239
+
240
+ declare interface AutoProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
241
+ }
242
+
243
+ declare class AutoProcessorMediaNode<Pins extends string> {
244
+ constructor(client: MediaClient, subscribeFn: (subscription: Subscription) => void);
245
+ }
246
+
247
+ /** @public */
248
+ export declare class AutoSinkMediaNode<Pins extends string> extends SinkMediaNode<Pins | "auto"> {
249
+ /** Subscribe to the given sources
250
+ * This version of subscribe simply requires a list of stream keys to be
251
+ * returned from each selector, and the server will automatically
252
+ * assign each stream to the appropriate pin on the sink node
253
+ * This is the appropriate method for most cases
254
+ */
255
+ subscribe(sources: ReceiveFromAddressAuto[], validation?: (context: Context) => boolean): void;
256
+ }
257
+
258
+ /** @public */
259
+ export declare const avToPin: <Pins extends string>(pin: Pins) => (streams: StreamMetadata[]) => PinToKey<Pins>;
260
+
261
+ /** @public */
262
+ export declare interface AwsCredentials {
263
+ accessKey: string;
264
+ secretKey: string;
265
+ sessionToken: string;
266
+ region: string;
267
+ }
268
+
269
+ /** @public */
270
+ export declare interface AwsS3PushDestinationSettings {
271
+ type: "s3";
272
+ host: string;
273
+ port: number;
274
+ pathPrefix: string;
275
+ sessionId?: string;
276
+ id: string;
277
+ awsRegion: string;
278
+ awsCredentials?: AwsCredentials;
279
+ retentionPeriodSeconds: number;
280
+ }
281
+
282
+ /**
283
+ * @public
284
+ * {@link NorskTransform.awsTranscribe}
285
+ */
286
+ export declare class AwsTranscribeNode extends AutoProcessorMediaNode<"audio"> {
287
+ close(): void;
288
+ }
289
+
290
+ /** @public */
291
+ export declare interface AwsTranscribeSettings extends ProcessorNodeSettings<AwsTranscribeNode> {
292
+ awsRegion: string;
293
+ outputStreamId: number;
294
+ language: string;
295
+ sentenceBuildMode: SentenceBuildMode;
296
+ sentenceStabilizationMode: StabilizationMode;
297
+ }
298
+
299
+ /**
300
+ * @public
301
+ * There are three possible modes:
302
+ * - "abr": encode in average bitrate mode, specified in kilobits/sec (note, 1
303
+ * kilobit is 1000 bits). You can make use of the vbv settings to control
304
+ * the bounds on how much the actual bitrate can fluctuate within the bounds
305
+ * of the average
306
+ * - "cqp": encode in constant quantizer mode. In general, crf will give better
307
+ * results, although cqp can be faster to encode
308
+ * - "crf": encode in constant rate factor mode. This will give a constant 'quality'
309
+ * to the encode, but with a variable bitrate
310
+ */
311
+ export declare interface BitrateMode {
312
+ value: number;
313
+ mode: "abr" | "cqp" | "crf";
314
+ }
315
+
316
+ /** @public */
317
+ export declare type BrowserEvent = {
318
+ case: "onLoaded";
319
+ value: BrowserOnLoaded;
320
+ } | {
321
+ case: "onLoadStart";
322
+ value: BrowserOnLoadStart;
323
+ } | {
324
+ case: "onLoadEnd";
325
+ value: BrowserOnLoadEnd;
326
+ } | {
327
+ case: "onLoadError";
328
+ value: BrowserOnLoadError;
329
+ };
330
+
331
+ /**
332
+ * @public
333
+ * {@link NorskInput.browser}
334
+ */
335
+ export declare class BrowserInputNode extends SourceMediaNode {
336
+ updateConfig(settings: BrowserInputSettingsUpdate): void;
337
+ nudge(nudge: number): void;
338
+ close(): void;
339
+ }
340
+
341
+ /** @public */
342
+ export declare interface BrowserInputSettings extends SourceNodeSettings<BrowserInputNode>, StreamStatsMixin {
343
+ url: string;
344
+ resolution: {
345
+ width: number;
346
+ height: number;
347
+ };
348
+ sourceName: string;
349
+ frameRate: FrameRate;
350
+ onBrowserEvent?: (event: BrowserEvent) => void;
351
+ }
352
+
353
+ /** @public */
354
+ export declare interface BrowserInputSettingsUpdate {
355
+ url?: string;
356
+ resolution?: {
357
+ width: number;
358
+ height: number;
359
+ };
360
+ }
361
+
362
+ /** @public */
363
+ export declare interface BrowserOnLoaded {
364
+ url: string;
365
+ }
366
+
367
+ /** @public */
368
+ export declare interface BrowserOnLoadEnd {
369
+ url: string;
370
+ statusCode: number;
371
+ }
372
+
373
+ /** @public */
374
+ export declare interface BrowserOnLoadError {
375
+ url: string;
376
+ errorText: string;
377
+ errorCode: number;
378
+ }
379
+
380
+ /** @public */
381
+ export declare interface BrowserOnLoadStart {
382
+ url: string;
383
+ }
384
+
385
+ /** @public Channel layout for an audio stream */
386
+ export declare type ChannelLayout = "mono" | "stereo" | "surround" | "4.0" | "5.0" | "5.1" | "7.1" | "5.1.4" | "7.1.4" | (ChannelName | ExplicitChannel)[];
387
+
388
+ /** @public */
389
+ export declare interface ChannelLevels {
390
+ rms?: Db;
391
+ peak?: Db;
392
+ }
393
+
394
+ /** @public */
395
+ export declare type ChannelName =
396
+ /** Left front */
397
+ "l"
398
+ /** Right front */
399
+ | "r"
400
+ /** Centre front */
401
+ | "c"
402
+ /** Low frequency enhancement */
403
+ | "lfe"
404
+ /** Left surround */
405
+ | "ls"
406
+ /** Right surround */
407
+ | "rs"
408
+ /** Left front centre */
409
+ | "lc"
410
+ /** Right front centre */
411
+ | "rc"
412
+ /** Rear surround left */
413
+ | "lsr"
414
+ /** Rear Surround Right */
415
+ | "rsr"
416
+ /** Rear centre */
417
+ | "cs"
418
+ /** Left surround direct */
419
+ | "lsd"
420
+ /** Right surround direct */
421
+ | "rsd"
422
+ /** Left side surround */
423
+ | "lss"
424
+ /** Right side surround */
425
+ | "rss"
426
+ /** Left wide front */
427
+ | "lw"
428
+ /** Right wide front */
429
+ | "rw"
430
+ /** Left front vertical height */
431
+ | "lv"
432
+ /** Right front vertical height */
433
+ | "rv"
434
+ /** Centre front vertical height */
435
+ | "cv"
436
+ /** Left surround vertical height rear */
437
+ | "lvr"
438
+ /** Right surround vertical height rear */
439
+ | "rvr"
440
+ /** Centre vertical height rear */
441
+ | "cvr"
442
+ /** Left vertical height side surround */
443
+ | "lvss"
444
+ /** Right vertical height side surround */
445
+ | "rvss"
446
+ /** Top centre surround */
447
+ | "ts"
448
+ /** Low frequency enhancement 2 */
449
+ | "lfe2"
450
+ /** Left front vertical bottom */
451
+ | "lb"
452
+ /** Right front vertical bottom */
453
+ | "rb"
454
+ /** Centre front vertical bottom */
455
+ | "cb"
456
+ /** Left vertical height surround */
457
+ | "lvs"
458
+ /** Right vertical height surround */
459
+ | "rvs"
460
+ /** Low frequency enhancement 3 */
461
+ | "lfe3"
462
+ /** Left edge of screen */
463
+ | "leos"
464
+ /** Right edge of screen */
465
+ | "reos"
466
+ /** Halfway between centre of screen and left edge of screen */
467
+ | "hwbcal"
468
+ /** Halfway between centre of screen and right edge of screen */
469
+ | "hwbcar"
470
+ /** Left back surround */
471
+ | "lbs"
472
+ /** Right back surround */
473
+ | "rbs"
474
+ /** Unknown */
475
+ | "unknown";
476
+
477
+ /**
478
+ * @public
479
+ * {@link NorskTransform.chaosMonkey}
480
+ */
481
+ export declare class ChaosMonkeyNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
482
+ close(): void;
483
+ }
484
+
485
+ /** @public */
486
+ export declare interface ChaosMonkeySettings extends ProcessorNodeSettings<ChaosMonkeyNode> {
487
+ frameDrop?: DropRandom | DropEvery;
488
+ }
489
+
490
+ /** @public */
491
+ export declare type CMAFDestinationSettings = HlsPushDestinationSettings | AwsS3PushDestinationSettings | LocalPullDestinationSettings;
492
+
493
+ declare class CommonHlsNode<ClientMessage, Pins extends string, T extends MediaNodeState> extends AutoSinkMediaNode<Pins> {
494
+ constructor(client: MediaClient, settings: NodeSettings<T> & StreamStatsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => void, subscribedStreamsChangedFn?: (streams: StreamMetadata[]) => void);
495
+ close(): void;
496
+ }
497
+
498
+ /** @public */
499
+ export declare type ComposeMissingStreamBehaviour =
500
+ /**
501
+ * Produce frames on output by dropping the part(s) of the composition which
502
+ * cannot be fulfilled.
503
+ */
504
+ "drop_part"
505
+ /**
506
+ * Wait for all streams to be present before producing (further) output
507
+ */
508
+ | "wait_for_all";
509
+
510
+ /** @public */
511
+ export declare interface ComposePart<Pins> {
512
+ pin: Pins;
513
+ sourceRect: OffsetRect;
514
+ destRect: OffsetRect;
515
+ zIndex: number;
516
+ opacity: number;
517
+ id?: string;
518
+ transition?: PartTransition;
519
+ referenceResolution?: Resolution;
520
+ }
521
+
522
+ /**
523
+ * @public
524
+ * {@link NorskTransform.composeOverlay}
525
+ */
526
+ export declare class ComposeVideoNode<Pins extends string> extends ProcessorMediaNode<Pins> {
527
+ updateConfig(settings: ComposeVideoSettingsUpdate<Pins>): void;
528
+ close(): void;
529
+ }
530
+
531
+ /** @public */
532
+ export declare interface ComposeVideoSettings<Pins extends string> extends ProcessorNodeSettings<ComposeVideoNode<Pins>> {
533
+ referenceStream: Pins;
534
+ parts: readonly ComposePart<Pins>[];
535
+ referenceResolution?: Resolution;
536
+ outputResolution: Resolution;
537
+ outputPixelFormat?: PixelFormat;
538
+ missingStreamBehaviour?: ComposeMissingStreamBehaviour;
539
+ onTransitionComplete?: () => void;
540
+ }
541
+
542
+ /** @public */
543
+ export declare interface ComposeVideoSettingsUpdate<Pins extends string> {
544
+ parts: readonly ComposePart<Pins>[];
545
+ }
546
+
547
+ /** @public */
548
+ export declare interface Core {
549
+ logicalCpuIds: LogicalCpuId[];
550
+ }
551
+
552
+ /** @public */
553
+ export declare interface CpuTopology {
554
+ numaNodes: NumaNode[];
555
+ }
556
+
557
+ /** @public A decibel (dB). A null value represents -inf. */
558
+ export declare type Db = number | null;
559
+
560
+ /** @public */
561
+ export declare interface DeckLinkCard {
562
+ index: number;
563
+ displayName: string;
564
+ inputConnections: DeckLinkVideoConnection[];
565
+ outputConnections: DeckLinkVideoConnection[];
566
+ ioSupport: DeckLinkVideoIOSupport[];
567
+ }
568
+
569
+ /** @public */
570
+ export declare interface DeckLinkDisplayMode {
571
+ id: DeckLinkDisplayModeId;
572
+ name: string;
573
+ width: number;
574
+ height: number;
575
+ frameRate: FrameRate;
576
+ }
577
+
578
+ /** @public */
579
+ export declare type DeckLinkDisplayModeId = "auto" | "sd_ntsc" | "sd_nt23" | "sd_pal" | "sd_ntsp" | "sd_palp" | "hd1080_23ps" | "hd1080_24ps" | "hd1080_p25" | "hd1080_p29" | "hd1080_p30" | "hd1080_p47" | "hd1080_p48" | "hd1080_i50" | "hd1080_i59" | "hd1080_i60" | "hd1080_p95" | "hd1080_p96" | "hd1080_p10" | "hd1080_p11" | "hd1080_p12" | "hd1080_p50" | "hd1080_p59" | "hd1080_p60" | "hd720_p50" | "hd720_p59" | "hd720_p60" | "two_k_23" | "two_k_24" | "two_k_25" | "two_k_dci_23" | "two_k_dci_24" | "two_k_dci_25" | "two_k_dci_29" | "two_k_dci_30" | "two_k_dci_47" | "two_k_dci_48" | "two_k_dci_50" | "two_k_dci_59" | "two_k_dci_60" | "two_k_dci_95" | "two_k_dci_96" | "two_k_dci_10" | "two_k_dci_11" | "two_k_dci_12" | "four_k_23" | "four_k_24" | "four_k_25" | "four_k_29" | "four_k_30" | "four_k_47" | "four_k_48" | "four_k_50" | "four_k_59" | "four_k_60" | "four_k_95" | "four_k_96" | "four_k_10" | "four_k_11" | "four_k_12" | "four_k_dci_23" | "four_k_dci_24" | "four_k_dci_25" | "four_k_dci_29" | "four_k_dci_30" | "four_k_dci_47" | "four_k_dci_48" | "four_k_dci_50" | "four_k_dci_59" | "four_k_dci_60" | "four_k_dci_95" | "four_k_dci_96" | "four_k_dci_10" | "four_k_dci_11" | "four_k_dci_12" | "eight_k_23" | "eight_k_24" | "eight_k_25" | "eight_k_29" | "eight_k_30" | "eight_k_47" | "eight_k_48" | "eight_k_50" | "eight_k_59" | "eight_k_60" | "eight_k_dci_23" | "eight_k_dci_24" | "eight_k_dci_25" | "eight_k_dci_29" | "eight_k_dci_30" | "eight_k_dci_47" | "eight_k_dci_48" | "eight_k_dci_50" | "eight_k_dci_59" | "eight_k_dci_60" | "pc_vga6" | "pc_svg6" | "pc_wxg5" | "pc_wxg6" | "pc_sxg5" | "pc_sxg6" | "pc_uxg5" | "pc_uxg6" | "pc_wux5" | "pc_wux6" | "pc_1945" | "pc_1946" | "pc_wqh5" | "pc_wqh6" | "pc_wqx5" | "pc_wqx6" | "special_iunk";
580
+
581
+ /**
582
+ * @public
583
+ * SDI capture through a DeckLink card. Create an ingest with {@link NorskInput.deckLink}.
584
+ */
585
+ export declare class DeckLinkInputNode extends SourceMediaNode {
586
+ nudge(nudge: number): void;
587
+ close(): void;
588
+ }
589
+
590
+ /**
591
+ * @public
592
+ * Settings to control SDI capture through a DeckLink card
593
+ */
594
+ export declare interface DeckLinkInputSettings extends InputSettings<DeckLinkInputNode>, StreamStatsMixin {
595
+ /** Which card to use */
596
+ cardIndex: number;
597
+ /** The audio channel layout for the input */
598
+ channelLayout: ChannelLayout;
599
+ /** SDI or HDMI capture */
600
+ videoConnection: DeckLinkVideoConnection;
601
+ /** Typically left undefined, but can be used to force capture for a specific {@link DeckLinkDisplayModeId}. If
602
+ * the source is not currently in this mode, then no capture will occur. */
603
+ displayModeId?: DeckLinkDisplayModeId;
604
+ pixelFormat?: DeckLinkPixelFormat;
605
+ }
606
+
607
+ /** @public */
608
+ export declare type DeckLinkPixelFormat = "uyvy" | "argb";
609
+
610
+ /** @public */
611
+ export declare type DeckLinkVideoConnection = "sdi" | "hdmi" | "optical_sdi" | "component" | "composite" | "svideo";
612
+
613
+ /** @public */
614
+ export declare type DeckLinkVideoIOSupport = "capture" | "playback";
615
+
616
+ /** @public */
617
+ export declare interface DropEvery {
618
+ kind: "every";
619
+ every: number;
620
+ }
621
+
622
+ /** @public */
623
+ export declare interface DropRandom {
624
+ kind: "random";
625
+ percentage: number;
626
+ }
627
+
628
+ /** @public */
629
+ export declare interface FrameRate {
630
+ frames: number;
631
+ seconds: number;
632
+ }
633
+
634
+ /**
635
+ * @public
636
+ * A relative change in decibels, expressing a power ratio.
637
+ *
638
+ * A value of 0dB means no change, positive values mean an increase in power, and negative values mean a decrease in power.
639
+ */
640
+ export declare type Gain = Db;
641
+
642
+ /** @public */
643
+ export declare function getAmountOfChannels(layout: ChannelLayout): number;
644
+
645
+ /**
646
+ * @public
647
+ * {@link NorskControl.hardSwitcher}
648
+ */
649
+ export declare class HardSwitcherNode<Pins extends string> extends ProcessorMediaNode<Pins> {
650
+ switchSource(newSource: Pins): void;
651
+ close(): void;
652
+ }
653
+
654
+ /** @public */
655
+ export declare interface HardSwitcherSettings<Pins extends string> extends ProcessorNodeSettings<HardSwitcherNode<Pins>> {
656
+ activeSource: Pins;
657
+ outputSource: string;
658
+ }
659
+
660
+ /**
661
+ * @public
662
+ *
663
+ */
664
+ /**
665
+ * @public
666
+ */
667
+ export declare interface HardwareInfo {
668
+ cpuTopology: CpuTopology;
669
+ systemMemory: number;
670
+ deckLinkCards: DeckLinkCard[];
671
+ }
672
+
673
+ /**
674
+ * @public
675
+ * {@link NorskOutput.hlsAudio}
676
+ */
677
+ export declare class HlsAudioOutputNode extends HlsNodeWithPlaylist<HlsAudioMessage, "audio", HlsAudioOutputNode> {
678
+ updateCredentials(settings: UpdateCredentials): void;
679
+ }
680
+
681
+ /** @public */
682
+ export declare interface HlsAudioOutputSettings extends NodeSettings<HlsAudioOutputNode> {
683
+ segmentDurationSeconds: number;
684
+ partDurationSeconds: number;
685
+ delayOutputMs?: number;
686
+ encryption?: {
687
+ encryptionKey: string;
688
+ encryptionKeyId: string;
689
+ };
690
+ destinations: CMAFDestinationSettings[];
691
+ }
692
+
693
+ /**
694
+ * @public
695
+ * {@link NorskOutput.hlsMaster}
696
+ */
697
+ export declare class HlsMasterOutputNode extends CommonHlsNode<HlsMasterMessage, "video" | "audio" | "subtitle", HlsMasterOutputNode> {
698
+ /** @public The URL of the master playlist */
699
+ playlistUrl: string;
700
+ updateCredentials(settings: UpdateCredentials): void;
701
+ }
702
+
703
+ /** @public */
704
+ export declare interface HlsMasterOutputSettings extends NodeSettings<HlsMasterOutputNode> {
705
+ playlistName: string;
706
+ pathPrefix?: string;
707
+ destinations: CMAFDestinationSettings[];
708
+ }
709
+
710
+ /**
711
+ * @public
712
+ * {@link NorskOutput.hlsMasterPush}
713
+ */
714
+ export declare class HlsMasterPushOutputNode extends CommonHlsNode<HlsMasterPushMessage, "video" | "audio" | "subtitle", HlsMasterPushOutputNode> {
715
+ }
716
+
717
+ /** @public */
718
+ export declare interface HlsMasterPushOutputSettings extends NodeSettings<HlsMasterPushOutputNode> {
719
+ playlistName: string;
720
+ destination: CMAFDestinationSettings;
721
+ }
722
+
723
+ declare class HlsNodeWithPlaylist<ClientMessage, Pins extends string, T extends MediaNodeState> extends CommonHlsNode<ClientMessage, Pins, T> {
724
+ constructor(client: MediaClient, settings: NodeSettings<T> & StreamStatsMixin, grpcInit: () => grpc.ClientDuplexStream<ClientMessage, HlsOutputEvent>, subscribeFn: (subscription: Subscription) => void);
725
+ /** @public
726
+ * Returns the URL to the HLS playlist entry. Note this can only be evaluated once the stream is active as it
727
+ * varies with the stream subscribed to. Useful during development, but you probably want to
728
+ * use {@link NorskOutput.hlsMaster} for production.
729
+ */
730
+ url(): Promise<string>;
731
+ }
732
+
733
+ /** @public */
734
+ export declare interface HlsPushDestinationSettings {
735
+ type: "generic";
736
+ host: string;
737
+ port: number;
738
+ pathPrefix: string;
739
+ sessionId?: string;
740
+ id: string;
741
+ retentionPeriodSeconds: number;
742
+ }
743
+
744
+ /**
745
+ * @public
746
+ * {@link NorskOutput.hlsTsAudio}
747
+ */
748
+ export declare class HlsTsAudioOutputNode extends CommonHlsNode<HlsTsAudioMessage, "audio", HlsTsAudioOutputNode> {
749
+ }
750
+
751
+ /** @public */
752
+ export declare interface HlsTsAudioOutputSettings extends NodeSettings<HlsTsAudioOutputNode> {
753
+ segmentDurationSeconds: number;
754
+ delayOutputMs?: number;
755
+ }
756
+
757
+ /**
758
+ * @public
759
+ * {@link NorskOutput.hlsTsAudioPush}
760
+ */
761
+ export declare class HlsTsAudioPushOutputNode extends CommonHlsNode<HlsTsAudioPushMessage, "audio", HlsTsAudioOutputNode> {
762
+ }
763
+
764
+ /** @public */
765
+ export declare interface HlsTsAudioPushOutputSettings extends NodeSettings<HlsTsAudioPushOutputNode> {
766
+ segmentDurationSeconds: number;
767
+ delayOutputMs?: number;
768
+ destination: CMAFDestinationSettings;
769
+ }
770
+
771
+ /**
772
+ * @public
773
+ * {@link NorskOutput.hlsTsCombinedPush}
774
+ */
775
+ export declare class HlsTsCombinedPushOutputNode extends CommonHlsNode<HlsTsCombinedPushMessage, "audio" | "video", HlsTsCombinedPushOutputNode> {
776
+ }
777
+
778
+ /** @public */
779
+ export declare interface HlsTsCombinedPushOutputSettings extends NodeSettings<HlsTsCombinedPushOutputNode> {
780
+ segmentDurationSeconds: number;
781
+ delayOutputMs?: number;
782
+ destination: CMAFDestinationSettings;
783
+ playlistName: string;
784
+ }
785
+
786
+ /**
787
+ * @public
788
+ * {@link NorskOutput.hlsTsVideo}
789
+ */
790
+ export declare class HlsTsVideoOutputNode extends CommonHlsNode<HlsTsVideoMessage, "video", HlsTsVideoOutputNode> {
791
+ }
792
+
793
+ /** @public */
794
+ export declare interface HlsTsVideoOutputSettings extends NodeSettings<HlsTsVideoOutputNode> {
795
+ segmentDurationSeconds: number;
796
+ delayOutputMs?: number;
797
+ }
798
+
799
+ /**
800
+ * @public
801
+ * {@link NorskOutput.hlsTsVideoPush}
802
+ */
803
+ export declare class HlsTsVideoPushOutputNode extends CommonHlsNode<HlsTsVideoPushMessage, "video", HlsTsVideoOutputNode> {
804
+ }
805
+
806
+ /** @public */
807
+ export declare interface HlsTsVideoPushOutputSettings extends NodeSettings<HlsTsVideoPushOutputNode> {
808
+ segmentDurationSeconds: number;
809
+ delayOutputMs?: number;
810
+ destination: CMAFDestinationSettings;
811
+ }
812
+
813
+ /**
814
+ * @public
815
+ * {@link NorskOutput.hlsVideo}
816
+ */
817
+ export declare class HlsVideoOutputNode extends HlsNodeWithPlaylist<HlsVideoMessage, "video", HlsVideoOutputNode> {
818
+ updateCredentials(settings: UpdateCredentials): void;
819
+ }
820
+
821
+ /** @public */
822
+ export declare interface HlsVideoOutputSettings extends NodeSettings<HlsVideoOutputNode> {
823
+ segmentDurationSeconds: number;
824
+ partDurationSeconds: number;
825
+ delayOutputMs?: number;
826
+ sessionId?: string;
827
+ id: string;
828
+ encryption?: {
829
+ encryptionKey: string;
830
+ encryptionKeyId: string;
831
+ };
832
+ destinations: CMAFDestinationSettings[];
833
+ }
834
+
835
+ /**
836
+ * @public
837
+ * {@link NorskOutput.hlsWebVtt}
838
+ */
839
+ export declare class HlsWebVttOutputNode extends HlsNodeWithPlaylist<HlsWebVttMessage, "subtitle", HlsWebVttOutputNode> {
840
+ }
841
+
842
+ /** @public */
843
+ export declare interface HlsWebVttOutputSettings extends NodeSettings<HlsWebVttOutputNode> {
844
+ segmentDurationSeconds: number;
845
+ sessionId?: string;
846
+ delayOutputMs?: number;
847
+ }
848
+
849
+ /**
850
+ * @public
851
+ * {@link NorskOutput.hlsWebVttPush}
852
+ */
853
+ export declare class HlsWebVttPushOutputNode extends CommonHlsNode<HlsWebVttPushMessage, "subtitle", HlsWebVttPushOutputNode> {
854
+ }
855
+
856
+ export declare interface HlsWebVttPushOutputSettings extends NodeSettings<HlsWebVttPushOutputNode> {
857
+ segmentDurationSeconds: number;
858
+ delayOutputMs?: number;
859
+ destination: CMAFDestinationSettings;
860
+ retentionPeriodSeconds: number;
861
+ hlsCacheDirectory: string;
862
+ maximumPlaylistSegments: number;
863
+ }
864
+
865
+ /**
866
+ * @public
867
+ * {@link NorskInput.imageFile}
868
+ */
869
+ export declare class ImageFileInputNode extends SourceMediaNode {
870
+ close(): void;
871
+ }
872
+
873
+ /** @public */
874
+ export declare interface ImageFileInputSettings extends SourceNodeSettings<ImageFileInputNode>, StreamStatsMixin {
875
+ sourceName: string;
876
+ fileName: string;
877
+ /** The file format for the image. Will be inferred from the file name if not specified. */
878
+ imageFormat?: ImageFormat;
879
+ }
880
+
881
+ /** @public */
882
+ export declare type ImageFormat = "png" | "jpeg" | "gif" | "webp" | "pnm" | "tiff" | "tga" | "dds" | "bmp" | "ico" | "hdr" | "openexr" | "farbfeld" | "avif";
883
+
884
+ /** @public */
885
+ export declare interface InputSettings<T extends MediaNodeState> extends SourceNodeSettings<T> {
886
+ sourceName: string;
887
+ }
888
+
889
+ /** @public A time interval measured as ticks / (ticks per second) */
890
+ export declare interface Interval {
891
+ n: number;
892
+ d: number;
893
+ }
894
+
895
+ /** @public */
896
+ export declare interface LocalFileInputSettings extends InputSettings<SourceMediaNode> {
897
+ fileName: string;
898
+ onEof?: () => void;
899
+ }
900
+
901
+ /** @public */
902
+ export declare interface LocalPullDestinationSettings {
903
+ type: "local";
904
+ sessionId?: string;
905
+ retentionPeriodSeconds: number;
906
+ }
907
+
908
+ /**
909
+ * @public
910
+ * {@link NorskDuplex.localWebRTC}
911
+ */
912
+ export declare class LocalWebRTCNode extends AutoProcessorMediaNode<"audio" | "video"> {
913
+ /** @public The URL of the local player */
914
+ playerUrl: string;
915
+ close(): void;
916
+ }
917
+
918
+ /** @public */
919
+ export declare interface LocalWebRTCSettings extends ProcessorNodeSettings<LocalWebRTCNode>, StreamStatsMixin {
920
+ }
921
+
922
+ /** @public */
923
+ export declare type Log = {
924
+ level: "emergency" | "alert" | "critical" | "error" | "warning" | "notice" | "info" | "debug";
925
+ timestamp: Date;
926
+ message: string;
927
+ };
928
+
929
+ /** @public */
930
+ export declare type LogicalCpuId = number;
931
+
932
+ /** @public */
933
+ export declare type MediaNodeId = string;
934
+
935
+ /** @public */
936
+ export declare class MediaNodeState {
937
+ id: MediaNodeId | undefined;
938
+ constructor(client: MediaClient);
939
+ close(): void;
940
+ }
941
+
942
+ /**
943
+ * @public
944
+ * {@link NorskTransform.metadataOverride}
945
+ */
946
+ export declare class MetadataOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
947
+ updateConfig(settings: MetadataOverrideSettingsUpdate): void;
948
+ close(): void;
949
+ }
950
+
951
+ /** @public */
952
+ export declare interface MetadataOverrideSettings extends ProcessorNodeSettings<MetadataOverrideNode>, MetadataOverrideSettingsUpdate {
953
+ }
954
+
955
+ /** @public */
956
+ export declare interface MetadataOverrideSettingsUpdate {
957
+ video?: {
958
+ /** Override the bitrate metadata of a compressed video stream, or `0` to clear */
959
+ bitrate?: number;
960
+ };
961
+ audio?: {
962
+ /** Override the bitrate metadata of a compressed audio stream, or `0` to clear */
963
+ bitrate?: number;
964
+ /** Override the language metadata of an audio stream, or `""` to clear */
965
+ language?: string;
966
+ };
967
+ subtitles?: {
968
+ /** Override the language metadata of a subtitles stream, or `""` to clear */
969
+ language?: string;
970
+ };
971
+ }
972
+
973
+ /**
974
+ * @public
975
+ * Generate encryption parameters from from an encryption KeyID and Key,
976
+ * in the form KEYID:KEY, both 16byte hexadecimal
977
+ */
978
+ export declare function mkEncryption(encryption: string | undefined): {
979
+ encryptionKey: string;
980
+ encryptionKeyId: string;
981
+ } | undefined;
982
+
983
+ /** @public */
984
+ export declare function mkSine(freq: number): Wave;
985
+
986
+ /** @public */
987
+ export declare interface Mp4FileInfo {
988
+ durationMs?: number;
989
+ byteLength?: number;
990
+ }
991
+
992
+ /**
993
+ * @public
994
+ * {@link NorskInput.localMp4File}
995
+ */
996
+ export declare class Mp4FileInputNode extends SourceMediaNode {
997
+ nudge(nudge: number): void;
998
+ close(): void;
999
+ }
1000
+
1001
+ /**
1002
+ * @public
1003
+ * Settings to control reading from an MP4 file
1004
+ */
1005
+ export declare interface Mp4FileInputSettings extends SourceNodeSettings<Mp4FileInputNode>, StreamStatsMixin {
1006
+ sourceName: string;
1007
+ /** Path to the MP4 file to read */
1008
+ fileName: string;
1009
+ /** Callback to be notified when the file ends */
1010
+ onEof?: () => void;
1011
+ onInfo?: (info: Mp4FileInfo) => void;
1012
+ }
1013
+
1014
+ /**
1015
+ * @public
1016
+ * {@link NorskOutput.localMp4File}
1017
+ */
1018
+ export declare class Mp4FileOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1019
+ close(): void;
1020
+ writeFile(nonfragmentedFileName: string): void;
1021
+ }
1022
+
1023
+ /**
1024
+ * @public
1025
+ * Settings to control MP4 file output
1026
+ */
1027
+ export declare interface Mp4FileOutputSettings extends NodeSettings<Mp4FileOutputNode>, StreamStatsMixin {
1028
+ /**
1029
+ * Required: stream fragmented MP4 to this file.
1030
+ */
1031
+ fragmentedFileName: string;
1032
+ /**
1033
+ * Write non-fragmented MP4 to this file on close, creates a `.tmp` file to
1034
+ * store the frame data.
1035
+ */
1036
+ nonfragmentedFileName?: string;
1037
+ /**
1038
+ * Common encryption (CENC) key and key ID to use, both 16-bytes encoded in
1039
+ * hexadecimal (32 chars).
1040
+ */
1041
+ encryption?: {
1042
+ encryptionKey: string;
1043
+ encryptionKeyId: string;
1044
+ };
1045
+ }
1046
+
1047
+ /** @public */
1048
+ export declare interface MultiStreamStats {
1049
+ allStreams: SingleStreamStats[];
1050
+ sampleSizeSeconds: number;
1051
+ total: StreamStats;
1052
+ audio: StreamStats;
1053
+ video: StreamStats;
1054
+ }
1055
+
1056
+ /** @public */
1057
+ export declare interface NetintH264 {
1058
+ type: "ni-h264";
1059
+ extraOpts?: string;
1060
+ enableAud?: boolean;
1061
+ gpuIndex?: number;
1062
+ bitrate?: number;
1063
+ flushGop?: boolean;
1064
+ enableVfr?: boolean;
1065
+ crf?: number;
1066
+ cbr?: boolean;
1067
+ gopPresetIndex?: number;
1068
+ intraPeriod?: number;
1069
+ rcEnable?: boolean;
1070
+ intraQp?: number;
1071
+ rcInitDelay?: number;
1072
+ profile?: NetintH264Profile;
1073
+ level?: NetintH264Level;
1074
+ }
1075
+
1076
+ /** @public */
1077
+ export declare type NetintH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1078
+
1079
+ /** @public */
1080
+ export declare type NetintH264Profile = "baseline" | "main" | "extended" | "high" | "high10";
1081
+
1082
+ /** @public */
1083
+ export declare interface NetintHevc {
1084
+ type: "ni-hevc";
1085
+ extraOpts?: string;
1086
+ enableAud?: boolean;
1087
+ gpuIndex?: number;
1088
+ bitrate?: number;
1089
+ flushGop?: boolean;
1090
+ enableVfr?: boolean;
1091
+ crf?: number;
1092
+ cbr?: boolean;
1093
+ gopPresetIndex?: number;
1094
+ intraPeriod?: number;
1095
+ rcEnable?: boolean;
1096
+ intraQp?: number;
1097
+ rcInitDelay?: number;
1098
+ profile?: NetintHevcProfile;
1099
+ level?: NetintHevcLevel;
1100
+ tier?: NetintHevcTier;
1101
+ lossless?: boolean;
1102
+ hrdEnable?: boolean;
1103
+ dolbyVisionProfile?: number;
1104
+ }
1105
+
1106
+ /** @public */
1107
+ export declare type NetintHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1108
+
1109
+ /** @public */
1110
+ export declare type NetintHevcProfile = "main" | "main10";
1111
+
1112
+ /** @public */
1113
+ export declare type NetintHevcTier = "main" | "high";
1114
+
1115
+ /** @public */
1116
+ export declare function newSilentMatrix(rows: number, cols: number): Gain[][];
1117
+
1118
+ /**
1119
+ * @public
1120
+ * Settings common to all media nodes
1121
+ */
1122
+ export declare interface NodeSettings<T extends MediaNodeState> {
1123
+ /**
1124
+ * Media Node identifier. If one is not specified, a random identifier will be generated.
1125
+ */
1126
+ id?: string;
1127
+ /**
1128
+ * Called when the Norsk session ends
1129
+ */
1130
+ onEnd?: () => void;
1131
+ /**
1132
+ * Called with any errors from the Node
1133
+ */
1134
+ onError?: (error: Error) => void;
1135
+ /**
1136
+ * Called when the Node closes
1137
+ */
1138
+ onClose?: () => void;
1139
+ /**
1140
+ * Callback to synchronously perform an action when node creation is complete
1141
+ * (e.g. subscribe a downstream node before the first context/frame might arrive)
1142
+ */
1143
+ onCreate?: (node: T) => void;
1144
+ }
1145
+
1146
+ /**
1147
+ * @public
1148
+ * The entrypoint for all Norsk Media applications
1149
+ *
1150
+ * @example
1151
+ * ```ts
1152
+ * const norsk = new Norsk();
1153
+ * ```
1154
+ */
1155
+ export declare class Norsk {
1156
+ /**
1157
+ * Implements the {@link NorskInput} interface
1158
+ */
1159
+ input: NorskInput;
1160
+ /**
1161
+ * Implements the {@link NorskOutput} interface
1162
+ */
1163
+ output: NorskOutput;
1164
+ /**
1165
+ * Implements the {@link NorskDuplex} interface
1166
+ */
1167
+ duplex: NorskDuplex;
1168
+ /**
1169
+ * Implements the {@link NorskProcessor} interface
1170
+ */
1171
+ processor: NorskProcessor;
1172
+ /**
1173
+ * Implements the {@link NorskSystem} interface
1174
+ */
1175
+ system: NorskSystem;
1176
+ /**
1177
+ * Norsk Runtime version informatio
1178
+ */
1179
+ version: Version;
1180
+ close(): void;
1181
+ /** @public */
1182
+ static connect(settings?: NorskSettings): Promise<Norsk>;
1183
+ }
1184
+
1185
+ /**
1186
+ * @public
1187
+ * Methods that allow you to control and monitor media streams
1188
+ */
1189
+ export declare interface NorskControl {
1190
+ /**
1191
+ * Switch between multiple input sources via a hard cut. May be used to switch between
1192
+ * sources of possibly different configurations or without decoding.
1193
+ * @param settings - Options for the switcher
1194
+ */
1195
+ hardSwitcher<Pins extends string>(settings: HardSwitcherSettings<Pins>): Promise<HardSwitcherNode<Pins>>;
1196
+ /**
1197
+ * Switch between multiple input sources without interruption, via a transition.
1198
+ * @param settings - Options for the switcher
1199
+ */
1200
+ smoothSwitcher<Pins extends string>(settings: SmoothSwitcherSettings<Pins>): Promise<SmoothSwitcherNode<Pins>>;
1201
+ /**
1202
+ * Record statistical information about media streams, including bitrate,
1203
+ * frame rate, and number of keyframes, measured over some configurable
1204
+ * sampling windows.
1205
+ * @param settings - Callback and sampling intervals
1206
+ */
1207
+ streamStats(settings: StreamStatsSettings): Promise<StreamStatsNode>;
1208
+ /**
1209
+ * Monitor the volume of an audio stream
1210
+ * @param settings - Callback and options for the level data
1211
+ */
1212
+ audioLevels(settings: AudioLevelsSettings): Promise<AudioLevelsNode>;
1213
+ }
1214
+
1215
+ /**
1216
+ * @public
1217
+ * Methods that allow you to both ingest and egest media from your application
1218
+ * at the same time
1219
+ */
1220
+ export declare interface NorskDuplex {
1221
+ /**
1222
+ * Playback audio/video via webrtc to a browser, and accept audio/video input from a browser.
1223
+ * The browser client must conform to a custom protocol as implemented in the hosted test page.
1224
+ * For general WebRTC ingest prefer the WHIP input node, and for egest to a downstream media server
1225
+ * use the WHIP output node.
1226
+ * @param settings - Options for the webrtc node
1227
+ */
1228
+ localWebRTC(settings: LocalWebRTCSettings): Promise<LocalWebRTCNode>;
1229
+ }
1230
+
1231
+ /**
1232
+ * @public
1233
+ * Methods that allow you to ingest media into your application
1234
+ */
1235
+ export declare interface NorskInput {
1236
+ /** Create an RTMP Server to receive RTMP streams into your application
1237
+ * @param settings - Configuration for the RTMP server
1238
+ */
1239
+ rtmpServer(settings: RtmpServerInputSettings): Promise<RtmpServerInputNode>;
1240
+ localTsFile(settings: LocalFileInputSettings): Promise<TsFileInputNode>;
1241
+ /**
1242
+ * Stream from a SRT source
1243
+ * @param settings - Configuration for the SRT input
1244
+ */
1245
+ srt(settings: SrtInputSettings): Promise<SrtInputNode>;
1246
+ whip(settings: WhipInputSettings): Promise<WhipInputNode>;
1247
+ udpTs(settings: RemoteInputSettings<UdpTsInputNode>): Promise<UdpTsInputNode>;
1248
+ localWebVttFile(settings: LocalFileInputSettings): Promise<WebVttFileInputNode>;
1249
+ /**
1250
+ * Read an image from a file. Various image formats are supported, see the
1251
+ * documentation for {@link ImageFileInputSettings} for more details.
1252
+ * @param settings - Configuration for the file input
1253
+ */
1254
+ imageFile(settings: ImageFileInputSettings): Promise<ImageFileInputNode>;
1255
+ /**
1256
+ * Read a MP4 (fragmented or not) from a file with realtime playback.
1257
+ * This will not play frames that are written to the file after the node
1258
+ * starts.
1259
+ * @param settings - Configuration for the file input
1260
+ */
1261
+ localMp4File(settings: Mp4FileInputSettings): Promise<Mp4FileInputNode>;
1262
+ /**
1263
+ * Stream from a remote RTP source
1264
+ * @param settings - Configuration for the RTP input
1265
+ */
1266
+ rtp(settings: RtpInputSettings): Promise<RtpInputNode>;
1267
+ /**
1268
+ * Generate a test audio signal with a configurable waveform.
1269
+ * @param settings - Configuration for the audio signal
1270
+ */
1271
+ audioSignal(settings: AudioSignalGeneratorSettings): Promise<AudioSignalGeneratorNode>;
1272
+ /**
1273
+ * Video source from rendering an HTML page
1274
+ * @param settings - Settings for the web page
1275
+ */
1276
+ browser(settings: BrowserInputSettings): Promise<BrowserInputNode>;
1277
+ /**
1278
+ * SDI/HDMI Input using a BlackMagic DeckLink card.
1279
+ * The available cards on the machine can be enumerated using the {@link NorskSystem.hardwareInfo} API.
1280
+ *
1281
+ * Multiple cards and both SDI and HDMI inputs are supported, with all DeckLink-supported
1282
+ * input resolutions and framerates are supported. The capture format is currently 8-bit only,
1283
+ * but 10-bit captures will be supported soon. All supported audio channels can be captured.
1284
+ * At present, additional data such as closed-captions and HDR metadata is not captured.
1285
+ * @param settings - Settings for the SDI capture
1286
+ */
1287
+ deckLink(settings: DeckLinkInputSettings): Promise<DeckLinkInputNode>;
1288
+ }
1289
+
1290
+ /**
1291
+ * @public
1292
+ * * Methods that allow you to egest media from your application
1293
+ */
1294
+ export declare interface NorskOutput {
1295
+ hlsVideo(settings: HlsVideoOutputSettings): Promise<HlsVideoOutputNode>;
1296
+ hlsAudio(settings: HlsAudioOutputSettings): Promise<HlsAudioOutputNode>;
1297
+ hlsWebVtt(settings: HlsWebVttOutputSettings): Promise<HlsWebVttOutputNode>;
1298
+ hlsWebVttPush(settings: HlsWebVttPushOutputSettings): Promise<HlsWebVttPushOutputNode>;
1299
+ hlsTsVideo(settings: HlsTsVideoOutputSettings): Promise<HlsTsVideoOutputNode>;
1300
+ tsUdp(settings: TsUdpOutputSettings): Promise<TsUdpOutputNode>;
1301
+ srt(settings: SrtOutputSettings): Promise<SrtOutputNode>;
1302
+ hlsTsAudio(settings: HlsTsAudioOutputSettings): Promise<HlsTsAudioOutputNode>;
1303
+ hlsTsVideoPush(settings: HlsTsVideoPushOutputSettings): Promise<HlsTsVideoPushOutputNode>;
1304
+ hlsTsAudioPush(settings: HlsTsAudioPushOutputSettings): Promise<HlsTsAudioPushOutputNode>;
1305
+ hlsTsCombinedPush(settings: HlsTsCombinedPushOutputSettings): Promise<HlsTsCombinedPushOutputNode>;
1306
+ hlsMaster(settings: HlsMasterOutputSettings): Promise<HlsMasterOutputNode>;
1307
+ hlsMasterPush(settings: HlsMasterPushOutputSettings): Promise<HlsMasterPushOutputNode>;
1308
+ webRTCWhip(settings: WebRTCWhipOutputSettings): Promise<WebRTCWhipOutputNode>;
1309
+ rtmp(settings: RtmpOutputSettings): Promise<RtmpOutputNode>;
1310
+ localTsFile(settings: TsFileOutputSettings): Promise<TsFileOutputNode>;
1311
+ /**
1312
+ * Output MP4 files to disk, both fragmented and non-fragmented.
1313
+ *
1314
+ * The fragmented output is required.
1315
+ *
1316
+ * The optional non-fragmented filename will be written when calling
1317
+ * {@link Mp4FileOutputNode.close} and will be fully written by the time
1318
+ * {@link NodeSettings.onEnd} is called. This sets up a temp file to
1319
+ * store the frame data by appending the extension `.tmp`.
1320
+ *
1321
+ * A non-fragmented MP4 file can be written on request with
1322
+ * {@link Mp4FileOutputNode.writeFile}, which uses the frame data store if
1323
+ * {@link Mp4FileOutputSettings.nonfragmentedFileName} was given or reads
1324
+ * back the fragmented mp4 if there is no non-fragmented file.
1325
+ */
1326
+ localMp4File(settings: Mp4FileOutputSettings): Promise<Mp4FileOutputNode>;
1327
+ }
1328
+
1329
+ /** @public */
1330
+ export declare class NorskProcessor {
1331
+ /**
1332
+ * Implements the {@link NorskControl} interface
1333
+ */
1334
+ control: NorskControl;
1335
+ /**
1336
+ * Implements the {@link NorskTransform} interface
1337
+ */
1338
+ transform: NorskTransform;
1339
+ close(): Promise<void>;
1340
+ constructor(client: MediaClient);
1341
+ }
1342
+
1343
+ /**
1344
+ * @public
1345
+ * Top level Norsk configuration
1346
+ */
1347
+ export declare interface NorskSettings {
1348
+ /**
1349
+ * Callback URL to listen on for gRPC session with Norsk Media
1350
+ * Defaults to $NORSK_HOST:$NORSK_PORT if the environment variables are set
1351
+ * where NORSK_HOST defaults to "127.0.0.1" and NORSK_PORT to "6790"
1352
+ * (so "127.0.0.1:6790" if neither variable is set)
1353
+ */
1354
+ url?: string;
1355
+ onAttemptingToConnect?: () => void;
1356
+ onConnecting?: () => void;
1357
+ onReady?: () => void;
1358
+ onFailedToConnect?: () => void;
1359
+ /** Code to execute if the Norsk node is shutdown - by default if logs and exits the client application */
1360
+ onShutdown?: () => void;
1361
+ onCurrentLoad?: (load: CurrentLoad) => void;
1362
+ onHello?: (version: Version) => void;
1363
+ onLogEvent?: (log: Log) => void;
1364
+ /**
1365
+ * Manually handle license events, such as missing/invalid licenses and
1366
+ * sandbox timeout. (Logs messages to console by default.)
1367
+ */
1368
+ onLicenseEvent?: (message: string) => void;
1369
+ }
1370
+
1371
+ /**
1372
+ * @public
1373
+ * Methods that allow you query the features of the system that Norsk is running in
1374
+ */
1375
+ export declare interface NorskSystem {
1376
+ hardwareInfo(): Promise<HardwareInfo>;
1377
+ }
1378
+
1379
+ /**
1380
+ * @public
1381
+ * Methods that allow you to manipulate your media streams
1382
+ */
1383
+ export declare interface NorskTransform {
1384
+ /**
1385
+ * Encode a video stream to one or more renditions
1386
+ * @param settings - Encode ladder settings
1387
+ */
1388
+ videoEncodeLadder(settings: VideoEncodeLadderSettings): Promise<VideoEncodeLadderNode>;
1389
+ /**
1390
+ * Transform a video stream (rescale, etc)
1391
+ * @param settings - Transform settings
1392
+ */
1393
+ videoTransform(settings: VideoTransformSettings): Promise<VideoTransformNode>;
1394
+ /**
1395
+ * Interferes with a stream by dropping frames
1396
+ * @param settings - Chaos monkey settings
1397
+ */
1398
+ chaosMonkey(settings: ChaosMonkeySettings): Promise<ChaosMonkeyNode>;
1399
+ /**
1400
+ * Compose multiple video streams together into a single output
1401
+ * @param settings - Composition setting
1402
+ */
1403
+ composeOverlay<Pins extends string>(settings: ComposeVideoSettings<Pins>): Promise<ComposeVideoNode<Pins>>;
1404
+ awsTranscribe(settings: AwsTranscribeSettings): Promise<AwsTranscribeNode>;
1405
+ /**
1406
+ * Mix multiple audio streams together into a single output,
1407
+ * with optional gain control on each input.
1408
+ * @param settings - Settings for the mixer, including the gain vectors
1409
+ */
1410
+ audioMixer<Pins extends string>(settings: AudioMixerSettings<Pins>): Promise<AudioMixerNode<Pins>>;
1411
+ /**
1412
+ * Given an audio stream of N channels, mix it down to M channels through a matrix of NxM gains.
1413
+ * @param settings - Settings for the mixer, including the gain matrix
1414
+ */
1415
+ audioMatrixMixer(settings: AudioMatrixMixerSettings): Promise<AudioMatrixMixerNode>;
1416
+ /**
1417
+ * Apply gain to an audio stream
1418
+ * @param settings - Settings for the gain node
1419
+ */
1420
+ audioGain(settings: AudioGainSettings): Promise<AudioGainNode>;
1421
+ audioBuildMultichannel(settings: AudioBuildMultichannelSettings): Promise<AudioBuildMultichannelNode>;
1422
+ audioSplitMultichannel(settings: AudioSplitMultichannelSettings): Promise<AudioSplitMultichannelNode>;
1423
+ audioEncoder(settings: AudioEncoderSettings): Promise<AudioEncoderNode>;
1424
+ timestampNudge(settings: TimestampNudgerSettings): Promise<TimestampNudgerNode>;
1425
+ streamKeyOverride(settings: StreamKeyOverrideSettings): Promise<StreamKeyOverrideNode>;
1426
+ metadataOverride(settings: MetadataOverrideSettings): Promise<MetadataOverrideNode>;
1427
+ sync(settings: SyncSettings): Promise<SyncNode>;
1428
+ }
1429
+
1430
+ /** @public */
1431
+ export declare interface NumaNode {
1432
+ processors: Processor[];
1433
+ }
1434
+
1435
+ /** @public */
1436
+ export declare interface NvidiaH264 {
1437
+ type: "nv-h264";
1438
+ preset?: NvidiaPreset;
1439
+ idrPeriod?: number;
1440
+ gopInterval?: number;
1441
+ frameIntervalP?: number;
1442
+ maxNumRefFrames?: number;
1443
+ level?: NvidiaH264Level;
1444
+ profile?: NvidiaH264Profile;
1445
+ outputAud?: boolean;
1446
+ rateControl?: NvidiaRateControl;
1447
+ }
1448
+
1449
+ /** @public */
1450
+ export declare type NvidiaH264Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1451
+
1452
+ /** @public */
1453
+ export declare type NvidiaH264Profile = "baseline" | "main" | "high" | "high444";
1454
+
1455
+ /** @public */
1456
+ export declare interface NvidiaHevc {
1457
+ type: "nv-hevc";
1458
+ preset?: NvidiaPreset;
1459
+ idrPeriod?: number;
1460
+ gopInterval?: number;
1461
+ frameIntervalP?: number;
1462
+ maxNumRefFrames?: number;
1463
+ level?: NvidiaHevcLevel;
1464
+ outputAud?: boolean;
1465
+ tier?: NvidiaHevcTier;
1466
+ profile?: NvidiaHevcProfile;
1467
+ rateControl?: NvidiaRateControl;
1468
+ }
1469
+
1470
+ /** @public */
1471
+ export declare type NvidiaHevcLevel = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
1472
+
1473
+ /** @public */
1474
+ export declare type NvidiaHevcProfile = "main" | "main10";
1475
+
1476
+ /** @public */
1477
+ export declare type NvidiaHevcTier = "main" | "high";
1478
+
1479
+ /** @public */
1480
+ export declare type NvidiaPreset = "p1" | "p2" | "p3" | "p4" | "p5" | "p6" | "p7";
1481
+
1482
+ /** @public */
1483
+ export declare interface NvidiaRateControl {
1484
+ mode: NvidiaRateControlMode;
1485
+ averageBitrate: number;
1486
+ maxBitrate?: number;
1487
+ vbvBufferSize?: number;
1488
+ vbvInitialDelay?: number;
1489
+ enableLookahead?: boolean;
1490
+ strictGopTarget?: boolean;
1491
+ lookaheadDepth?: number;
1492
+ }
1493
+
1494
+ /** @public */
1495
+ export declare type NvidiaRateControlMode = "con_stqp" | "vbr" | "cbr";
1496
+
1497
+ /** @public */
1498
+ export declare interface OffsetRect {
1499
+ x: number;
1500
+ y: number;
1501
+ width: number;
1502
+ height: number;
1503
+ }
1504
+
1505
+ /**
1506
+ * @public
1507
+ * Return type to enable control of an RTMP stream once media arrives on it
1508
+ */
1509
+ export declare type OnStreamResult =
1510
+ /** Accept the stream */
1511
+ {
1512
+ accept: true;
1513
+ videoStreamKey: StreamKey_2 | StreamKeySettings;
1514
+ audioStreamKey: StreamKey_2 | StreamKeySettings;
1515
+ }
1516
+ /** Reject the stream */
1517
+ | {
1518
+ accept: false;
1519
+ reason: string;
1520
+ };
1521
+
1522
+ /** @public */
1523
+ export declare interface OpusSettings {
1524
+ kind: "opus";
1525
+ }
1526
+
1527
+ /** @public */
1528
+ export declare interface PartTransition {
1529
+ durationMs: number;
1530
+ easing?: SimpleEasing;
1531
+ }
1532
+
1533
+ /** @public */
1534
+ export declare type PinToKey<Pins extends string> = Nullable<Partial<Record<Pins, StreamKey[]>>>;
1535
+
1536
+ /** @public */
1537
+ export declare type PixelFormat = "bgra" | "rgba" | "yuv420p" | "yuv422p" | "yuv444p" | "yuva420p" | "yuva422p" | "yuva444p";
1538
+
1539
+ /** @public */
1540
+ export declare interface Processor {
1541
+ cores: Core[];
1542
+ }
1543
+
1544
+ declare interface ProcessorMediaNode<Pins extends string> extends SourceMediaNode, AutoSinkMediaNode<Pins> {
1545
+ }
1546
+
1547
+ declare class ProcessorMediaNode<Pins extends string> {
1548
+ constructor(client: MediaClient, subscribeFn: (subscription: Subscription) => void);
1549
+ }
1550
+
1551
+ /** @public */
1552
+ export declare interface ProcessorNodeSettings<T extends MediaNodeState> extends SourceNodeSettings<T> {
1553
+ }
1554
+
1555
+ /** @public */
1556
+ export declare type ReceiveFromAddress<Pins extends string> = {
1557
+ source: SourceMediaNode;
1558
+ sourceSelector: (streams: StreamMetadata[]) => PinToKey<Pins>;
1559
+ };
1560
+
1561
+ /** @public */
1562
+ export declare type ReceiveFromAddressAuto = {
1563
+ source: SourceMediaNode;
1564
+ sourceSelector: (streams: StreamMetadata[]) => StreamKey[];
1565
+ };
1566
+
1567
+ /** @public */
1568
+ export declare interface RemoteInputSettings<T extends MediaNodeState> extends InputSettings<T> {
1569
+ ip: string;
1570
+ port: number;
1571
+ }
1572
+
1573
+ /** @public */
1574
+ export declare interface Resolution {
1575
+ width: number;
1576
+ height: number;
1577
+ }
1578
+
1579
+ /** @public */
1580
+ export declare type RtmpInputStatus = "disconnected";
1581
+
1582
+ /**
1583
+ * @public
1584
+ * {@link NorskOutput.rtmp}
1585
+ */
1586
+ export declare class RtmpOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1587
+ close(): void;
1588
+ }
1589
+
1590
+ /** @public */
1591
+ export declare interface RtmpOutputSettings extends NodeSettings<RtmpOutputNode>, StreamStatsMixin {
1592
+ url: string;
1593
+ numberOfStreams?: number;
1594
+ }
1595
+
1596
+ /**
1597
+ * @public
1598
+ * {@link NorskInput.rtmpServer}
1599
+ */
1600
+ export declare class RtmpServerInputNode extends SourceMediaNode {
1601
+ close(): void;
1602
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
1603
+ }
1604
+
1605
+ /**
1606
+ * @public
1607
+ * Settings to control how RTMP streams can be included as sources in your media workflow
1608
+ */
1609
+ export declare interface RtmpServerInputSettings extends SourceNodeSettings<RtmpServerInputNode>, StreamStatsMixin {
1610
+ /** The port the RTMP server should listen on */
1611
+ port: number;
1612
+ /**
1613
+ * On connect callback, use to accept/reject connections given app/url in use
1614
+ * @eventProperty
1615
+ */
1616
+ onConnection?: (
1617
+ /** The RTMP "app" field from the connection string */
1618
+ app: string,
1619
+ /** The full URL of the RTMP connection string */
1620
+ url: string) => {
1621
+ accept: true;
1622
+ } | {
1623
+ accept: false;
1624
+ reason: string;
1625
+ };
1626
+ /**
1627
+ * On stream callback, set up the stream keys for a given stream or reject the stream
1628
+ * @eventProperty
1629
+ */
1630
+ onStream?: (
1631
+ /** The RTMP "app" field from the connection string */
1632
+ app: string,
1633
+ /** The full URL of the RTMP connection string */
1634
+ url: string,
1635
+ /** The Norsk streamId of this media stream */
1636
+ streamId: number,
1637
+ /** TODO - publishingName */
1638
+ publishingName: string) => OnStreamResult;
1639
+ /**
1640
+ * Called when the connection status has changed (e.g. when the RTMP connection drops)
1641
+ * @eventProperty
1642
+ */
1643
+ onConnectionStatusChange?: (
1644
+ /** The new connection state */
1645
+ status: RtmpInputStatus,
1646
+ /** The audio and video stream keys that were present in the stream at the time of the status change */
1647
+ streamKeys: {
1648
+ audioStreamKey: StreamKey_2;
1649
+ videoStreamKey: StreamKey_2;
1650
+ }[]) => void;
1651
+ }
1652
+
1653
+ /** @public */
1654
+ export declare interface RtpEac3 {
1655
+ kind: "eac3";
1656
+ clockRate: number;
1657
+ languageCode?: string;
1658
+ ec3Extension: boolean;
1659
+ complexityIndex: number;
1660
+ }
1661
+
1662
+ /** @public */
1663
+ export declare interface RtpH264 {
1664
+ kind: "h264";
1665
+ clockRate: number;
1666
+ }
1667
+
1668
+ /** @public */
1669
+ export declare interface RtpHEVC {
1670
+ kind: "hevc";
1671
+ clockRate: number;
1672
+ }
1673
+
1674
+ /**
1675
+ * @public
1676
+ * {@link NorskInput.rtp}
1677
+ */
1678
+ export declare class RtpInputNode extends SourceMediaNode {
1679
+ nudge(nudge: number): void;
1680
+ close(): void;
1681
+ }
1682
+
1683
+ /** @public */
1684
+ export declare interface RtpInputSettings extends SourceNodeSettings<RtpInputNode>, StreamStatsMixin {
1685
+ sourceName: string;
1686
+ streams: readonly RtpStreamSettings[];
1687
+ }
1688
+
1689
+ /** @public */
1690
+ export declare interface RtpLinearPcm {
1691
+ kind: "linearpcm";
1692
+ sampleRate: SampleRate;
1693
+ channelLayout: ChannelLayout;
1694
+ bitDepth: RtpLinearPcmBitDepth;
1695
+ }
1696
+
1697
+ /** @public */
1698
+ export declare type RtpLinearPcmBitDepth = 16 | 24;
1699
+
1700
+ /** @public */
1701
+ export declare interface RtpMpeg4GenericAacHbr {
1702
+ kind: "mpeg4-generic-aac-hbr";
1703
+ config: string;
1704
+ }
1705
+
1706
+ /** @public */
1707
+ export declare interface RtpStreamSettings {
1708
+ streamId: number;
1709
+ ip: string;
1710
+ iface: string;
1711
+ rtpPort: number;
1712
+ rtcpPort: number;
1713
+ streamType: RtpLinearPcm | RtpEac3 | RtpMpeg4GenericAacHbr | RtpH264 | RtpHEVC;
1714
+ }
1715
+
1716
+ /** @public */
1717
+ export declare interface SampleAspectRatio {
1718
+ x: number;
1719
+ y: number;
1720
+ }
1721
+
1722
+ /** @public */
1723
+ export declare type SampleFormat = "flt" | "fltp" | "s16" | "s16p";
1724
+
1725
+ /** @public Audio sample rate, in Hz */
1726
+ export declare type SampleRate = 8000 | 11025 | 12000 | 16000 | 22050 | 24000 | 32000 | 44100 | 48000 | 64000 | 88200 | 96000;
1727
+
1728
+ /** @public */
1729
+ export declare function selectAllAudios(count: number): (streams: readonly StreamMetadata[]) => StreamKey[];
1730
+
1731
+ /** @public */
1732
+ export declare function selectAllVideos(count: number): (streams: readonly StreamMetadata[]) => StreamKey[];
1733
+
1734
+ /** @public */
1735
+ export declare function selectAudio(streams: readonly StreamMetadata[]): StreamKey[];
1736
+
1737
+ /**
1738
+ * @public
1739
+ * Select all the audio and video streams from the input
1740
+ * @param streams - The streams from the inbound Context
1741
+ * @returns Array of selected StreamKeys
1742
+ */
1743
+ export declare function selectAV(streams: readonly StreamMetadata[]): StreamKey[];
1744
+
1745
+ /** @public */
1746
+ export declare function selectSubtitles(streams: readonly StreamMetadata[]): StreamKey[];
1747
+
1748
+ /** @public */
1749
+ export declare function selectVideo(streams: readonly StreamMetadata[]): StreamKey[];
1750
+
1751
+ /** @public */
1752
+ export declare type SentenceBuildMode = "raw" | "stable" | "partial" | "complete";
1753
+
1754
+ /** @public */
1755
+ export declare type SimpleEasing = "linear" | "ease_in" | "ease_in_out" | "ease_out";
1756
+
1757
+ /** @public */
1758
+ export declare interface SingleStreamStats extends StreamStats {
1759
+ streamKey: StreamKey;
1760
+ metadata: StreamMetadataMessage;
1761
+ }
1762
+
1763
+ /** @public */
1764
+ export declare class SinkMediaNode<Pins extends string> extends MediaNodeState {
1765
+ /** Subscribe to the given sources,
1766
+ * This version of the function call accepts the target pins of an output
1767
+ * and is suitable for advanced use where a node is capable of subscribing to
1768
+ * multiple video streams and provides a means of distinguishing them via pins
1769
+ * discarding any existing subscriptions */
1770
+ subscribeToPins(sources: ReceiveFromAddress<Pins>[], validation?: (context: Context) => boolean): void;
1771
+ sourceContextChange(responseCallback: () => void): Promise<void>;
1772
+ }
1773
+
1774
+ /**
1775
+ * @public
1776
+ * {@link NorskControl.smoothSwitcher}
1777
+ */
1778
+ export declare class SmoothSwitcherNode<Pins extends string> extends ProcessorMediaNode<Pins> {
1779
+ switchSource(newSource: Pins): void;
1780
+ close(): void;
1781
+ }
1782
+
1783
+ /** @public */
1784
+ export declare interface SmoothSwitcherSettings<Pins extends string> extends ProcessorNodeSettings<SmoothSwitcherNode<Pins>> {
1785
+ activeSource?: Pins;
1786
+ outputSource: string;
1787
+ transitionDurationMs?: number;
1788
+ outputResolution: Resolution;
1789
+ sampleRate: SampleRate;
1790
+ }
1791
+
1792
+ /** @public */
1793
+ export declare class SourceMediaNode extends MediaNodeState {
1794
+ outputStreams: StreamMetadata[];
1795
+ registerForContextChange(subscriber: SinkMediaNode<string>): void;
1796
+ unregisterForContextChange(subscriber: SinkMediaNode<string>): void;
1797
+ }
1798
+
1799
+ /** @public */
1800
+ export declare interface SourceNodeSettings<T extends MediaNodeState> extends NodeSettings<T> {
1801
+ onOutboundContextChange?: (streams: StreamMetadata[]) => Promise<void>;
1802
+ }
1803
+
1804
+ /**
1805
+ * @public
1806
+ */
1807
+ export declare type SrtConnectionResult =
1808
+ /** Accept the stream */
1809
+ {
1810
+ accept: true;
1811
+ sourceName: string;
1812
+ }
1813
+ /** Reject the stream */
1814
+ | {
1815
+ accept: false;
1816
+ };
1817
+
1818
+ /**
1819
+ * @public
1820
+ * {@link NorskInput.srt}
1821
+ */
1822
+ export declare class SrtInputNode extends SourceMediaNode {
1823
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
1824
+ closeStream(streamIndex: number): void;
1825
+ close(): void;
1826
+ }
1827
+
1828
+ /**
1829
+ * @public
1830
+ */
1831
+ export declare interface SrtInputSettings extends RemoteInputSettings<SrtInputNode>, StreamStatsMixin {
1832
+ mode: SrtMode;
1833
+ /**
1834
+ * Passphrase for encryption
1835
+ */
1836
+ passphrase?: string;
1837
+ /**
1838
+ * Stream ID to set on the socket when acting in caller mode
1839
+ */
1840
+ streamId?: string;
1841
+ /**
1842
+ * On connect callback, notifying that a new caller has connected (in listener mode) and set the source name accordingly
1843
+ * @eventProperty
1844
+ */
1845
+ onConnection?: (
1846
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
1847
+ streamId: string,
1848
+ /** The connection index */
1849
+ index: number,
1850
+ /** Remote host address */
1851
+ remoteHost: string) => SrtConnectionResult;
1852
+ /**
1853
+ * Called when the connection status has changed (e.g. when the SRT socket is closed)
1854
+ * @eventProperty
1855
+ */
1856
+ onConnectionStatusChange?: (
1857
+ /** The new connection state */
1858
+ status: SrtInputStatus,
1859
+ /** The source name assigned to the connection which changed status */
1860
+ sourceName: string | undefined) => void;
1861
+ }
1862
+
1863
+ /** @public */
1864
+ export declare type SrtInputStatus = "disconnected";
1865
+
1866
+ /** @public */
1867
+ export declare type SrtMode = "listener" | "caller";
1868
+
1869
+ /**
1870
+ * @public
1871
+ * {@link NorskOutput.srt}
1872
+ */
1873
+ export declare class SrtOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
1874
+ close(): void;
1875
+ }
1876
+
1877
+ /** @public */
1878
+ export declare interface SrtOutputSettings extends NodeSettings<SrtOutputNode>, StreamStatsMixin {
1879
+ /**
1880
+ * Passphrase for encryption
1881
+ */
1882
+ passphrase?: string;
1883
+ /**
1884
+ * Stream ID to set on the socket when acting in caller mode
1885
+ */
1886
+ streamId?: string;
1887
+ mode: SrtMode;
1888
+ ip: string;
1889
+ port: number;
1890
+ /**
1891
+ * On connect callback, notifying that a new caller has connected (in listener mode) and providing the stream_id that was set on the socket
1892
+ * @eventProperty
1893
+ */
1894
+ onConnection?: (
1895
+ /** The stream_id sent on the SRT socket (or empty if none was set) */
1896
+ streamId: string,
1897
+ /** The stream index (count of connections that have been made) */
1898
+ streamIndex: number,
1899
+ /** The remote host address */
1900
+ remoteHost: string) => void;
1901
+ }
1902
+
1903
+ /** @public */
1904
+ export declare type StabilizationMode = "low" | "medium" | "high";
1905
+
1906
+ /** @public */
1907
+ export declare interface StreamKey {
1908
+ streamId: number;
1909
+ programNumber: number;
1910
+ sourceName: string;
1911
+ renditionName: string;
1912
+ }
1913
+
1914
+ /**
1915
+ * @public
1916
+ * {@link NorskTransform.streamKeyOverride}
1917
+ */
1918
+ export declare class StreamKeyOverrideNode extends AutoProcessorMediaNode<"audio" | "video" | "subtitle"> {
1919
+ close(): void;
1920
+ }
1921
+
1922
+ /** @public */
1923
+ export declare interface StreamKeyOverrideSettings extends ProcessorNodeSettings<StreamKeyOverrideNode> {
1924
+ streamKey: StreamKey;
1925
+ }
1926
+
1927
+ /** @public */
1928
+ export declare interface StreamKeySettings {
1929
+ /** Source name. Default: the rtmp app */
1930
+ sourceName?: string;
1931
+ /** Program number. Default: 1 */
1932
+ programNumber?: number;
1933
+ /** Stream Id. Default: 1 for audio, 2 for video */
1934
+ streamId?: number;
1935
+ /** Rendition name. Default: the stream publishing name */
1936
+ renditionName?: string;
1937
+ }
1938
+
1939
+ /** @public */
1940
+ export declare interface StreamMetadata {
1941
+ streamKey?: StreamKey;
1942
+ message: StreamMetadataMessage;
1943
+ }
1944
+
1945
+ /** @public */
1946
+ export declare type StreamMetadataMessage = {
1947
+ case: "audio";
1948
+ value: AudioStreamMetadata;
1949
+ } | {
1950
+ case: "video";
1951
+ value: VideoStreamMetadata;
1952
+ } | {
1953
+ case: "subtitle";
1954
+ value: SubtitleStreamMetadata;
1955
+ } | {
1956
+ case: undefined;
1957
+ value?: undefined;
1958
+ };
1959
+
1960
+ /** @public */
1961
+ export declare interface StreamStats {
1962
+ /** The size of the sample window in seconds */
1963
+ sampleSizeSeconds: number;
1964
+ /** The number of bits over the sample window */
1965
+ bitsForSample: number;
1966
+ /** The bitrate, in bits per second */
1967
+ bitrate: number;
1968
+ /** The number of frames over the sample window */
1969
+ framesForSample: number;
1970
+ /** The frame rate, in frames per second */
1971
+ framerate: number;
1972
+ /** The number of key frames over the sample window */
1973
+ keyFramesForSample: number;
1974
+ }
1975
+
1976
+ /** @public */
1977
+ export declare interface StreamStatsMixin {
1978
+ /**
1979
+ * Sampling rates for stream stats, in seconds
1980
+ */
1981
+ statsSampling?: PlainMessage<StreamStatsSampling>;
1982
+ /**
1983
+ * Called at periodic intervals when stream statistics are ready.
1984
+ * @eventProperty
1985
+ */
1986
+ onStreamStats?: (
1987
+ /** The stats */
1988
+ stats: MultiStreamStats) => void;
1989
+ }
1990
+
1991
+ /**
1992
+ * @public
1993
+ * Monitor statistics about a media stream. Create a new instance of this node with {@link NorskControl.streamStats}.
1994
+ */
1995
+ export declare class StreamStatsNode extends AutoProcessorMediaNode<"audio" | "video"> {
1996
+ close(): void;
1997
+ }
1998
+
1999
+ /**
2000
+ * @public
2001
+ * Settings for an {@link StreamStatsNode}
2002
+ */
2003
+ export declare interface StreamStatsSettings extends ProcessorNodeSettings<StreamStatsNode>, StreamStatsMixin {
2004
+ /**
2005
+ * Called periodically with the stream stats
2006
+ * @param stats - The statistics for the stream
2007
+ * @eventProperty
2008
+ */
2009
+ onStreamStats: (stats: MultiStreamStats) => void;
2010
+ /**
2011
+ * Sampling rates for stream stats, in seconds
2012
+ */
2013
+ statsSampling?: PlainMessage<StreamStatsSampling>;
2014
+ }
2015
+
2016
+ /** @public */
2017
+ export declare const subtitlesToPin: <Pins extends string>(pin: Pins) => (streams: StreamMetadata[]) => PinToKey<Pins>;
2018
+
2019
+ /**
2020
+ * @public
2021
+ * Returns the stream keys for subtitle streams in a media context
2022
+ * @param streams - The media context from which to return the stream keys
2023
+ * @returns The subtitle stream keys in the media context
2024
+ */
2025
+ export declare function subtitleStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
2026
+
2027
+ /** @public */
2028
+ export declare interface SubtitleStreamMetadata {
2029
+ }
2030
+
2031
+ /**
2032
+ * @public
2033
+ * Filters a context to only the subtitle streams within it
2034
+ * @param streams - The media context from which to return the streams
2035
+ * @returns The subtitle streams in the media context
2036
+ */
2037
+ export declare function subtitleStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
2038
+
2039
+ /**
2040
+ * @public
2041
+ * {@link NorskTransform.sync}
2042
+ */
2043
+ export declare class SyncNode extends AutoProcessorMediaNode<"audio" | "video"> {
2044
+ close(): void;
2045
+ }
2046
+
2047
+ /** @public */
2048
+ export declare interface SyncSettings extends ProcessorNodeSettings<SyncNode> {
2049
+ }
2050
+
2051
+ /**
2052
+ * @public
2053
+ * {@link NorskTransform.timestampNudge}
2054
+ */
2055
+ export declare class TimestampNudgerNode extends AutoProcessorMediaNode<"audio" | "video"> {
2056
+ nudge(nudge: number): void;
2057
+ close(): void;
2058
+ }
2059
+
2060
+ /** @public */
2061
+ export declare interface TimestampNudgerSettings extends ProcessorNodeSettings<TimestampNudgerNode> {
2062
+ nudge?: number;
2063
+ }
2064
+
2065
+ declare class TsCommonInputNode<SourceMessage, T extends SourceMediaNode> extends SourceMediaNode {
2066
+ constructor(tsType: TsInputType, client: MediaClient, settings: SourceNodeSettings<T> & StreamStatsMixin, nudgeFn: (nudge: TimestampProgramNudge) => SourceMessage, onEof: (() => void) | undefined, grpcStartFn: () => grpc.ClientDuplexStream<SourceMessage, TsInputEvent>);
2067
+ nudge(programNumber: number, nudge: number): void;
2068
+ close(): void;
2069
+ }
2070
+
2071
+ /**
2072
+ * @public
2073
+ * {@link NorskInput.localTsFile}
2074
+ */
2075
+ export declare class TsFileInputNode extends TsCommonInputNode<TsFileInputMessage, TsFileInputNode> {
2076
+ }
2077
+
2078
+ /**
2079
+ * @public
2080
+ * {@link NorskOutput.localTsFile}
2081
+ */
2082
+ export declare class TsFileOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
2083
+ close(): void;
2084
+ }
2085
+
2086
+ /** @public */
2087
+ export declare interface TsFileOutputSettings extends NodeSettings<TsFileOutputNode>, StreamStatsMixin {
2088
+ fileName: string;
2089
+ }
2090
+
2091
+ declare enum TsInputType {
2092
+ TsFile = 0,
2093
+ Srt = 1,
2094
+ Udp = 2,
2095
+ M3u8 = 3
2096
+ }
2097
+
2098
+ /**
2099
+ * @public
2100
+ * {@link NorskOutput.tsUdp}
2101
+ */
2102
+ export declare class TsUdpOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
2103
+ close(): void;
2104
+ }
2105
+
2106
+ /** @public */
2107
+ export declare interface TsUdpOutputSettings extends NodeSettings<TsUdpOutputNode>, StreamStatsMixin {
2108
+ destinationIp: string;
2109
+ iface: string;
2110
+ port: number;
2111
+ }
2112
+
2113
+ /**
2114
+ * @public
2115
+ * {@link NorskInput.udpTs}
2116
+ */
2117
+ export declare class UdpTsInputNode extends TsCommonInputNode<UdpTsInputMessage, UdpTsInputNode> {
2118
+ }
2119
+
2120
+ /** @public */
2121
+ export declare interface UdpTsInputSettings extends RemoteInputSettings<UdpTsInputNode> {
2122
+ }
2123
+
2124
+ /** @public */
2125
+ export declare interface UpdateCredentials {
2126
+ destinationId: string;
2127
+ awsCredentials: AwsCredentials;
2128
+ }
2129
+
2130
+ export { Version }
2131
+
2132
+ /**
2133
+ * @public
2134
+ * {@link NorskTransform.videoEncodeLadder}
2135
+ */
2136
+ export declare class VideoEncodeLadderNode extends AutoProcessorMediaNode<"video"> {
2137
+ close(): void;
2138
+ }
2139
+
2140
+ /** @public */
2141
+ export declare interface VideoEncodeLadderRung {
2142
+ name: string;
2143
+ width: number;
2144
+ height: number;
2145
+ frameRate?: FrameRate;
2146
+ /**
2147
+ * Specifies the input video's Sample Aspect Ratio (SAR) to be used by the
2148
+ * encoder in width:height
2149
+ */
2150
+ sar?: SampleAspectRatio;
2151
+ codec: X264Codec | X265Codec | NvidiaH264 | NvidiaHevc | NetintH264 | NetintHevc;
2152
+ }
2153
+
2154
+ /** @public */
2155
+ export declare interface VideoEncodeLadderSettings extends ProcessorNodeSettings<VideoEncodeLadderNode> {
2156
+ rungs: readonly VideoEncodeLadderRung[];
2157
+ }
2158
+
2159
+ /**
2160
+ * @public
2161
+ * Returns the stream keys for video streams in a media context
2162
+ * @param streams - The media context from which to return the stream keys
2163
+ * @returns The video stream keys in the media context
2164
+ */
2165
+ export declare function videoStreamKeys(streams: readonly StreamMetadata[]): StreamKey[];
2166
+
2167
+ /** @public */
2168
+ export declare interface VideoStreamMetadata {
2169
+ codec: string;
2170
+ width: number;
2171
+ height: number;
2172
+ }
2173
+
2174
+ /**
2175
+ * @public
2176
+ * Filters a context to only the video streams within it
2177
+ * @param streams - The media context from which to return the streams
2178
+ * @returns The video streams in the media context
2179
+ */
2180
+ export declare function videoStreams(streams: readonly StreamMetadata[]): StreamMetadata[];
2181
+
2182
+ /** @public */
2183
+ export declare const videoToPin: <Pins extends string>(pin: Pins) => (streams: StreamMetadata[]) => PinToKey<Pins>;
2184
+
2185
+ /**
2186
+ * @public
2187
+ * {@link NorskTransform.videoTransform}
2188
+ */
2189
+ export declare class VideoTransformNode extends AutoProcessorMediaNode<"video"> {
2190
+ close(): void;
2191
+ }
2192
+
2193
+ /** @public */
2194
+ export declare interface VideoTransformSettings extends ProcessorNodeSettings<VideoTransformNode> {
2195
+ resolution?: Resolution;
2196
+ frameRate?: FrameRate;
2197
+ sar?: SampleAspectRatio;
2198
+ }
2199
+
2200
+ /**
2201
+ * @public
2202
+ * {@link NorskOutput.webRTCWhip}
2203
+ */
2204
+ export declare class WebRTCWhipOutputNode extends AutoSinkMediaNode<"audio" | "video"> {
2205
+ close(): void;
2206
+ }
2207
+
2208
+ /**
2209
+ * @public
2210
+ */
2211
+ export declare interface WebRTCWhipOutputSettings extends NodeSettings<WebRTCWhipOutputNode>, StreamStatsMixin {
2212
+ uri: string;
2213
+ authHeader: string;
2214
+ }
2215
+
2216
+ /**
2217
+ * @public
2218
+ * {@link NorskInput.localWebVttFile}
2219
+ */
2220
+ export declare class WebVttFileInputNode extends SourceMediaNode {
2221
+ close(): void;
2222
+ }
2223
+
2224
+ /**
2225
+ * @public
2226
+ * {@link NorskInput.whip}
2227
+ */
2228
+ export declare class WhipInputNode extends SourceMediaNode {
2229
+ nudge(sourceName: string, programNumber: number, nudge: number): void;
2230
+ close(): void;
2231
+ }
2232
+
2233
+ /** @public */
2234
+ export declare interface WhipInputSettings extends InputSettings<WhipInputNode>, StreamStatsMixin {
2235
+ }
2236
+
2237
+ /** @public */
2238
+ export declare interface X264Codec {
2239
+ type: "x264";
2240
+ threads?: number;
2241
+ /**
2242
+ * Rate control options - one of abr, cqp or crf
2243
+ */
2244
+ bitrateMode?: BitrateMode;
2245
+ /**
2246
+ * Set the H264 profile of the output
2247
+ */
2248
+ profile?: X264Profile;
2249
+ /**
2250
+ * Sets the level flag in the output
2251
+ */
2252
+ level?: X264Level;
2253
+ /**
2254
+ * Sets the minimum length between IDR frames
2255
+ */
2256
+ keyFrameIntervalMin?: number;
2257
+ /**
2258
+ * Sets the maximum length between IDR frames
2259
+ */
2260
+ keyFrameIntervalMax?: number;
2261
+ /**
2262
+ * Sets the maximum number of concurrent B-frames
2263
+ */
2264
+ bframes?: number;
2265
+ /**
2266
+ * Tune options to further optimize them for your input content. If you
2267
+ * specify a tuning, the changes will be applied after 'preset' but before all
2268
+ * other parameters.
2269
+ */
2270
+ tune?: X264Tune;
2271
+ /**
2272
+ * Change options to trade off compression efficiency against encoding speed.
2273
+ * If you specify a preset, the changes it makes will be applied before all
2274
+ * other parameters are applied.
2275
+ */
2276
+ preset?: X264Preset;
2277
+ /**
2278
+ * Maximum number of reference frames, i.e., the number of previous frames
2279
+ * each P-frame can use as references
2280
+ */
2281
+ frameReference?: number;
2282
+ /**
2283
+ * Enables CABAC (Context Adaptive Binary Arithmetic Coder) stream compression
2284
+ * instead of the less efficient CAVLC (Context Adaptive Variable Length
2285
+ * Coder) system. Significantly improves both the compression efficiency
2286
+ * (10-20% typically) and the decoding requirements, at the expense of encode
2287
+ * CPU requirements
2288
+ */
2289
+ cabac?: boolean;
2290
+ /**
2291
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
2292
+ */
2293
+ vbvMaxRate?: number;
2294
+ /**
2295
+ * Sets the size of the VBV buffer in kilobits
2296
+ */
2297
+ vbvBufferSize?: number;
2298
+ /**
2299
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
2300
+ * disables adaptive I-frame decisioning
2301
+ */
2302
+ sceneCut?: number;
2303
+ /**
2304
+ * Use access unit delimiters in the output
2305
+ */
2306
+ aud?: boolean;
2307
+ /**
2308
+ * Disables the loop filter. Not Recommended.
2309
+ */
2310
+ noDeblock?: boolean;
2311
+ /**
2312
+ * Signal HRD information
2313
+ */
2314
+ nalHrd?: X264NalHrd;
2315
+ }
2316
+
2317
+ /** @public */
2318
+ export declare type X264Level = 1 | 1.1 | 1.2 | 1.3 | 2 | 2.1 | 2.2 | 3 | 3.1 | 3.2 | 4 | 4.1 | 4.2 | 5 | 5.1;
2319
+
2320
+ /**
2321
+ * @public
2322
+ * Three possible values:
2323
+ * - "none": specify no HRD information
2324
+ * - "vbr": specify HRD information
2325
+ * - "cbr": specify HRD information and pack the bitstream to the bitrate specified
2326
+ */
2327
+ export declare type X264NalHrd = "none" | "vbr" | "cbr";
2328
+
2329
+ /** @public */
2330
+ export declare type X264Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
2331
+
2332
+ /** @public */
2333
+ export declare type X264Profile = "baseline" | "main" | "high" | "high10" | "high422" | "high444";
2334
+
2335
+ /** @public */
2336
+ export declare type X264Tune = "film" | "animation" | "grain" | "stillimage" | "psnr" | "ssim" | "fastdecode" | "zerolatency";
2337
+
2338
+ /** @public X265 codec */
2339
+ export declare interface X265Codec {
2340
+ type: "x265";
2341
+ threads?: number;
2342
+ bitrateMode?: BitrateMode;
2343
+ profile?: X265Profile;
2344
+ /**
2345
+ * Sets the level flag in the output
2346
+ */
2347
+ level?: X265Level;
2348
+ /**
2349
+ * Sets the minimum length between IDR frames
2350
+ */
2351
+ keyFrameIntervalMin?: number;
2352
+ /**
2353
+ * Sets the maximum length between IDR frames
2354
+ */
2355
+ keyFrameIntervalMax?: number;
2356
+ /**
2357
+ * Sets the maximum number of concurrent B-frames
2358
+ */
2359
+ bframes?: number;
2360
+ /**
2361
+ * Tune options to further optimize them for your input content. If you
2362
+ * specify a tuning, the changes will be applied after 'preset' but before all
2363
+ * other parameters.
2364
+ */
2365
+ tune?: X265Tune;
2366
+ /**
2367
+ * Change options to trade off compression efficiency against encoding speed.
2368
+ * If you specify a preset, the changes it makes will be applied before all
2369
+ * other parameters are applied.
2370
+ */
2371
+ preset?: X265Preset;
2372
+ /**
2373
+ * Maximum number of reference frames, i.e., the number of previous frames
2374
+ * each P-frame can use as references
2375
+ */
2376
+ frameReference?: number;
2377
+ /**
2378
+ * Sets the maximum rate the VBV buffer should be assumed to refill at
2379
+ */
2380
+ vbvMaxRate?: number;
2381
+ /**
2382
+ * Sets the size of the VBV buffer in kilobits
2383
+ */
2384
+ vbvBufferSize?: number;
2385
+ /**
2386
+ * Sets the threshold for I/IDR frame placement. Setting sceneCut to zero
2387
+ * disables adaptive I-frame decisioning
2388
+ */
2389
+ sceneCut?: number;
2390
+ /**
2391
+ * Use access unit delimiters in the output
2392
+ */
2393
+ aud?: boolean;
2394
+ /**
2395
+ * Disables the loop filter. Not Recommended.
2396
+ */
2397
+ noDeblock?: boolean;
2398
+ }
2399
+
2400
+ /** @public */
2401
+ export declare type X265Level = 1 | 2 | 2.1 | 3 | 3.1 | 4 | 4.1 | 5 | 5.1 | 5.2 | 6 | 6.1 | 6.2;
2402
+
2403
+ /** @public */
2404
+ export declare type X265Preset = "ultrafast" | "superfast" | "veryfast" | "faster" | "fast" | "medium" | "slow" | "slower" | "veryslow" | "placebo";
2405
+
2406
+ /** @public */
2407
+ export declare type X265Profile = "main" | "main10" | "main444_8" | "main422_10" | "main444_10";
2408
+
2409
+ /** @public */
2410
+ export declare type X265Tune = "psnr" | "ssim" | "grain" | "zerolatency" | "fastdecode" | "animation";
2411
+
2412
+ export { }