@vidtreo/recorder 0.9.4 → 0.9.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -152,7 +152,12 @@ class AudioLevelAnalyzer {
152
152
  }
153
153
  }
154
154
  // src/core/config/config-constants.ts
155
- import { QUALITY_HIGH } from "mediabunny";
155
+ import {
156
+ QUALITY_HIGH,
157
+ QUALITY_LOW,
158
+ QUALITY_MEDIUM,
159
+ QUALITY_VERY_HIGH
160
+ } from "mediabunny";
156
161
 
157
162
  // src/core/processor/format-codec-mapper.ts
158
163
  var FORMAT_DEFAULT_CODECS = {
@@ -172,15 +177,32 @@ function getAudioCodecForFormat(format, overrideCodec) {
172
177
  }
173
178
 
174
179
  // src/core/config/config-constants.ts
180
+ var QUALITY_MAP = {
181
+ sd: QUALITY_LOW,
182
+ hd: QUALITY_MEDIUM,
183
+ fhd: QUALITY_HIGH,
184
+ "4k": QUALITY_VERY_HIGH
185
+ };
186
+ var RESOLUTION_MAP = {
187
+ sd: { width: 854, height: 480 },
188
+ hd: { width: 1280, height: 720 },
189
+ fhd: { width: 1920, height: 1080 },
190
+ "4k": { width: 3840, height: 2160 }
191
+ };
175
192
  var DEFAULT_BACKEND_URL = "https://api.vidtreo.com";
176
193
  var DEFAULT_TRANSCODE_CONFIG = Object.freeze({
177
194
  format: "mp4",
178
195
  fps: 30,
179
- width: 1920,
180
- height: 1080,
181
- bitrate: QUALITY_HIGH,
196
+ width: RESOLUTION_MAP.fhd.width,
197
+ height: RESOLUTION_MAP.fhd.height,
198
+ bitrate: QUALITY_MAP.fhd,
182
199
  audioCodec: "aac",
183
- audioBitrate: 96000
200
+ audioBitrate: 96000,
201
+ watermark: {
202
+ url: "https://avatars.githubusercontent.com/u/244247750?s=200&v=4",
203
+ opacity: 1,
204
+ position: "bottom-right"
205
+ }
184
206
  });
185
207
  function getDefaultConfigForFormat(format) {
186
208
  return {
@@ -190,39 +212,31 @@ function getDefaultConfigForFormat(format) {
190
212
  };
191
213
  }
192
214
  // src/core/config/preset-mapper.ts
193
- import {
194
- QUALITY_HIGH as QUALITY_HIGH2,
195
- QUALITY_LOW,
196
- QUALITY_MEDIUM,
197
- QUALITY_VERY_HIGH
198
- } from "mediabunny";
199
- var QUALITY_MAP = {
200
- sd: QUALITY_LOW,
201
- hd: QUALITY_MEDIUM,
202
- fhd: QUALITY_HIGH2,
203
- "4k": QUALITY_VERY_HIGH
204
- };
205
215
  var AUDIO_BITRATE = 128000;
206
- function mapPresetToConfig(preset, maxWidth, maxHeight, outputFormat) {
216
+ function mapPresetToConfig(options) {
217
+ const { preset, outputFormat, watermark } = options;
207
218
  if (!(preset in QUALITY_MAP)) {
208
219
  throw new Error(`Invalid preset: ${preset}`);
209
220
  }
210
- if (typeof maxWidth !== "number" || maxWidth <= 0) {
211
- throw new Error("maxWidth must be a positive number");
212
- }
213
- if (typeof maxHeight !== "number" || maxHeight <= 0) {
214
- throw new Error("maxHeight must be a positive number");
215
- }
221
+ const { width, height } = RESOLUTION_MAP[preset];
216
222
  const format = outputFormat || "mp4";
217
223
  const audioCodec = getDefaultAudioCodecForFormat(format);
218
- return {
224
+ const config = {
219
225
  format,
220
- width: maxWidth,
221
- height: maxHeight,
226
+ width,
227
+ height,
222
228
  bitrate: QUALITY_MAP[preset],
223
229
  audioCodec,
224
230
  audioBitrate: AUDIO_BITRATE
225
231
  };
232
+ if (watermark) {
233
+ config.watermark = {
234
+ url: watermark.url,
235
+ opacity: watermark.opacity,
236
+ position: watermark.position
237
+ };
238
+ }
239
+ return config;
226
240
  }
227
241
 
228
242
  // src/core/config/config-service.ts
@@ -308,10 +322,14 @@ class ConfigService {
308
322
  throw new Error(`Failed to fetch config: ${response.status} ${response.statusText}`);
309
323
  }
310
324
  const data = await response.json();
311
- if (!data.presetEncoding || typeof data.max_width !== "number" || typeof data.max_height !== "number") {
312
- throw new Error("Invalid config response from backend");
325
+ if (!data.presetEncoding) {
326
+ throw new Error("Invalid config response from backend: missing presetEncoding");
313
327
  }
314
- return mapPresetToConfig(data.presetEncoding, data.max_width, data.max_height, data.outputFormat);
328
+ return mapPresetToConfig({
329
+ preset: data.presetEncoding,
330
+ outputFormat: data.outputFormat,
331
+ watermark: data.watermark
332
+ });
315
333
  }
316
334
  }
317
335
 
@@ -3029,7 +3047,7 @@ function isScreenCaptureStream(stream) {
3029
3047
 
3030
3048
  // src/core/processor/bitrate-utils.ts
3031
3049
  import {
3032
- QUALITY_HIGH as QUALITY_HIGH3,
3050
+ QUALITY_HIGH as QUALITY_HIGH2,
3033
3051
  QUALITY_LOW as QUALITY_LOW2,
3034
3052
  QUALITY_MEDIUM as QUALITY_MEDIUM2,
3035
3053
  QUALITY_VERY_HIGH as QUALITY_VERY_HIGH2
@@ -3047,7 +3065,7 @@ function serializeBitrate(bitrate) {
3047
3065
  if (bitrate === QUALITY_MEDIUM2) {
3048
3066
  return "medium";
3049
3067
  }
3050
- if (bitrate === QUALITY_HIGH3) {
3068
+ if (bitrate === QUALITY_HIGH2) {
3051
3069
  return "high";
3052
3070
  }
3053
3071
  if (bitrate === QUALITY_VERY_HIGH2) {
@@ -4148,27 +4166,32 @@ var removeEmulationPreventionBytes = (data) => {
4148
4166
  }
4149
4167
  return new Uint8Array(result);
4150
4168
  };
4151
- var transformAnnexBToLengthPrefixed = (packetData) => {
4152
- const NAL_UNIT_LENGTH_SIZE = 4;
4153
- const nalUnits = findNalUnitsInAnnexB(packetData);
4154
- if (nalUnits.length === 0) {
4155
- return null;
4156
- }
4157
- let totalSize = 0;
4158
- for (const nalUnit of nalUnits) {
4159
- totalSize += NAL_UNIT_LENGTH_SIZE + nalUnit.byteLength;
4160
- }
4161
- const avccData = new Uint8Array(totalSize);
4162
- const dataView = new DataView(avccData.buffer);
4169
+ var ANNEX_B_START_CODE = new Uint8Array([0, 0, 0, 1]);
4170
+ var concatNalUnitsInLengthPrefixed = (nalUnits, lengthSize) => {
4171
+ const totalLength = nalUnits.reduce((a, b) => a + lengthSize + b.byteLength, 0);
4172
+ const result = new Uint8Array(totalLength);
4163
4173
  let offset = 0;
4164
4174
  for (const nalUnit of nalUnits) {
4165
- const length = nalUnit.byteLength;
4166
- dataView.setUint32(offset, length, false);
4167
- offset += 4;
4168
- avccData.set(nalUnit, offset);
4175
+ const dataView = new DataView(result.buffer, result.byteOffset, result.byteLength);
4176
+ switch (lengthSize) {
4177
+ case 1:
4178
+ dataView.setUint8(offset, nalUnit.byteLength);
4179
+ break;
4180
+ case 2:
4181
+ dataView.setUint16(offset, nalUnit.byteLength, false);
4182
+ break;
4183
+ case 3:
4184
+ setUint24(dataView, offset, nalUnit.byteLength, false);
4185
+ break;
4186
+ case 4:
4187
+ dataView.setUint32(offset, nalUnit.byteLength, false);
4188
+ break;
4189
+ }
4190
+ offset += lengthSize;
4191
+ result.set(nalUnit, offset);
4169
4192
  offset += nalUnit.byteLength;
4170
4193
  }
4171
- return avccData;
4194
+ return result;
4172
4195
  };
4173
4196
  var extractNalUnitTypeForAvc = (data) => {
4174
4197
  return data[0] & 31;
@@ -5006,6 +5029,32 @@ if (typeof FinalizationRegistry !== "undefined") {
5006
5029
  }
5007
5030
  });
5008
5031
  }
5032
+ var VIDEO_SAMPLE_PIXEL_FORMATS = [
5033
+ "I420",
5034
+ "I420P10",
5035
+ "I420P12",
5036
+ "I420A",
5037
+ "I420AP10",
5038
+ "I420AP12",
5039
+ "I422",
5040
+ "I422P10",
5041
+ "I422P12",
5042
+ "I422A",
5043
+ "I422AP10",
5044
+ "I422AP12",
5045
+ "I444",
5046
+ "I444P10",
5047
+ "I444P12",
5048
+ "I444A",
5049
+ "I444AP10",
5050
+ "I444AP12",
5051
+ "NV12",
5052
+ "RGBA",
5053
+ "RGBX",
5054
+ "BGRA",
5055
+ "BGRX"
5056
+ ];
5057
+ var VIDEO_SAMPLE_PIXEL_FORMATS_SET = new Set(VIDEO_SAMPLE_PIXEL_FORMATS);
5009
5058
 
5010
5059
  class VideoSample {
5011
5060
  get displayWidth() {
@@ -5029,8 +5078,8 @@ class VideoSample {
5029
5078
  if (!init || typeof init !== "object") {
5030
5079
  throw new TypeError("init must be an object.");
5031
5080
  }
5032
- if (!("format" in init) || typeof init.format !== "string") {
5033
- throw new TypeError("init.format must be a string.");
5081
+ if (init.format === undefined || !VIDEO_SAMPLE_PIXEL_FORMATS_SET.has(init.format)) {
5082
+ throw new TypeError("init.format must be one of: " + VIDEO_SAMPLE_PIXEL_FORMATS.join(", "));
5034
5083
  }
5035
5084
  if (!Number.isInteger(init.codedWidth) || init.codedWidth <= 0) {
5036
5085
  throw new TypeError("init.codedWidth must be a positive integer.");
@@ -5048,13 +5097,14 @@ class VideoSample {
5048
5097
  throw new TypeError("init.duration, when provided, must be a non-negative number.");
5049
5098
  }
5050
5099
  this._data = toUint8Array(data).slice();
5100
+ this._layout = init.layout ?? createDefaultPlaneLayout(init.format, init.codedWidth, init.codedHeight);
5051
5101
  this.format = init.format;
5052
5102
  this.codedWidth = init.codedWidth;
5053
5103
  this.codedHeight = init.codedHeight;
5054
5104
  this.rotation = init.rotation ?? 0;
5055
5105
  this.timestamp = init.timestamp;
5056
5106
  this.duration = init.duration ?? 0;
5057
- this.colorSpace = new VideoColorSpace(init.colorSpace);
5107
+ this.colorSpace = new VideoSampleColorSpace(init.colorSpace);
5058
5108
  } else if (typeof VideoFrame !== "undefined" && data instanceof VideoFrame) {
5059
5109
  if (init?.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
5060
5110
  throw new TypeError("init.rotation, when provided, must be 0, 90, 180, or 270.");
@@ -5066,13 +5116,14 @@ class VideoSample {
5066
5116
  throw new TypeError("init.duration, when provided, must be a non-negative number.");
5067
5117
  }
5068
5118
  this._data = data;
5119
+ this._layout = null;
5069
5120
  this.format = data.format;
5070
5121
  this.codedWidth = data.displayWidth;
5071
5122
  this.codedHeight = data.displayHeight;
5072
5123
  this.rotation = init?.rotation ?? 0;
5073
5124
  this.timestamp = init?.timestamp ?? data.timestamp / 1e6;
5074
5125
  this.duration = init?.duration ?? (data.duration ?? 0) / 1e6;
5075
- this.colorSpace = data.colorSpace;
5126
+ this.colorSpace = new VideoSampleColorSpace(data.colorSpace);
5076
5127
  } else if (typeof HTMLImageElement !== "undefined" && data instanceof HTMLImageElement || typeof SVGImageElement !== "undefined" && data instanceof SVGImageElement || typeof ImageBitmap !== "undefined" && data instanceof ImageBitmap || typeof HTMLVideoElement !== "undefined" && data instanceof HTMLVideoElement || typeof HTMLCanvasElement !== "undefined" && data instanceof HTMLCanvasElement || typeof OffscreenCanvas !== "undefined" && data instanceof OffscreenCanvas) {
5077
5128
  if (!init || typeof init !== "object") {
5078
5129
  throw new TypeError("init must be an object.");
@@ -5115,13 +5166,14 @@ class VideoSample {
5115
5166
  assert(context);
5116
5167
  context.drawImage(data, 0, 0);
5117
5168
  this._data = canvas;
5169
+ this._layout = null;
5118
5170
  this.format = "RGBX";
5119
5171
  this.codedWidth = width;
5120
5172
  this.codedHeight = height;
5121
5173
  this.rotation = init.rotation ?? 0;
5122
5174
  this.timestamp = init.timestamp;
5123
5175
  this.duration = init.duration ?? 0;
5124
- this.colorSpace = new VideoColorSpace({
5176
+ this.colorSpace = new VideoSampleColorSpace({
5125
5177
  matrix: "rgb",
5126
5178
  primaries: "bt709",
5127
5179
  transfer: "iec61966-2-1",
@@ -5144,8 +5196,10 @@ class VideoSample {
5144
5196
  rotation: this.rotation
5145
5197
  });
5146
5198
  } else if (this._data instanceof Uint8Array) {
5147
- return new VideoSample(this._data.slice(), {
5199
+ assert(this._layout);
5200
+ return new VideoSample(this._data, {
5148
5201
  format: this.format,
5202
+ layout: this._layout,
5149
5203
  codedWidth: this.codedWidth,
5150
5204
  codedHeight: this.codedHeight,
5151
5205
  timestamp: this.timestamp,
@@ -5177,32 +5231,58 @@ class VideoSample {
5177
5231
  }
5178
5232
  this._closed = true;
5179
5233
  }
5180
- allocationSize() {
5234
+ allocationSize(options = {}) {
5235
+ validateVideoFrameCopyToOptions(options);
5181
5236
  if (this._closed) {
5182
5237
  throw new Error("VideoSample is closed.");
5183
5238
  }
5239
+ if ((options.format ?? this.format) === null) {
5240
+ throw new Error("Cannot get allocation size when format is null. Please manually provide an RGB pixel format in the" + " options instead.");
5241
+ }
5184
5242
  assert(this._data !== null);
5243
+ if (!isVideoFrame(this._data)) {
5244
+ if (options.colorSpace || options.format && options.format !== this.format || options.layout || options.rect) {
5245
+ const videoFrame = this.toVideoFrame();
5246
+ const size = videoFrame.allocationSize(options);
5247
+ videoFrame.close();
5248
+ return size;
5249
+ }
5250
+ }
5185
5251
  if (isVideoFrame(this._data)) {
5186
- return this._data.allocationSize();
5252
+ return this._data.allocationSize(options);
5187
5253
  } else if (this._data instanceof Uint8Array) {
5188
5254
  return this._data.byteLength;
5189
5255
  } else {
5190
5256
  return this.codedWidth * this.codedHeight * 4;
5191
5257
  }
5192
5258
  }
5193
- async copyTo(destination) {
5259
+ async copyTo(destination, options = {}) {
5194
5260
  if (!isAllowSharedBufferSource(destination)) {
5195
5261
  throw new TypeError("destination must be an ArrayBuffer or an ArrayBuffer view.");
5196
5262
  }
5263
+ validateVideoFrameCopyToOptions(options);
5197
5264
  if (this._closed) {
5198
5265
  throw new Error("VideoSample is closed.");
5199
5266
  }
5267
+ if ((options.format ?? this.format) === null) {
5268
+ throw new Error("Cannot copy video sample data when format is null. Please manually provide an RGB pixel format in the" + " options instead.");
5269
+ }
5200
5270
  assert(this._data !== null);
5271
+ if (!isVideoFrame(this._data)) {
5272
+ if (options.colorSpace || options.format && options.format !== this.format || options.layout || options.rect) {
5273
+ const videoFrame = this.toVideoFrame();
5274
+ const layout = await videoFrame.copyTo(destination, options);
5275
+ videoFrame.close();
5276
+ return layout;
5277
+ }
5278
+ }
5201
5279
  if (isVideoFrame(this._data)) {
5202
- await this._data.copyTo(destination);
5280
+ return this._data.copyTo(destination, options);
5203
5281
  } else if (this._data instanceof Uint8Array) {
5282
+ assert(this._layout);
5204
5283
  const dest = toUint8Array(destination);
5205
5284
  dest.set(this._data);
5285
+ return this._layout;
5206
5286
  } else {
5207
5287
  const canvas = this._data;
5208
5288
  const context = canvas.getContext("2d");
@@ -5210,6 +5290,10 @@ class VideoSample {
5210
5290
  const imageData = context.getImageData(0, 0, this.codedWidth, this.codedHeight);
5211
5291
  const dest = toUint8Array(destination);
5212
5292
  dest.set(imageData.data);
5293
+ return [{
5294
+ offset: 0,
5295
+ stride: 4 * this.codedWidth
5296
+ }];
5213
5297
  }
5214
5298
  }
5215
5299
  toVideoFrame() {
@@ -5419,6 +5503,23 @@ class VideoSample {
5419
5503
  this.close();
5420
5504
  }
5421
5505
  }
5506
+
5507
+ class VideoSampleColorSpace {
5508
+ constructor(init) {
5509
+ this.primaries = init?.primaries ?? null;
5510
+ this.transfer = init?.transfer ?? null;
5511
+ this.matrix = init?.matrix ?? null;
5512
+ this.fullRange = init?.fullRange ?? null;
5513
+ }
5514
+ toJSON() {
5515
+ return {
5516
+ primaries: this.primaries,
5517
+ transfer: this.transfer,
5518
+ matrix: this.matrix,
5519
+ fullRange: this.fullRange
5520
+ };
5521
+ }
5522
+ }
5422
5523
  var isVideoFrame = (x) => {
5423
5524
  return typeof VideoFrame !== "undefined" && x instanceof VideoFrame;
5424
5525
  };
@@ -5447,6 +5548,127 @@ var validateCropRectangle = (crop, prefix) => {
5447
5548
  throw new TypeError(prefix + "crop.height must be a non-negative integer.");
5448
5549
  }
5449
5550
  };
5551
+ var validateVideoFrameCopyToOptions = (options) => {
5552
+ if (!options || typeof options !== "object") {
5553
+ throw new TypeError("options must be an object.");
5554
+ }
5555
+ if (options.colorSpace !== undefined && !["display-p3", "srgb"].includes(options.colorSpace)) {
5556
+ throw new TypeError("options.colorSpace, when provided, must be 'display-p3' or 'srgb'.");
5557
+ }
5558
+ if (options.format !== undefined && typeof options.format !== "string") {
5559
+ throw new TypeError("options.format, when provided, must be a string.");
5560
+ }
5561
+ if (options.layout !== undefined) {
5562
+ if (!Array.isArray(options.layout)) {
5563
+ throw new TypeError("options.layout, when provided, must be an array.");
5564
+ }
5565
+ for (const plane of options.layout) {
5566
+ if (!plane || typeof plane !== "object") {
5567
+ throw new TypeError("Each entry in options.layout must be an object.");
5568
+ }
5569
+ if (!Number.isInteger(plane.offset) || plane.offset < 0) {
5570
+ throw new TypeError("plane.offset must be a non-negative integer.");
5571
+ }
5572
+ if (!Number.isInteger(plane.stride) || plane.stride < 0) {
5573
+ throw new TypeError("plane.stride must be a non-negative integer.");
5574
+ }
5575
+ }
5576
+ }
5577
+ if (options.rect !== undefined) {
5578
+ if (!options.rect || typeof options.rect !== "object") {
5579
+ throw new TypeError("options.rect, when provided, must be an object.");
5580
+ }
5581
+ if (options.rect.x !== undefined && (!Number.isInteger(options.rect.x) || options.rect.x < 0)) {
5582
+ throw new TypeError("options.rect.x, when provided, must be a non-negative integer.");
5583
+ }
5584
+ if (options.rect.y !== undefined && (!Number.isInteger(options.rect.y) || options.rect.y < 0)) {
5585
+ throw new TypeError("options.rect.y, when provided, must be a non-negative integer.");
5586
+ }
5587
+ if (options.rect.width !== undefined && (!Number.isInteger(options.rect.width) || options.rect.width < 0)) {
5588
+ throw new TypeError("options.rect.width, when provided, must be a non-negative integer.");
5589
+ }
5590
+ if (options.rect.height !== undefined && (!Number.isInteger(options.rect.height) || options.rect.height < 0)) {
5591
+ throw new TypeError("options.rect.height, when provided, must be a non-negative integer.");
5592
+ }
5593
+ }
5594
+ };
5595
+ var createDefaultPlaneLayout = (format, codedWidth, codedHeight) => {
5596
+ const planes = getPlaneConfigs(format);
5597
+ const layouts = [];
5598
+ let currentOffset = 0;
5599
+ for (const plane of planes) {
5600
+ const planeWidth = Math.ceil(codedWidth / plane.widthDivisor);
5601
+ const planeHeight = Math.ceil(codedHeight / plane.heightDivisor);
5602
+ const stride = planeWidth * plane.sampleBytes;
5603
+ const planeSize = stride * planeHeight;
5604
+ layouts.push({
5605
+ offset: currentOffset,
5606
+ stride
5607
+ });
5608
+ currentOffset += planeSize;
5609
+ }
5610
+ return layouts;
5611
+ };
5612
+ var getPlaneConfigs = (format) => {
5613
+ const yuv = (yBytes, uvBytes, subX, subY, hasAlpha) => {
5614
+ const configs = [
5615
+ { sampleBytes: yBytes, widthDivisor: 1, heightDivisor: 1 },
5616
+ { sampleBytes: uvBytes, widthDivisor: subX, heightDivisor: subY },
5617
+ { sampleBytes: uvBytes, widthDivisor: subX, heightDivisor: subY }
5618
+ ];
5619
+ if (hasAlpha) {
5620
+ configs.push({ sampleBytes: yBytes, widthDivisor: 1, heightDivisor: 1 });
5621
+ }
5622
+ return configs;
5623
+ };
5624
+ switch (format) {
5625
+ case "I420":
5626
+ return yuv(1, 1, 2, 2, false);
5627
+ case "I420P10":
5628
+ case "I420P12":
5629
+ return yuv(2, 2, 2, 2, false);
5630
+ case "I420A":
5631
+ return yuv(1, 1, 2, 2, true);
5632
+ case "I420AP10":
5633
+ case "I420AP12":
5634
+ return yuv(2, 2, 2, 2, true);
5635
+ case "I422":
5636
+ return yuv(1, 1, 2, 1, false);
5637
+ case "I422P10":
5638
+ case "I422P12":
5639
+ return yuv(2, 2, 2, 1, false);
5640
+ case "I422A":
5641
+ return yuv(1, 1, 2, 1, true);
5642
+ case "I422AP10":
5643
+ case "I422AP12":
5644
+ return yuv(2, 2, 2, 1, true);
5645
+ case "I444":
5646
+ return yuv(1, 1, 1, 1, false);
5647
+ case "I444P10":
5648
+ case "I444P12":
5649
+ return yuv(2, 2, 1, 1, false);
5650
+ case "I444A":
5651
+ return yuv(1, 1, 1, 1, true);
5652
+ case "I444AP10":
5653
+ case "I444AP12":
5654
+ return yuv(2, 2, 1, 1, true);
5655
+ case "NV12":
5656
+ return [
5657
+ { sampleBytes: 1, widthDivisor: 1, heightDivisor: 1 },
5658
+ { sampleBytes: 2, widthDivisor: 2, heightDivisor: 2 }
5659
+ ];
5660
+ case "RGBA":
5661
+ case "RGBX":
5662
+ case "BGRA":
5663
+ case "BGRX":
5664
+ return [
5665
+ { sampleBytes: 4, widthDivisor: 1, heightDivisor: 1 }
5666
+ ];
5667
+ default:
5668
+ assertNever(format);
5669
+ assert(false);
5670
+ }
5671
+ };
5450
5672
  var AUDIO_SAMPLE_FORMATS = new Set(["f32", "f32-planar", "s16", "s16-planar", "s32", "s32-planar", "u8", "u8-planar"]);
5451
5673
 
5452
5674
  class AudioSample {
@@ -7898,11 +8120,11 @@ class IsobmffMuxer extends Muxer {
7898
8120
  const trackData = this.getVideoTrackData(track, packet, meta);
7899
8121
  let packetData = packet.data;
7900
8122
  if (trackData.info.requiresAnnexBTransformation) {
7901
- const transformedData = transformAnnexBToLengthPrefixed(packetData);
7902
- if (!transformedData) {
8123
+ const nalUnits = findNalUnitsInAnnexB(packetData);
8124
+ if (nalUnits.length === 0) {
7903
8125
  throw new Error("Failed to transform packet data. Make sure all packets are provided in Annex B format, as" + " specified in ITU-T-REC-H.264 and ITU-T-REC-H.265.");
7904
8126
  }
7905
- packetData = transformedData;
8127
+ packetData = concatNalUnitsInLengthPrefixed(nalUnits, 4);
7906
8128
  }
7907
8129
  const timestamp = this.validateAndNormalizeTimestamp(trackData.track, packet.timestamp, packet.type === "key");
7908
8130
  const internalSample = this.createSampleForTrack(trackData, packetData, timestamp, packet.duration, packet.type);
@@ -10144,6 +10366,41 @@ function requireInitialized(value, componentName) {
10144
10366
  return value;
10145
10367
  }
10146
10368
 
10369
+ // src/core/processor/worker/watermark-utils.ts
10370
+ function calculateWatermarkTargetSize(videoWidth, imageWidth, imageHeight) {
10371
+ const targetWidth = Math.round(videoWidth * 0.07);
10372
+ const scaleFactor = targetWidth / imageWidth;
10373
+ const targetHeight = Math.round(imageHeight * scaleFactor);
10374
+ return { width: targetWidth, height: targetHeight };
10375
+ }
10376
+ function getWatermarkPosition(options) {
10377
+ const { watermarkWidth, watermarkHeight, videoWidth, videoHeight, position } = options;
10378
+ const padding = 20;
10379
+ switch (position) {
10380
+ case "top-left":
10381
+ return { x: padding, y: padding };
10382
+ case "top-right":
10383
+ return { x: videoWidth - watermarkWidth - padding, y: padding };
10384
+ case "bottom-left":
10385
+ return { x: padding, y: videoHeight - watermarkHeight - padding };
10386
+ case "bottom-right":
10387
+ return {
10388
+ x: videoWidth - watermarkWidth - padding,
10389
+ y: videoHeight - watermarkHeight - padding
10390
+ };
10391
+ case "center":
10392
+ return {
10393
+ x: (videoWidth - watermarkWidth) / 2,
10394
+ y: (videoHeight - watermarkHeight) / 2
10395
+ };
10396
+ default:
10397
+ return {
10398
+ x: videoWidth - watermarkWidth - padding,
10399
+ y: videoHeight - watermarkHeight - padding
10400
+ };
10401
+ }
10402
+ }
10403
+
10147
10404
  // src/core/processor/worker/recorder-worker.ts
10148
10405
  var CHUNK_SIZE = 16 * 1024 * 1024;
10149
10406
  var OVERLAY_BACKGROUND_OPACITY = 0.6;
@@ -10181,6 +10438,7 @@ class RecorderWorker {
10181
10438
  overlayCanvas = null;
10182
10439
  compositionCanvas = null;
10183
10440
  compositionCtx = null;
10441
+ watermarkCanvas = null;
10184
10442
  hiddenIntervals = [];
10185
10443
  currentHiddenIntervalStart = null;
10186
10444
  recordingStartTime = 0;
@@ -10298,6 +10556,7 @@ class RecorderWorker {
10298
10556
  this.hiddenIntervals = [];
10299
10557
  this.currentHiddenIntervalStart = null;
10300
10558
  this.pendingVisibilityUpdates = [];
10559
+ this.watermarkCanvas = null;
10301
10560
  }
10302
10561
  setupOverlayConfig(overlayConfig) {
10303
10562
  this.overlayConfig = overlayConfig ? { enabled: overlayConfig.enabled, text: overlayConfig.text } : null;
@@ -10331,7 +10590,9 @@ class RecorderWorker {
10331
10590
  const keyFrameIntervalSeconds = config.keyFrameInterval / fps;
10332
10591
  const videoSourceOptions = {
10333
10592
  codec: config.codec,
10334
- sizeChangeBehavior: "passThrough",
10593
+ width: config.width,
10594
+ height: config.height,
10595
+ sizeChangeBehavior: "contain",
10335
10596
  bitrateMode: "variable",
10336
10597
  latencyMode: "quality",
10337
10598
  contentHint: "detail",
@@ -10393,6 +10654,9 @@ class RecorderWorker {
10393
10654
  }
10394
10655
  this.setupAudioSource(audioStream, config);
10395
10656
  const output = requireNonNull(this.output, "Output must be initialized before starting");
10657
+ if (this.config?.watermark) {
10658
+ this.prepareWatermark();
10659
+ }
10396
10660
  await output.start();
10397
10661
  this.startBufferUpdates();
10398
10662
  this.sendReady();
@@ -10536,6 +10800,79 @@ class RecorderWorker {
10536
10800
  y: padding
10537
10801
  };
10538
10802
  }
10803
+ async prepareWatermark() {
10804
+ if (!this.config?.watermark || this.watermarkCanvas) {
10805
+ return;
10806
+ }
10807
+ const { url: url2, opacity = 1 } = this.config.watermark;
10808
+ try {
10809
+ const response = await fetch(url2, { mode: "cors" });
10810
+ if (!response.ok) {
10811
+ throw new Error(\`HTTP error! status: \${response.status}\`);
10812
+ }
10813
+ const blob = await response.blob();
10814
+ const isSvg = url2.toLowerCase().endsWith(".svg") || blob.type === "image/svg+xml";
10815
+ if (isSvg) {
10816
+ logger.warn("[RecorderWorker] Loading SVG watermark. Note: Some environments may not support SVG in createImageBitmap inside workers. If the watermark doesn't appear, consider using a PNG or a Data URL.");
10817
+ }
10818
+ const bitmap = await createImageBitmap(blob).catch((err) => {
10819
+ throw new Error(\`Failed to create ImageBitmap from blob (\${blob.type}). Errors can happen with SVGs in workers or invalid formats: \${err.message}\`);
10820
+ });
10821
+ const videoWidth = this.config?.width || 1280;
10822
+ const { width: targetWidth, height: targetHeight } = calculateWatermarkTargetSize(videoWidth, bitmap.width, bitmap.height);
10823
+ const scaleFactor = targetWidth / bitmap.width;
10824
+ const canvas = new OffscreenCanvas(targetWidth, targetHeight);
10825
+ const ctx = canvas.getContext("2d", { willReadFrequently: false });
10826
+ if (!ctx) {
10827
+ bitmap.close();
10828
+ throw new Error("Failed to get watermark canvas context");
10829
+ }
10830
+ ctx.globalAlpha = opacity;
10831
+ ctx.drawImage(bitmap, 0, 0, targetWidth, targetHeight);
10832
+ ctx.globalAlpha = 1;
10833
+ bitmap.close();
10834
+ this.watermarkCanvas = canvas;
10835
+ logger.debug("[RecorderWorker] Watermark prepared with pre-applied opacity", {
10836
+ width: canvas.width,
10837
+ height: canvas.height,
10838
+ opacity,
10839
+ scaleFactor
10840
+ });
10841
+ } catch (error) {
10842
+ const errorMessage = error instanceof Error ? error.message : String(error);
10843
+ logger.error("[RecorderWorker] Failed to load watermark. This is often caused by CORS if the image is on another domain. Try using a Data URL (base64) or ensure the server has Access-Control-Allow-Origin: *.", {
10844
+ url: url2,
10845
+ error: errorMessage
10846
+ });
10847
+ }
10848
+ }
10849
+ ensureCompositionCanvas(width, height) {
10850
+ if (!this.compositionCanvas) {
10851
+ this.compositionCanvas = new OffscreenCanvas(width, height);
10852
+ this.compositionCtx = requireNonNull(this.compositionCanvas.getContext("2d", { willReadFrequently: false }), "Failed to get composition canvas context");
10853
+ logger.debug("[RecorderWorker] Composition canvas created", {
10854
+ width,
10855
+ height
10856
+ });
10857
+ return this.compositionCtx;
10858
+ }
10859
+ if (!this.compositionCtx) {
10860
+ this.compositionCtx = requireNonNull(this.compositionCanvas.getContext("2d", { willReadFrequently: false }), "Failed to get composition canvas context");
10861
+ return this.compositionCtx;
10862
+ }
10863
+ const widthChanged = this.compositionCanvas.width !== width;
10864
+ const heightChanged = this.compositionCanvas.height !== height;
10865
+ if (widthChanged || heightChanged) {
10866
+ this.compositionCanvas = new OffscreenCanvas(width, height);
10867
+ this.compositionCtx = requireNonNull(this.compositionCanvas.getContext("2d", { willReadFrequently: false }), "Failed to get composition canvas context");
10868
+ logger.debug("[RecorderWorker] Composition canvas resized", {
10869
+ width,
10870
+ height
10871
+ });
10872
+ return this.compositionCtx;
10873
+ }
10874
+ return this.compositionCtx;
10875
+ }
10539
10876
  shouldApplyOverlay(timestamp) {
10540
10877
  if (!this.overlayConfig?.enabled) {
10541
10878
  return false;
@@ -10603,39 +10940,49 @@ class RecorderWorker {
10603
10940
  const frameDuration = 1 / this.frameRate;
10604
10941
  let frameToProcess = videoFrame;
10605
10942
  let imageBitmap = null;
10606
- if (this.shouldApplyOverlay(frameTimestamp)) {
10943
+ const needsOverlay = this.shouldApplyOverlay(frameTimestamp);
10944
+ const needsWatermark = !!(this.config?.watermark && this.watermarkCanvas);
10945
+ const needsComposition = needsOverlay || needsWatermark;
10946
+ if (needsComposition) {
10607
10947
  const width = videoFrame.displayWidth;
10608
10948
  const height = videoFrame.displayHeight;
10609
10949
  if (width <= 0 || height <= 0) {
10610
- logger.warn("[RecorderWorker] Invalid video frame dimensions, skipping overlay", { width, height });
10611
- } else if (this.overlayConfig) {
10612
- if (!this.overlayCanvas) {
10613
- this.overlayCanvas = this.createOverlayCanvas(this.overlayConfig.text);
10614
- logger.debug("[RecorderWorker] Overlay canvas created", {
10615
- overlayWidth: this.overlayCanvas.width,
10616
- overlayHeight: this.overlayCanvas.height
10950
+ logger.warn("[RecorderWorker] Invalid video frame dimensions, skipping composition", { width, height });
10951
+ } else {
10952
+ const ctx = this.ensureCompositionCanvas(width, height);
10953
+ ctx.clearRect(0, 0, width, height);
10954
+ ctx.drawImage(videoFrame, 0, 0, width, height);
10955
+ if (needsOverlay && this.overlayConfig) {
10956
+ if (!this.overlayCanvas) {
10957
+ this.overlayCanvas = this.createOverlayCanvas(this.overlayConfig.text);
10958
+ logger.debug("[RecorderWorker] Overlay canvas created", {
10959
+ overlayWidth: this.overlayCanvas.width,
10960
+ overlayHeight: this.overlayCanvas.height
10961
+ });
10962
+ }
10963
+ const overlayPosition = this.getOverlayPosition(this.overlayCanvas.width, width);
10964
+ ctx.drawImage(this.overlayCanvas, overlayPosition.x, overlayPosition.y);
10965
+ }
10966
+ if (needsWatermark && this.config?.watermark && this.watermarkCanvas) {
10967
+ const watermarkPosition = getWatermarkPosition({
10968
+ watermarkWidth: this.watermarkCanvas.width,
10969
+ watermarkHeight: this.watermarkCanvas.height,
10970
+ videoWidth: width,
10971
+ videoHeight: height,
10972
+ position: this.config.watermark.position
10617
10973
  });
10974
+ ctx.drawImage(this.watermarkCanvas, watermarkPosition.x, watermarkPosition.y);
10618
10975
  }
10619
- if (this.overlayCanvas) {
10620
- if (!(this.compositionCanvas && this.compositionCtx) || this.compositionCanvas.width !== width || this.compositionCanvas.height !== height) {
10621
- this.compositionCanvas = new OffscreenCanvas(width, height);
10622
- this.compositionCtx = requireNonNull(this.compositionCanvas.getContext("2d"), "Failed to get composition canvas context");
10623
- }
10624
- requireNonNull(this.compositionCtx, "Composition context must be available");
10625
- this.compositionCtx.clearRect(0, 0, width, height);
10626
- this.compositionCtx.drawImage(videoFrame, 0, 0, width, height);
10627
- const position = this.getOverlayPosition(this.overlayCanvas.width, width);
10628
- this.compositionCtx.drawImage(this.overlayCanvas, position.x, position.y);
10629
- imageBitmap = this.compositionCanvas.transferToImageBitmap();
10630
- const frameInit = {};
10631
- if (typeof videoFrame.timestamp === "number" && videoFrame.timestamp !== null) {
10632
- frameInit.timestamp = videoFrame.timestamp;
10633
- }
10634
- if (typeof videoFrame.duration === "number" && videoFrame.duration !== null) {
10635
- frameInit.duration = videoFrame.duration;
10636
- }
10637
- frameToProcess = new VideoFrame(imageBitmap, frameInit);
10976
+ const compositionCanvas = requireNonNull(this.compositionCanvas, "Composition canvas must exist after ensureCompositionCanvas");
10977
+ imageBitmap = compositionCanvas.transferToImageBitmap();
10978
+ const frameInit = {};
10979
+ if (typeof videoFrame.timestamp === "number") {
10980
+ frameInit.timestamp = videoFrame.timestamp;
10981
+ }
10982
+ if (typeof videoFrame.duration === "number") {
10983
+ frameInit.duration = videoFrame.duration;
10638
10984
  }
10985
+ frameToProcess = new VideoFrame(imageBitmap, frameInit);
10639
10986
  }
10640
10987
  }
10641
10988
  const maxLead = 0.05;
@@ -11006,6 +11353,7 @@ class RecorderWorker {
11006
11353
  this.pausedDuration = 0;
11007
11354
  this.pauseStartedAt = null;
11008
11355
  this.overlayCanvas = null;
11356
+ this.watermarkCanvas = null;
11009
11357
  this.overlayConfig = null;
11010
11358
  this.hiddenIntervals = [];
11011
11359
  this.currentHiddenIntervalStart = null;
@@ -11210,7 +11558,8 @@ class WorkerProcessor {
11210
11558
  audioBitrate: config.audioBitrate,
11211
11559
  codec,
11212
11560
  keyFrameInterval: KEY_FRAME_INTERVAL,
11213
- format
11561
+ format,
11562
+ watermark: config.watermark
11214
11563
  };
11215
11564
  const videoTracks = stream.getVideoTracks();
11216
11565
  const audioTracks = stream.getAudioTracks();