@stefanmartin/expo-video-watermark 0.3.2 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -228,6 +228,9 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
228
228
|
val frameRate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE)?.toFloatOrNull() ?: 0f
|
|
229
229
|
val colorStandard = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COLOR_STANDARD) ?: "unknown"
|
|
230
230
|
val colorTransfer = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COLOR_TRANSFER) ?: "unknown"
|
|
231
|
+
val colorTransferInt = colorTransfer.toIntOrNull()
|
|
232
|
+
// Check for HDR color transfer characteristics. 6 = PQ (ST2084), 7 = HLG. See MediaFormat constants.
|
|
233
|
+
val isHdr = colorTransferInt == 6 || colorTransferInt == 7
|
|
231
234
|
retriever.release()
|
|
232
235
|
|
|
233
236
|
// Build comprehensive video info string for debugging
|
|
@@ -237,7 +240,7 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
237
240
|
append("duration=${duration}ms, ")
|
|
238
241
|
append("frameRate=$frameRate, ")
|
|
239
242
|
append("colorStandard=$colorStandard, ")
|
|
240
|
-
append("colorTransfer=$colorTransfer")
|
|
243
|
+
append("colorTransfer=$colorTransfer (isHdr=$isHdr)")
|
|
241
244
|
}
|
|
242
245
|
Log.d(TAG, "[Step 6] Video metadata: $videoInfo")
|
|
243
246
|
|
|
@@ -254,34 +257,55 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
254
257
|
rawVideoWidth to rawVideoHeight
|
|
255
258
|
}
|
|
256
259
|
|
|
257
|
-
// Step 8:
|
|
258
|
-
val
|
|
259
|
-
val
|
|
260
|
-
val
|
|
260
|
+
// Step 8: Pre-scale watermark bitmap to match video width if needed
|
|
261
|
+
val originalWidth = watermarkBitmap.width
|
|
262
|
+
val originalHeight = watermarkBitmap.height
|
|
263
|
+
val targetWidth = videoWidth.toInt()
|
|
264
|
+
val scale = targetWidth.toFloat() / originalWidth.toFloat()
|
|
265
|
+
val targetHeight = (originalHeight * scale).toInt()
|
|
266
|
+
|
|
267
|
+
// Skip scaling if watermark already matches video width
|
|
268
|
+
val scaledWatermark: Bitmap = if (originalWidth == targetWidth) {
|
|
269
|
+
Log.d(TAG, "[Step 8] Watermark already matches video width (${originalWidth}x${originalHeight}), skipping scale")
|
|
270
|
+
watermarkBitmap
|
|
271
|
+
} else {
|
|
272
|
+
Log.d(TAG, "[Step 8] Pre-scaling watermark: ${originalWidth}x${originalHeight} -> ${targetWidth}x${targetHeight} (scale: $scale)")
|
|
273
|
+
try {
|
|
274
|
+
val scaled = Bitmap.createScaledBitmap(watermarkBitmap, targetWidth, targetHeight, true)
|
|
275
|
+
// Recycle original if we created a new scaled version
|
|
276
|
+
if (scaled !== watermarkBitmap) {
|
|
277
|
+
watermarkBitmap.recycle()
|
|
278
|
+
}
|
|
279
|
+
scaled
|
|
280
|
+
} catch (e: Exception) {
|
|
281
|
+
watermarkBitmap.recycle()
|
|
282
|
+
promise.reject("STEP8_SCALE_ERROR", "[Step 8] Failed to scale watermark bitmap: ${e.message}", e)
|
|
283
|
+
return
|
|
284
|
+
}
|
|
285
|
+
}
|
|
261
286
|
|
|
262
|
-
// Step 9: Create overlay settings for
|
|
287
|
+
// Step 9: Create overlay settings for bottom positioning (no GPU scaling needed)
|
|
263
288
|
// In Media3, coordinates are normalized: (0,0) is center
|
|
264
289
|
// x range [-1, 1] (left to right), y range [-1, 1] (bottom to top)
|
|
265
290
|
val overlaySettings = try {
|
|
266
291
|
StaticOverlaySettings.Builder()
|
|
267
|
-
.setScale(scale, scale) // Scale uniformly to match video width
|
|
268
292
|
.setOverlayFrameAnchor(0f, -1f) // Anchor at bottom-center of watermark
|
|
269
293
|
.setBackgroundFrameAnchor(0f, -1f) // Position at very bottom of video
|
|
270
294
|
.build()
|
|
271
295
|
} catch (e: Exception) {
|
|
272
|
-
|
|
296
|
+
scaledWatermark.recycle()
|
|
273
297
|
promise.reject("STEP9_OVERLAY_SETTINGS_ERROR", "[Step 9] Failed to create overlay settings: ${e.message}", e)
|
|
274
298
|
return
|
|
275
299
|
}
|
|
276
300
|
|
|
277
|
-
// Step 10: Create the bitmap overlay with
|
|
301
|
+
// Step 10: Create the bitmap overlay with pre-scaled bitmap
|
|
278
302
|
val bitmapOverlay = try {
|
|
279
303
|
BitmapOverlay.createStaticBitmapOverlay(
|
|
280
|
-
|
|
304
|
+
scaledWatermark,
|
|
281
305
|
overlaySettings
|
|
282
306
|
)
|
|
283
307
|
} catch (e: Exception) {
|
|
284
|
-
|
|
308
|
+
scaledWatermark.recycle()
|
|
285
309
|
promise.reject("STEP10_BITMAP_OVERLAY_ERROR", "[Step 10] Failed to create bitmap overlay: ${e.message}", e)
|
|
286
310
|
return
|
|
287
311
|
}
|
|
@@ -289,8 +313,9 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
289
313
|
// Step 11: Create overlay effect with proper typing
|
|
290
314
|
val overlayEffect = try {
|
|
291
315
|
OverlayEffect(ImmutableList.of<TextureOverlay>(bitmapOverlay))
|
|
292
|
-
}
|
|
293
|
-
|
|
316
|
+
}
|
|
317
|
+
catch (e: Exception) {
|
|
318
|
+
scaledWatermark.recycle()
|
|
294
319
|
promise.reject("STEP11_OVERLAY_EFFECT_ERROR", "[Step 11] Failed to create overlay effect: ${e.message}", e)
|
|
295
320
|
return
|
|
296
321
|
}
|
|
@@ -304,17 +329,34 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
304
329
|
// Step 13: Create media item from video
|
|
305
330
|
val mediaItem = MediaItem.fromUri("file://$cleanVideoPath")
|
|
306
331
|
|
|
307
|
-
// Step 14: Create edited media item
|
|
332
|
+
// Step 14: Create edited media item, requesting HDR to SDR tone-mapping if needed.
|
|
333
|
+
// This is the modern API for handling HDR in Media3, replacing the deprecated setHdrMode.
|
|
308
334
|
val editedMediaItem = try {
|
|
309
|
-
EditedMediaItem.Builder(mediaItem)
|
|
310
|
-
|
|
311
|
-
.
|
|
335
|
+
val builder = EditedMediaItem.Builder(mediaItem).setEffects(effects)
|
|
336
|
+
if (isHdr) {
|
|
337
|
+
Log.d(TAG, "[Step 14] HDR video detected. Requesting tone-mapping.")
|
|
338
|
+
builder.setForceHdrToSdrToneMap(true)
|
|
339
|
+
} else {
|
|
340
|
+
Log.d(TAG, "[Step 14] SDR video detected. No tone-mapping needed.")
|
|
341
|
+
}
|
|
342
|
+
builder.build()
|
|
312
343
|
} catch (e: Exception) {
|
|
313
|
-
|
|
344
|
+
scaledWatermark.recycle()
|
|
314
345
|
promise.reject("STEP14_EDITED_MEDIA_ERROR", "[Step 14] Failed to create edited media item: ${e.message}", e)
|
|
315
346
|
return
|
|
316
347
|
}
|
|
317
348
|
|
|
349
|
+
// Step 14b: Create composition
|
|
350
|
+
val composition = try {
|
|
351
|
+
Composition.Builder(listOf(editedMediaItem))
|
|
352
|
+
.build()
|
|
353
|
+
} catch (e: Exception) {
|
|
354
|
+
scaledWatermark.recycle()
|
|
355
|
+
promise.reject("STEP14B_COMPOSITION_ERROR", "[Step 14b] Failed to create composition: ${e.message}", e)
|
|
356
|
+
return
|
|
357
|
+
}
|
|
358
|
+
Log.d(TAG, "[Step 14b] Composition created successfully.")
|
|
359
|
+
|
|
318
360
|
// Handler for main thread callbacks
|
|
319
361
|
val mainHandler = Handler(Looper.getMainLooper())
|
|
320
362
|
|
|
@@ -338,7 +380,7 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
338
380
|
"averageAudioBitrate: ${exportResult.averageAudioBitrate}, " +
|
|
339
381
|
"averageVideoBitrate: ${exportResult.averageVideoBitrate}, " +
|
|
340
382
|
"videoFrameCount: ${exportResult.videoFrameCount}")
|
|
341
|
-
|
|
383
|
+
scaledWatermark.recycle()
|
|
342
384
|
|
|
343
385
|
// Step 16: Re-encode to H.265 if device supports HEVC encoder
|
|
344
386
|
val supportsHevc = hasHevcEncoder()
|
|
@@ -452,7 +494,8 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
452
494
|
appendLine("Input path: $cleanVideoPath")
|
|
453
495
|
appendLine()
|
|
454
496
|
appendLine("--- Watermark Info ---")
|
|
455
|
-
appendLine("
|
|
497
|
+
appendLine("Original dimensions: ${originalWidth}x${originalHeight}")
|
|
498
|
+
appendLine("Scaled dimensions: ${targetWidth}x${targetHeight}")
|
|
456
499
|
appendLine("Bitmap info: $bitmapInfo")
|
|
457
500
|
appendLine("Scale factor: $scale")
|
|
458
501
|
appendLine()
|
|
@@ -478,14 +521,14 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
478
521
|
causeLevel++
|
|
479
522
|
}
|
|
480
523
|
|
|
481
|
-
|
|
524
|
+
scaledWatermark.recycle()
|
|
482
525
|
|
|
483
526
|
// Reject with comprehensive error message
|
|
484
527
|
val errorMessage = "[Step 15] Transform failed - " +
|
|
485
528
|
"ErrorCode: $errorCodeName (${exportException.errorCode}), " +
|
|
486
529
|
"Device: ${Build.MANUFACTURER} ${Build.MODEL} (API ${Build.VERSION.SDK_INT}), " +
|
|
487
530
|
"Video: ${videoWidth.toInt()}x${videoHeight.toInt()} $mimeType, " +
|
|
488
|
-
"Watermark: ${
|
|
531
|
+
"Watermark: ${originalWidth}x${originalHeight} -> ${targetWidth}x${targetHeight}, " +
|
|
489
532
|
"Scale: $scale, " +
|
|
490
533
|
"Message: ${exportException.message ?: "Unknown error"}"
|
|
491
534
|
|
|
@@ -499,13 +542,13 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
499
542
|
.build()
|
|
500
543
|
|
|
501
544
|
Log.d(TAG, "[Step 15] Transformer built, starting export...")
|
|
502
|
-
transformer.start(
|
|
545
|
+
transformer.start(composition, cleanOutputPath)
|
|
503
546
|
Log.d(TAG, "[Step 15] Transformer.start() called, waiting for completion...")
|
|
504
547
|
} catch (e: Exception) {
|
|
505
548
|
Log.e(TAG, "[Step 15] Exception building/starting transformer", e)
|
|
506
549
|
Log.e(TAG, "[Step 15] Device info: $deviceInfo")
|
|
507
550
|
Log.e(TAG, "[Step 15] GL info: $glInfo")
|
|
508
|
-
|
|
551
|
+
scaledWatermark.recycle()
|
|
509
552
|
promise.reject(
|
|
510
553
|
"STEP15_TRANSFORMER_BUILD_ERROR",
|
|
511
554
|
"[Step 15] Failed to build/start transformer on ${Build.MANUFACTURER} ${Build.MODEL}: ${e.message}",
|