@stefanmartin/expo-video-watermark 0.2.7 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/android/build.gradle
CHANGED
|
@@ -3,11 +3,15 @@ package expo.modules.videowatermark
|
|
|
3
3
|
import android.content.Context
|
|
4
4
|
import android.graphics.Bitmap
|
|
5
5
|
import android.graphics.BitmapFactory
|
|
6
|
+
import android.media.MediaCodecList
|
|
6
7
|
import android.media.MediaMetadataRetriever
|
|
8
|
+
import android.os.Build
|
|
7
9
|
import android.os.Handler
|
|
8
10
|
import android.os.Looper
|
|
11
|
+
import android.util.Log
|
|
9
12
|
import androidx.annotation.OptIn
|
|
10
13
|
import androidx.media3.common.MediaItem
|
|
14
|
+
import androidx.media3.common.MimeTypes
|
|
11
15
|
import androidx.media3.common.util.UnstableApi
|
|
12
16
|
import androidx.media3.effect.BitmapOverlay
|
|
13
17
|
import androidx.media3.effect.OverlayEffect
|
|
@@ -25,8 +29,103 @@ import expo.modules.kotlin.exception.Exceptions
|
|
|
25
29
|
import expo.modules.kotlin.modules.Module
|
|
26
30
|
import expo.modules.kotlin.modules.ModuleDefinition
|
|
27
31
|
import java.io.File
|
|
32
|
+
import javax.microedition.khronos.egl.EGL10
|
|
33
|
+
import javax.microedition.khronos.egl.EGLConfig
|
|
34
|
+
import javax.microedition.khronos.egl.EGLContext
|
|
28
35
|
|
|
29
36
|
class ExpoVideoWatermarkModule : Module() {
|
|
37
|
+
companion object {
|
|
38
|
+
private const val TAG = "ExpoVideoWatermark"
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Check if the device has a hardware H.265/HEVC encoder
|
|
42
|
+
*/
|
|
43
|
+
fun hasHevcEncoder(): Boolean {
|
|
44
|
+
return try {
|
|
45
|
+
val codecList = MediaCodecList(MediaCodecList.REGULAR_CODECS)
|
|
46
|
+
codecList.codecInfos.any { codecInfo ->
|
|
47
|
+
codecInfo.isEncoder && codecInfo.supportedTypes.any { type ->
|
|
48
|
+
type.equals(MimeTypes.VIDEO_H265, ignoreCase = true)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
} catch (e: Exception) {
|
|
52
|
+
Log.w(TAG, "Failed to check HEVC encoder support: ${e.message}")
|
|
53
|
+
false
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Get device information for debugging
|
|
59
|
+
*/
|
|
60
|
+
fun getDeviceInfo(): String {
|
|
61
|
+
return buildString {
|
|
62
|
+
append("Device: ${Build.MANUFACTURER} ${Build.MODEL}")
|
|
63
|
+
append(", Android ${Build.VERSION.RELEASE} (API ${Build.VERSION.SDK_INT})")
|
|
64
|
+
append(", Board: ${Build.BOARD}")
|
|
65
|
+
append(", Hardware: ${Build.HARDWARE}")
|
|
66
|
+
append(", SOC: ${Build.SOC_MANUFACTURER} ${Build.SOC_MODEL}")
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Get OpenGL ES info (call on GL thread or main thread)
|
|
72
|
+
*/
|
|
73
|
+
fun getGLInfo(): String {
|
|
74
|
+
return try {
|
|
75
|
+
val egl = EGLContext.getEGL() as? EGL10
|
|
76
|
+
if (egl != null) {
|
|
77
|
+
val display = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY)
|
|
78
|
+
egl.eglInitialize(display, IntArray(2))
|
|
79
|
+
|
|
80
|
+
val configAttribs = intArrayOf(
|
|
81
|
+
EGL10.EGL_RENDERABLE_TYPE, 4, // EGL_OPENGL_ES2_BIT
|
|
82
|
+
EGL10.EGL_NONE
|
|
83
|
+
)
|
|
84
|
+
val configs = arrayOfNulls<EGLConfig>(1)
|
|
85
|
+
val numConfigs = IntArray(1)
|
|
86
|
+
egl.eglChooseConfig(display, configAttribs, configs, 1, numConfigs)
|
|
87
|
+
|
|
88
|
+
val vendor = egl.eglQueryString(display, EGL10.EGL_VENDOR) ?: "unknown"
|
|
89
|
+
val version = egl.eglQueryString(display, EGL10.EGL_VERSION) ?: "unknown"
|
|
90
|
+
val extensions = egl.eglQueryString(display, EGL10.EGL_EXTENSIONS) ?: ""
|
|
91
|
+
|
|
92
|
+
egl.eglTerminate(display)
|
|
93
|
+
|
|
94
|
+
"EGL Vendor: $vendor, Version: $version, Has OES_EGL_image_external: ${extensions.contains("EGL_KHR_image_base")}"
|
|
95
|
+
} else {
|
|
96
|
+
"EGL not available"
|
|
97
|
+
}
|
|
98
|
+
} catch (e: Exception) {
|
|
99
|
+
"GL info error: ${e.message}"
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Map ExportException error code to human-readable string
|
|
105
|
+
*/
|
|
106
|
+
@OptIn(UnstableApi::class)
|
|
107
|
+
fun getExportErrorCodeName(errorCode: Int): String {
|
|
108
|
+
return when (errorCode) {
|
|
109
|
+
ExportException.ERROR_CODE_UNSPECIFIED -> "ERROR_CODE_UNSPECIFIED"
|
|
110
|
+
ExportException.ERROR_CODE_IO_UNSPECIFIED -> "ERROR_CODE_IO_UNSPECIFIED"
|
|
111
|
+
ExportException.ERROR_CODE_IO_FILE_NOT_FOUND -> "ERROR_CODE_IO_FILE_NOT_FOUND"
|
|
112
|
+
ExportException.ERROR_CODE_IO_NETWORK_CONNECTION_FAILED -> "ERROR_CODE_IO_NETWORK_CONNECTION_FAILED"
|
|
113
|
+
ExportException.ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT -> "ERROR_CODE_IO_NETWORK_CONNECTION_TIMEOUT"
|
|
114
|
+
ExportException.ERROR_CODE_DECODER_INIT_FAILED -> "ERROR_CODE_DECODER_INIT_FAILED"
|
|
115
|
+
ExportException.ERROR_CODE_DECODING_FAILED -> "ERROR_CODE_DECODING_FAILED"
|
|
116
|
+
ExportException.ERROR_CODE_DECODING_FORMAT_UNSUPPORTED -> "ERROR_CODE_DECODING_FORMAT_UNSUPPORTED"
|
|
117
|
+
ExportException.ERROR_CODE_ENCODER_INIT_FAILED -> "ERROR_CODE_ENCODER_INIT_FAILED"
|
|
118
|
+
ExportException.ERROR_CODE_ENCODING_FAILED -> "ERROR_CODE_ENCODING_FAILED"
|
|
119
|
+
ExportException.ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED -> "ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED"
|
|
120
|
+
ExportException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED -> "ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED"
|
|
121
|
+
ExportException.ERROR_CODE_AUDIO_PROCESSING_FAILED -> "ERROR_CODE_AUDIO_PROCESSING_FAILED"
|
|
122
|
+
ExportException.ERROR_CODE_MUXING_FAILED -> "ERROR_CODE_MUXING_FAILED"
|
|
123
|
+
ExportException.ERROR_CODE_MUXING_TIMEOUT -> "ERROR_CODE_MUXING_TIMEOUT"
|
|
124
|
+
else -> "UNKNOWN_ERROR_CODE($errorCode)"
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
30
129
|
private val context: Context
|
|
31
130
|
get() = appContext.reactContext ?: throw Exceptions.AppContextLost()
|
|
32
131
|
|
|
@@ -77,6 +176,7 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
77
176
|
|
|
78
177
|
// Step 4: Ensure bitmap is in ARGB_8888 format (required for Media3 GPU processing)
|
|
79
178
|
val watermarkBitmap: Bitmap = if (decodedBitmap.config != Bitmap.Config.ARGB_8888) {
|
|
179
|
+
Log.d(TAG, "[Step 4] Converting bitmap from ${decodedBitmap.config} to ARGB_8888")
|
|
80
180
|
val converted = decodedBitmap.copy(Bitmap.Config.ARGB_8888, false)
|
|
81
181
|
decodedBitmap.recycle()
|
|
82
182
|
if (converted == null) {
|
|
@@ -88,6 +188,16 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
88
188
|
decodedBitmap
|
|
89
189
|
}
|
|
90
190
|
|
|
191
|
+
// Log bitmap details for debugging
|
|
192
|
+
val bitmapInfo = buildString {
|
|
193
|
+
append("size=${watermarkBitmap.width}x${watermarkBitmap.height}, ")
|
|
194
|
+
append("config=${watermarkBitmap.config}, ")
|
|
195
|
+
append("byteCount=${watermarkBitmap.byteCount / 1024}KB, ")
|
|
196
|
+
append("hasAlpha=${watermarkBitmap.hasAlpha()}, ")
|
|
197
|
+
append("isPremultiplied=${watermarkBitmap.isPremultiplied}")
|
|
198
|
+
}
|
|
199
|
+
Log.d(TAG, "[Step 4] Watermark bitmap: $bitmapInfo")
|
|
200
|
+
|
|
91
201
|
// Step 5: Ensure output directory exists
|
|
92
202
|
val outputFile = File(cleanOutputPath)
|
|
93
203
|
outputFile.parentFile?.mkdirs()
|
|
@@ -97,7 +207,7 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
97
207
|
outputFile.delete()
|
|
98
208
|
}
|
|
99
209
|
|
|
100
|
-
// Step 6: Get video dimensions to calculate scale
|
|
210
|
+
// Step 6: Get video dimensions and metadata to calculate scale
|
|
101
211
|
val retriever = MediaMetadataRetriever()
|
|
102
212
|
try {
|
|
103
213
|
retriever.setDataSource(cleanVideoPath)
|
|
@@ -110,8 +220,27 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
110
220
|
val rawVideoWidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)?.toFloatOrNull() ?: 0f
|
|
111
221
|
val rawVideoHeight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)?.toFloatOrNull() ?: 0f
|
|
112
222
|
val rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION)?.toIntOrNull() ?: 0
|
|
223
|
+
|
|
224
|
+
// Capture additional video metadata for debugging
|
|
225
|
+
val mimeType = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_MIMETYPE) ?: "unknown"
|
|
226
|
+
val bitrate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)?.toLongOrNull() ?: 0L
|
|
227
|
+
val duration = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)?.toLongOrNull() ?: 0L
|
|
228
|
+
val frameRate = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE)?.toFloatOrNull() ?: 0f
|
|
229
|
+
val colorStandard = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COLOR_STANDARD) ?: "unknown"
|
|
230
|
+
val colorTransfer = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COLOR_TRANSFER) ?: "unknown"
|
|
113
231
|
retriever.release()
|
|
114
232
|
|
|
233
|
+
// Build comprehensive video info string for debugging
|
|
234
|
+
val videoInfo = buildString {
|
|
235
|
+
append("mime=$mimeType, ")
|
|
236
|
+
append("bitrate=${bitrate / 1000}kbps, ")
|
|
237
|
+
append("duration=${duration}ms, ")
|
|
238
|
+
append("frameRate=$frameRate, ")
|
|
239
|
+
append("colorStandard=$colorStandard, ")
|
|
240
|
+
append("colorTransfer=$colorTransfer")
|
|
241
|
+
}
|
|
242
|
+
Log.d(TAG, "[Step 6] Video metadata: $videoInfo")
|
|
243
|
+
|
|
115
244
|
if (rawVideoWidth <= 0 || rawVideoHeight <= 0) {
|
|
116
245
|
watermarkBitmap.recycle()
|
|
117
246
|
promise.reject("STEP6_VIDEO_DIMENSIONS_ERROR", "[Step 6] Failed to get video dimensions (width=$rawVideoWidth, height=$rawVideoHeight)", null)
|
|
@@ -189,14 +318,111 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
189
318
|
// Handler for main thread callbacks
|
|
190
319
|
val mainHandler = Handler(Looper.getMainLooper())
|
|
191
320
|
|
|
321
|
+
// Gather device and GL info for debugging (do this before posting to main thread)
|
|
322
|
+
val deviceInfo = getDeviceInfo()
|
|
323
|
+
val glInfo = getGLInfo()
|
|
324
|
+
Log.d(TAG, "[Step 15] Starting transform on device: $deviceInfo")
|
|
325
|
+
Log.d(TAG, "[Step 15] GL info: $glInfo")
|
|
326
|
+
|
|
192
327
|
// Step 15: Build and start transformer
|
|
193
328
|
mainHandler.post {
|
|
194
329
|
try {
|
|
195
330
|
val transformer = Transformer.Builder(context)
|
|
331
|
+
// Force H.264 output for maximum compatibility
|
|
332
|
+
.setVideoMimeType(MimeTypes.VIDEO_H264)
|
|
333
|
+
// Enable HDR to SDR tone mapping for videos with HDR content
|
|
334
|
+
.setHdrMode(Transformer.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL)
|
|
196
335
|
.addListener(object : Transformer.Listener {
|
|
197
336
|
override fun onCompleted(composition: Composition, exportResult: ExportResult) {
|
|
337
|
+
Log.d(TAG, "[Step 15] Transform completed successfully")
|
|
338
|
+
Log.d(TAG, "[Step 15] Export result - durationMs: ${exportResult.durationMs}, " +
|
|
339
|
+
"fileSizeBytes: ${exportResult.fileSizeBytes}, " +
|
|
340
|
+
"averageAudioBitrate: ${exportResult.averageAudioBitrate}, " +
|
|
341
|
+
"averageVideoBitrate: ${exportResult.averageVideoBitrate}, " +
|
|
342
|
+
"videoFrameCount: ${exportResult.videoFrameCount}")
|
|
198
343
|
watermarkBitmap.recycle()
|
|
199
|
-
|
|
344
|
+
|
|
345
|
+
// Step 16: Re-encode to H.265 if device supports HEVC encoder
|
|
346
|
+
val supportsHevc = hasHevcEncoder()
|
|
347
|
+
Log.d(TAG, "[Step 16] HEVC encoder support: $supportsHevc")
|
|
348
|
+
|
|
349
|
+
if (!supportsHevc) {
|
|
350
|
+
Log.d(TAG, "[Step 16] Skipping H.265 re-encode - no HEVC encoder available")
|
|
351
|
+
promise.resolve("file://$cleanOutputPath")
|
|
352
|
+
return
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// Create temp path for the H.264 watermarked video, rename current output
|
|
356
|
+
val h264File = File(cleanOutputPath)
|
|
357
|
+
val h264TempPath = cleanOutputPath.replace(".mp4", "_h264_temp.mp4")
|
|
358
|
+
val h264TempFile = File(h264TempPath)
|
|
359
|
+
|
|
360
|
+
if (!h264File.renameTo(h264TempFile)) {
|
|
361
|
+
Log.e(TAG, "[Step 16] Failed to rename H.264 file for re-encoding, returning H.264 output")
|
|
362
|
+
promise.resolve("file://$cleanOutputPath")
|
|
363
|
+
return
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
Log.d(TAG, "[Step 16] Starting H.265 re-encode from: $h264TempPath to: $cleanOutputPath")
|
|
367
|
+
|
|
368
|
+
// Build transformer for H.265 re-encoding
|
|
369
|
+
val hevcTransformer = Transformer.Builder(context)
|
|
370
|
+
.setVideoMimeType(MimeTypes.VIDEO_H265)
|
|
371
|
+
.addListener(object : Transformer.Listener {
|
|
372
|
+
override fun onCompleted(composition: Composition, hevcExportResult: ExportResult) {
|
|
373
|
+
Log.d(TAG, "[Step 16] H.265 re-encode completed successfully")
|
|
374
|
+
Log.d(TAG, "[Step 16] Export result - durationMs: ${hevcExportResult.durationMs}, " +
|
|
375
|
+
"fileSizeBytes: ${hevcExportResult.fileSizeBytes}, " +
|
|
376
|
+
"averageAudioBitrate: ${hevcExportResult.averageAudioBitrate}, " +
|
|
377
|
+
"averageVideoBitrate: ${hevcExportResult.averageVideoBitrate}, " +
|
|
378
|
+
"videoFrameCount: ${hevcExportResult.videoFrameCount}")
|
|
379
|
+
|
|
380
|
+
// Calculate compression ratio
|
|
381
|
+
val h264Size = exportResult.fileSizeBytes
|
|
382
|
+
val h265Size = hevcExportResult.fileSizeBytes
|
|
383
|
+
if (h264Size > 0 && h265Size > 0) {
|
|
384
|
+
val savings = ((h264Size - h265Size) * 100.0 / h264Size)
|
|
385
|
+
Log.d(TAG, "[Step 16] Size reduction: H.264=${h264Size/1024}KB -> H.265=${h265Size/1024}KB (${String.format("%.1f", savings)}% smaller)")
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
// Clean up temp H.264 file
|
|
389
|
+
if (h264TempFile.exists()) {
|
|
390
|
+
h264TempFile.delete()
|
|
391
|
+
Log.d(TAG, "[Step 16] Cleaned up temp H.264 file")
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
promise.resolve("file://$cleanOutputPath")
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
override fun onError(
|
|
398
|
+
composition: Composition,
|
|
399
|
+
hevcExportResult: ExportResult,
|
|
400
|
+
hevcExportException: ExportException
|
|
401
|
+
) {
|
|
402
|
+
val errorCodeName = getExportErrorCodeName(hevcExportException.errorCode)
|
|
403
|
+
Log.e(TAG, "[Step 16] H.265 re-encode failed: $errorCodeName - ${hevcExportException.message}")
|
|
404
|
+
Log.e(TAG, Log.getStackTraceString(hevcExportException))
|
|
405
|
+
|
|
406
|
+
// Restore H.264 file as output on failure
|
|
407
|
+
if (h264TempFile.exists()) {
|
|
408
|
+
val outputFile = File(cleanOutputPath)
|
|
409
|
+
if (outputFile.exists()) {
|
|
410
|
+
outputFile.delete()
|
|
411
|
+
}
|
|
412
|
+
h264TempFile.renameTo(outputFile)
|
|
413
|
+
Log.d(TAG, "[Step 16] Restored H.264 output after H.265 failure")
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// Still resolve with the H.264 version rather than failing completely
|
|
417
|
+
Log.d(TAG, "[Step 16] Returning H.264 output instead")
|
|
418
|
+
promise.resolve("file://$cleanOutputPath")
|
|
419
|
+
}
|
|
420
|
+
})
|
|
421
|
+
.build()
|
|
422
|
+
|
|
423
|
+
val hevcMediaItem = MediaItem.fromUri("file://$h264TempPath")
|
|
424
|
+
val hevcEditedMediaItem = EditedMediaItem.Builder(hevcMediaItem).build()
|
|
425
|
+
hevcTransformer.start(hevcEditedMediaItem, cleanOutputPath)
|
|
200
426
|
}
|
|
201
427
|
|
|
202
428
|
override fun onError(
|
|
@@ -204,20 +430,89 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
204
430
|
exportResult: ExportResult,
|
|
205
431
|
exportException: ExportException
|
|
206
432
|
) {
|
|
433
|
+
// Build comprehensive error message with all diagnostic info
|
|
434
|
+
val errorCodeName = getExportErrorCodeName(exportException.errorCode)
|
|
435
|
+
|
|
436
|
+
val diagnosticInfo = buildString {
|
|
437
|
+
appendLine("=== STEP 15 TRANSFORM ERROR ===")
|
|
438
|
+
appendLine("Error code: ${exportException.errorCode} ($errorCodeName)")
|
|
439
|
+
appendLine("Error message: ${exportException.message ?: "null"}")
|
|
440
|
+
appendLine("Cause: ${exportException.cause?.message ?: "null"}")
|
|
441
|
+
appendLine("Cause class: ${exportException.cause?.javaClass?.name ?: "null"}")
|
|
442
|
+
appendLine()
|
|
443
|
+
appendLine("--- Device Info ---")
|
|
444
|
+
appendLine(deviceInfo)
|
|
445
|
+
appendLine()
|
|
446
|
+
appendLine("--- GL Info ---")
|
|
447
|
+
appendLine(glInfo)
|
|
448
|
+
appendLine()
|
|
449
|
+
appendLine("--- Video Info ---")
|
|
450
|
+
appendLine("Dimensions (raw): ${rawVideoWidth.toInt()}x${rawVideoHeight.toInt()}")
|
|
451
|
+
appendLine("Dimensions (adjusted): ${videoWidth.toInt()}x${videoHeight.toInt()}")
|
|
452
|
+
appendLine("Rotation: $rotation")
|
|
453
|
+
appendLine("Metadata: $videoInfo")
|
|
454
|
+
appendLine("Input path: $cleanVideoPath")
|
|
455
|
+
appendLine()
|
|
456
|
+
appendLine("--- Watermark Info ---")
|
|
457
|
+
appendLine("Dimensions: ${watermarkWidth.toInt()}x${watermarkHeight.toInt()}")
|
|
458
|
+
appendLine("Bitmap info: $bitmapInfo")
|
|
459
|
+
appendLine("Scale factor: $scale")
|
|
460
|
+
appendLine()
|
|
461
|
+
appendLine("--- Output Info ---")
|
|
462
|
+
appendLine("Output path: $cleanOutputPath")
|
|
463
|
+
appendLine("Partial export result - durationMs: ${exportResult.durationMs}, " +
|
|
464
|
+
"fileSizeBytes: ${exportResult.fileSizeBytes}")
|
|
465
|
+
appendLine()
|
|
466
|
+
appendLine("--- Full Stack Trace ---")
|
|
467
|
+
append(Log.getStackTraceString(exportException))
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
// Log full diagnostics
|
|
471
|
+
Log.e(TAG, diagnosticInfo)
|
|
472
|
+
|
|
473
|
+
// Also log any nested causes
|
|
474
|
+
var cause: Throwable? = exportException.cause
|
|
475
|
+
var causeLevel = 1
|
|
476
|
+
while (cause != null) {
|
|
477
|
+
Log.e(TAG, "[Step 15] Cause level $causeLevel: ${cause.javaClass.name}: ${cause.message}")
|
|
478
|
+
Log.e(TAG, Log.getStackTraceString(cause))
|
|
479
|
+
cause = cause.cause
|
|
480
|
+
causeLevel++
|
|
481
|
+
}
|
|
482
|
+
|
|
207
483
|
watermarkBitmap.recycle()
|
|
484
|
+
|
|
485
|
+
// Reject with comprehensive error message
|
|
486
|
+
val errorMessage = "[Step 15] Transform failed - " +
|
|
487
|
+
"ErrorCode: $errorCodeName (${exportException.errorCode}), " +
|
|
488
|
+
"Device: ${Build.MANUFACTURER} ${Build.MODEL} (API ${Build.VERSION.SDK_INT}), " +
|
|
489
|
+
"Video: ${videoWidth.toInt()}x${videoHeight.toInt()} $mimeType, " +
|
|
490
|
+
"Watermark: ${watermarkWidth.toInt()}x${watermarkHeight.toInt()}, " +
|
|
491
|
+
"Scale: $scale, " +
|
|
492
|
+
"Message: ${exportException.message ?: "Unknown error"}"
|
|
493
|
+
|
|
208
494
|
promise.reject(
|
|
209
495
|
"STEP15_TRANSFORM_ERROR",
|
|
210
|
-
|
|
496
|
+
errorMessage,
|
|
211
497
|
exportException
|
|
212
498
|
)
|
|
213
499
|
}
|
|
214
500
|
})
|
|
215
501
|
.build()
|
|
216
502
|
|
|
503
|
+
Log.d(TAG, "[Step 15] Transformer built, starting export...")
|
|
217
504
|
transformer.start(editedMediaItem, cleanOutputPath)
|
|
505
|
+
Log.d(TAG, "[Step 15] Transformer.start() called, waiting for completion...")
|
|
218
506
|
} catch (e: Exception) {
|
|
507
|
+
Log.e(TAG, "[Step 15] Exception building/starting transformer", e)
|
|
508
|
+
Log.e(TAG, "[Step 15] Device info: $deviceInfo")
|
|
509
|
+
Log.e(TAG, "[Step 15] GL info: $glInfo")
|
|
219
510
|
watermarkBitmap.recycle()
|
|
220
|
-
promise.reject(
|
|
511
|
+
promise.reject(
|
|
512
|
+
"STEP15_TRANSFORMER_BUILD_ERROR",
|
|
513
|
+
"[Step 15] Failed to build/start transformer on ${Build.MANUFACTURER} ${Build.MODEL}: ${e.message}",
|
|
514
|
+
e
|
|
515
|
+
)
|
|
221
516
|
}
|
|
222
517
|
}
|
|
223
518
|
}
|