@stefanmartin/expo-video-watermark 0.2.3 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -11,7 +11,7 @@ import androidx.media3.common.MediaItem
|
|
|
11
11
|
import androidx.media3.common.util.UnstableApi
|
|
12
12
|
import androidx.media3.effect.BitmapOverlay
|
|
13
13
|
import androidx.media3.effect.OverlayEffect
|
|
14
|
-
import androidx.media3.effect.
|
|
14
|
+
import androidx.media3.effect.StaticOverlaySettings
|
|
15
15
|
import androidx.media3.effect.TextureOverlay
|
|
16
16
|
import androidx.media3.transformer.Composition
|
|
17
17
|
import androidx.media3.transformer.EditedMediaItem
|
|
@@ -51,28 +51,44 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
51
51
|
val cleanImagePath = imagePath.removePrefix("file://")
|
|
52
52
|
val cleanOutputPath = outputPath.removePrefix("file://")
|
|
53
53
|
|
|
54
|
-
// Validate video file exists
|
|
54
|
+
// Step 1: Validate video file exists
|
|
55
55
|
val videoFile = File(cleanVideoPath)
|
|
56
56
|
if (!videoFile.exists()) {
|
|
57
|
-
promise.reject("
|
|
57
|
+
promise.reject("STEP1_VIDEO_NOT_FOUND", "[Step 1] Video file not found at path: $cleanVideoPath", null)
|
|
58
58
|
return
|
|
59
59
|
}
|
|
60
60
|
|
|
61
|
-
// Validate image file exists
|
|
61
|
+
// Step 2: Validate image file exists
|
|
62
62
|
val imageFile = File(cleanImagePath)
|
|
63
63
|
if (!imageFile.exists()) {
|
|
64
|
-
promise.reject("
|
|
64
|
+
promise.reject("STEP2_IMAGE_NOT_FOUND", "[Step 2] Watermark image not found at path: $cleanImagePath", null)
|
|
65
65
|
return
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
-
// Load the watermark bitmap
|
|
69
|
-
val
|
|
70
|
-
|
|
71
|
-
|
|
68
|
+
// Step 3: Load the watermark bitmap with ARGB_8888 config for GPU compatibility
|
|
69
|
+
val options = BitmapFactory.Options().apply {
|
|
70
|
+
inPreferredConfig = Bitmap.Config.ARGB_8888
|
|
71
|
+
}
|
|
72
|
+
val decodedBitmap: Bitmap? = BitmapFactory.decodeFile(cleanImagePath, options)
|
|
73
|
+
if (decodedBitmap == null) {
|
|
74
|
+
promise.reject("STEP3_IMAGE_DECODE_ERROR", "[Step 3] Failed to decode image at: $cleanImagePath", null)
|
|
72
75
|
return
|
|
73
76
|
}
|
|
74
77
|
|
|
75
|
-
// Ensure
|
|
78
|
+
// Step 4: Ensure bitmap is in ARGB_8888 format (required for Media3 GPU processing)
|
|
79
|
+
val watermarkBitmap: Bitmap = if (decodedBitmap.config != Bitmap.Config.ARGB_8888) {
|
|
80
|
+
val converted = decodedBitmap.copy(Bitmap.Config.ARGB_8888, false)
|
|
81
|
+
decodedBitmap.recycle()
|
|
82
|
+
if (converted == null) {
|
|
83
|
+
promise.reject("STEP4_IMAGE_CONVERT_ERROR", "[Step 4] Failed to convert image to ARGB_8888 format", null)
|
|
84
|
+
return
|
|
85
|
+
}
|
|
86
|
+
converted
|
|
87
|
+
} else {
|
|
88
|
+
decodedBitmap
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Step 5: Ensure output directory exists
|
|
76
92
|
val outputFile = File(cleanOutputPath)
|
|
77
93
|
outputFile.parentFile?.mkdirs()
|
|
78
94
|
|
|
@@ -81,88 +97,128 @@ class ExpoVideoWatermarkModule : Module() {
|
|
|
81
97
|
outputFile.delete()
|
|
82
98
|
}
|
|
83
99
|
|
|
84
|
-
// Get video dimensions to calculate scale
|
|
100
|
+
// Step 6: Get video dimensions to calculate scale
|
|
85
101
|
val retriever = MediaMetadataRetriever()
|
|
86
102
|
try {
|
|
87
103
|
retriever.setDataSource(cleanVideoPath)
|
|
88
104
|
} catch (e: Exception) {
|
|
89
|
-
|
|
105
|
+
watermarkBitmap.recycle()
|
|
106
|
+
promise.reject("STEP6_VIDEO_METADATA_ERROR", "[Step 6] Failed to read video metadata: ${e.message}", e)
|
|
90
107
|
return
|
|
91
108
|
}
|
|
92
109
|
|
|
93
|
-
val
|
|
94
|
-
val
|
|
110
|
+
val rawVideoWidth = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)?.toFloatOrNull() ?: 0f
|
|
111
|
+
val rawVideoHeight = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)?.toFloatOrNull() ?: 0f
|
|
112
|
+
val rotation = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION)?.toIntOrNull() ?: 0
|
|
95
113
|
retriever.release()
|
|
96
114
|
|
|
97
|
-
if (
|
|
98
|
-
|
|
115
|
+
if (rawVideoWidth <= 0 || rawVideoHeight <= 0) {
|
|
116
|
+
watermarkBitmap.recycle()
|
|
117
|
+
promise.reject("STEP6_VIDEO_DIMENSIONS_ERROR", "[Step 6] Failed to get video dimensions (width=$rawVideoWidth, height=$rawVideoHeight)", null)
|
|
99
118
|
return
|
|
100
119
|
}
|
|
101
120
|
|
|
102
|
-
//
|
|
121
|
+
// Step 7: Account for video rotation - swap dimensions if rotated 90 or 270 degrees
|
|
122
|
+
val (videoWidth, videoHeight) = if (rotation == 90 || rotation == 270) {
|
|
123
|
+
rawVideoHeight to rawVideoWidth
|
|
124
|
+
} else {
|
|
125
|
+
rawVideoWidth to rawVideoHeight
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Step 8: Calculate scale to make watermark span full video width, maintaining aspect ratio
|
|
103
129
|
val watermarkWidth = watermarkBitmap.width.toFloat()
|
|
130
|
+
val watermarkHeight = watermarkBitmap.height.toFloat()
|
|
104
131
|
val scale = videoWidth / watermarkWidth
|
|
105
132
|
|
|
106
|
-
// Create overlay settings for full-width bottom positioning
|
|
133
|
+
// Step 9: Create overlay settings for full-width bottom positioning
|
|
107
134
|
// In Media3, coordinates are normalized: (0,0) is center
|
|
108
135
|
// x range [-1, 1] (left to right), y range [-1, 1] (bottom to top)
|
|
109
|
-
val overlaySettings =
|
|
110
|
-
.
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
136
|
+
val overlaySettings = try {
|
|
137
|
+
StaticOverlaySettings.Builder()
|
|
138
|
+
.setScale(scale, scale) // Scale uniformly to match video width
|
|
139
|
+
.setOverlayFrameAnchor(0f, -1f) // Anchor at bottom-center of watermark
|
|
140
|
+
.setBackgroundFrameAnchor(0f, -1f) // Position at very bottom of video
|
|
141
|
+
.build()
|
|
142
|
+
} catch (e: Exception) {
|
|
143
|
+
watermarkBitmap.recycle()
|
|
144
|
+
promise.reject("STEP9_OVERLAY_SETTINGS_ERROR", "[Step 9] Failed to create overlay settings: ${e.message}", e)
|
|
145
|
+
return
|
|
146
|
+
}
|
|
120
147
|
|
|
121
|
-
// Create overlay
|
|
122
|
-
val
|
|
148
|
+
// Step 10: Create the bitmap overlay with settings
|
|
149
|
+
val bitmapOverlay = try {
|
|
150
|
+
BitmapOverlay.createStaticBitmapOverlay(
|
|
151
|
+
watermarkBitmap,
|
|
152
|
+
overlaySettings
|
|
153
|
+
)
|
|
154
|
+
} catch (e: Exception) {
|
|
155
|
+
watermarkBitmap.recycle()
|
|
156
|
+
promise.reject("STEP10_BITMAP_OVERLAY_ERROR", "[Step 10] Failed to create bitmap overlay: ${e.message}", e)
|
|
157
|
+
return
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Step 11: Create overlay effect with proper typing
|
|
161
|
+
val overlayEffect = try {
|
|
162
|
+
OverlayEffect(ImmutableList.of<TextureOverlay>(bitmapOverlay))
|
|
163
|
+
} catch (e: Exception) {
|
|
164
|
+
watermarkBitmap.recycle()
|
|
165
|
+
promise.reject("STEP11_OVERLAY_EFFECT_ERROR", "[Step 11] Failed to create overlay effect: ${e.message}", e)
|
|
166
|
+
return
|
|
167
|
+
}
|
|
123
168
|
|
|
124
|
-
// Create effects with video overlay
|
|
169
|
+
// Step 12: Create effects with video overlay
|
|
125
170
|
val effects = Effects(
|
|
126
171
|
/* audioProcessors= */ listOf(),
|
|
127
172
|
/* videoEffects= */ listOf(overlayEffect)
|
|
128
173
|
)
|
|
129
174
|
|
|
130
|
-
// Create media item from video
|
|
175
|
+
// Step 13: Create media item from video
|
|
131
176
|
val mediaItem = MediaItem.fromUri("file://$cleanVideoPath")
|
|
132
177
|
|
|
133
|
-
// Create edited media item with effects
|
|
134
|
-
val editedMediaItem =
|
|
135
|
-
.
|
|
136
|
-
|
|
178
|
+
// Step 14: Create edited media item with effects
|
|
179
|
+
val editedMediaItem = try {
|
|
180
|
+
EditedMediaItem.Builder(mediaItem)
|
|
181
|
+
.setEffects(effects)
|
|
182
|
+
.build()
|
|
183
|
+
} catch (e: Exception) {
|
|
184
|
+
watermarkBitmap.recycle()
|
|
185
|
+
promise.reject("STEP14_EDITED_MEDIA_ERROR", "[Step 14] Failed to create edited media item: ${e.message}", e)
|
|
186
|
+
return
|
|
187
|
+
}
|
|
137
188
|
|
|
138
189
|
// Handler for main thread callbacks
|
|
139
190
|
val mainHandler = Handler(Looper.getMainLooper())
|
|
140
191
|
|
|
141
|
-
// Build and start transformer
|
|
192
|
+
// Step 15: Build and start transformer
|
|
142
193
|
mainHandler.post {
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
194
|
+
try {
|
|
195
|
+
val transformer = Transformer.Builder(context)
|
|
196
|
+
.addListener(object : Transformer.Listener {
|
|
197
|
+
override fun onCompleted(composition: Composition, exportResult: ExportResult) {
|
|
198
|
+
watermarkBitmap.recycle()
|
|
199
|
+
promise.resolve(cleanOutputPath)
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
override fun onError(
|
|
203
|
+
composition: Composition,
|
|
204
|
+
exportResult: ExportResult,
|
|
205
|
+
exportException: ExportException
|
|
206
|
+
) {
|
|
207
|
+
watermarkBitmap.recycle()
|
|
208
|
+
promise.reject(
|
|
209
|
+
"STEP15_TRANSFORM_ERROR",
|
|
210
|
+
"[Step 15] Video transform failed (video: ${videoWidth.toInt()}x${videoHeight.toInt()}, rotation: $rotation, watermark: ${watermarkWidth.toInt()}x${watermarkHeight.toInt()}, scale: $scale): ${exportException.message ?: "Unknown error"}",
|
|
211
|
+
exportException
|
|
212
|
+
)
|
|
213
|
+
}
|
|
214
|
+
})
|
|
215
|
+
.build()
|
|
216
|
+
|
|
217
|
+
transformer.start(editedMediaItem, cleanOutputPath)
|
|
218
|
+
} catch (e: Exception) {
|
|
219
|
+
watermarkBitmap.recycle()
|
|
220
|
+
promise.reject("STEP15_TRANSFORMER_BUILD_ERROR", "[Step 15] Failed to build/start transformer: ${e.message}", e)
|
|
221
|
+
}
|
|
166
222
|
}
|
|
167
223
|
}
|
|
168
224
|
}
|