@movementinfra/expo-twostep-video 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,12 +11,14 @@ class ExpoTwoStepVideoView: ExpoView {
11
11
  private var playerLayer: AVPlayerLayer?
12
12
  private var timeObserver: Any?
13
13
  private var isObservingStatus: Bool = false
14
+ private var isAudioSessionConfigured: Bool = false
14
15
 
15
16
  /// Event dispatchers
16
17
  let onPlaybackStatusChange = EventDispatcher()
17
18
  let onProgress = EventDispatcher()
18
19
  let onEnd = EventDispatcher()
19
20
  let onError = EventDispatcher()
21
+ let onPanZoomChange = EventDispatcher()
20
22
 
21
23
  /// Current composition ID being played
22
24
  private var currentCompositionId: String?
@@ -25,6 +27,34 @@ class ExpoTwoStepVideoView: ExpoView {
25
27
  /// Whether to loop playback continuously
26
28
  var shouldLoop: Bool = false
27
29
 
30
+ // MARK: - Pan/Zoom Properties
31
+
32
+ /// Current zoom level (1.0 = no zoom, 2.0 = 2x zoom, etc.)
33
+ private var currentZoom: CGFloat = 1.0
34
+
35
+ /// Current horizontal pan (-1.0 to 1.0, 0 = center)
36
+ private var currentPanX: CGFloat = 0.0
37
+
38
+ /// Current vertical pan (-1.0 to 1.0, 0 = center)
39
+ private var currentPanY: CGFloat = 0.0
40
+
41
+ /// Last pinch scale for incremental zoom calculation
42
+ private var lastPinchScale: CGFloat = 1.0
43
+
44
+ /// Starting pan position when gesture begins
45
+ private var panStartX: CGFloat = 0.0
46
+ private var panStartY: CGFloat = 0.0
47
+
48
+ /// Minimum zoom level (configurable)
49
+ var minZoom: CGFloat = 1.0
50
+
51
+ /// Maximum zoom level (configurable)
52
+ var maxZoom: CGFloat = 5.0
53
+
54
+ /// Gesture recognizers
55
+ private var pinchGesture: UIPinchGestureRecognizer?
56
+ private var panGesture: UIPanGestureRecognizer?
57
+
28
58
  // MARK: - Initialization
29
59
 
30
60
  required init(appContext: AppContext? = nil) {
@@ -32,6 +62,9 @@ class ExpoTwoStepVideoView: ExpoView {
32
62
  clipsToBounds = true
33
63
  backgroundColor = .black
34
64
  setupPlayerLayer()
65
+ setupAudioSession()
66
+ setupAudioSessionObservers()
67
+ setupGestureRecognizers()
35
68
  }
36
69
 
37
70
  private func setupPlayerLayer() {
@@ -43,9 +76,259 @@ class ExpoTwoStepVideoView: ExpoView {
43
76
  }
44
77
  }
45
78
 
79
+ /// Configure audio session for video playback
80
+ /// Uses .playback category to ensure audio plays even when silent switch is on
81
+ private func setupAudioSession() {
82
+ do {
83
+ let audioSession = AVAudioSession.sharedInstance()
84
+ // .playback category: audio plays even with silent switch, stops other audio
85
+ // .defaultToSpeaker: routes audio to speaker by default (not earpiece)
86
+ try audioSession.setCategory(.playback, mode: .moviePlayback, options: [.defaultToSpeaker])
87
+ try audioSession.setActive(true, options: [.notifyOthersOnDeactivation])
88
+ isAudioSessionConfigured = true
89
+ } catch {
90
+ print("ExpoTwoStepVideoView: Failed to configure audio session: \(error)")
91
+ // Try a simpler configuration as fallback
92
+ do {
93
+ try AVAudioSession.sharedInstance().setCategory(.playback)
94
+ try AVAudioSession.sharedInstance().setActive(true)
95
+ isAudioSessionConfigured = true
96
+ } catch {
97
+ print("ExpoTwoStepVideoView: Fallback audio session also failed: \(error)")
98
+ }
99
+ }
100
+ }
101
+
102
+ /// Listen for audio session interruptions (phone calls, other apps)
103
+ private func setupAudioSessionObservers() {
104
+ NotificationCenter.default.addObserver(
105
+ self,
106
+ selector: #selector(handleAudioSessionInterruption),
107
+ name: AVAudioSession.interruptionNotification,
108
+ object: AVAudioSession.sharedInstance()
109
+ )
110
+
111
+ NotificationCenter.default.addObserver(
112
+ self,
113
+ selector: #selector(handleAudioSessionRouteChange),
114
+ name: AVAudioSession.routeChangeNotification,
115
+ object: AVAudioSession.sharedInstance()
116
+ )
117
+ }
118
+
119
+ @objc private func handleAudioSessionInterruption(notification: Notification) {
120
+ guard let userInfo = notification.userInfo,
121
+ let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
122
+ let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
123
+ return
124
+ }
125
+
126
+ switch type {
127
+ case .began:
128
+ // Interruption began - pause playback
129
+ player?.pause()
130
+ onPlaybackStatusChange(["status": "interrupted"])
131
+ case .ended:
132
+ // Interruption ended - check if we should resume
133
+ guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
134
+ let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
135
+ if options.contains(.shouldResume) {
136
+ // Re-activate audio session and resume
137
+ setupAudioSession()
138
+ player?.play()
139
+ onPlaybackStatusChange(["status": "playing"])
140
+ }
141
+ @unknown default:
142
+ break
143
+ }
144
+ }
145
+
146
+ @objc private func handleAudioSessionRouteChange(notification: Notification) {
147
+ guard let userInfo = notification.userInfo,
148
+ let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
149
+ let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else {
150
+ return
151
+ }
152
+
153
+ // Pause when headphones are unplugged
154
+ if reason == .oldDeviceUnavailable {
155
+ player?.pause()
156
+ onPlaybackStatusChange(["status": "paused"])
157
+ }
158
+ }
159
+
160
+ // MARK: - Gesture Setup
161
+
162
+ private func setupGestureRecognizers() {
163
+ // Pinch to zoom
164
+ let pinch = UIPinchGestureRecognizer(target: self, action: #selector(handlePinchGesture(_:)))
165
+ pinch.delegate = self
166
+ addGestureRecognizer(pinch)
167
+ pinchGesture = pinch
168
+
169
+ // Pan with 2 fingers (to avoid conflict with single-finger scrubbing)
170
+ let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePanGesture(_:)))
171
+ pan.minimumNumberOfTouches = 2
172
+ pan.maximumNumberOfTouches = 2
173
+ pan.delegate = self
174
+ addGestureRecognizer(pan)
175
+ panGesture = pan
176
+
177
+ // Enable user interaction
178
+ isUserInteractionEnabled = true
179
+ }
180
+
181
+ @objc private func handlePinchGesture(_ gesture: UIPinchGestureRecognizer) {
182
+ switch gesture.state {
183
+ case .began:
184
+ lastPinchScale = 1.0
185
+ case .changed:
186
+ // Calculate incremental scale change
187
+ let scaleChange = gesture.scale / lastPinchScale
188
+ lastPinchScale = gesture.scale
189
+
190
+ // Apply to current zoom
191
+ let newZoom = currentZoom * scaleChange
192
+ currentZoom = min(max(newZoom, minZoom), maxZoom)
193
+
194
+ // Constrain pan when zoom changes
195
+ constrainPan()
196
+
197
+ updateLayerTransform()
198
+ emitPanZoomChange()
199
+ case .ended, .cancelled:
200
+ lastPinchScale = 1.0
201
+ default:
202
+ break
203
+ }
204
+ }
205
+
206
+ @objc private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
207
+ // Only allow panning when zoomed in
208
+ guard currentZoom > 1.0 else { return }
209
+
210
+ switch gesture.state {
211
+ case .began:
212
+ panStartX = currentPanX
213
+ panStartY = currentPanY
214
+ case .changed:
215
+ let translation = gesture.translation(in: self)
216
+
217
+ // Convert translation to normalized pan values
218
+ // When zoomed in 2x, a full-width drag should change pan by the available pan range
219
+ let availablePanX = (currentZoom - 1.0) / currentZoom
220
+ let availablePanY = (currentZoom - 1.0) / currentZoom
221
+
222
+ // Normalize translation to view size
223
+ let normalizedDeltaX = translation.x / bounds.width
224
+ let normalizedDeltaY = translation.y / bounds.height
225
+
226
+ // Scale by available pan range and apply
227
+ currentPanX = panStartX - normalizedDeltaX * 2 * availablePanX
228
+ currentPanY = panStartY - normalizedDeltaY * 2 * availablePanY
229
+
230
+ // Constrain pan
231
+ constrainPan()
232
+
233
+ updateLayerTransform()
234
+ emitPanZoomChange()
235
+ case .ended, .cancelled:
236
+ break
237
+ default:
238
+ break
239
+ }
240
+ }
241
+
242
+ /// Constrain pan values so content stays visible
243
+ private func constrainPan() {
244
+ // When zoomed, limit how far we can pan
245
+ // At zoom level 2x, we can pan at most to show the edge (normalized to -1...1)
246
+ let maxPanAmount = (currentZoom - 1.0) / currentZoom
247
+
248
+ currentPanX = min(max(currentPanX, -maxPanAmount), maxPanAmount)
249
+ currentPanY = min(max(currentPanY, -maxPanAmount), maxPanAmount)
250
+
251
+ // If not zoomed in, reset pan to center
252
+ if currentZoom <= 1.0 {
253
+ currentPanX = 0
254
+ currentPanY = 0
255
+ }
256
+ }
257
+
258
+ /// Apply the current pan/zoom transform to the player layer
259
+ private func updateLayerTransform() {
260
+ guard let layer = playerLayer else { return }
261
+
262
+ var transform = CATransform3DIdentity
263
+
264
+ // Apply zoom (scale)
265
+ transform = CATransform3DScale(transform, currentZoom, currentZoom, 1.0)
266
+
267
+ // Apply pan (translation) - scale the translation by zoom to account for scaled coordinate space
268
+ let translateX = -currentPanX * bounds.width * (currentZoom - 1.0) / (2.0 * currentZoom)
269
+ let translateY = -currentPanY * bounds.height * (currentZoom - 1.0) / (2.0 * currentZoom)
270
+ transform = CATransform3DTranslate(transform, translateX, translateY, 0)
271
+
272
+ // Apply transform with animation for smoothness
273
+ CATransaction.begin()
274
+ CATransaction.setDisableActions(true) // Disable implicit animations for responsiveness
275
+ layer.transform = transform
276
+ CATransaction.commit()
277
+ }
278
+
279
+ /// Emit the current pan/zoom state to JavaScript
280
+ private func emitPanZoomChange() {
281
+ onPanZoomChange([
282
+ "panX": currentPanX,
283
+ "panY": currentPanY,
284
+ "zoomLevel": currentZoom
285
+ ])
286
+ }
287
+
288
+ // MARK: - Public Pan/Zoom Methods
289
+
290
+ /// Get the current pan/zoom state
291
+ func getPanZoomState() -> [String: CGFloat] {
292
+ return [
293
+ "panX": currentPanX,
294
+ "panY": currentPanY,
295
+ "zoomLevel": currentZoom
296
+ ]
297
+ }
298
+
299
+ /// Set the pan/zoom state programmatically
300
+ func setPanZoomState(panX: CGFloat?, panY: CGFloat?, zoomLevel: CGFloat?) {
301
+ if let zoom = zoomLevel {
302
+ currentZoom = min(max(zoom, minZoom), maxZoom)
303
+ }
304
+ if let x = panX {
305
+ currentPanX = x
306
+ }
307
+ if let y = panY {
308
+ currentPanY = y
309
+ }
310
+
311
+ constrainPan()
312
+ updateLayerTransform()
313
+ emitPanZoomChange()
314
+ }
315
+
316
+ /// Reset pan/zoom to default state
317
+ func resetPanZoom() {
318
+ currentZoom = 1.0
319
+ currentPanX = 0.0
320
+ currentPanY = 0.0
321
+ updateLayerTransform()
322
+ emitPanZoomChange()
323
+ }
324
+
46
325
  override func layoutSubviews() {
47
326
  super.layoutSubviews()
48
327
  playerLayer?.frame = bounds
328
+ // Reapply transform after frame changes
329
+ if currentZoom != 1.0 || currentPanX != 0 || currentPanY != 0 {
330
+ updateLayerTransform()
331
+ }
49
332
  }
50
333
 
51
334
  // MARK: - Public Methods (called from module)
@@ -117,6 +400,21 @@ class ExpoTwoStepVideoView: ExpoView {
117
400
  }
118
401
 
119
402
  func play() {
403
+ // Ensure audio session is properly configured before playing
404
+ // This handles cases where another component may have changed the audio session
405
+ if !isAudioSessionConfigured {
406
+ setupAudioSession()
407
+ } else {
408
+ // Re-activate in case it was deactivated
409
+ do {
410
+ try AVAudioSession.sharedInstance().setActive(true, options: [.notifyOthersOnDeactivation])
411
+ } catch {
412
+ print("ExpoTwoStepVideoView: Failed to activate audio session: \(error)")
413
+ // Try full reconfiguration
414
+ setupAudioSession()
415
+ }
416
+ }
417
+
120
418
  player?.play()
121
419
  onPlaybackStatusChange(["status": "playing"])
122
420
  }
@@ -216,8 +514,25 @@ class ExpoTwoStepVideoView: ExpoView {
216
514
 
217
515
  deinit {
218
516
  cleanup()
517
+ // Remove audio session observers
518
+ NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
519
+ NotificationCenter.default.removeObserver(self, name: AVAudioSession.routeChangeNotification, object: nil)
219
520
  // Also remove the layer to break any potential retain cycles
220
521
  playerLayer?.removeFromSuperlayer()
221
522
  playerLayer = nil
222
523
  }
223
524
  }
525
+
526
+ // MARK: - UIGestureRecognizerDelegate
527
+
528
+ extension ExpoTwoStepVideoView: UIGestureRecognizerDelegate {
529
+ /// Allow pinch and pan gestures to work simultaneously
530
+ func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
531
+ // Allow pinch and pan to work together
532
+ if (gestureRecognizer == pinchGesture && otherGestureRecognizer == panGesture) ||
533
+ (gestureRecognizer == panGesture && otherGestureRecognizer == pinchGesture) {
534
+ return true
535
+ }
536
+ return false
537
+ }
538
+ }
@@ -333,8 +333,175 @@ public class VideoTransformer {
333
333
  }
334
334
  }
335
335
 
336
+ // MARK: - Pan and Zoom
337
+
338
+ /// Create a composition with pan and zoom applied
339
+ /// - Parameters:
340
+ /// - asset: The video asset to transform
341
+ /// - panX: Horizontal pan (-1.0 to 1.0, 0 = center)
342
+ /// - panY: Vertical pan (-1.0 to 1.0, 0 = center)
343
+ /// - zoomLevel: Zoom level (1.0 = 100%, 2.0 = 200%, etc.)
344
+ /// - timeRange: Optional time range to apply (nil = full video)
345
+ /// - Returns: A tuple of (composition, videoComposition) for export
346
+ /// - Throws: VideoEditingError if pan/zoom fails
347
+ public func panZoom(
348
+ asset: VideoAsset,
349
+ panX: CGFloat,
350
+ panY: CGFloat,
351
+ zoomLevel: CGFloat,
352
+ timeRange: TimeRange? = nil
353
+ ) async throws -> (AVMutableComposition, AVMutableVideoComposition) {
354
+ // Validate zoom level
355
+ guard zoomLevel >= 1.0 && zoomLevel <= 5.0 else {
356
+ throw VideoEditingError.invalidConfiguration(
357
+ reason: "Zoom level must be between 1.0 and 5.0"
358
+ )
359
+ }
360
+
361
+ // Validate pan values
362
+ guard panX >= -1.0 && panX <= 1.0 && panY >= -1.0 && panY <= 1.0 else {
363
+ throw VideoEditingError.invalidConfiguration(
364
+ reason: "Pan values must be between -1.0 and 1.0"
365
+ )
366
+ }
367
+
368
+ let videoTracks = try await asset.avAsset.loadTracks(withMediaType: .video)
369
+ guard let sourceVideoTrack = videoTracks.first else {
370
+ throw VideoEditingError.noVideoTrack
371
+ }
372
+
373
+ let composition = AVMutableComposition()
374
+
375
+ // Determine time range
376
+ let effectiveRange: CMTimeRange
377
+ if let timeRange = timeRange {
378
+ try timeRange.validate(against: asset)
379
+ effectiveRange = timeRange.cmTimeRange
380
+ } else {
381
+ effectiveRange = CMTimeRange(start: .zero, duration: asset.duration)
382
+ }
383
+
384
+ // Add video track
385
+ guard let compositionVideoTrack = composition.addMutableTrack(
386
+ withMediaType: .video,
387
+ preferredTrackID: kCMPersistentTrackID_Invalid
388
+ ) else {
389
+ throw VideoEditingError.compositionFailed(
390
+ reason: "Failed to add video track to composition"
391
+ )
392
+ }
393
+
394
+ do {
395
+ try compositionVideoTrack.insertTimeRange(
396
+ effectiveRange,
397
+ of: sourceVideoTrack,
398
+ at: .zero
399
+ )
400
+ } catch {
401
+ throw VideoEditingError.compositionFailed(
402
+ reason: "Failed to insert video track: \(error.localizedDescription)"
403
+ )
404
+ }
405
+
406
+ // Add audio track if present
407
+ let audioTracks = try? await asset.avAsset.loadTracks(withMediaType: .audio)
408
+ if let sourceAudioTrack = audioTracks?.first {
409
+ if let compositionAudioTrack = composition.addMutableTrack(
410
+ withMediaType: .audio,
411
+ preferredTrackID: kCMPersistentTrackID_Invalid
412
+ ) {
413
+ try? compositionAudioTrack.insertTimeRange(
414
+ effectiveRange,
415
+ of: sourceAudioTrack,
416
+ at: .zero
417
+ )
418
+ }
419
+ }
420
+
421
+ // Get video properties
422
+ let preferredTransform = try await sourceVideoTrack.load(.preferredTransform)
423
+ let naturalSize = try await sourceVideoTrack.load(.naturalSize)
424
+
425
+ // Calculate the actual render size accounting for the transform
426
+ let transformedSize = naturalSize.applying(preferredTransform)
427
+ let renderSize = CGSize(
428
+ width: abs(transformedSize.width),
429
+ height: abs(transformedSize.height)
430
+ )
431
+
432
+ // Create video composition for pan/zoom
433
+ let videoComposition = AVMutableVideoComposition()
434
+ videoComposition.renderSize = renderSize
435
+
436
+ // Get frame rate
437
+ let frameRate = try await sourceVideoTrack.load(.nominalFrameRate)
438
+ videoComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(frameRate > 0 ? frameRate : 30))
439
+
440
+ // Create instruction
441
+ let instruction = AVMutableVideoCompositionInstruction()
442
+ instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
443
+
444
+ // Create layer instruction with pan/zoom transform
445
+ let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
446
+
447
+ let panZoomTransform = createPanZoomTransform(
448
+ panX: panX,
449
+ panY: panY,
450
+ zoomLevel: zoomLevel,
451
+ size: renderSize,
452
+ originalTransform: preferredTransform
453
+ )
454
+
455
+ layerInstruction.setTransform(panZoomTransform, at: .zero)
456
+
457
+ instruction.layerInstructions = [layerInstruction]
458
+ videoComposition.instructions = [instruction]
459
+
460
+ return (composition, videoComposition)
461
+ }
462
+
336
463
  // MARK: - Private Helpers
337
464
 
465
+ /// Create the transform matrix for pan and zoom
466
+ /// Scales around the center and then applies pan translation
467
+ private func createPanZoomTransform(
468
+ panX: CGFloat,
469
+ panY: CGFloat,
470
+ zoomLevel: CGFloat,
471
+ size: CGSize,
472
+ originalTransform: CGAffineTransform
473
+ ) -> CGAffineTransform {
474
+ // Start with the original transform (handles rotation/orientation)
475
+ var transform = originalTransform
476
+
477
+ // Calculate the center of the video
478
+ let centerX = size.width / 2
479
+ let centerY = size.height / 2
480
+
481
+ // Step 1: Translate so the center is at the origin
482
+ transform = transform.concatenating(CGAffineTransform(translationX: -centerX, y: -centerY))
483
+
484
+ // Step 2: Apply zoom (scale)
485
+ transform = transform.concatenating(CGAffineTransform(scaleX: zoomLevel, y: zoomLevel))
486
+
487
+ // Step 3: Translate back to original position
488
+ transform = transform.concatenating(CGAffineTransform(translationX: centerX, y: centerY))
489
+
490
+ // Step 4: Apply pan translation
491
+ // Pan range is normalized to how much we can pan while keeping content visible
492
+ // When zoomed in 2x, we can pan up to half the width/height in each direction
493
+ let maxPanX = (size.width * zoomLevel - size.width) / 2
494
+ let maxPanY = (size.height * zoomLevel - size.height) / 2
495
+
496
+ // Negative because moving the content right means translating left
497
+ let translateX = -panX * maxPanX
498
+ let translateY = -panY * maxPanY
499
+
500
+ transform = transform.concatenating(CGAffineTransform(translationX: translateX, y: translateY))
501
+
502
+ return transform
503
+ }
504
+
338
505
  /// Create the transform matrix for mirroring
339
506
  private func createMirrorTransform(
340
507
  axis: MirrorAxis,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@movementinfra/expo-twostep-video",
3
- "version": "0.1.9",
3
+ "version": "0.1.11",
4
4
  "description": "Minimal video editing for React Native using AVFoundation",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",