@movementinfra/expo-twostep-video 0.1.11 → 0.1.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,7 @@ class ExpoTwoStepVideoView: ExpoView {
9
9
 
10
10
  private var player: AVPlayer?
11
11
  private var playerLayer: AVPlayerLayer?
12
+ private var playerContainerView: UIView!
12
13
  private var timeObserver: Any?
13
14
  private var isObservingStatus: Bool = false
14
15
  private var isAudioSessionConfigured: Bool = false
@@ -29,26 +30,16 @@ class ExpoTwoStepVideoView: ExpoView {
29
30
 
30
31
  // MARK: - Pan/Zoom Properties
31
32
 
32
- /// Current zoom level (1.0 = no zoom, 2.0 = 2x zoom, etc.)
33
- private var currentZoom: CGFloat = 1.0
33
+ /// Current transform applied to the container view
34
+ private var currentTransform: CGAffineTransform = .identity
34
35
 
35
- /// Current horizontal pan (-1.0 to 1.0, 0 = center)
36
- private var currentPanX: CGFloat = 0.0
36
+ /// Transform at gesture start (for incremental changes)
37
+ private var gestureStartTransform: CGAffineTransform = .identity
37
38
 
38
- /// Current vertical pan (-1.0 to 1.0, 0 = center)
39
- private var currentPanY: CGFloat = 0.0
40
-
41
- /// Last pinch scale for incremental zoom calculation
42
- private var lastPinchScale: CGFloat = 1.0
43
-
44
- /// Starting pan position when gesture begins
45
- private var panStartX: CGFloat = 0.0
46
- private var panStartY: CGFloat = 0.0
47
-
48
- /// Minimum zoom level (configurable)
39
+ /// Minimum zoom level
49
40
  var minZoom: CGFloat = 1.0
50
41
 
51
- /// Maximum zoom level (configurable)
42
+ /// Maximum zoom level
52
43
  var maxZoom: CGFloat = 5.0
53
44
 
54
45
  /// Gesture recognizers
@@ -61,34 +52,34 @@ class ExpoTwoStepVideoView: ExpoView {
61
52
  super.init(appContext: appContext)
62
53
  clipsToBounds = true
63
54
  backgroundColor = .black
64
- setupPlayerLayer()
55
+ setupPlayerContainerView()
65
56
  setupAudioSession()
66
57
  setupAudioSessionObservers()
67
58
  setupGestureRecognizers()
68
59
  }
69
60
 
70
- private func setupPlayerLayer() {
61
+ private func setupPlayerContainerView() {
62
+ // Create container view that will receive transforms
63
+ playerContainerView = UIView()
64
+ playerContainerView.backgroundColor = .black
65
+ addSubview(playerContainerView)
66
+
67
+ // Create player layer inside container
71
68
  playerLayer = AVPlayerLayer()
72
69
  playerLayer?.videoGravity = .resizeAspect
73
70
  playerLayer?.backgroundColor = UIColor.black.cgColor
74
- if let playerLayer = playerLayer {
75
- layer.addSublayer(playerLayer)
76
- }
71
+ playerContainerView.layer.addSublayer(playerLayer!)
77
72
  }
78
73
 
79
74
  /// Configure audio session for video playback
80
- /// Uses .playback category to ensure audio plays even when silent switch is on
81
75
  private func setupAudioSession() {
82
76
  do {
83
77
  let audioSession = AVAudioSession.sharedInstance()
84
- // .playback category: audio plays even with silent switch, stops other audio
85
- // .defaultToSpeaker: routes audio to speaker by default (not earpiece)
86
78
  try audioSession.setCategory(.playback, mode: .moviePlayback, options: [.defaultToSpeaker])
87
79
  try audioSession.setActive(true, options: [.notifyOthersOnDeactivation])
88
80
  isAudioSessionConfigured = true
89
81
  } catch {
90
82
  print("ExpoTwoStepVideoView: Failed to configure audio session: \(error)")
91
- // Try a simpler configuration as fallback
92
83
  do {
93
84
  try AVAudioSession.sharedInstance().setCategory(.playback)
94
85
  try AVAudioSession.sharedInstance().setActive(true)
@@ -99,7 +90,7 @@ class ExpoTwoStepVideoView: ExpoView {
99
90
  }
100
91
  }
101
92
 
102
- /// Listen for audio session interruptions (phone calls, other apps)
93
+ /// Listen for audio session interruptions
103
94
  private func setupAudioSessionObservers() {
104
95
  NotificationCenter.default.addObserver(
105
96
  self,
@@ -125,15 +116,12 @@ class ExpoTwoStepVideoView: ExpoView {
125
116
 
126
117
  switch type {
127
118
  case .began:
128
- // Interruption began - pause playback
129
119
  player?.pause()
130
120
  onPlaybackStatusChange(["status": "interrupted"])
131
121
  case .ended:
132
- // Interruption ended - check if we should resume
133
122
  guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
134
123
  let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
135
124
  if options.contains(.shouldResume) {
136
- // Re-activate audio session and resume
137
125
  setupAudioSession()
138
126
  player?.play()
139
127
  onPlaybackStatusChange(["status": "playing"])
@@ -150,7 +138,6 @@ class ExpoTwoStepVideoView: ExpoView {
150
138
  return
151
139
  }
152
140
 
153
- // Pause when headphones are unplugged
154
141
  if reason == .oldDeviceUnavailable {
155
142
  player?.pause()
156
143
  onPlaybackStatusChange(["status": "paused"])
@@ -166,122 +153,190 @@ class ExpoTwoStepVideoView: ExpoView {
166
153
  addGestureRecognizer(pinch)
167
154
  pinchGesture = pinch
168
155
 
169
- // Pan with 2 fingers (to avoid conflict with single-finger scrubbing)
156
+ // Single finger pan (only works when zoomed, controlled by delegate)
170
157
  let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePanGesture(_:)))
171
- pan.minimumNumberOfTouches = 2
172
- pan.maximumNumberOfTouches = 2
173
158
  pan.delegate = self
174
159
  addGestureRecognizer(pan)
175
160
  panGesture = pan
176
161
 
177
- // Enable user interaction
178
162
  isUserInteractionEnabled = true
179
163
  }
180
164
 
165
+ // MARK: - Video Content Rect Calculation
166
+
167
+ /// Calculate the rect where the actual video content is displayed within the player layer.
168
+ /// This accounts for letterboxing/pillarboxing when using .resizeAspect video gravity.
169
+ private func videoContentRect() -> CGRect {
170
+ guard let playerLayer = playerLayer,
171
+ let playerItem = player?.currentItem else {
172
+ return playerContainerView.bounds
173
+ }
174
+
175
+ // Get the natural size of the video
176
+ let videoSize = playerItem.presentationSize
177
+ guard videoSize.width > 0 && videoSize.height > 0 else {
178
+ return playerContainerView.bounds
179
+ }
180
+
181
+ // Use AVFoundation's utility to calculate the rect
182
+ let layerBounds = playerLayer.bounds
183
+ return AVMakeRect(aspectRatio: videoSize, insideRect: layerBounds)
184
+ }
185
+
186
+ /// Adjust an anchor point to be relative to the video content rect.
187
+ /// Clamps points outside the video area to the nearest edge.
188
+ private func adjustedAnchorPoint(for point: CGPoint) -> CGPoint {
189
+ let contentRect = videoContentRect()
190
+
191
+ // Clamp the point to the video content rect
192
+ let clampedX = min(max(point.x, contentRect.minX), contentRect.maxX)
193
+ let clampedY = min(max(point.y, contentRect.minY), contentRect.maxY)
194
+
195
+ return CGPoint(x: clampedX, y: clampedY)
196
+ }
197
+
198
+ // MARK: - Gesture Handlers
199
+
200
+ /// Get the current visual transform, reading from the presentation layer if an animation is in progress
201
+ private func captureVisualTransform() -> CGAffineTransform {
202
+ if let presentationLayer = playerContainerView.layer.presentation() {
203
+ return CGAffineTransform(
204
+ a: presentationLayer.transform.m11,
205
+ b: presentationLayer.transform.m12,
206
+ c: presentationLayer.transform.m21,
207
+ d: presentationLayer.transform.m22,
208
+ tx: presentationLayer.transform.m41,
209
+ ty: presentationLayer.transform.m42
210
+ )
211
+ }
212
+ return playerContainerView.transform
213
+ }
214
+
215
+ /// Prepare for a new gesture by capturing current visual state and cancelling animations
216
+ private func beginGesture() {
217
+ let visualTransform = captureVisualTransform()
218
+ playerContainerView.layer.removeAllAnimations()
219
+ playerContainerView.transform = visualTransform
220
+ gestureStartTransform = visualTransform
221
+ currentTransform = visualTransform
222
+ }
223
+
181
224
  @objc private func handlePinchGesture(_ gesture: UIPinchGestureRecognizer) {
182
225
  switch gesture.state {
183
226
  case .began:
184
- lastPinchScale = 1.0
185
- case .changed:
186
- // Calculate incremental scale change
187
- let scaleChange = gesture.scale / lastPinchScale
188
- lastPinchScale = gesture.scale
189
-
190
- // Apply to current zoom
191
- let newZoom = currentZoom * scaleChange
192
- currentZoom = min(max(newZoom, minZoom), maxZoom)
227
+ beginGesture()
193
228
 
194
- // Constrain pan when zoom changes
195
- constrainPan()
229
+ case .changed:
230
+ let rawAnchor = gesture.location(in: self)
231
+ // Adjust anchor to account for letterboxing/pillarboxing
232
+ let anchor = adjustedAnchorPoint(for: rawAnchor)
233
+ let scale = gesture.scale
234
+ let scaleTransform = CGAffineTransform.anchoredScale(scale: scale, anchor: anchor)
235
+ let newTransform = gestureStartTransform.concatenating(scaleTransform)
236
+ currentTransform = newTransform
237
+ playerContainerView.transform = newTransform
196
238
 
197
- updateLayerTransform()
198
- emitPanZoomChange()
199
239
  case .ended, .cancelled:
200
- lastPinchScale = 1.0
240
+ onGestureEnded()
241
+
201
242
  default:
202
243
  break
203
244
  }
204
245
  }
205
246
 
206
247
  @objc private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
207
- // Only allow panning when zoomed in
208
- guard currentZoom > 1.0 else { return }
209
-
210
248
  switch gesture.state {
211
249
  case .began:
212
- panStartX = currentPanX
213
- panStartY = currentPanY
250
+ beginGesture()
251
+
214
252
  case .changed:
215
253
  let translation = gesture.translation(in: self)
254
+ let scale = max(gestureStartTransform.scaleX, 1.0)
216
255
 
217
- // Convert translation to normalized pan values
218
- // When zoomed in 2x, a full-width drag should change pan by the available pan range
219
- let availablePanX = (currentZoom - 1.0) / currentZoom
220
- let availablePanY = (currentZoom - 1.0) / currentZoom
256
+ // Translate in the scaled coordinate space
257
+ let newTransform = gestureStartTransform.translatedBy(
258
+ x: translation.x / scale,
259
+ y: translation.y / scale
260
+ )
261
+ currentTransform = newTransform
262
+ playerContainerView.transform = newTransform
221
263
 
222
- // Normalize translation to view size
223
- let normalizedDeltaX = translation.x / bounds.width
224
- let normalizedDeltaY = translation.y / bounds.height
225
-
226
- // Scale by available pan range and apply
227
- currentPanX = panStartX - normalizedDeltaX * 2 * availablePanX
228
- currentPanY = panStartY - normalizedDeltaY * 2 * availablePanY
229
-
230
- // Constrain pan
231
- constrainPan()
232
-
233
- updateLayerTransform()
234
- emitPanZoomChange()
235
264
  case .ended, .cancelled:
236
- break
265
+ onGestureEnded()
266
+
237
267
  default:
238
268
  break
239
269
  }
240
270
  }
241
271
 
242
- /// Constrain pan values so content stays visible
243
- private func constrainPan() {
244
- // When zoomed, limit how far we can pan
245
- // At zoom level 2x, we can pan at most to show the edge (normalized to -1...1)
246
- let maxPanAmount = (currentZoom - 1.0) / currentZoom
272
+ // MARK: - Transform Helpers
273
+
274
+ /// Called when a gesture ends - applies bounds limiting with spring animation
275
+ private func onGestureEnded() {
276
+ let limitedTransform = limitTransform(currentTransform)
277
+
278
+ // Update state immediately so new gestures start from correct position
279
+ currentTransform = limitedTransform
280
+ gestureStartTransform = limitedTransform
281
+
282
+ UIView.animate(
283
+ withDuration: 0.3,
284
+ delay: 0,
285
+ usingSpringWithDamping: 0.8,
286
+ initialSpringVelocity: 0,
287
+ options: [.allowUserInteraction],
288
+ animations: {
289
+ self.playerContainerView.transform = limitedTransform
290
+ },
291
+ completion: { _ in
292
+ self.emitPanZoomChange()
293
+ }
294
+ )
295
+ }
247
296
 
248
- currentPanX = min(max(currentPanX, -maxPanAmount), maxPanAmount)
249
- currentPanY = min(max(currentPanY, -maxPanAmount), maxPanAmount)
297
+ /// Limit transform to valid bounds (min/max zoom, pan within content)
298
+ private func limitTransform(_ transform: CGAffineTransform) -> CGAffineTransform {
299
+ let scaleX = transform.scaleX
300
+ let scaleY = transform.scaleY
250
301
 
251
- // If not zoomed in, reset pan to center
252
- if currentZoom <= 1.0 {
253
- currentPanX = 0
254
- currentPanY = 0
302
+ // If zoomed out too far, reset to identity
303
+ if scaleX < minZoom || scaleY < minZoom {
304
+ return .identity
255
305
  }
256
- }
257
306
 
258
- /// Apply the current pan/zoom transform to the player layer
259
- private func updateLayerTransform() {
260
- guard let layer = playerLayer else { return }
307
+ var capped = transform
261
308
 
262
- var transform = CATransform3DIdentity
309
+ // Cap maximum zoom
310
+ let currentScale = max(scaleX, scaleY)
311
+ if currentScale > maxZoom {
312
+ let factor = maxZoom / currentScale
313
+ let center = CGPoint(x: bounds.width / 2, y: bounds.height / 2)
314
+ let capTransform = CGAffineTransform.anchoredScale(scale: factor, anchor: center)
315
+ capped = capped.concatenating(capTransform)
316
+ }
263
317
 
264
- // Apply zoom (scale)
265
- transform = CATransform3DScale(transform, currentZoom, currentZoom, 1.0)
318
+ // Constrain pan to keep content visible
319
+ let contentSize = bounds.size
320
+ let maxX = contentSize.width * (capped.scaleX - 1)
321
+ let maxY = contentSize.height * (capped.scaleY - 1)
266
322
 
267
- // Apply pan (translation) - scale the translation by zoom to account for scaled coordinate space
268
- let translateX = -currentPanX * bounds.width * (currentZoom - 1.0) / (2.0 * currentZoom)
269
- let translateY = -currentPanY * bounds.height * (currentZoom - 1.0) / (2.0 * currentZoom)
270
- transform = CATransform3DTranslate(transform, translateX, translateY, 0)
323
+ // tx/ty constraints: can't pan past edges
324
+ capped.tx = min(max(capped.tx, -maxX), 0)
325
+ capped.ty = min(max(capped.ty, -maxY), 0)
271
326
 
272
- // Apply transform with animation for smoothness
273
- CATransaction.begin()
274
- CATransaction.setDisableActions(true) // Disable implicit animations for responsiveness
275
- layer.transform = transform
276
- CATransaction.commit()
327
+ return capped
277
328
  }
278
329
 
279
330
  /// Emit the current pan/zoom state to JavaScript
280
331
  private func emitPanZoomChange() {
332
+ let scale = currentTransform.scaleX
333
+ let panX = scale > 1.0 ? currentTransform.tx / (bounds.width * (scale - 1)) : 0
334
+ let panY = scale > 1.0 ? currentTransform.ty / (bounds.height * (scale - 1)) : 0
335
+
281
336
  onPanZoomChange([
282
- "panX": currentPanX,
283
- "panY": currentPanY,
284
- "zoomLevel": currentZoom
337
+ "panX": -panX, // Normalize to -1 to 0 range
338
+ "panY": -panY,
339
+ "zoomLevel": scale
285
340
  ])
286
341
  }
287
342
 
@@ -289,61 +344,86 @@ class ExpoTwoStepVideoView: ExpoView {
289
344
 
290
345
  /// Get the current pan/zoom state
291
346
  func getPanZoomState() -> [String: CGFloat] {
347
+ let scale = currentTransform.scaleX
348
+ let panX = scale > 1.0 ? currentTransform.tx / (bounds.width * (scale - 1)) : 0
349
+ let panY = scale > 1.0 ? currentTransform.ty / (bounds.height * (scale - 1)) : 0
350
+
292
351
  return [
293
- "panX": currentPanX,
294
- "panY": currentPanY,
295
- "zoomLevel": currentZoom
352
+ "panX": -panX,
353
+ "panY": -panY,
354
+ "zoomLevel": scale
296
355
  ]
297
356
  }
298
357
 
299
358
  /// Set the pan/zoom state programmatically
300
359
  func setPanZoomState(panX: CGFloat?, panY: CGFloat?, zoomLevel: CGFloat?) {
360
+ var newTransform = currentTransform
361
+
301
362
  if let zoom = zoomLevel {
302
- currentZoom = min(max(zoom, minZoom), maxZoom)
363
+ let clampedZoom = min(max(zoom, minZoom), maxZoom)
364
+ let currentScale = currentTransform.scaleX
365
+ let scaleFactor = clampedZoom / currentScale
366
+ let center = CGPoint(x: bounds.width / 2, y: bounds.height / 2)
367
+ newTransform = newTransform.concatenating(
368
+ CGAffineTransform.anchoredScale(scale: scaleFactor, anchor: center)
369
+ )
303
370
  }
304
- if let x = panX {
305
- currentPanX = x
306
- }
307
- if let y = panY {
308
- currentPanY = y
371
+
372
+ if let x = panX, let y = panY {
373
+ let scale = newTransform.scaleX
374
+ if scale > 1.0 {
375
+ newTransform.tx = -x * bounds.width * (scale - 1)
376
+ newTransform.ty = -y * bounds.height * (scale - 1)
377
+ }
309
378
  }
310
379
 
311
- constrainPan()
312
- updateLayerTransform()
380
+ currentTransform = limitTransform(newTransform)
381
+ gestureStartTransform = currentTransform
382
+ playerContainerView.transform = currentTransform
313
383
  emitPanZoomChange()
314
384
  }
315
385
 
316
- /// Reset pan/zoom to default state
386
+ /// Reset pan/zoom to default state with animation
317
387
  func resetPanZoom() {
318
- currentZoom = 1.0
319
- currentPanX = 0.0
320
- currentPanY = 0.0
321
- updateLayerTransform()
322
- emitPanZoomChange()
388
+ UIView.animate(
389
+ withDuration: 0.2,
390
+ delay: 0,
391
+ options: [.allowUserInteraction],
392
+ animations: {
393
+ self.playerContainerView.transform = .identity
394
+ },
395
+ completion: { _ in
396
+ self.currentTransform = .identity
397
+ self.gestureStartTransform = .identity
398
+ self.emitPanZoomChange()
399
+ }
400
+ )
323
401
  }
324
402
 
325
403
  override func layoutSubviews() {
326
404
  super.layoutSubviews()
327
- playerLayer?.frame = bounds
328
- // Reapply transform after frame changes
329
- if currentZoom != 1.0 || currentPanX != 0 || currentPanY != 0 {
330
- updateLayerTransform()
331
- }
405
+
406
+ // Important: When a view has a non-identity transform, setting `frame` is undefined behavior.
407
+ // We must temporarily reset the transform, update bounds/center, then restore it.
408
+ let savedTransform = playerContainerView.transform
409
+ playerContainerView.transform = .identity
410
+
411
+ playerContainerView.frame = bounds
412
+ playerLayer?.frame = playerContainerView.bounds
413
+
414
+ // Restore the transform
415
+ playerContainerView.transform = savedTransform
332
416
  }
333
417
 
334
418
  // MARK: - Public Methods (called from module)
335
419
 
336
420
  func loadComposition(compositionId: String, composition: AVMutableComposition, videoComposition: AVMutableVideoComposition?) {
337
- // Clean up previous player
338
421
  cleanup()
339
422
 
340
423
  currentCompositionId = compositionId
341
424
  currentAssetId = nil
342
425
 
343
- // Create player item from composition
344
426
  let playerItem = AVPlayerItem(asset: composition)
345
-
346
- // Apply video composition for transforms (mirror, etc.)
347
427
  if let videoComposition = videoComposition {
348
428
  playerItem.videoComposition = videoComposition
349
429
  }
@@ -352,7 +432,6 @@ class ExpoTwoStepVideoView: ExpoView {
352
432
  }
353
433
 
354
434
  func loadAsset(assetId: String, asset: AVAsset) {
355
- // Clean up previous player
356
435
  cleanup()
357
436
 
358
437
  currentAssetId = assetId
@@ -363,15 +442,12 @@ class ExpoTwoStepVideoView: ExpoView {
363
442
  }
364
443
 
365
444
  private func setupPlayer(with playerItem: AVPlayerItem) {
366
- // Create player
367
445
  player = AVPlayer(playerItem: playerItem)
368
446
  playerLayer?.player = player
369
447
 
370
- // Observe playback status (track that we added this observer)
371
448
  playerItem.addObserver(self, forKeyPath: "status", options: [.new, .initial], context: nil)
372
449
  isObservingStatus = true
373
450
 
374
- // Observe when playback ends
375
451
  NotificationCenter.default.addObserver(
376
452
  self,
377
453
  selector: #selector(playerDidFinishPlaying),
@@ -379,7 +455,6 @@ class ExpoTwoStepVideoView: ExpoView {
379
455
  object: playerItem
380
456
  )
381
457
 
382
- // Add periodic time observer for progress (0.25s interval is sufficient and reduces overhead)
383
458
  let interval = CMTime(seconds: 0.25, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
384
459
  timeObserver = player?.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in
385
460
  guard let self = self,
@@ -400,17 +475,13 @@ class ExpoTwoStepVideoView: ExpoView {
400
475
  }
401
476
 
402
477
  func play() {
403
- // Ensure audio session is properly configured before playing
404
- // This handles cases where another component may have changed the audio session
405
478
  if !isAudioSessionConfigured {
406
479
  setupAudioSession()
407
480
  } else {
408
- // Re-activate in case it was deactivated
409
481
  do {
410
482
  try AVAudioSession.sharedInstance().setActive(true, options: [.notifyOthersOnDeactivation])
411
483
  } catch {
412
484
  print("ExpoTwoStepVideoView: Failed to activate audio session: \(error)")
413
- // Try full reconfiguration
414
485
  setupAudioSession()
415
486
  }
416
487
  }
@@ -472,7 +543,6 @@ class ExpoTwoStepVideoView: ExpoView {
472
543
 
473
544
  @objc private func playerDidFinishPlaying() {
474
545
  if shouldLoop {
475
- // Seek back to start and play again
476
546
  player?.seek(to: .zero) { [weak self] _ in
477
547
  self?.player?.play()
478
548
  }
@@ -485,24 +555,20 @@ class ExpoTwoStepVideoView: ExpoView {
485
555
  // MARK: - Cleanup
486
556
 
487
557
  private func cleanup() {
488
- // Remove time observer first
489
558
  if let observer = timeObserver, let player = player {
490
559
  player.removeTimeObserver(observer)
491
560
  }
492
561
  timeObserver = nil
493
562
 
494
- // Remove KVO observer only if we added it
495
563
  if isObservingStatus, let playerItem = player?.currentItem {
496
564
  playerItem.removeObserver(self, forKeyPath: "status")
497
565
  isObservingStatus = false
498
566
  }
499
567
 
500
- // Remove notification observer
501
568
  if let playerItem = player?.currentItem {
502
569
  NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
503
570
  }
504
571
 
505
- // Stop and clear player
506
572
  player?.pause()
507
573
  player?.replaceCurrentItem(with: nil)
508
574
  player = nil
@@ -514,10 +580,8 @@ class ExpoTwoStepVideoView: ExpoView {
514
580
 
515
581
  deinit {
516
582
  cleanup()
517
- // Remove audio session observers
518
583
  NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
519
584
  NotificationCenter.default.removeObserver(self, name: AVAudioSession.routeChangeNotification, object: nil)
520
- // Also remove the layer to break any potential retain cycles
521
585
  playerLayer?.removeFromSuperlayer()
522
586
  playerLayer = nil
523
587
  }
@@ -528,11 +592,58 @@ class ExpoTwoStepVideoView: ExpoView {
528
592
  extension ExpoTwoStepVideoView: UIGestureRecognizerDelegate {
529
593
  /// Allow pinch and pan gestures to work simultaneously
530
594
  func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
531
- // Allow pinch and pan to work together
595
+ // Allow our pinch and pan to work together
532
596
  if (gestureRecognizer == pinchGesture && otherGestureRecognizer == panGesture) ||
533
597
  (gestureRecognizer == panGesture && otherGestureRecognizer == pinchGesture) {
534
598
  return true
535
599
  }
600
+ // Allow pinch to work simultaneously with external gestures (like ScrollView)
601
+ if gestureRecognizer == pinchGesture {
602
+ return true
603
+ }
604
+ return false
605
+ }
606
+
607
+ /// Only allow pan gesture when zoomed in
608
+ override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
609
+ if gestureRecognizer == panGesture {
610
+ // Read from actual view transform to be accurate
611
+ return playerContainerView.transform.scaleX > 1.01
612
+ }
613
+ return true
614
+ }
615
+
616
+ /// Our pinch gesture should take priority over parent scroll view gestures
617
+ func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldBeRequiredToFailBy otherGestureRecognizer: UIGestureRecognizer) -> Bool {
618
+ // If this is our pinch gesture, don't require it to fail for other gestures
619
+ if gestureRecognizer == pinchGesture {
620
+ return false
621
+ }
622
+ // If this is our pan gesture and we're zoomed in, don't require it to fail
623
+ if gestureRecognizer == panGesture && playerContainerView.transform.scaleX > 1.01 {
624
+ return false
625
+ }
536
626
  return false
537
627
  }
538
628
  }
629
+
630
+ // MARK: - CGAffineTransform Extension
631
+
632
+ extension CGAffineTransform {
633
+ /// Create a scale transform anchored at a specific point
634
+ static func anchoredScale(scale: CGFloat, anchor: CGPoint) -> CGAffineTransform {
635
+ return CGAffineTransform(translationX: anchor.x, y: anchor.y)
636
+ .scaledBy(x: scale, y: scale)
637
+ .translatedBy(x: -anchor.x, y: -anchor.y)
638
+ }
639
+
640
+ /// Get the X scale factor from the transform
641
+ var scaleX: CGFloat {
642
+ return sqrt(a * a + c * c)
643
+ }
644
+
645
+ /// Get the Y scale factor from the transform
646
+ var scaleY: CGFloat {
647
+ return sqrt(b * b + d * d)
648
+ }
649
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@movementinfra/expo-twostep-video",
3
- "version": "0.1.11",
3
+ "version": "0.1.13",
4
4
  "description": "Minimal video editing for React Native using AVFoundation",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",