@movementinfra/expo-twostep-video 0.1.12 → 0.1.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,7 @@ class ExpoTwoStepVideoView: ExpoView {
9
9
 
10
10
  private var player: AVPlayer?
11
11
  private var playerLayer: AVPlayerLayer?
12
+ private var playerContainerView: UIView!
12
13
  private var timeObserver: Any?
13
14
  private var isObservingStatus: Bool = false
14
15
  private var isAudioSessionConfigured: Bool = false
@@ -29,84 +30,56 @@ class ExpoTwoStepVideoView: ExpoView {
29
30
 
30
31
  // MARK: - Pan/Zoom Properties
31
32
 
32
- /// Current zoom level (1.0 = no zoom, 2.0 = 2x zoom, etc.)
33
- private var currentZoom: CGFloat = 1.0
33
+ /// Current transform applied to the container view
34
+ private var currentTransform: CGAffineTransform = .identity
34
35
 
35
- /// Current horizontal pan (-1.0 to 1.0, 0 = center)
36
- private var currentPanX: CGFloat = 0.0
36
+ /// Transform at gesture start (for incremental changes)
37
+ private var gestureStartTransform: CGAffineTransform = .identity
37
38
 
38
- /// Current vertical pan (-1.0 to 1.0, 0 = center)
39
- private var currentPanY: CGFloat = 0.0
40
-
41
- /// Last pinch scale for incremental zoom calculation
42
- private var lastPinchScale: CGFloat = 1.0
43
-
44
- /// Starting pan position when gesture begins
45
- private var panStartPosition: CGPoint = .zero
46
-
47
- /// Minimum zoom level (configurable)
39
+ /// Minimum zoom level
48
40
  var minZoom: CGFloat = 1.0
49
41
 
50
- /// Maximum zoom level (configurable)
42
+ /// Maximum zoom level
51
43
  var maxZoom: CGFloat = 5.0
52
44
 
53
45
  /// Gesture recognizers
54
46
  private var pinchGesture: UIPinchGestureRecognizer?
55
47
  private var panGesture: UIPanGestureRecognizer?
56
48
 
57
- // MARK: - Pan/Zoom Helpers
58
-
59
- /// Maximum pan amount allowed at current zoom level
60
- /// At zoom 2x, this is 0.5 (can pan halfway). At zoom 1x, this is 0.
61
- private var maxPanAmount: CGFloat {
62
- guard currentZoom > 1.0 else { return 0 }
63
- return (currentZoom - 1.0) / currentZoom
64
- }
65
-
66
- /// Clamp a value between min and max bounds
67
- private func clamp(_ value: CGFloat, min minValue: CGFloat, max maxValue: CGFloat) -> CGFloat {
68
- return Swift.min(Swift.max(value, minValue), maxValue)
69
- }
70
-
71
- /// Whether pan/zoom is at the default (untransformed) state
72
- private var isAtDefaultTransform: Bool {
73
- return currentZoom == 1.0 && currentPanX == 0 && currentPanY == 0
74
- }
75
-
76
49
  // MARK: - Initialization
77
50
 
78
51
  required init(appContext: AppContext? = nil) {
79
52
  super.init(appContext: appContext)
80
53
  clipsToBounds = true
81
54
  backgroundColor = .black
82
- setupPlayerLayer()
55
+ setupPlayerContainerView()
83
56
  setupAudioSession()
84
57
  setupAudioSessionObservers()
85
58
  setupGestureRecognizers()
86
59
  }
87
60
 
88
- private func setupPlayerLayer() {
61
+ private func setupPlayerContainerView() {
62
+ // Create container view that will receive transforms
63
+ playerContainerView = UIView()
64
+ playerContainerView.backgroundColor = .black
65
+ addSubview(playerContainerView)
66
+
67
+ // Create player layer inside container
89
68
  playerLayer = AVPlayerLayer()
90
69
  playerLayer?.videoGravity = .resizeAspect
91
70
  playerLayer?.backgroundColor = UIColor.black.cgColor
92
- if let playerLayer = playerLayer {
93
- layer.addSublayer(playerLayer)
94
- }
71
+ playerContainerView.layer.addSublayer(playerLayer!)
95
72
  }
96
73
 
97
74
  /// Configure audio session for video playback
98
- /// Uses .playback category to ensure audio plays even when silent switch is on
99
75
  private func setupAudioSession() {
100
76
  do {
101
77
  let audioSession = AVAudioSession.sharedInstance()
102
- // .playback category: audio plays even with silent switch, stops other audio
103
- // .defaultToSpeaker: routes audio to speaker by default (not earpiece)
104
78
  try audioSession.setCategory(.playback, mode: .moviePlayback, options: [.defaultToSpeaker])
105
79
  try audioSession.setActive(true, options: [.notifyOthersOnDeactivation])
106
80
  isAudioSessionConfigured = true
107
81
  } catch {
108
82
  print("ExpoTwoStepVideoView: Failed to configure audio session: \(error)")
109
- // Try a simpler configuration as fallback
110
83
  do {
111
84
  try AVAudioSession.sharedInstance().setCategory(.playback)
112
85
  try AVAudioSession.sharedInstance().setActive(true)
@@ -117,7 +90,7 @@ class ExpoTwoStepVideoView: ExpoView {
117
90
  }
118
91
  }
119
92
 
120
- /// Listen for audio session interruptions (phone calls, other apps)
93
+ /// Listen for audio session interruptions
121
94
  private func setupAudioSessionObservers() {
122
95
  NotificationCenter.default.addObserver(
123
96
  self,
@@ -143,15 +116,12 @@ class ExpoTwoStepVideoView: ExpoView {
143
116
 
144
117
  switch type {
145
118
  case .began:
146
- // Interruption began - pause playback
147
119
  player?.pause()
148
120
  onPlaybackStatusChange(["status": "interrupted"])
149
121
  case .ended:
150
- // Interruption ended - check if we should resume
151
122
  guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
152
123
  let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
153
124
  if options.contains(.shouldResume) {
154
- // Re-activate audio session and resume
155
125
  setupAudioSession()
156
126
  player?.play()
157
127
  onPlaybackStatusChange(["status": "playing"])
@@ -168,7 +138,6 @@ class ExpoTwoStepVideoView: ExpoView {
168
138
  return
169
139
  }
170
140
 
171
- // Pause when headphones are unplugged
172
141
  if reason == .oldDeviceUnavailable {
173
142
  player?.pause()
174
143
  onPlaybackStatusChange(["status": "paused"])
@@ -184,109 +153,190 @@ class ExpoTwoStepVideoView: ExpoView {
184
153
  addGestureRecognizer(pinch)
185
154
  pinchGesture = pinch
186
155
 
187
- // Pan with 2 fingers (to avoid conflict with single-finger scrubbing)
156
+ // Single finger pan (only works when zoomed, controlled by delegate)
188
157
  let pan = UIPanGestureRecognizer(target: self, action: #selector(handlePanGesture(_:)))
189
- pan.minimumNumberOfTouches = 2
190
- pan.maximumNumberOfTouches = 2
191
158
  pan.delegate = self
192
159
  addGestureRecognizer(pan)
193
160
  panGesture = pan
194
161
 
195
- // Enable user interaction
196
162
  isUserInteractionEnabled = true
197
163
  }
198
164
 
165
+ // MARK: - Video Content Rect Calculation
166
+
167
+ /// Calculate the rect where the actual video content is displayed within the player layer.
168
+ /// This accounts for letterboxing/pillarboxing when using .resizeAspect video gravity.
169
+ private func videoContentRect() -> CGRect {
170
+ guard let playerLayer = playerLayer,
171
+ let playerItem = player?.currentItem else {
172
+ return playerContainerView.bounds
173
+ }
174
+
175
+ // Get the natural size of the video
176
+ let videoSize = playerItem.presentationSize
177
+ guard videoSize.width > 0 && videoSize.height > 0 else {
178
+ return playerContainerView.bounds
179
+ }
180
+
181
+ // Use AVFoundation's utility to calculate the rect
182
+ let layerBounds = playerLayer.bounds
183
+ return AVMakeRect(aspectRatio: videoSize, insideRect: layerBounds)
184
+ }
185
+
186
+ /// Adjust an anchor point to be relative to the video content rect.
187
+ /// Clamps points outside the video area to the nearest edge.
188
+ private func adjustedAnchorPoint(for point: CGPoint) -> CGPoint {
189
+ let contentRect = videoContentRect()
190
+
191
+ // Clamp the point to the video content rect
192
+ let clampedX = min(max(point.x, contentRect.minX), contentRect.maxX)
193
+ let clampedY = min(max(point.y, contentRect.minY), contentRect.maxY)
194
+
195
+ return CGPoint(x: clampedX, y: clampedY)
196
+ }
197
+
198
+ // MARK: - Gesture Handlers
199
+
200
+ /// Get the current visual transform, reading from the presentation layer if an animation is in progress
201
+ private func captureVisualTransform() -> CGAffineTransform {
202
+ if let presentationLayer = playerContainerView.layer.presentation() {
203
+ return CGAffineTransform(
204
+ a: presentationLayer.transform.m11,
205
+ b: presentationLayer.transform.m12,
206
+ c: presentationLayer.transform.m21,
207
+ d: presentationLayer.transform.m22,
208
+ tx: presentationLayer.transform.m41,
209
+ ty: presentationLayer.transform.m42
210
+ )
211
+ }
212
+ return playerContainerView.transform
213
+ }
214
+
215
+ /// Prepare for a new gesture by capturing current visual state and cancelling animations
216
+ private func beginGesture() {
217
+ let visualTransform = captureVisualTransform()
218
+ playerContainerView.layer.removeAllAnimations()
219
+ playerContainerView.transform = visualTransform
220
+ gestureStartTransform = visualTransform
221
+ currentTransform = visualTransform
222
+ }
223
+
199
224
  @objc private func handlePinchGesture(_ gesture: UIPinchGestureRecognizer) {
200
225
  switch gesture.state {
201
226
  case .began:
202
- lastPinchScale = 1.0
203
- case .changed:
204
- // Calculate incremental scale change from last gesture update
205
- let scaleChange = gesture.scale / lastPinchScale
206
- lastPinchScale = gesture.scale
227
+ beginGesture()
207
228
 
208
- // Apply zoom with clamping to valid range
209
- currentZoom = clamp(currentZoom * scaleChange, min: minZoom, max: maxZoom)
229
+ case .changed:
230
+ let rawAnchor = gesture.location(in: self)
231
+ // Adjust anchor to account for letterboxing/pillarboxing
232
+ let anchor = adjustedAnchorPoint(for: rawAnchor)
233
+ let scale = gesture.scale
234
+ let scaleTransform = CGAffineTransform.anchoredScale(scale: scale, anchor: anchor)
235
+ let newTransform = gestureStartTransform.concatenating(scaleTransform)
236
+ currentTransform = newTransform
237
+ playerContainerView.transform = newTransform
210
238
 
211
- // Constrain pan (may need adjustment when zoom decreases)
212
- constrainPan()
213
- applyTransformAndNotify()
214
239
  case .ended, .cancelled:
215
- lastPinchScale = 1.0
240
+ onGestureEnded()
241
+
216
242
  default:
217
243
  break
218
244
  }
219
245
  }
220
246
 
221
247
  @objc private func handlePanGesture(_ gesture: UIPanGestureRecognizer) {
222
- // Only allow panning when zoomed in
223
- guard currentZoom > 1.0 else { return }
224
-
225
248
  switch gesture.state {
226
249
  case .began:
227
- panStartPosition = CGPoint(x: currentPanX, y: currentPanY)
250
+ beginGesture()
251
+
228
252
  case .changed:
229
253
  let translation = gesture.translation(in: self)
254
+ let scale = max(gestureStartTransform.scaleX, 1.0)
230
255
 
231
- // Convert screen translation to normalized pan delta
232
- // Dragging full width changes pan by 2x the available pan range
233
- let panDelta = CGPoint(
234
- x: (translation.x / bounds.width) * 2 * maxPanAmount,
235
- y: (translation.y / bounds.height) * 2 * maxPanAmount
256
+ // Translate in the scaled coordinate space
257
+ let newTransform = gestureStartTransform.translatedBy(
258
+ x: translation.x / scale,
259
+ y: translation.y / scale
236
260
  )
261
+ currentTransform = newTransform
262
+ playerContainerView.transform = newTransform
237
263
 
238
- // Apply delta from start position (negative because dragging right shows left content)
239
- currentPanX = panStartPosition.x - panDelta.x
240
- currentPanY = panStartPosition.y - panDelta.y
241
-
242
- constrainPan()
243
- applyTransformAndNotify()
244
264
  case .ended, .cancelled:
245
- break
265
+ onGestureEnded()
266
+
246
267
  default:
247
268
  break
248
269
  }
249
270
  }
250
271
 
251
- /// Constrain pan values to keep content visible at current zoom level
252
- private func constrainPan() {
253
- let limit = maxPanAmount
254
- currentPanX = clamp(currentPanX, min: -limit, max: limit)
255
- currentPanY = clamp(currentPanY, min: -limit, max: limit)
256
- }
272
+ // MARK: - Transform Helpers
257
273
 
258
- /// Apply transform to layer and notify JS of the change
259
- private func applyTransformAndNotify() {
260
- updateLayerTransform()
261
- emitPanZoomChange()
274
+ /// Called when a gesture ends - applies bounds limiting with spring animation
275
+ private func onGestureEnded() {
276
+ let limitedTransform = limitTransform(currentTransform)
277
+
278
+ // Update state immediately so new gestures start from correct position
279
+ currentTransform = limitedTransform
280
+ gestureStartTransform = limitedTransform
281
+
282
+ UIView.animate(
283
+ withDuration: 0.3,
284
+ delay: 0,
285
+ usingSpringWithDamping: 0.8,
286
+ initialSpringVelocity: 0,
287
+ options: [.allowUserInteraction],
288
+ animations: {
289
+ self.playerContainerView.transform = limitedTransform
290
+ },
291
+ completion: { _ in
292
+ self.emitPanZoomChange()
293
+ }
294
+ )
262
295
  }
263
296
 
264
- /// Apply the current pan/zoom transform to the player layer
265
- private func updateLayerTransform() {
266
- guard let layer = playerLayer else { return }
297
+ /// Limit transform to valid bounds (min/max zoom, pan within content)
298
+ private func limitTransform(_ transform: CGAffineTransform) -> CGAffineTransform {
299
+ let scaleX = transform.scaleX
300
+ let scaleY = transform.scaleY
267
301
 
268
- // Start with identity and apply zoom
269
- var transform = CATransform3DScale(CATransform3DIdentity, currentZoom, currentZoom, 1.0)
302
+ // If zoomed out too far, reset to identity
303
+ if scaleX < minZoom || scaleY < minZoom {
304
+ return .identity
305
+ }
306
+
307
+ var capped = transform
270
308
 
271
- // Apply pan translation (scaled for zoomed coordinate space)
272
- // Formula: translate by half the extra visible area in the zoom direction
273
- let translateX = -currentPanX * bounds.width * maxPanAmount / 2.0
274
- let translateY = -currentPanY * bounds.height * maxPanAmount / 2.0
275
- transform = CATransform3DTranslate(transform, translateX, translateY, 0)
309
+ // Cap maximum zoom
310
+ let currentScale = max(scaleX, scaleY)
311
+ if currentScale > maxZoom {
312
+ let factor = maxZoom / currentScale
313
+ let center = CGPoint(x: bounds.width / 2, y: bounds.height / 2)
314
+ let capTransform = CGAffineTransform.anchoredScale(scale: factor, anchor: center)
315
+ capped = capped.concatenating(capTransform)
316
+ }
276
317
 
277
- // Apply without implicit animations for responsive feel
278
- CATransaction.begin()
279
- CATransaction.setDisableActions(true)
280
- layer.transform = transform
281
- CATransaction.commit()
318
+ // Constrain pan to keep content visible
319
+ let contentSize = bounds.size
320
+ let maxX = contentSize.width * (capped.scaleX - 1)
321
+ let maxY = contentSize.height * (capped.scaleY - 1)
322
+
323
+ // tx/ty constraints: can't pan past edges
324
+ capped.tx = min(max(capped.tx, -maxX), 0)
325
+ capped.ty = min(max(capped.ty, -maxY), 0)
326
+
327
+ return capped
282
328
  }
283
329
 
284
330
  /// Emit the current pan/zoom state to JavaScript
285
331
  private func emitPanZoomChange() {
332
+ let scale = currentTransform.scaleX
333
+ let panX = scale > 1.0 ? currentTransform.tx / (bounds.width * (scale - 1)) : 0
334
+ let panY = scale > 1.0 ? currentTransform.ty / (bounds.height * (scale - 1)) : 0
335
+
286
336
  onPanZoomChange([
287
- "panX": currentPanX,
288
- "panY": currentPanY,
289
- "zoomLevel": currentZoom
337
+ "panX": -panX, // Normalize to -1 to 0 range
338
+ "panY": -panY,
339
+ "zoomLevel": scale
290
340
  ])
291
341
  }
292
342
 
@@ -294,59 +344,86 @@ class ExpoTwoStepVideoView: ExpoView {
294
344
 
295
345
  /// Get the current pan/zoom state
296
346
  func getPanZoomState() -> [String: CGFloat] {
347
+ let scale = currentTransform.scaleX
348
+ let panX = scale > 1.0 ? currentTransform.tx / (bounds.width * (scale - 1)) : 0
349
+ let panY = scale > 1.0 ? currentTransform.ty / (bounds.height * (scale - 1)) : 0
350
+
297
351
  return [
298
- "panX": currentPanX,
299
- "panY": currentPanY,
300
- "zoomLevel": currentZoom
352
+ "panX": -panX,
353
+ "panY": -panY,
354
+ "zoomLevel": scale
301
355
  ]
302
356
  }
303
357
 
304
358
  /// Set the pan/zoom state programmatically
305
359
  func setPanZoomState(panX: CGFloat?, panY: CGFloat?, zoomLevel: CGFloat?) {
360
+ var newTransform = currentTransform
361
+
306
362
  if let zoom = zoomLevel {
307
- currentZoom = clamp(zoom, min: minZoom, max: maxZoom)
308
- }
309
- if let x = panX {
310
- currentPanX = x
363
+ let clampedZoom = min(max(zoom, minZoom), maxZoom)
364
+ let currentScale = currentTransform.scaleX
365
+ let scaleFactor = clampedZoom / currentScale
366
+ let center = CGPoint(x: bounds.width / 2, y: bounds.height / 2)
367
+ newTransform = newTransform.concatenating(
368
+ CGAffineTransform.anchoredScale(scale: scaleFactor, anchor: center)
369
+ )
311
370
  }
312
- if let y = panY {
313
- currentPanY = y
371
+
372
+ if let x = panX, let y = panY {
373
+ let scale = newTransform.scaleX
374
+ if scale > 1.0 {
375
+ newTransform.tx = -x * bounds.width * (scale - 1)
376
+ newTransform.ty = -y * bounds.height * (scale - 1)
377
+ }
314
378
  }
315
379
 
316
- constrainPan()
317
- applyTransformAndNotify()
380
+ currentTransform = limitTransform(newTransform)
381
+ gestureStartTransform = currentTransform
382
+ playerContainerView.transform = currentTransform
383
+ emitPanZoomChange()
318
384
  }
319
385
 
320
- /// Reset pan/zoom to default state
386
+ /// Reset pan/zoom to default state with animation
321
387
  func resetPanZoom() {
322
- currentZoom = 1.0
323
- currentPanX = 0.0
324
- currentPanY = 0.0
325
- applyTransformAndNotify()
388
+ UIView.animate(
389
+ withDuration: 0.2,
390
+ delay: 0,
391
+ options: [.allowUserInteraction],
392
+ animations: {
393
+ self.playerContainerView.transform = .identity
394
+ },
395
+ completion: { _ in
396
+ self.currentTransform = .identity
397
+ self.gestureStartTransform = .identity
398
+ self.emitPanZoomChange()
399
+ }
400
+ )
326
401
  }
327
402
 
328
403
  override func layoutSubviews() {
329
404
  super.layoutSubviews()
330
- playerLayer?.frame = bounds
331
- // Reapply transform after frame changes (only if not at default)
332
- if !isAtDefaultTransform {
333
- updateLayerTransform()
334
- }
405
+
406
+ // Important: When a view has a non-identity transform, setting `frame` is undefined behavior.
407
+ // We must temporarily reset the transform, update bounds/center, then restore it.
408
+ let savedTransform = playerContainerView.transform
409
+ playerContainerView.transform = .identity
410
+
411
+ playerContainerView.frame = bounds
412
+ playerLayer?.frame = playerContainerView.bounds
413
+
414
+ // Restore the transform
415
+ playerContainerView.transform = savedTransform
335
416
  }
336
417
 
337
418
  // MARK: - Public Methods (called from module)
338
419
 
339
420
  func loadComposition(compositionId: String, composition: AVMutableComposition, videoComposition: AVMutableVideoComposition?) {
340
- // Clean up previous player
341
421
  cleanup()
342
422
 
343
423
  currentCompositionId = compositionId
344
424
  currentAssetId = nil
345
425
 
346
- // Create player item from composition
347
426
  let playerItem = AVPlayerItem(asset: composition)
348
-
349
- // Apply video composition for transforms (mirror, etc.)
350
427
  if let videoComposition = videoComposition {
351
428
  playerItem.videoComposition = videoComposition
352
429
  }
@@ -355,7 +432,6 @@ class ExpoTwoStepVideoView: ExpoView {
355
432
  }
356
433
 
357
434
  func loadAsset(assetId: String, asset: AVAsset) {
358
- // Clean up previous player
359
435
  cleanup()
360
436
 
361
437
  currentAssetId = assetId
@@ -366,15 +442,12 @@ class ExpoTwoStepVideoView: ExpoView {
366
442
  }
367
443
 
368
444
  private func setupPlayer(with playerItem: AVPlayerItem) {
369
- // Create player
370
445
  player = AVPlayer(playerItem: playerItem)
371
446
  playerLayer?.player = player
372
447
 
373
- // Observe playback status (track that we added this observer)
374
448
  playerItem.addObserver(self, forKeyPath: "status", options: [.new, .initial], context: nil)
375
449
  isObservingStatus = true
376
450
 
377
- // Observe when playback ends
378
451
  NotificationCenter.default.addObserver(
379
452
  self,
380
453
  selector: #selector(playerDidFinishPlaying),
@@ -382,7 +455,6 @@ class ExpoTwoStepVideoView: ExpoView {
382
455
  object: playerItem
383
456
  )
384
457
 
385
- // Add periodic time observer for progress (0.25s interval is sufficient and reduces overhead)
386
458
  let interval = CMTime(seconds: 0.25, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
387
459
  timeObserver = player?.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in
388
460
  guard let self = self,
@@ -403,17 +475,13 @@ class ExpoTwoStepVideoView: ExpoView {
403
475
  }
404
476
 
405
477
  func play() {
406
- // Ensure audio session is properly configured before playing
407
- // This handles cases where another component may have changed the audio session
408
478
  if !isAudioSessionConfigured {
409
479
  setupAudioSession()
410
480
  } else {
411
- // Re-activate in case it was deactivated
412
481
  do {
413
482
  try AVAudioSession.sharedInstance().setActive(true, options: [.notifyOthersOnDeactivation])
414
483
  } catch {
415
484
  print("ExpoTwoStepVideoView: Failed to activate audio session: \(error)")
416
- // Try full reconfiguration
417
485
  setupAudioSession()
418
486
  }
419
487
  }
@@ -475,7 +543,6 @@ class ExpoTwoStepVideoView: ExpoView {
475
543
 
476
544
  @objc private func playerDidFinishPlaying() {
477
545
  if shouldLoop {
478
- // Seek back to start and play again
479
546
  player?.seek(to: .zero) { [weak self] _ in
480
547
  self?.player?.play()
481
548
  }
@@ -488,24 +555,20 @@ class ExpoTwoStepVideoView: ExpoView {
488
555
  // MARK: - Cleanup
489
556
 
490
557
  private func cleanup() {
491
- // Remove time observer first
492
558
  if let observer = timeObserver, let player = player {
493
559
  player.removeTimeObserver(observer)
494
560
  }
495
561
  timeObserver = nil
496
562
 
497
- // Remove KVO observer only if we added it
498
563
  if isObservingStatus, let playerItem = player?.currentItem {
499
564
  playerItem.removeObserver(self, forKeyPath: "status")
500
565
  isObservingStatus = false
501
566
  }
502
567
 
503
- // Remove notification observer
504
568
  if let playerItem = player?.currentItem {
505
569
  NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
506
570
  }
507
571
 
508
- // Stop and clear player
509
572
  player?.pause()
510
573
  player?.replaceCurrentItem(with: nil)
511
574
  player = nil
@@ -517,10 +580,8 @@ class ExpoTwoStepVideoView: ExpoView {
517
580
 
518
581
  deinit {
519
582
  cleanup()
520
- // Remove audio session observers
521
583
  NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
522
584
  NotificationCenter.default.removeObserver(self, name: AVAudioSession.routeChangeNotification, object: nil)
523
- // Also remove the layer to break any potential retain cycles
524
585
  playerLayer?.removeFromSuperlayer()
525
586
  playerLayer = nil
526
587
  }
@@ -531,11 +592,58 @@ class ExpoTwoStepVideoView: ExpoView {
531
592
  extension ExpoTwoStepVideoView: UIGestureRecognizerDelegate {
532
593
  /// Allow pinch and pan gestures to work simultaneously
533
594
  func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
534
- // Allow pinch and pan to work together
595
+ // Allow our pinch and pan to work together
535
596
  if (gestureRecognizer == pinchGesture && otherGestureRecognizer == panGesture) ||
536
597
  (gestureRecognizer == panGesture && otherGestureRecognizer == pinchGesture) {
537
598
  return true
538
599
  }
600
+ // Allow pinch to work simultaneously with external gestures (like ScrollView)
601
+ if gestureRecognizer == pinchGesture {
602
+ return true
603
+ }
604
+ return false
605
+ }
606
+
607
+ /// Only allow pan gesture when zoomed in
608
+ override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
609
+ if gestureRecognizer == panGesture {
610
+ // Read from actual view transform to be accurate
611
+ return playerContainerView.transform.scaleX > 1.01
612
+ }
613
+ return true
614
+ }
615
+
616
+ /// Our pinch gesture should take priority over parent scroll view gestures
617
+ func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldBeRequiredToFailBy otherGestureRecognizer: UIGestureRecognizer) -> Bool {
618
+ // If this is our pinch gesture, don't require it to fail for other gestures
619
+ if gestureRecognizer == pinchGesture {
620
+ return false
621
+ }
622
+ // If this is our pan gesture and we're zoomed in, don't require it to fail
623
+ if gestureRecognizer == panGesture && playerContainerView.transform.scaleX > 1.01 {
624
+ return false
625
+ }
539
626
  return false
540
627
  }
541
628
  }
629
+
630
+ // MARK: - CGAffineTransform Extension
631
+
632
+ extension CGAffineTransform {
633
+ /// Create a scale transform anchored at a specific point
634
+ static func anchoredScale(scale: CGFloat, anchor: CGPoint) -> CGAffineTransform {
635
+ return CGAffineTransform(translationX: anchor.x, y: anchor.y)
636
+ .scaledBy(x: scale, y: scale)
637
+ .translatedBy(x: -anchor.x, y: -anchor.y)
638
+ }
639
+
640
+ /// Get the X scale factor from the transform
641
+ var scaleX: CGFloat {
642
+ return sqrt(a * a + c * c)
643
+ }
644
+
645
+ /// Get the Y scale factor from the transform
646
+ var scaleY: CGFloat {
647
+ return sqrt(b * b + d * d)
648
+ }
649
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@movementinfra/expo-twostep-video",
3
- "version": "0.1.12",
3
+ "version": "0.1.13",
4
4
  "description": "Minimal video editing for React Native using AVFoundation",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",