@rejourneyco/react-native 1.0.7 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/README.md +1 -1
  2. package/android/src/main/java/com/rejourney/RejourneyModuleImpl.kt +20 -18
  3. package/android/src/main/java/com/rejourney/recording/InteractionRecorder.kt +28 -0
  4. package/android/src/main/java/com/rejourney/recording/ReplayOrchestrator.kt +42 -33
  5. package/android/src/main/java/com/rejourney/recording/SegmentDispatcher.kt +242 -34
  6. package/android/src/main/java/com/rejourney/recording/SpecialCases.kt +572 -0
  7. package/android/src/main/java/com/rejourney/recording/TelemetryPipeline.kt +6 -4
  8. package/android/src/main/java/com/rejourney/recording/VisualCapture.kt +156 -64
  9. package/ios/Engine/RejourneyImpl.swift +3 -18
  10. package/ios/Recording/InteractionRecorder.swift +28 -0
  11. package/ios/Recording/ReplayOrchestrator.swift +50 -17
  12. package/ios/Recording/SegmentDispatcher.swift +147 -13
  13. package/ios/Recording/SpecialCases.swift +614 -0
  14. package/ios/Recording/StabilityMonitor.swift +2 -2
  15. package/ios/Recording/TelemetryPipeline.swift +21 -3
  16. package/ios/Recording/VisualCapture.swift +50 -20
  17. package/lib/commonjs/index.js +4 -5
  18. package/lib/commonjs/sdk/constants.js +2 -2
  19. package/lib/commonjs/sdk/utils.js +1 -1
  20. package/lib/module/index.js +4 -5
  21. package/lib/module/sdk/constants.js +2 -2
  22. package/lib/module/sdk/utils.js +1 -1
  23. package/lib/typescript/sdk/constants.d.ts +2 -2
  24. package/lib/typescript/types/index.d.ts +1 -6
  25. package/package.json +2 -2
  26. package/src/index.ts +9 -10
  27. package/src/sdk/constants.ts +2 -2
  28. package/src/sdk/utils.ts +1 -1
  29. package/src/types/index.ts +1 -6
@@ -26,7 +26,9 @@ import android.graphics.Rect
26
26
  import android.os.Handler
27
27
  import android.os.Looper
28
28
  import android.os.SystemClock
29
+ import android.view.TextureView
29
30
  import android.view.View
31
+ import android.view.ViewGroup
30
32
  import android.view.WindowManager
31
33
  import com.rejourney.engine.DiagnosticLog
32
34
  import com.rejourney.utility.gzipCompress
@@ -60,7 +62,7 @@ class VisualCapture private constructor(private val context: Context) {
60
62
  get() = instance
61
63
  }
62
64
 
63
- var snapshotInterval: Double = 0.5
65
+ var snapshotInterval: Double = 1.0
64
66
  var quality: Float = 0.5f
65
67
 
66
68
  val isCapturing: Boolean
@@ -91,18 +93,17 @@ class VisualCapture private constructor(private val context: Context) {
91
93
 
92
94
  // Current activity reference
93
95
  private var currentActivity: WeakReference<Activity>? = null
96
+
94
97
 
95
98
  fun setCurrentActivity(activity: Activity?) {
96
99
  currentActivity = if (activity != null) WeakReference(activity) else null
97
- DiagnosticLog.notice("[VisualCapture] setCurrentActivity: ${activity?.javaClass?.simpleName ?: "null"}")
100
+ DiagnosticLog.trace("[VisualCapture] setCurrentActivity: ${activity?.javaClass?.simpleName ?: "null"}")
98
101
  }
99
102
 
100
103
  fun beginCapture(sessionOrigin: Long) {
101
- DiagnosticLog.notice("[VisualCapture] beginCapture called, currentActivity=${currentActivity?.get()?.javaClass?.simpleName ?: "null"}, state=${stateMachine.currentState}")
102
- DiagnosticLog.trace("[VisualCapture] beginCapture called, currentActivity=${currentActivity?.get()?.javaClass?.simpleName ?: "null"}")
104
+ DiagnosticLog.trace("[VisualCapture] beginCapture called, currentActivity=${currentActivity?.get()?.javaClass?.simpleName ?: "null"}, state=${stateMachine.currentState}")
103
105
  if (!stateMachine.transition(CaptureState.CAPTURING)) {
104
- DiagnosticLog.notice("[VisualCapture] beginCapture REJECTED - state transition failed from ${stateMachine.currentState}")
105
- DiagnosticLog.trace("[VisualCapture] beginCapture failed - state transition rejected")
106
+ DiagnosticLog.trace("[VisualCapture] beginCapture REJECTED - state transition failed from ${stateMachine.currentState}")
106
107
  return
107
108
  }
108
109
  sessionEpoch = sessionOrigin
@@ -116,7 +117,6 @@ class VisualCapture private constructor(private val context: Context) {
116
117
  }
117
118
  }
118
119
 
119
- DiagnosticLog.notice("[VisualCapture] Starting capture timer with interval=${snapshotInterval}s")
120
120
  DiagnosticLog.trace("[VisualCapture] Starting capture timer with interval=${snapshotInterval}s")
121
121
  startCaptureTimer()
122
122
  }
@@ -138,6 +138,21 @@ class VisualCapture private constructor(private val context: Context) {
138
138
  flushBufferToDisk()
139
139
  }
140
140
 
141
+ /** Submit any buffered frames to the upload pipeline immediately
142
+ * (regardless of batch size threshold). Packages synchronously to
143
+ * avoid race conditions during backgrounding. */
144
+ fun flushBufferToNetwork() {
145
+ // Take frames from buffer synchronously (not via async sendScreenshots)
146
+ val images = stateLock.withLock {
147
+ val copy = screenshots.toList()
148
+ screenshots.clear()
149
+ copy
150
+ }
151
+ if (images.isEmpty()) return
152
+ // Package and submit synchronously on this thread
153
+ packageAndShip(images, sessionEpoch)
154
+ }
155
+
141
156
  fun activateDeferredMode() {
142
157
  deferredUntilCommit = true
143
158
  }
@@ -165,14 +180,14 @@ class VisualCapture private constructor(private val context: Context) {
165
180
  }
166
181
 
167
182
  fun snapshotNow() {
168
- mainHandler.post { captureFrame() }
183
+ mainHandler.post { captureFrame(force = true) }
169
184
  }
170
185
 
171
186
  private fun startCaptureTimer() {
172
187
  stopCaptureTimer()
173
188
  captureRunnable = object : Runnable {
174
189
  override fun run() {
175
- captureFrame()
190
+ captureFrame(force = false)
176
191
  mainHandler.postDelayed(this, (snapshotInterval * 1000).toLong())
177
192
  }
178
193
  }
@@ -184,32 +199,43 @@ class VisualCapture private constructor(private val context: Context) {
184
199
  captureRunnable = null
185
200
  }
186
201
 
187
- private fun captureFrame() {
202
+ private fun captureFrame(force: Boolean = false) {
188
203
  val currentFrameNum = frameCounter.get()
189
- // Log first 3 frames at notice level
190
204
  if (currentFrameNum < 3) {
191
- DiagnosticLog.notice("[VisualCapture] captureFrame #$currentFrameNum, state=${stateMachine.currentState}, activity=${currentActivity?.get()?.javaClass?.simpleName ?: "null"}")
205
+ DiagnosticLog.trace("[VisualCapture] captureFrame #$currentFrameNum, state=${stateMachine.currentState}, activity=${currentActivity?.get()?.javaClass?.simpleName ?: "null"}")
192
206
  }
193
207
 
194
208
  if (stateMachine.currentState != CaptureState.CAPTURING) {
195
- DiagnosticLog.notice("[VisualCapture] captureFrame skipped - state=${stateMachine.currentState}")
196
209
  DiagnosticLog.trace("[VisualCapture] captureFrame skipped - state=${stateMachine.currentState}")
197
210
  return
198
211
  }
199
212
 
200
213
  val activity = currentActivity?.get()
201
214
  if (activity == null) {
202
- if (currentFrameNum < 3) {
203
- DiagnosticLog.notice("[VisualCapture] captureFrame skipped - NO ACTIVITY")
204
- }
205
215
  DiagnosticLog.trace("[VisualCapture] captureFrame skipped - no activity")
206
216
  return
207
217
  }
208
218
 
219
+ // Refresh map detection state (very cheap shallow walk)
220
+ SpecialCases.shared.refreshMapState(activity)
221
+
222
+ // Map stutter prevention: when a map view is visible and its camera
223
+ // is still moving (user gesture or animation), skip decorView.draw()
224
+ // entirely — this call triggers GPU readback on SurfaceView/TextureView
225
+ // map tiles which causes visible stutter. We resume capture at 1 FPS
226
+ // once the map SDK reports idle.
227
+ if (!force && SpecialCases.shared.mapVisible && !SpecialCases.shared.mapIdle) {
228
+ if (currentFrameNum < 3 || currentFrameNum % 30 == 0L) {
229
+ DiagnosticLog.trace("[VisualCapture] SKIPPING capture - map moving (mapIdle=false)")
230
+ }
231
+ return
232
+ }
233
+
209
234
  val frameStart = SystemClock.elapsedRealtime()
210
235
 
211
236
  try {
212
- val decorView = activity.window?.decorView ?: return
237
+ val window = activity.window ?: return
238
+ val decorView = window.decorView
213
239
  val bounds = Rect()
214
240
  decorView.getWindowVisibleDisplayFrame(bounds)
215
241
 
@@ -217,68 +243,134 @@ class VisualCapture private constructor(private val context: Context) {
217
243
 
218
244
  val redactRects = redactionMask.computeRects()
219
245
 
220
- // Use lower scale to reduce encoding time significantly
221
246
  val screenScale = 1.25f
222
247
  val scaledWidth = (bounds.width() / screenScale).toInt()
223
248
  val scaledHeight = (bounds.height() / screenScale).toInt()
224
249
 
250
+ // 1. Draw the View tree (captures everything except GPU surfaces)
225
251
  val bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888)
226
252
  val canvas = Canvas(bitmap)
227
253
  canvas.scale(1f / screenScale, 1f / screenScale)
228
-
229
254
  decorView.draw(canvas)
230
255
 
231
- // Apply redactions
232
- if (redactRects.isNotEmpty()) {
233
- val paint = Paint().apply {
234
- color = Color.BLACK
235
- style = Paint.Style.FILL
236
- }
237
- for (rect in redactRects) {
238
- if (rect.width() > 0 && rect.height() > 0) {
239
- canvas.drawRect(
240
- rect.left / screenScale,
241
- rect.top / screenScale,
242
- rect.right / screenScale,
243
- rect.bottom / screenScale,
244
- paint
245
- )
246
- }
247
- }
248
- }
249
-
250
- // Compress to JPEG
251
- val stream = ByteArrayOutputStream()
252
- bitmap.compress(Bitmap.CompressFormat.JPEG, (quality * 100).toInt(), stream)
253
- bitmap.recycle()
256
+ // 2. Composite GPU surfaces (TextureView/SurfaceView) on top.
257
+ // decorView.draw() renders these as black; we grab their pixels
258
+ // directly and paint them at the correct position.
259
+ compositeGpuSurfaces(decorView, canvas, screenScale)
254
260
 
255
- val data = stream.toByteArray()
256
- val captureTs = System.currentTimeMillis()
257
- val frameNum = frameCounter.incrementAndGet()
261
+ processCapture(bitmap, redactRects, screenScale, frameStart, force)
258
262
 
259
- // Log first frame and every 30 frames
260
- if (frameNum == 1L) {
261
- DiagnosticLog.notice("[VisualCapture] First frame captured! size=${data.size} bytes")
263
+ } catch (e: Exception) {
264
+ DiagnosticLog.fault("Frame capture failed: ${e.message}")
265
+ }
266
+ }
267
+
268
+ /**
269
+ * Find all TextureView instances in the hierarchy and draw their GPU-rendered
270
+ * content onto the capture canvas at the correct position. decorView.draw()
271
+ * renders TextureView/SurfaceView as black; this fills in the actual pixels.
272
+ *
273
+ * Mapbox uses SurfaceView by default, so we use MapView.snapshot() to capture
274
+ * the map and composite it at the correct position.
275
+ */
276
+ private fun compositeGpuSurfaces(root: View, canvas: Canvas, screenScale: Float) {
277
+ findTextureViews(root) { tv ->
278
+ try {
279
+ val tvBitmap = tv.bitmap ?: return@findTextureViews
280
+ val loc = IntArray(2)
281
+ tv.getLocationInWindow(loc)
282
+ canvas.drawBitmap(tvBitmap, loc[0].toFloat(), loc[1].toFloat(), null)
283
+ tvBitmap.recycle()
284
+ } catch (_: Exception) {
285
+ // Safety: never crash if TextureView.getBitmap() fails
286
+ }
287
+ }
288
+ compositeMapboxSnapshot(root, canvas)
289
+ }
290
+
291
+ /**
292
+ * Mapbox MapView uses SurfaceView; decorView.draw() renders it black.
293
+ * Use MapView.snapshot() (Mapbox SDK API) to capture the map and composite it.
294
+ */
295
+ private fun compositeMapboxSnapshot(root: View, canvas: Canvas) {
296
+ val mapView = SpecialCases.shared.getMapboxMapViewForSnapshot(root) ?: return
297
+ try {
298
+ val snapshot = mapView.javaClass.getMethod("snapshot").invoke(mapView)
299
+ val bitmap = snapshot as? Bitmap ?: return
300
+ val loc = IntArray(2)
301
+ mapView.getLocationInWindow(loc)
302
+ canvas.drawBitmap(bitmap, loc[0].toFloat(), loc[1].toFloat(), null)
303
+ bitmap.recycle()
304
+ } catch (e: Exception) {
305
+ DiagnosticLog.trace("[VisualCapture] Mapbox snapshot failed: ${e.message}")
306
+ }
307
+ }
308
+
309
+ private fun findTextureViews(view: View, action: (TextureView) -> Unit) {
310
+ if (view is TextureView && view.isAvailable) {
311
+ action(view)
312
+ }
313
+ if (view is ViewGroup) {
314
+ for (i in 0 until view.childCount) {
315
+ findTextureViews(view.getChildAt(i), action)
262
316
  }
263
- if (frameNum % 30 == 0L) {
264
- val frameDurationMs = (SystemClock.elapsedRealtime() - frameStart).toDouble()
265
- val isMainThread = Looper.myLooper() == Looper.getMainLooper()
266
- DiagnosticLog.perfFrame("screenshot", frameDurationMs, frameNum.toInt(), isMainThread)
317
+ }
318
+ }
319
+
320
+ private fun processCapture(
321
+ bitmap: Bitmap,
322
+ redactRects: List<Rect>,
323
+ screenScale: Float,
324
+ frameStart: Long,
325
+ force: Boolean
326
+ ) {
327
+ // Apply redactions
328
+ if (redactRects.isNotEmpty()) {
329
+ val canvas = Canvas(bitmap)
330
+ val paint = Paint().apply {
331
+ color = Color.BLACK
332
+ style = Paint.Style.FILL
267
333
  }
268
-
269
- // Store in buffer
270
- stateLock.withLock {
271
- screenshots.add(Pair(data, captureTs))
272
- enforceScreenshotCaps()
273
- val shouldSend = !deferredUntilCommit && screenshots.size >= batchSize
274
-
275
- if (shouldSend) {
276
- sendScreenshots()
334
+ for (rect in redactRects) {
335
+ if (rect.width() > 0 && rect.height() > 0) {
336
+ canvas.drawRect(
337
+ rect.left / screenScale,
338
+ rect.top / screenScale,
339
+ rect.right / screenScale,
340
+ rect.bottom / screenScale,
341
+ paint
342
+ )
277
343
  }
278
344
  }
345
+ }
346
+
347
+ // Compress to JPEG
348
+ val stream = ByteArrayOutputStream()
349
+ bitmap.compress(Bitmap.CompressFormat.JPEG, (quality * 100).toInt(), stream)
350
+ bitmap.recycle()
351
+
352
+ val data = stream.toByteArray()
353
+ val captureTs = System.currentTimeMillis()
354
+ val frameNum = frameCounter.incrementAndGet()
355
+
356
+ if (frameNum == 1L) {
357
+ DiagnosticLog.trace("[VisualCapture] First frame captured! size=${data.size} bytes")
358
+ }
359
+ if (frameNum % 30 == 0L) {
360
+ val frameDurationMs = (SystemClock.elapsedRealtime() - frameStart).toDouble()
361
+ val isMainThread = Looper.myLooper() == Looper.getMainLooper()
362
+ DiagnosticLog.perfFrame("screenshot", frameDurationMs, frameNum.toInt(), isMainThread)
363
+ }
364
+
365
+ // Store in buffer
366
+ stateLock.withLock {
367
+ screenshots.add(Pair(data, captureTs))
368
+ enforceScreenshotCaps()
369
+ val shouldSend = !deferredUntilCommit && screenshots.size >= batchSize
279
370
 
280
- } catch (e: Exception) {
281
- DiagnosticLog.fault("Frame capture failed: ${e.message}")
371
+ if (shouldSend) {
372
+ sendScreenshots()
373
+ }
282
374
  }
283
375
  }
284
376
 
@@ -302,7 +394,7 @@ class VisualCapture private constructor(private val context: Context) {
302
394
  return
303
395
  }
304
396
 
305
- DiagnosticLog.notice("[VisualCapture] sendScreenshots: sending ${images.size} frames")
397
+ DiagnosticLog.trace("[VisualCapture] sendScreenshots: sending ${images.size} frames")
306
398
 
307
399
  // All heavy work happens in background
308
400
  encodeExecutor.execute {
@@ -609,7 +609,7 @@ public final class RejourneyImpl: NSObject {
609
609
  resolve: @escaping RCTPromiseResolveBlock,
610
610
  reject: @escaping RCTPromiseRejectBlock
611
611
  ) {
612
- DiagnosticLog.notice("[Rejourney] setRemoteConfig: rejourneyEnabled=\(rejourneyEnabled), recordingEnabled=\(recordingEnabled), sampleRate=\(sampleRate), maxRecording=\(maxRecordingMinutes)min")
612
+ DiagnosticLog.trace("[Rejourney] setRemoteConfig: rejourneyEnabled=\(rejourneyEnabled), recordingEnabled=\(recordingEnabled), sampleRate=\(sampleRate), maxRecording=\(maxRecordingMinutes)min")
613
613
 
614
614
  ReplayOrchestrator.shared.setRemoteConfig(
615
615
  rejourneyEnabled: rejourneyEnabled,
@@ -631,23 +631,8 @@ public final class RejourneyImpl: NSObject {
631
631
  resolve: @escaping RCTPromiseResolveBlock,
632
632
  reject: @escaping RCTPromiseRejectBlock
633
633
  ) {
634
- resolve([
635
- "uploadSuccessCount": 0,
636
- "uploadFailureCount": 0,
637
- "retryAttemptCount": 0,
638
- "circuitBreakerOpenCount": 0,
639
- "memoryEvictionCount": 0,
640
- "offlinePersistCount": 0,
641
- "sessionStartCount": (ReplayOrchestrator.shared.replayId != nil) ? 1 : 0,
642
- "crashCount": 0,
643
- "uploadSuccessRate": 1.0,
644
- "avgUploadDurationMs": 0.0,
645
- "currentQueueDepth": 0,
646
- "lastUploadTime": NSNull(),
647
- "lastRetryTime": NSNull(),
648
- "totalBytesUploaded": 0,
649
- "totalBytesEvicted": 0
650
- ])
634
+ let queueDepth = TelemetryPipeline.shared.getQueueDepth()
635
+ resolve(SegmentDispatcher.shared.sdkTelemetrySnapshot(currentQueueDepth: queueDepth))
651
636
  }
652
637
 
653
638
  @objc(getDeviceInfo:reject:)
@@ -28,6 +28,7 @@ public final class InteractionRecorder: NSObject {
28
28
  private var _inputObservers = NSMapTable<UITextField, AnyObject>.weakToStrongObjects()
29
29
  private var _navigationStack: [String] = []
30
30
  private let _coalesceWindow: TimeInterval = 0.3
31
+ private var _lastInteractionTimestampMs: UInt64 = 0
31
32
 
32
33
  private override init() {
33
34
  super.init()
@@ -48,6 +49,11 @@ public final class InteractionRecorder: NSObject {
48
49
  _gestureAggregator = nil
49
50
  _inputObservers.removeAllObjects()
50
51
  _navigationStack.removeAll()
52
+ _lastInteractionTimestampMs = 0
53
+ }
54
+
55
+ @objc public func latestInteractionTimestampMs() -> UInt64 {
56
+ _lastInteractionTimestampMs
51
57
  }
52
58
 
53
59
  @objc public func observeTextField(_ field: UITextField) {
@@ -88,6 +94,21 @@ public final class InteractionRecorder: NSObject {
88
94
  @objc public func processRawTouches(_ event: UIEvent, in window: UIWindow) {
89
95
  guard isTracking, let agg = _gestureAggregator else { return }
90
96
  guard let touches = event.allTouches else { return }
97
+ _lastInteractionTimestampMs = UInt64(Date().timeIntervalSince1970 * 1000)
98
+
99
+ // Notify SpecialCases about touch phases for touch-based map idle detection
100
+ // (used by Mapbox v10+ where SDK idle callbacks can't be hooked).
101
+ for touch in touches {
102
+ switch touch.phase {
103
+ case .began:
104
+ SpecialCases.shared.notifyTouchBegan()
105
+ case .ended, .cancelled:
106
+ SpecialCases.shared.notifyTouchEnded()
107
+ default:
108
+ break
109
+ }
110
+ }
111
+
91
112
  for touch in touches {
92
113
  agg.processTouch(touch, in: window)
93
114
  }
@@ -304,6 +325,13 @@ private final class GestureAggregator: NSObject {
304
325
  }
305
326
 
306
327
  private func _resolveTarget(at point: CGPoint, in window: UIWindow) -> (label: String, isInteractive: Bool) {
328
+ // When a map view is visible, skip hitTest entirely — performing
329
+ // hitTest on a deep Metal/OpenGL map hierarchy is expensive and
330
+ // causes micro-stutter during pan/zoom gestures.
331
+ if SpecialCases.shared.mapVisible {
332
+ return ("map", false)
333
+ }
334
+
307
335
  guard let hit = window.hitTest(point, with: nil) else { return ("window", false) }
308
336
 
309
337
  let label = hit.accessibilityIdentifier ?? hit.accessibilityLabel ?? String(describing: type(of: hit))
@@ -38,7 +38,7 @@ public final class ReplayOrchestrator: NSObject {
38
38
  }
39
39
  }
40
40
 
41
- @objc public var snapshotInterval: Double = 0.33
41
+ @objc public var snapshotInterval: Double = 1.0
42
42
  @objc public var compressionLevel: Double = 0.5
43
43
  @objc public var visualCaptureEnabled: Bool = true
44
44
  @objc public var interactionCaptureEnabled: Bool = true
@@ -168,7 +168,7 @@ public final class ReplayOrchestrator: NSObject {
168
168
  _initSession()
169
169
  TelemetryPipeline.shared.activateDeferredMode()
170
170
 
171
- let renderCfg = _computeRender(fps: 3, tier: "standard")
171
+ let renderCfg = _computeRender(fps: 1, tier: "standard")
172
172
 
173
173
  if visualCaptureEnabled {
174
174
  VisualCapture.shared.configure(snapshotInterval: renderCfg.interval, jpegQuality: renderCfg.quality)
@@ -224,6 +224,7 @@ public final class ReplayOrchestrator: NSObject {
224
224
  "screensVisited": _visitedScreens,
225
225
  "screenCount": Set(_visitedScreens).count
226
226
  ]
227
+ let queueDepthAtFinalize = TelemetryPipeline.shared.getQueueDepth()
227
228
 
228
229
  SegmentDispatcher.shared.evaluateReplayRetention(replayId: sid, metrics: metrics) { [weak self] retain, reason in
229
230
  guard let self else { return }
@@ -246,7 +247,13 @@ public final class ReplayOrchestrator: NSObject {
246
247
  }
247
248
  self._finalized = true
248
249
 
249
- SegmentDispatcher.shared.concludeReplay(replayId: sid, concludedAt: termMs, backgroundDurationMs: self._bgTimeMs, metrics: metrics) { [weak self] ok in
250
+ SegmentDispatcher.shared.concludeReplay(
251
+ replayId: sid,
252
+ concludedAt: termMs,
253
+ backgroundDurationMs: self._bgTimeMs,
254
+ metrics: metrics,
255
+ currentQueueDepth: queueDepthAtFinalize
256
+ ) { [weak self] ok in
250
257
  if ok { self?._clearRecovery() }
251
258
  completion?(true, ok)
252
259
  }
@@ -285,7 +292,7 @@ public final class ReplayOrchestrator: NSObject {
285
292
  // If recording is disabled, disable visual capture
286
293
  if !recordingEnabled {
287
294
  visualCaptureEnabled = false
288
- DiagnosticLog.notice("[ReplayOrchestrator] Visual capture disabled by remote config (recordingEnabled=false)")
295
+ DiagnosticLog.trace("[ReplayOrchestrator] Visual capture disabled by remote config (recordingEnabled=false)")
289
296
  }
290
297
 
291
298
  // If already recording, restart the duration limit timer with updated config
@@ -293,7 +300,7 @@ public final class ReplayOrchestrator: NSObject {
293
300
  _startDurationLimitTimer()
294
301
  }
295
302
 
296
- DiagnosticLog.notice("[ReplayOrchestrator] Remote config applied: rejourneyEnabled=\(rejourneyEnabled), recordingEnabled=\(recordingEnabled), sampleRate=\(sampleRate)%, maxRecording=\(maxRecordingMinutes)min, isSampledIn=\(recordingEnabled)")
303
+ DiagnosticLog.trace("[ReplayOrchestrator] Remote config applied: rejourneyEnabled=\(rejourneyEnabled), recordingEnabled=\(recordingEnabled), sampleRate=\(sampleRate)%, maxRecording=\(maxRecordingMinutes)min, isSampledIn=\(recordingEnabled)")
297
304
  }
298
305
 
299
306
  @objc public func attachAttribute(key: String, value: String) {
@@ -344,8 +351,15 @@ public final class ReplayOrchestrator: NSObject {
344
351
  "crashCount": 1,
345
352
  "durationSeconds": Int((nowMs - origStart) / 1000)
346
353
  ]
347
-
348
- SegmentDispatcher.shared.concludeReplay(replayId: recId, concludedAt: nowMs, backgroundDurationMs: 0, metrics: crashMetrics) { [weak self] ok in
354
+ let queueDepthAtFinalize = TelemetryPipeline.shared.getQueueDepth()
355
+
356
+ SegmentDispatcher.shared.concludeReplay(
357
+ replayId: recId,
358
+ concludedAt: nowMs,
359
+ backgroundDurationMs: 0,
360
+ metrics: crashMetrics,
361
+ currentQueueDepth: queueDepthAtFinalize
362
+ ) { [weak self] ok in
349
363
  self?._clearRecovery()
350
364
  completion(ok ? recId : nil)
351
365
  }
@@ -483,7 +497,7 @@ public final class ReplayOrchestrator: NSObject {
483
497
  SegmentDispatcher.shared.activate()
484
498
  TelemetryPipeline.shared.activate()
485
499
 
486
- let renderCfg = _computeRender(fps: 3, tier: "high")
500
+ let renderCfg = _computeRender(fps: 1, tier: "standard")
487
501
  VisualCapture.shared.configure(snapshotInterval: renderCfg.interval, jpegQuality: renderCfg.quality)
488
502
 
489
503
  if visualCaptureEnabled { VisualCapture.shared.beginCapture(sessionOrigin: replayStartMs) }
@@ -510,20 +524,20 @@ public final class ReplayOrchestrator: NSObject {
510
524
  let remaining = maxMs > elapsed ? maxMs - elapsed : 0
511
525
 
512
526
  guard remaining > 0 else {
513
- DiagnosticLog.notice("[ReplayOrchestrator] Duration limit already exceeded, stopping session")
527
+ DiagnosticLog.trace("[ReplayOrchestrator] Duration limit already exceeded, stopping session")
514
528
  endReplay()
515
529
  return
516
530
  }
517
531
 
518
532
  let workItem = DispatchWorkItem { [weak self] in
519
533
  guard let self, self._live else { return }
520
- DiagnosticLog.notice("[ReplayOrchestrator] Recording duration limit reached (\(maxMinutes)min), stopping session")
534
+ DiagnosticLog.trace("[ReplayOrchestrator] Recording duration limit reached (\(maxMinutes)min), stopping session")
521
535
  self.endReplay()
522
536
  }
523
537
  _durationLimitTimer = workItem
524
538
  DispatchQueue.main.asyncAfter(deadline: .now() + .milliseconds(Int(remaining)), execute: workItem)
525
539
 
526
- DiagnosticLog.notice("[ReplayOrchestrator] Duration limit timer set: \(remaining / 1000)s remaining (max \(maxMinutes)min)")
540
+ DiagnosticLog.trace("[ReplayOrchestrator] Duration limit timer set: \(remaining / 1000)s remaining (max \(maxMinutes)min)")
527
541
  }
528
542
 
529
543
  private func _stopDurationLimitTimer() {
@@ -585,6 +599,13 @@ public final class ReplayOrchestrator: NSObject {
585
599
  return
586
600
  }
587
601
 
602
+ // Throttle hierarchy capture when map is visible and animating —
603
+ // hierarchy scanning traverses the full view tree including the
604
+ // map's deep Metal/GL subviews, adding main-thread pressure.
605
+ if SpecialCases.shared.mapVisible && !SpecialCases.shared.mapIdle {
606
+ return
607
+ }
608
+
588
609
  guard let hierarchy = ViewHierarchyScanner.shared.captureHierarchy() else { return }
589
610
 
590
611
  let hash = _hierarchyHash(hierarchy)
@@ -608,13 +629,25 @@ public final class ReplayOrchestrator: NSObject {
608
629
  }
609
630
 
610
631
  private func _computeRender(fps: Int, tier: String) -> (interval: Double, quality: Double) {
611
- let interval = 1.0 / Double(max(1, min(fps, 99)))
632
+ let tierLower = tier.lowercased()
633
+ let interval: Double
612
634
  let quality: Double
613
- switch tier.lowercased() {
614
- case "low": quality = 0.4
615
- case "standard": quality = 0.5
616
- case "high": quality = 0.6
617
- default: quality = 0.5
635
+ switch tierLower {
636
+ case "minimal":
637
+ interval = 2.0 // 0.5 fps for maximum size reduction
638
+ quality = 0.4
639
+ case "low":
640
+ interval = 1.0 / Double(max(1, min(fps, 99)))
641
+ quality = 0.4
642
+ case "standard":
643
+ interval = 1.0 / Double(max(1, min(fps, 99)))
644
+ quality = 0.5
645
+ case "high":
646
+ interval = 1.0 / Double(max(1, min(fps, 99)))
647
+ quality = 0.55
648
+ default:
649
+ interval = 1.0 / Double(max(1, min(fps, 99)))
650
+ quality = 0.5
618
651
  }
619
652
  return (interval, quality)
620
653
  }