blix-expo-settings 0.1.11 → 0.1.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,12 +2,57 @@ import ExpoModulesCore
2
2
  import HaishinKit
3
3
  import AVFoundation
4
4
  import VideoToolbox
5
+ import Logboard
6
+
7
+ // MARK: - RTMP Event Observer
8
+
9
+ final class RTMPEventObserver: NSObject {
10
+ var onStatus: ((String, String, String) -> Void)?
11
+ var onError: ((String) -> Void)?
12
+
13
+ @objc func rtmpStatusHandler(_ notification: Notification) {
14
+ let e: Event = Event.from(notification)
15
+ guard let data = e.data as? [String: Any] else { return }
16
+ let code = data["code"] as? String ?? ""
17
+ let level = data["level"] as? String ?? ""
18
+ let desc = data["description"] as? String ?? ""
19
+ onStatus?(code, level, desc)
20
+ }
21
+
22
+ @objc func rtmpErrorHandler(_ notification: Notification) {
23
+ let e: Event = Event.from(notification)
24
+ onError?("ioError: \(e)")
25
+ }
26
+ }
27
+
28
+ // MARK: - Module
5
29
 
6
30
  public class ExpoSettingsModule: Module {
7
31
  private var rtmpConnection: RTMPConnection?
8
32
  private var rtmpStream: RTMPStream?
9
33
  private var currentStreamStatus: String = "stopped"
34
+ private let rtmpObserver = RTMPEventObserver()
35
+
36
+ private var pendingPublish: (url: String, streamKey: String)?
37
+ private var statsTimer: Timer?
38
+
39
+ // Timing/debug
40
+ private var previewInitTime: Date?
41
+ private var publishRequestTime: Date?
42
+ private var firstDataSentTime: Date?
43
+ private var stopRequestTime: Date?
44
+ private var lastDataSentTime: Date?
45
+
46
+ // Stream config
47
+ private let TARGET_ASPECT_RATIO: CGFloat = 9.0 / 16.0
48
+ private var calculatedVideoWidth: Int = 720
49
+ private var calculatedVideoHeight: Int = 1280
50
+ private var configuredBitrate: Int = 2_500_000
51
+ private var configuredFrameRate: Float64 = 30
10
52
 
53
+ // Monitor cancellation
54
+ private var dataMonitorToken: UUID?
55
+ private var stopFlushToken: UUID?
11
56
 
12
57
  public func definition() -> ModuleDefinition {
13
58
  Name("ExpoSettings")
@@ -18,226 +63,542 @@ public class ExpoSettingsModule: Module {
18
63
  // não precisa colocar nada aqui se você não tiver Props
19
64
  }
20
65
 
21
- Events("onStreamStatus")
66
+ Events("onStreamStatus", "onStreamStats", "onStreamTiming")
22
67
 
23
68
  Function("getStreamStatus") {
24
69
  return self.currentStreamStatus
25
70
  }
26
71
 
72
+ Function("getStreamInfo") { () -> [String: Any] in
73
+ let w = self.calculatedVideoWidth
74
+ let h = self.calculatedVideoHeight
75
+ let ar = (h == 0) ? 0.0 : (Double(w) / Double(h))
76
+ return [
77
+ "videoWidth": w,
78
+ "videoHeight": h,
79
+ "aspectRatio": String(format: "%.4f", ar),
80
+ "bitrate": self.configuredBitrate,
81
+ "frameRate": self.configuredFrameRate
82
+ ]
83
+ }
84
+
85
+ Function("getDeviceDimensions") { () -> [String: Any] in
86
+ let screen = UIScreen.main.bounds
87
+ let scale = UIScreen.main.scale
88
+ return [
89
+ "screenWidth": Int(screen.width),
90
+ "screenHeight": Int(screen.height),
91
+ "scale": scale,
92
+ "pixelWidth": Int(screen.width * scale),
93
+ "pixelHeight": Int(screen.height * scale),
94
+ "streamWidth": self.calculatedVideoWidth,
95
+ "streamHeight": self.calculatedVideoHeight,
96
+ "aspectRatio": String(format: "%.4f", Double(self.calculatedVideoWidth) / Double(max(self.calculatedVideoHeight, 1)))
97
+ ]
98
+ }
99
+
100
+ Function("getStreamTiming") { () -> [String: Any] in
101
+ var result: [String: Any] = [:]
102
+ let fmt = ISO8601DateFormatter()
103
+
104
+ if let t = self.previewInitTime { result["previewInitTime"] = fmt.string(from: t) }
105
+ if let t = self.publishRequestTime { result["publishRequestTime"] = fmt.string(from: t) }
106
+ if let t = self.firstDataSentTime { result["firstDataSentTime"] = fmt.string(from: t) }
107
+ if let t = self.stopRequestTime { result["stopRequestTime"] = fmt.string(from: t) }
108
+ if let t = self.lastDataSentTime { result["lastDataSentTime"] = fmt.string(from: t) }
109
+
110
+ if let publish = self.publishRequestTime, let first = self.firstDataSentTime {
111
+ result["startDelayMs"] = Int(first.timeIntervalSince(publish) * 1000)
112
+ }
113
+
114
+ if let stop = self.stopRequestTime, let last = self.lastDataSentTime {
115
+ // Positive means stop happened after last data timestamp
116
+ result["timeSinceLastDataMs"] = Int(stop.timeIntervalSince(last) * 1000)
117
+ }
118
+
119
+ return result
120
+ }
121
+
27
122
  Function("initializePreview") { () -> Void in
28
- Task {
29
- self.currentStreamStatus = "previewInitializing"
30
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
31
-
32
- do {
33
-
34
- // 0) Configura e ativa o AVAudioSession
35
- let session = AVAudioSession.sharedInstance()
36
- do {
37
- try session.setCategory(.playAndRecord,
38
- mode: .default,
39
- options: [.defaultToSpeaker, .allowBluetooth])
40
- try session.setActive(true)
41
- } catch {
42
- print("[ExpoSettings] AVAudioSession error:", error)
43
- }
123
+ DispatchQueue.main.async { self.initializePreview() }
124
+ }
44
125
 
45
- // 1) Conectar ao servidor RTMP, mas não publica
46
- let connection = RTMPConnection()
47
- self.rtmpConnection = connection
48
-
49
- // 2) Criar RTMPStream, mas não publica pro servidor ainda
50
- let stream = RTMPStream(connection: connection)
51
- self.rtmpStream = stream
52
- print("[ExpoSettings] RTMPStream initialized")
53
-
54
- // 3) Configurar captura: frame rate e preset
55
- stream.sessionPreset = .hd1280x720
56
- stream.frameRate = 30
57
- stream.videoOrientation = .portrait
58
- stream.configuration { captureSession in
59
- captureSession.automaticallyConfiguresApplicationAudioSession = true
60
- }
126
+ Function("publishStream") { (url: String, streamKey: String) -> Void in
127
+ DispatchQueue.main.async { self.publishStream(url: url, streamKey: streamKey) }
128
+ }
61
129
 
62
- // 4) Configurar áudio: anexa microfone
63
- if let audioDevice = AVCaptureDevice.default(for: .audio) {
64
- print("[ExpoSettings] Attaching audio device")
65
- stream.attachAudio(audioDevice)
66
- } else {
67
- print("[ExpoSettings] No audio device found")
68
- }
130
+ Function("stopStream") { () -> Void in
131
+ DispatchQueue.main.async { self.stopStream() }
132
+ }
133
+ }
69
134
 
70
- // 5) Configurar vídeo: anexa câmera frontal
71
- if let camera = AVCaptureDevice.default(.builtInWideAngleCamera,
72
- for: .video,
73
- position: .front) {
74
- print("[ExpoSettings] Attaching camera device")
75
- stream.attachCamera(camera) { videoUnit, error in
76
- guard let unit = videoUnit else {
77
- print("[ExpoSettings] attachCamera error:", error?.localizedDescription ?? "unknown")
78
- return
79
- }
80
- unit.isVideoMirrored = true
81
- unit.videoOrientation = .portrait
82
- unit.preferredVideoStabilizationMode = .standard
83
- unit.colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
135
+ // MARK: - Helpers
84
136
 
85
- }
86
- if let preview = await ExpoSettingsView.current {
87
- print("[ExpoSettings] Attaching stream to preview view")
88
- await preview.attachStream(stream)
89
- } else {
90
- print("[ExpoSettings] ERROR: Preview view not found!")
91
- }
92
- } else {
93
- print("[ExpoSettings] No camera device found")
94
- }
137
+ private func setStatus(_ s: String) {
138
+ guard currentStreamStatus != s else { return }
139
+ currentStreamStatus = s
140
+ sendEvent("onStreamStatus", [
141
+ "status": s,
142
+ "timestamp": ISO8601DateFormatter().string(from: Date())
143
+ ])
144
+ }
145
+
146
+ private func sanitizeRTMPUrl(_ url: String) -> String {
147
+ var u = url.trimmingCharacters(in: .whitespacesAndNewlines)
148
+ while u.hasSuffix("/") { u.removeLast() }
149
+ return u
150
+ }
95
151
 
96
- //6) Definir configurações de codec
97
- print("[ExpoSettings] Setting audio and video codecs")
98
- var audioSettings = AudioCodecSettings()
99
- audioSettings.bitRate = 128 * 1000
100
- stream.audioSettings = audioSettings
152
+ private func calculateStreamDimensions() -> (width: Int, height: Int) {
153
+ let width = 720
154
+ let height = 1280
155
+
156
+ let aspectRatio = CGFloat(width) / CGFloat(height)
157
+ let expected = TARGET_ASPECT_RATIO
158
+
159
+ assert(abs(aspectRatio - expected) < 0.001, "Aspect ratio mismatch!")
160
+
161
+ print("[ExpoSettings] 📐 Stream dimensions: \(width)x\(height)")
162
+ print("[ExpoSettings] 📐 Aspect ratio: \(String(format: "%.4f", aspectRatio)) expected \(String(format: "%.4f", expected))")
163
+ return (width, height)
164
+ }
165
+
166
+ // MARK: - Permissions
167
+
168
+ private func requestAVPermissions(completion: @escaping (Bool) -> Void) {
169
+ let group = DispatchGroup()
170
+ var camOK = false
171
+ var micOK = false
172
+
173
+ group.enter()
174
+ AVCaptureDevice.requestAccess(for: .video) { granted in
175
+ camOK = granted
176
+ group.leave()
177
+ }
178
+
179
+ group.enter()
180
+ AVCaptureDevice.requestAccess(for: .audio) { granted in
181
+ micOK = granted
182
+ group.leave()
183
+ }
184
+
185
+ group.notify(queue: .main) {
186
+ print("[ExpoSettings] camera permission \(camOK)")
187
+ print("[ExpoSettings] mic permission \(micOK)")
188
+ completion(camOK && micOK)
189
+ }
190
+ }
191
+
192
+ // MARK: - Preview init
193
+
194
+ private func initializePreview() {
195
+ previewInitTime = Date()
196
+ LBLogger.with("com.haishinkit.HaishinKit").level = .trace
197
+
198
+ print("[ExpoSettings] ⏱️ initializePreview at \(ISO8601DateFormatter().string(from: previewInitTime!))")
199
+ setStatus("previewInitializing")
200
+
201
+ requestAVPermissions { [weak self] ok in
202
+ guard let self else { return }
203
+ guard ok else {
204
+ print("[ExpoSettings] ❌ Missing camera/mic permissions")
205
+ self.setStatus("error")
206
+ return
207
+ }
208
+
209
+ // Audio session
210
+ let session = AVAudioSession.sharedInstance()
211
+ do {
212
+ try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
213
+ try session.setPreferredSampleRate(44_100)
214
+ try session.setActive(true)
215
+ print("[ExpoSettings] ✅ AudioSession OK")
216
+ } catch {
217
+ print("[ExpoSettings] ❌ AudioSession error: \(error)")
218
+ }
219
+
220
+ let connection = RTMPConnection()
221
+ self.rtmpConnection = connection
222
+
223
+ let stream = RTMPStream(connection: connection)
224
+
225
+ // Attach listeners
226
+ connection.addEventListener(.rtmpStatus,
227
+ selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
228
+ observer: self.rtmpObserver)
229
+ connection.addEventListener(.ioError,
230
+ selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
231
+ observer: self.rtmpObserver)
232
+
233
+ stream.addEventListener(.rtmpStatus,
234
+ selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
235
+ observer: self.rtmpObserver)
236
+ stream.addEventListener(.ioError,
237
+ selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
238
+ observer: self.rtmpObserver)
239
+
240
+ self.rtmpStream = stream
241
+
242
+ self.rtmpObserver.onStatus = { [weak self] code, level, desc in
243
+ self?.handleRTMPStatus(code: code, level: level, desc: desc)
244
+ }
245
+ self.rtmpObserver.onError = { [weak self] msg in
246
+ print("[ExpoSettings] ❌ \(msg)")
247
+ self?.setStatus("error")
248
+ }
249
+
250
+ // Dimensions
251
+ let dimensions = self.calculateStreamDimensions()
252
+ self.calculatedVideoWidth = dimensions.width
253
+ self.calculatedVideoHeight = dimensions.height
254
+
255
+ // Video settings
256
+ self.configuredBitrate = 2_500_000
257
+ self.configuredFrameRate = 30
101
258
 
102
259
  let videoSettings = VideoCodecSettings(
103
- videoSize: .init(width: 720, height: 1280),
104
- bitRate: 4000 * 1000,
260
+ videoSize: CGSize(width: dimensions.width, height: dimensions.height),
261
+ bitRate: self.configuredBitrate,
105
262
  profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
106
263
  scalingMode: .trim,
107
264
  bitRateMode: .average,
108
- maxKeyFrameIntervalDuration: 2,
265
+ maxKeyFrameIntervalDuration: 1, // GOP 1s
109
266
  allowFrameReordering: nil,
110
267
  isHardwareEncoderEnabled: true
111
- )
112
- stream.videoSettings = videoSettings
268
+ )
269
+ stream.videoSettings = videoSettings
270
+ stream.frameRate = self.configuredFrameRate
271
+
272
+ print("[ExpoSettings] 📐 VideoSettings videoSize=\(stream.videoSettings.videoSize) bitrate=\(stream.videoSettings.bitRate) GOP=\(stream.videoSettings.maxKeyFrameIntervalDuration) fps=\(stream.frameRate)")
273
+
274
+ // Audio settings
275
+ var audioSettings = AudioCodecSettings()
276
+ audioSettings.bitRate = 128_000
277
+ stream.audioSettings = audioSettings
278
+ print("[ExpoSettings] 🔊 Audio bitRate: 128000")
279
+
280
+ // Devices
281
+ guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
282
+ print("[ExpoSettings] ❌ No front camera")
283
+ self.setStatus("error")
284
+ return
113
285
  }
114
- self.currentStreamStatus = "previewReady"
115
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
286
+ guard let microphone = AVCaptureDevice.default(for: .audio) else {
287
+ print("[ExpoSettings] ❌ No microphone")
288
+ self.setStatus("error")
289
+ return
290
+ }
291
+
292
+ // Attach camera (portrait, mirrored)
293
+ stream.attachCamera(camera) { videoUnit, error in
294
+ if let error = error {
295
+ print("[ExpoSettings] ❌ Camera ERROR: \(error)")
296
+ } else {
297
+ videoUnit?.isVideoMirrored = true
298
+ videoUnit?.videoOrientation = .portrait
299
+ print("[ExpoSettings] ✅ Camera attached (portrait, mirrored)")
300
+ }
301
+ }
302
+
303
+ stream.attachAudio(microphone) { _, error in
304
+ if let error = error {
305
+ print("[ExpoSettings] ❌ Audio ERROR: \(error.localizedDescription)")
306
+ } else {
307
+ print("[ExpoSettings] ✅ Audio attached")
308
+ }
309
+ }
310
+
311
+ // Attach preview
312
+ if let preview = ExpoSettingsView.current {
313
+ preview.attachStream(stream) // requires RTMPStream? in view to allow nil later
314
+ print("[ExpoSettings] ✅ Preview attached")
315
+ }
316
+
317
+ // Wait for encoder warm-up
318
+ DispatchQueue.main.asyncAfter(deadline: .now() + 1.2) { [weak self] in
319
+ guard let self, let s = self.rtmpStream else { return }
320
+ print("[ExpoSettings] 🔍 Warm verify videoSize=\(s.videoSettings.videoSize) fps=\(s.frameRate)")
321
+ self.setStatus("previewReady")
322
+ print("[ExpoSettings] ✅ Preview READY")
116
323
  }
117
324
  }
325
+ }
118
326
 
119
- Function("publishStream") { (url: String, streamKey: String) -> Void in
120
- Task {
121
-
122
- print("[ExpoSettings] Publishing stream to URL: \(url) with key: \(streamKey)")
123
-
124
- self.currentStreamStatus = "connecting"
125
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
126
-
127
- // se não houve initializePreview→recria a connection
128
- if self.rtmpConnection == nil || self.rtmpStream == nil {
129
- print("[ExpoSettings] WARNING: Connection or stream not initialized, creating new ones")
130
- // Create new connection
131
- let connection = RTMPConnection()
132
- self.rtmpConnection = connection
133
-
134
- // Create new stream
135
- let stream = RTMPStream(connection: connection)
136
- self.rtmpStream = stream
137
-
138
- // Captura: preset antes do FPS + orientação no stream
139
- stream.sessionPreset = .hd1280x720
140
- stream.frameRate = 30
141
- stream.videoOrientation = .portrait
142
- stream.configuration { captureSession in
143
- captureSession.automaticallyConfiguresApplicationAudioSession = true
144
- }
327
+ // MARK: - Publish
145
328
 
146
- // Áudio
147
- if let audioDevice = AVCaptureDevice.default(for: .audio) {
148
- stream.attachAudio(audioDevice)
149
- }
329
+ private func publishStream(url: String, streamKey: String) {
330
+ publishRequestTime = Date()
331
+ firstDataSentTime = nil
332
+ lastDataSentTime = nil
333
+ stopRequestTime = nil
150
334
 
151
- if let camera = AVCaptureDevice.default(.builtInWideAngleCamera,
152
- for: .video,
153
- position: .front) {
154
- stream.attachCamera(camera) { videoUnit, error in
155
- guard let unit = videoUnit else {
156
- print("[ExpoSettings] attachCamera error:", error?.localizedDescription ?? "unknown")
157
- return
158
- }
159
- unit.isVideoMirrored = true
160
- unit.videoOrientation = .portrait
161
- unit.preferredVideoStabilizationMode = .standard
162
- unit.colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
163
- }
164
- }
335
+ // reset monitors
336
+ dataMonitorToken = UUID()
165
337
 
166
- // Attach preview (se existir)
167
- if let preview = await ExpoSettingsView.current {
168
- await preview.attachStream(stream)
169
- } else {
170
- print("[ExpoSettings] ERROR: Preview view not found during publish!")
171
- }
338
+ let cleanUrl = sanitizeRTMPUrl(url)
339
+ print("[ExpoSettings] ⏱️ publishStream at \(ISO8601DateFormatter().string(from: publishRequestTime!))")
340
+ print("[ExpoSettings] URL: \(cleanUrl)")
341
+ print("[ExpoSettings] Key: \(streamKey)")
172
342
 
173
- var audioSettings = AudioCodecSettings()
174
- audioSettings.bitRate = 128 * 1000 // 128 kbps
175
- stream.audioSettings = audioSettings
176
-
177
- // Vídeo
178
- let videoSettings = VideoCodecSettings(
179
- videoSize: .init(width: 720, height: 1280),
180
- bitRate: 4000 * 1000, // 4 Mbps
181
- profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
182
- scalingMode: .trim,
183
- bitRateMode: .average,
184
- maxKeyFrameIntervalDuration: 2,
185
- allowFrameReordering: nil,
186
- isHardwareEncoderEnabled: true
187
- )
188
- stream.videoSettings = videoSettings
189
-
190
- self.currentStreamStatus = "previewReady"
191
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
192
-
193
- connection.connect(url)
194
- } else {
195
- // Use existing connection
196
- self.rtmpConnection?.connect(url)
197
- }
198
- self.currentStreamStatus = "connected"
199
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
343
+ guard let connection = rtmpConnection, let stream = rtmpStream else {
344
+ print("[ExpoSettings] No connection/stream")
345
+ setStatus("error")
346
+ return
347
+ }
348
+
349
+ print("[ExpoSettings] 🔍 Pre-publish videoSize=\(stream.videoSettings.videoSize)")
350
+
351
+ pendingPublish = (cleanUrl, streamKey)
352
+ setStatus("connecting")
353
+ connection.connect(cleanUrl)
354
+ }
200
355
 
201
- self.currentStreamStatus = "publishing"
202
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
356
+ // MARK: - RTMP status
203
357
 
204
- self.rtmpStream?.publish(streamKey)
205
- print("[ExpoSettings] Stream published successfully")
358
+ private func handleRTMPStatus(code: String, level: String, desc: String) {
359
+ let now = Date()
360
+ print("[ExpoSettings] ⏱️ RTMP status \(code) at \(ISO8601DateFormatter().string(from: now))")
206
361
 
207
- self.currentStreamStatus = "started"
208
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
362
+ guard let stream = rtmpStream else { return }
363
+
364
+ switch code {
365
+ case "NetConnection.Connect.Success":
366
+ setStatus("connected")
367
+ if let p = pendingPublish {
368
+ pendingPublish = nil
369
+ setStatus("publishing")
370
+ print("[ExpoSettings] 📤 Publishing...")
371
+ stream.publish(p.streamKey, type: .live)
372
+
373
+ // Start monitoring for real media egress
374
+ monitorForRealOutboundMedia()
209
375
  }
376
+
377
+ case "NetStream.Publish.Start":
378
+ // IMPORTANT:
379
+ // Do NOT setStatus("started") here anymore.
380
+ // This event means publish handshake started, not necessarily that DVR/RTMP has real media yet.
381
+ print("[ExpoSettings] ✅ Publish.Start received (waiting for data confirmation...)")
382
+
383
+ case "NetStream.Publish.BadName",
384
+ "NetStream.Publish.Rejected",
385
+ "NetConnection.Connect.Failed":
386
+ stopStatsTimer()
387
+ setStatus("error")
388
+
389
+ case "NetConnection.Connect.Closed":
390
+ stopStatsTimer()
391
+ setStatus("stopped")
392
+
393
+ default:
394
+ break
210
395
  }
396
+ }
211
397
 
212
- Function("stopStream") { () -> Void in
213
- Task {
214
- print("[ExpoSettings] stopStream called")
398
+ // MARK: - Start confirmation (Fix #1)
215
399
 
216
- // Primeiro pare a publicação (se estiver publicando)
217
- if let stream = self.rtmpStream {
218
- print("[ExpoSettings] Stopping stream publication")
219
- stream.close()
400
+ private func monitorForRealOutboundMedia() {
401
+ guard let connection = rtmpConnection, let stream = rtmpStream else { return }
402
+ let token = dataMonitorToken ?? UUID()
403
+ dataMonitorToken = token
220
404
 
221
- // Desanexa a câmera e o áudio para liberar recursos
222
- stream.attachCamera(nil)
223
- stream.attachAudio(nil)
224
- }
405
+ var checks = 0
406
+ let maxChecks = 200 // 20s (200 x 100ms)
407
+ let interval: TimeInterval = 0.1
408
+
409
+ // Require a few consecutive "good" checks to avoid flapping
410
+ var goodStreak = 0
411
+ let neededGoodStreak = 4 // 400ms stable
412
+
413
+ func tick() {
414
+ // cancelled?
415
+ guard self.dataMonitorToken == token else { return }
416
+
417
+ checks += 1
418
+
419
+ let bytesOut = connection.currentBytesOutPerSecond // Int32
420
+ let fps = stream.currentFPS
421
+
422
+ // Track last data time if any egress
423
+ if bytesOut > 0 && fps > 0 {
424
+ self.lastDataSentTime = Date()
425
+ }
225
426
 
226
- // Depois feche a conexão RTMP
227
- if let connection = self.rtmpConnection {
228
- print("[ExpoSettings] Closing RTMP connection")
229
- connection.close()
427
+ if bytesOut > 0 && fps > 0 {
428
+ goodStreak += 1
429
+ } else {
430
+ goodStreak = 0
431
+ }
432
+
433
+ // Confirm start ONLY when stable outbound is observed
434
+ if goodStreak >= neededGoodStreak {
435
+ if self.firstDataSentTime == nil {
436
+ self.firstDataSentTime = Date()
230
437
  }
231
438
 
232
- // Limpe as referências
233
- self.rtmpStream = nil
234
- self.rtmpConnection = nil
439
+ let delayMs = self.publishRequestTime.flatMap { pub in
440
+ self.firstDataSentTime.map { Int($0.timeIntervalSince(pub) * 1000) }
441
+ } ?? -1
442
+
443
+ print("[ExpoSettings] ✅ Data confirmed (bytesOut=\(bytesOut), fps=\(fps)) after \(delayMs)ms")
444
+
445
+ // Emit timing events (send both names to match any JS)
446
+ self.sendEvent("onStreamTiming", [
447
+ "event": "dataConfirmed",
448
+ "delayMs": delayMs,
449
+ "timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
450
+ ])
451
+ self.sendEvent("onStreamTiming", [
452
+ "event": "firstDataSent",
453
+ "delayMs": delayMs,
454
+ "timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
455
+ ])
456
+
457
+ self.setStatus("started")
458
+ self.startStatsTimer()
459
+ return
460
+ }
235
461
 
236
- print("[ExpoSettings] Stream and connection closed and resources released")
462
+ // Timeout
463
+ if checks >= maxChecks {
464
+ print("[ExpoSettings] ⚠️ Start confirmation timeout (still no stable outbound media). Keeping status=\(self.currentStreamStatus)")
465
+ // Keep status as "publishing" or whatever it currently is; do not force started.
466
+ return
467
+ }
237
468
 
238
- self.currentStreamStatus = "stopped"
239
- sendEvent("onStreamStatus", ["status": self.currentStreamStatus])
469
+ // Keep checking while in publishing/connected state
470
+ if self.currentStreamStatus == "publishing" || self.currentStreamStatus == "connected" || self.currentStreamStatus == "connecting" {
471
+ DispatchQueue.main.asyncAfter(deadline: .now() + interval) { tick() }
240
472
  }
241
473
  }
474
+
475
+ tick()
476
+ }
477
+
478
+ // MARK: - Stats
479
+
480
+ private func startStatsTimer() {
481
+ stopStatsTimer()
482
+ statsTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in
483
+ guard let self,
484
+ let c = self.rtmpConnection,
485
+ let s = self.rtmpStream else { return }
486
+
487
+ let fps = s.currentFPS
488
+ let bps = c.currentBytesOutPerSecond * 8
489
+
490
+ if bps > 0 {
491
+ self.lastDataSentTime = Date()
492
+ }
493
+
494
+ self.sendEvent("onStreamStats", [
495
+ "fps": fps,
496
+ "bps": bps,
497
+ "timestamp": ISO8601DateFormatter().string(from: Date())
498
+ ])
499
+ }
500
+ }
501
+
502
+ private func stopStatsTimer() {
503
+ statsTimer?.invalidate()
504
+ statsTimer = nil
505
+ }
506
+
507
+ // MARK: - Stop (Fix #2)
508
+
509
+ private func stopStream() {
510
+ stopRequestTime = Date()
511
+ print("[ExpoSettings] ⏱️ stopStream at \(ISO8601DateFormatter().string(from: stopRequestTime!))")
512
+
513
+ // cancel start confirmation monitor
514
+ dataMonitorToken = UUID()
515
+
516
+ stopStatsTimer()
517
+
518
+ guard let stream = rtmpStream, let connection = rtmpConnection else {
519
+ print("[ExpoSettings] No active stream to stop")
520
+ setStatus("stopped")
521
+ return
522
+ }
523
+
524
+ setStatus("stopping")
525
+
526
+ // Stop capturing new frames but keep connection open for flush
527
+ print("[ExpoSettings] 📤 Stop capture (keep RTMP open for flush)")
528
+ stream.attachCamera(nil) { _, _ in }
529
+ stream.attachAudio(nil) { _, _ in }
530
+
531
+ // Adaptive flush: wait until outbound bytes are ~0 for a stable window, OR max time reached
532
+ stopFlushToken = UUID()
533
+ let token = stopFlushToken!
534
+
535
+ let interval: TimeInterval = 0.2
536
+ let maxFlushSeconds: TimeInterval = 12.0
537
+ let stableZeroNeeded: Int = 6 // 6 * 0.2s = 1.2s stable
538
+
539
+ var elapsed: TimeInterval = 0
540
+ var stableZeroCount = 0
541
+
542
+ func flushTick() {
543
+ guard self.stopFlushToken == token else { return }
544
+
545
+ let bytesOut = connection.currentBytesOutPerSecond
546
+ let now = Date()
547
+
548
+ // if still sending, update lastDataSentTime
549
+ if bytesOut > 0 {
550
+ self.lastDataSentTime = now
551
+ stableZeroCount = 0
552
+ } else {
553
+ stableZeroCount += 1
554
+ }
555
+
556
+ elapsed += interval
557
+
558
+ // Condition to proceed: stable no outbound OR max wait
559
+ if stableZeroCount >= stableZeroNeeded || elapsed >= maxFlushSeconds {
560
+ print("[ExpoSettings] ✅ Flush condition met (stableZeroCount=\(stableZeroCount), elapsed=\(String(format: "%.1f", elapsed))s). Closing stream...")
561
+
562
+ // Close stream then connection
563
+ self.rtmpStream?.close()
564
+
565
+ DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { [weak self] in
566
+ guard let self else { return }
567
+ self.rtmpConnection?.close()
568
+
569
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
570
+ guard let self else { return }
571
+
572
+ // Detach preview if your view supports optional
573
+ if let preview = ExpoSettingsView.current {
574
+ preview.attachStream(nil)
575
+ }
576
+
577
+ let finalTime = Date()
578
+ let totalMs = self.stopRequestTime.map { Int(finalTime.timeIntervalSince($0) * 1000) } ?? -1
579
+
580
+ self.sendEvent("onStreamTiming", [
581
+ "event": "shutdownComplete",
582
+ "totalDurationMs": totalMs,
583
+ "timestamp": ISO8601DateFormatter().string(from: finalTime)
584
+ ])
585
+
586
+ self.rtmpStream = nil
587
+ self.rtmpConnection = nil
588
+ self.pendingPublish = nil
589
+
590
+ self.setStatus("stopped")
591
+ print("[ExpoSettings] ✅ Stream stopped (total \(totalMs)ms)")
592
+ }
593
+ }
594
+
595
+ return
596
+ }
597
+
598
+ // Keep flushing
599
+ DispatchQueue.main.asyncAfter(deadline: .now() + interval) { flushTick() }
600
+ }
601
+
602
+ flushTick()
242
603
  }
243
604
  }
@@ -11,6 +11,9 @@ public class ExpoSettingsView: ExpoView {
11
11
  return view
12
12
  }()
13
13
 
14
+ // Guarda stream para reattach se a view recriar/layout mudar
15
+ private weak var attachedStream: RTMPStream?
16
+
14
17
  required init(appContext: AppContext? = nil) {
15
18
  super.init(appContext: appContext)
16
19
  clipsToBounds = true
@@ -24,8 +27,10 @@ public class ExpoSettingsView: ExpoView {
24
27
  hkView.frame = bounds
25
28
  }
26
29
 
27
- public func attachStream(_ stream: RTMPStream) {
28
- hkView.attachStream(stream)
30
+ // agora aceita nil
31
+ public func attachStream(_ stream: RTMPStream?) {
32
+ attachedStream = stream
33
+ hkView.attachStream(stream) // normalmente aceita nil
29
34
  }
30
35
 
31
36
  deinit {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "blix-expo-settings",
3
- "version": "0.1.11",
3
+ "version": "0.1.13",
4
4
  "description": "LiveStream",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",