blix-expo-settings 0.1.13 → 0.1.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ios/ExpoSettingsModule.swift +198 -517
- package/ios/ExpoSettingsView.swift +2 -7
- package/package.json +1 -1
|
@@ -2,57 +2,28 @@ import ExpoModulesCore
|
|
|
2
2
|
import HaishinKit
|
|
3
3
|
import AVFoundation
|
|
4
4
|
import VideoToolbox
|
|
5
|
-
import Logboard
|
|
6
|
-
|
|
7
|
-
// MARK: - RTMP Event Observer
|
|
8
|
-
|
|
9
|
-
final class RTMPEventObserver: NSObject {
|
|
10
|
-
var onStatus: ((String, String, String) -> Void)?
|
|
11
|
-
var onError: ((String) -> Void)?
|
|
12
|
-
|
|
13
|
-
@objc func rtmpStatusHandler(_ notification: Notification) {
|
|
14
|
-
let e: Event = Event.from(notification)
|
|
15
|
-
guard let data = e.data as? [String: Any] else { return }
|
|
16
|
-
let code = data["code"] as? String ?? ""
|
|
17
|
-
let level = data["level"] as? String ?? ""
|
|
18
|
-
let desc = data["description"] as? String ?? ""
|
|
19
|
-
onStatus?(code, level, desc)
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
@objc func rtmpErrorHandler(_ notification: Notification) {
|
|
23
|
-
let e: Event = Event.from(notification)
|
|
24
|
-
onError?("ioError: \(e)")
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// MARK: - Module
|
|
29
5
|
|
|
30
6
|
public class ExpoSettingsModule: Module {
|
|
31
7
|
private var rtmpConnection: RTMPConnection?
|
|
32
8
|
private var rtmpStream: RTMPStream?
|
|
33
|
-
private var
|
|
34
|
-
private
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
private
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
private
|
|
41
|
-
private
|
|
42
|
-
private
|
|
43
|
-
|
|
44
|
-
private var
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
private var configuredFrameRate: Float64 = 30
|
|
52
|
-
|
|
53
|
-
// Monitor cancellation
|
|
54
|
-
private var dataMonitorToken: UUID?
|
|
55
|
-
private var stopFlushToken: UUID?
|
|
9
|
+
private var currentStatus: String = "idle"
|
|
10
|
+
private var operationStartTime: Date?
|
|
11
|
+
|
|
12
|
+
// MARK: - Stream Configuration (Portrait 9:16)
|
|
13
|
+
private let videoWidth = 720
|
|
14
|
+
private let videoHeight = 1280
|
|
15
|
+
private let videoBitrate = 4_000_000
|
|
16
|
+
private let audioBitrate = 128_000
|
|
17
|
+
private let frameRate: Float64 = 30
|
|
18
|
+
private let gopSeconds: Int32 = 1
|
|
19
|
+
|
|
20
|
+
private static var audioSessionConfigured = false
|
|
21
|
+
|
|
22
|
+
private static let isoFormatter: ISO8601DateFormatter = {
|
|
23
|
+
let f = ISO8601DateFormatter()
|
|
24
|
+
f.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
|
|
25
|
+
return f
|
|
26
|
+
}()
|
|
56
27
|
|
|
57
28
|
public func definition() -> ModuleDefinition {
|
|
58
29
|
Name("ExpoSettings")
|
|
@@ -63,542 +34,252 @@ public class ExpoSettingsModule: Module {
|
|
|
63
34
|
// não precisa colocar nada aqui se você não tiver Props
|
|
64
35
|
}
|
|
65
36
|
|
|
66
|
-
|
|
37
|
+
Events("onStreamStatus", "onStreamTiming")
|
|
67
38
|
|
|
68
39
|
Function("getStreamStatus") {
|
|
69
|
-
return self.
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
Function("getStreamInfo") { () -> [String: Any] in
|
|
73
|
-
let w = self.calculatedVideoWidth
|
|
74
|
-
let h = self.calculatedVideoHeight
|
|
75
|
-
let ar = (h == 0) ? 0.0 : (Double(w) / Double(h))
|
|
76
|
-
return [
|
|
77
|
-
"videoWidth": w,
|
|
78
|
-
"videoHeight": h,
|
|
79
|
-
"aspectRatio": String(format: "%.4f", ar),
|
|
80
|
-
"bitrate": self.configuredBitrate,
|
|
81
|
-
"frameRate": self.configuredFrameRate
|
|
82
|
-
]
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
Function("getDeviceDimensions") { () -> [String: Any] in
|
|
86
|
-
let screen = UIScreen.main.bounds
|
|
87
|
-
let scale = UIScreen.main.scale
|
|
88
|
-
return [
|
|
89
|
-
"screenWidth": Int(screen.width),
|
|
90
|
-
"screenHeight": Int(screen.height),
|
|
91
|
-
"scale": scale,
|
|
92
|
-
"pixelWidth": Int(screen.width * scale),
|
|
93
|
-
"pixelHeight": Int(screen.height * scale),
|
|
94
|
-
"streamWidth": self.calculatedVideoWidth,
|
|
95
|
-
"streamHeight": self.calculatedVideoHeight,
|
|
96
|
-
"aspectRatio": String(format: "%.4f", Double(self.calculatedVideoWidth) / Double(max(self.calculatedVideoHeight, 1)))
|
|
97
|
-
]
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
Function("getStreamTiming") { () -> [String: Any] in
|
|
101
|
-
var result: [String: Any] = [:]
|
|
102
|
-
let fmt = ISO8601DateFormatter()
|
|
103
|
-
|
|
104
|
-
if let t = self.previewInitTime { result["previewInitTime"] = fmt.string(from: t) }
|
|
105
|
-
if let t = self.publishRequestTime { result["publishRequestTime"] = fmt.string(from: t) }
|
|
106
|
-
if let t = self.firstDataSentTime { result["firstDataSentTime"] = fmt.string(from: t) }
|
|
107
|
-
if let t = self.stopRequestTime { result["stopRequestTime"] = fmt.string(from: t) }
|
|
108
|
-
if let t = self.lastDataSentTime { result["lastDataSentTime"] = fmt.string(from: t) }
|
|
109
|
-
|
|
110
|
-
if let publish = self.publishRequestTime, let first = self.firstDataSentTime {
|
|
111
|
-
result["startDelayMs"] = Int(first.timeIntervalSince(publish) * 1000)
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
if let stop = self.stopRequestTime, let last = self.lastDataSentTime {
|
|
115
|
-
// Positive means stop happened after last data timestamp
|
|
116
|
-
result["timeSinceLastDataMs"] = Int(stop.timeIntervalSince(last) * 1000)
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
return result
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
Function("initializePreview") { () -> Void in
|
|
123
|
-
DispatchQueue.main.async { self.initializePreview() }
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
Function("publishStream") { (url: String, streamKey: String) -> Void in
|
|
127
|
-
DispatchQueue.main.async { self.publishStream(url: url, streamKey: streamKey) }
|
|
40
|
+
return self.currentStatus
|
|
128
41
|
}
|
|
129
42
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
// MARK: - Helpers
|
|
136
|
-
|
|
137
|
-
private func setStatus(_ s: String) {
|
|
138
|
-
guard currentStreamStatus != s else { return }
|
|
139
|
-
currentStreamStatus = s
|
|
140
|
-
sendEvent("onStreamStatus", [
|
|
141
|
-
"status": s,
|
|
142
|
-
"timestamp": ISO8601DateFormatter().string(from: Date())
|
|
143
|
-
])
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
private func sanitizeRTMPUrl(_ url: String) -> String {
|
|
147
|
-
var u = url.trimmingCharacters(in: .whitespacesAndNewlines)
|
|
148
|
-
while u.hasSuffix("/") { u.removeLast() }
|
|
149
|
-
return u
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
private func calculateStreamDimensions() -> (width: Int, height: Int) {
|
|
153
|
-
let width = 720
|
|
154
|
-
let height = 1280
|
|
155
|
-
|
|
156
|
-
let aspectRatio = CGFloat(width) / CGFloat(height)
|
|
157
|
-
let expected = TARGET_ASPECT_RATIO
|
|
158
|
-
|
|
159
|
-
assert(abs(aspectRatio - expected) < 0.001, "Aspect ratio mismatch!")
|
|
160
|
-
|
|
161
|
-
print("[ExpoSettings] 📐 Stream dimensions: \(width)x\(height)")
|
|
162
|
-
print("[ExpoSettings] 📐 Aspect ratio: \(String(format: "%.4f", aspectRatio)) expected \(String(format: "%.4f", expected))")
|
|
163
|
-
return (width, height)
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
// MARK: - Permissions
|
|
167
|
-
|
|
168
|
-
private func requestAVPermissions(completion: @escaping (Bool) -> Void) {
|
|
169
|
-
let group = DispatchGroup()
|
|
170
|
-
var camOK = false
|
|
171
|
-
var micOK = false
|
|
172
|
-
|
|
173
|
-
group.enter()
|
|
174
|
-
AVCaptureDevice.requestAccess(for: .video) { granted in
|
|
175
|
-
camOK = granted
|
|
176
|
-
group.leave()
|
|
177
|
-
}
|
|
43
|
+
Function("initializePreview") {
|
|
44
|
+
Task { await self.initializePreview() }
|
|
45
|
+
}
|
|
178
46
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
group.leave()
|
|
183
|
-
}
|
|
47
|
+
Function("publishStream") { (url: String, streamKey: String) in
|
|
48
|
+
Task { await self.publishStream(url: url, streamKey: streamKey) }
|
|
49
|
+
}
|
|
184
50
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
print("[ExpoSettings] mic permission \(micOK)")
|
|
188
|
-
completion(camOK && micOK)
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
// MARK: - Preview init
|
|
193
|
-
|
|
194
|
-
private func initializePreview() {
|
|
195
|
-
previewInitTime = Date()
|
|
196
|
-
LBLogger.with("com.haishinkit.HaishinKit").level = .trace
|
|
197
|
-
|
|
198
|
-
print("[ExpoSettings] ⏱️ initializePreview at \(ISO8601DateFormatter().string(from: previewInitTime!))")
|
|
199
|
-
setStatus("previewInitializing")
|
|
200
|
-
|
|
201
|
-
requestAVPermissions { [weak self] ok in
|
|
202
|
-
guard let self else { return }
|
|
203
|
-
guard ok else {
|
|
204
|
-
print("[ExpoSettings] ❌ Missing camera/mic permissions")
|
|
205
|
-
self.setStatus("error")
|
|
206
|
-
return
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
// Audio session
|
|
210
|
-
let session = AVAudioSession.sharedInstance()
|
|
211
|
-
do {
|
|
212
|
-
try session.setCategory(.playAndRecord, mode: .default, options: [.defaultToSpeaker, .allowBluetooth])
|
|
213
|
-
try session.setPreferredSampleRate(44_100)
|
|
214
|
-
try session.setActive(true)
|
|
215
|
-
print("[ExpoSettings] ✅ AudioSession OK")
|
|
216
|
-
} catch {
|
|
217
|
-
print("[ExpoSettings] ❌ AudioSession error: \(error)")
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
let connection = RTMPConnection()
|
|
221
|
-
self.rtmpConnection = connection
|
|
222
|
-
|
|
223
|
-
let stream = RTMPStream(connection: connection)
|
|
224
|
-
|
|
225
|
-
// Attach listeners
|
|
226
|
-
connection.addEventListener(.rtmpStatus,
|
|
227
|
-
selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
|
|
228
|
-
observer: self.rtmpObserver)
|
|
229
|
-
connection.addEventListener(.ioError,
|
|
230
|
-
selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
|
|
231
|
-
observer: self.rtmpObserver)
|
|
232
|
-
|
|
233
|
-
stream.addEventListener(.rtmpStatus,
|
|
234
|
-
selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
|
|
235
|
-
observer: self.rtmpObserver)
|
|
236
|
-
stream.addEventListener(.ioError,
|
|
237
|
-
selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
|
|
238
|
-
observer: self.rtmpObserver)
|
|
239
|
-
|
|
240
|
-
self.rtmpStream = stream
|
|
241
|
-
|
|
242
|
-
self.rtmpObserver.onStatus = { [weak self] code, level, desc in
|
|
243
|
-
self?.handleRTMPStatus(code: code, level: level, desc: desc)
|
|
244
|
-
}
|
|
245
|
-
self.rtmpObserver.onError = { [weak self] msg in
|
|
246
|
-
print("[ExpoSettings] ❌ \(msg)")
|
|
247
|
-
self?.setStatus("error")
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
// Dimensions
|
|
251
|
-
let dimensions = self.calculateStreamDimensions()
|
|
252
|
-
self.calculatedVideoWidth = dimensions.width
|
|
253
|
-
self.calculatedVideoHeight = dimensions.height
|
|
254
|
-
|
|
255
|
-
// Video settings
|
|
256
|
-
self.configuredBitrate = 2_500_000
|
|
257
|
-
self.configuredFrameRate = 30
|
|
258
|
-
|
|
259
|
-
let videoSettings = VideoCodecSettings(
|
|
260
|
-
videoSize: CGSize(width: dimensions.width, height: dimensions.height),
|
|
261
|
-
bitRate: self.configuredBitrate,
|
|
262
|
-
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
|
|
263
|
-
scalingMode: .trim,
|
|
264
|
-
bitRateMode: .average,
|
|
265
|
-
maxKeyFrameIntervalDuration: 1, // GOP 1s
|
|
266
|
-
allowFrameReordering: nil,
|
|
267
|
-
isHardwareEncoderEnabled: true
|
|
268
|
-
)
|
|
269
|
-
stream.videoSettings = videoSettings
|
|
270
|
-
stream.frameRate = self.configuredFrameRate
|
|
271
|
-
|
|
272
|
-
print("[ExpoSettings] 📐 VideoSettings videoSize=\(stream.videoSettings.videoSize) bitrate=\(stream.videoSettings.bitRate) GOP=\(stream.videoSettings.maxKeyFrameIntervalDuration) fps=\(stream.frameRate)")
|
|
273
|
-
|
|
274
|
-
// Audio settings
|
|
275
|
-
var audioSettings = AudioCodecSettings()
|
|
276
|
-
audioSettings.bitRate = 128_000
|
|
277
|
-
stream.audioSettings = audioSettings
|
|
278
|
-
print("[ExpoSettings] 🔊 Audio bitRate: 128000")
|
|
279
|
-
|
|
280
|
-
// Devices
|
|
281
|
-
guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
|
|
282
|
-
print("[ExpoSettings] ❌ No front camera")
|
|
283
|
-
self.setStatus("error")
|
|
284
|
-
return
|
|
285
|
-
}
|
|
286
|
-
guard let microphone = AVCaptureDevice.default(for: .audio) else {
|
|
287
|
-
print("[ExpoSettings] ❌ No microphone")
|
|
288
|
-
self.setStatus("error")
|
|
289
|
-
return
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
// Attach camera (portrait, mirrored)
|
|
293
|
-
stream.attachCamera(camera) { videoUnit, error in
|
|
294
|
-
if let error = error {
|
|
295
|
-
print("[ExpoSettings] ❌ Camera ERROR: \(error)")
|
|
296
|
-
} else {
|
|
297
|
-
videoUnit?.isVideoMirrored = true
|
|
298
|
-
videoUnit?.videoOrientation = .portrait
|
|
299
|
-
print("[ExpoSettings] ✅ Camera attached (portrait, mirrored)")
|
|
51
|
+
Function("stopStream") {
|
|
52
|
+
Task { await self.stopStream() }
|
|
300
53
|
}
|
|
301
|
-
}
|
|
302
54
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
} else {
|
|
307
|
-
print("[ExpoSettings] ✅ Audio attached")
|
|
55
|
+
Function("forceCleanup") {
|
|
56
|
+
self.cleanup()
|
|
57
|
+
self.setStatus("idle")
|
|
308
58
|
}
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
// Attach preview
|
|
312
|
-
if let preview = ExpoSettingsView.current {
|
|
313
|
-
preview.attachStream(stream) // requires RTMPStream? in view to allow nil later
|
|
314
|
-
print("[ExpoSettings] ✅ Preview attached")
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
// Wait for encoder warm-up
|
|
318
|
-
DispatchQueue.main.asyncAfter(deadline: .now() + 1.2) { [weak self] in
|
|
319
|
-
guard let self, let s = self.rtmpStream else { return }
|
|
320
|
-
print("[ExpoSettings] 🔍 Warm verify videoSize=\(s.videoSettings.videoSize) fps=\(s.frameRate)")
|
|
321
|
-
self.setStatus("previewReady")
|
|
322
|
-
print("[ExpoSettings] ✅ Preview READY")
|
|
323
|
-
}
|
|
324
59
|
}
|
|
325
|
-
}
|
|
326
60
|
|
|
327
|
-
|
|
61
|
+
private func setStatus(_ status: String) {
|
|
62
|
+
guard currentStatus != status else { return }
|
|
328
63
|
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
firstDataSentTime = nil
|
|
332
|
-
lastDataSentTime = nil
|
|
333
|
-
stopRequestTime = nil
|
|
64
|
+
print("[ExpoSettings] \(currentStatus) → \(status)")
|
|
65
|
+
currentStatus = status
|
|
334
66
|
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
print("[ExpoSettings] ⏱️ publishStream at \(ISO8601DateFormatter().string(from: publishRequestTime!))")
|
|
340
|
-
print("[ExpoSettings] URL: \(cleanUrl)")
|
|
341
|
-
print("[ExpoSettings] Key: \(streamKey)")
|
|
342
|
-
|
|
343
|
-
guard let connection = rtmpConnection, let stream = rtmpStream else {
|
|
344
|
-
print("[ExpoSettings] ❌ No connection/stream")
|
|
345
|
-
setStatus("error")
|
|
346
|
-
return
|
|
67
|
+
sendEvent("onStreamStatus", [
|
|
68
|
+
"status": status,
|
|
69
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
70
|
+
])
|
|
347
71
|
}
|
|
348
72
|
|
|
349
|
-
|
|
73
|
+
private func setupAudioSession() -> Bool {
|
|
350
74
|
|
|
351
|
-
|
|
352
|
-
setStatus("connecting")
|
|
353
|
-
connection.connect(cleanUrl)
|
|
354
|
-
}
|
|
75
|
+
if Self.audioSessionConfigured { return true }
|
|
355
76
|
|
|
356
|
-
|
|
77
|
+
do {
|
|
78
|
+
let session = AVAudioSession.sharedInstance()
|
|
357
79
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
80
|
+
try session.setCategory(
|
|
81
|
+
.playAndRecord,
|
|
82
|
+
mode: .default,
|
|
83
|
+
options: [.defaultToSpeaker, .allowBluetooth]
|
|
84
|
+
)
|
|
361
85
|
|
|
362
|
-
|
|
86
|
+
try session.setActive(true)
|
|
363
87
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
if let p = pendingPublish {
|
|
368
|
-
pendingPublish = nil
|
|
369
|
-
setStatus("publishing")
|
|
370
|
-
print("[ExpoSettings] 📤 Publishing...")
|
|
371
|
-
stream.publish(p.streamKey, type: .live)
|
|
372
|
-
|
|
373
|
-
// Start monitoring for real media egress
|
|
374
|
-
monitorForRealOutboundMedia()
|
|
375
|
-
}
|
|
376
|
-
|
|
377
|
-
case "NetStream.Publish.Start":
|
|
378
|
-
// IMPORTANT:
|
|
379
|
-
// Do NOT setStatus("started") here anymore.
|
|
380
|
-
// This event means publish handshake started, not necessarily that DVR/RTMP has real media yet.
|
|
381
|
-
print("[ExpoSettings] ✅ Publish.Start received (waiting for data confirmation...)")
|
|
382
|
-
|
|
383
|
-
case "NetStream.Publish.BadName",
|
|
384
|
-
"NetStream.Publish.Rejected",
|
|
385
|
-
"NetConnection.Connect.Failed":
|
|
386
|
-
stopStatsTimer()
|
|
387
|
-
setStatus("error")
|
|
388
|
-
|
|
389
|
-
case "NetConnection.Connect.Closed":
|
|
390
|
-
stopStatsTimer()
|
|
391
|
-
setStatus("stopped")
|
|
392
|
-
|
|
393
|
-
default:
|
|
394
|
-
break
|
|
395
|
-
}
|
|
396
|
-
}
|
|
88
|
+
Self.audioSessionConfigured = true
|
|
89
|
+
print("[ExpoSettings] Audio session ready")
|
|
90
|
+
return true
|
|
397
91
|
|
|
398
|
-
|
|
92
|
+
} catch {
|
|
93
|
+
print("[ExpoSettings] Audio session error:", error)
|
|
94
|
+
return false
|
|
95
|
+
}
|
|
96
|
+
}
|
|
399
97
|
|
|
400
|
-
|
|
401
|
-
guard let connection = rtmpConnection, let stream = rtmpStream else { return }
|
|
402
|
-
let token = dataMonitorToken ?? UUID()
|
|
403
|
-
dataMonitorToken = token
|
|
98
|
+
private func initializePreview() async {
|
|
404
99
|
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
let interval: TimeInterval = 0.1
|
|
100
|
+
print("[ExpoSettings] initializePreview")
|
|
101
|
+
operationStartTime = Date()
|
|
408
102
|
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
let neededGoodStreak = 4 // 400ms stable
|
|
103
|
+
cleanup()
|
|
104
|
+
try? await Task.sleep(nanoseconds: 200_000_000)
|
|
412
105
|
|
|
413
|
-
|
|
414
|
-
// cancelled?
|
|
415
|
-
guard self.dataMonitorToken == token else { return }
|
|
106
|
+
setStatus("previewInitializing")
|
|
416
107
|
|
|
417
|
-
|
|
108
|
+
guard setupAudioSession() else {
|
|
109
|
+
setStatus("error")
|
|
110
|
+
return
|
|
111
|
+
}
|
|
418
112
|
|
|
419
|
-
|
|
420
|
-
|
|
113
|
+
let connection = RTMPConnection()
|
|
114
|
+
// 2) Criar RTMPStream, mas não publica pro servidor ainda
|
|
115
|
+
let stream = RTMPStream(connection: connection)
|
|
116
|
+
self.rtmpConnection = connection
|
|
117
|
+
self.rtmpStream = stream
|
|
118
|
+
|
|
119
|
+
// ---------- Stream Base ----------
|
|
120
|
+
stream.sessionPreset = .hd1280x720
|
|
121
|
+
stream.frameRate = frameRate
|
|
122
|
+
stream.videoOrientation = .portrait
|
|
123
|
+
stream.configuration { captureSession in
|
|
124
|
+
captureSession.automaticallyConfiguresApplicationAudioSession = true
|
|
125
|
+
}
|
|
421
126
|
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
127
|
+
// ---------- Audio ----------
|
|
128
|
+
stream.audioSettings = AudioCodecSettings(
|
|
129
|
+
bitRate: audioBitrate
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
// ---------- Video ----------
|
|
133
|
+
stream.videoSettings = VideoCodecSettings(
|
|
134
|
+
videoSize: .init(width: videoWidth, height: videoHeight),
|
|
135
|
+
bitRate: videoBitrate,
|
|
136
|
+
profileLevel: kVTProfileLevel_H264_Main_4_1 as String,
|
|
137
|
+
scalingMode: .letterbox,
|
|
138
|
+
bitRateMode: .average,
|
|
139
|
+
maxKeyFrameIntervalDuration: gopSeconds,
|
|
140
|
+
allowFrameReordering: nil,
|
|
141
|
+
isHardwareEncoderEnabled: true
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
// ---------- Attach Audio ----------
|
|
145
|
+
if let mic = AVCaptureDevice.default(for: .audio) {
|
|
146
|
+
stream.attachAudio(mic)
|
|
147
|
+
}
|
|
426
148
|
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
149
|
+
// ---------- Attach Camera ----------
|
|
150
|
+
if let cam = AVCaptureDevice.default(
|
|
151
|
+
.builtInWideAngleCamera,
|
|
152
|
+
for: .video,
|
|
153
|
+
position: .front
|
|
154
|
+
) {
|
|
155
|
+
stream.attachCamera(cam) { unit, _ in
|
|
156
|
+
guard let unit else { return }
|
|
157
|
+
unit.videoOrientation = .portrait
|
|
158
|
+
unit.isVideoMirrored = true
|
|
159
|
+
unit.preferredVideoStabilizationMode = .standard
|
|
160
|
+
unit.colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
|
161
|
+
}
|
|
162
|
+
}
|
|
432
163
|
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
164
|
+
// ---------- Preview ----------
|
|
165
|
+
if let preview = await ExpoSettingsView.current {
|
|
166
|
+
await preview.attachStream(stream)
|
|
167
|
+
} else {
|
|
168
|
+
print("[ExpoSettings] ERROR: Preview view not found during publish!")
|
|
437
169
|
}
|
|
438
170
|
|
|
439
|
-
let
|
|
440
|
-
self.firstDataSentTime.map { Int($0.timeIntervalSince(pub) * 1000) }
|
|
441
|
-
} ?? -1
|
|
171
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
442
172
|
|
|
443
|
-
print("[ExpoSettings]
|
|
173
|
+
print("[ExpoSettings] Preview ready in \(ms)ms")
|
|
174
|
+
setStatus("previewReady")
|
|
444
175
|
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
"delayMs": delayMs,
|
|
449
|
-
"timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
|
|
450
|
-
])
|
|
451
|
-
self.sendEvent("onStreamTiming", [
|
|
452
|
-
"event": "firstDataSent",
|
|
453
|
-
"delayMs": delayMs,
|
|
454
|
-
"timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
|
|
176
|
+
sendEvent("onStreamTiming", [
|
|
177
|
+
"event": "previewReady",
|
|
178
|
+
"durationMs": ms
|
|
455
179
|
])
|
|
456
|
-
|
|
457
|
-
self.setStatus("started")
|
|
458
|
-
self.startStatsTimer()
|
|
459
|
-
return
|
|
460
|
-
}
|
|
461
|
-
|
|
462
|
-
// Timeout
|
|
463
|
-
if checks >= maxChecks {
|
|
464
|
-
print("[ExpoSettings] ⚠️ Start confirmation timeout (still no stable outbound media). Keeping status=\(self.currentStreamStatus)")
|
|
465
|
-
// Keep status as "publishing" or whatever it currently is; do not force started.
|
|
466
|
-
return
|
|
467
|
-
}
|
|
468
|
-
|
|
469
|
-
// Keep checking while in publishing/connected state
|
|
470
|
-
if self.currentStreamStatus == "publishing" || self.currentStreamStatus == "connected" || self.currentStreamStatus == "connecting" {
|
|
471
|
-
DispatchQueue.main.asyncAfter(deadline: .now() + interval) { tick() }
|
|
472
|
-
}
|
|
473
180
|
}
|
|
474
181
|
|
|
475
|
-
|
|
476
|
-
}
|
|
182
|
+
private func publishStream(url: String, streamKey: String) async {
|
|
477
183
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
guard let self,
|
|
484
|
-
let c = self.rtmpConnection,
|
|
485
|
-
let s = self.rtmpStream else { return }
|
|
184
|
+
guard let connection = rtmpConnection,
|
|
185
|
+
let stream = rtmpStream else {
|
|
186
|
+
setStatus("error")
|
|
187
|
+
return
|
|
188
|
+
}
|
|
486
189
|
|
|
487
|
-
|
|
488
|
-
|
|
190
|
+
operationStartTime = Date()
|
|
191
|
+
setStatus("connecting")
|
|
489
192
|
|
|
490
|
-
|
|
491
|
-
self.lastDataSentTime = Date()
|
|
492
|
-
}
|
|
193
|
+
self.rtmpConnection?.connect(url)
|
|
493
194
|
|
|
494
|
-
|
|
495
|
-
"fps": fps,
|
|
496
|
-
"bps": bps,
|
|
497
|
-
"timestamp": ISO8601DateFormatter().string(from: Date())
|
|
498
|
-
])
|
|
499
|
-
}
|
|
500
|
-
}
|
|
195
|
+
let deadline = Date().addingTimeInterval(10)
|
|
501
196
|
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
197
|
+
while Date() < deadline {
|
|
198
|
+
if connection.connected { break }
|
|
199
|
+
try? await Task.sleep(nanoseconds: 50_000_000)
|
|
200
|
+
}
|
|
506
201
|
|
|
507
|
-
|
|
202
|
+
guard connection.connected else {
|
|
203
|
+
print("[ExpoSettings] Connect timeout")
|
|
204
|
+
setStatus("error")
|
|
205
|
+
return
|
|
206
|
+
}
|
|
508
207
|
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
print("[ExpoSettings] ⏱️ stopStream at \(ISO8601DateFormatter().string(from: stopRequestTime!))")
|
|
208
|
+
setStatus("connected")
|
|
209
|
+
try? await Task.sleep(nanoseconds: 150_000_000)
|
|
512
210
|
|
|
513
|
-
|
|
514
|
-
|
|
211
|
+
setStatus("publishing")
|
|
212
|
+
stream.publish(streamKey)
|
|
515
213
|
|
|
516
|
-
|
|
214
|
+
try? await Task.sleep(nanoseconds: 200_000_000)
|
|
517
215
|
|
|
518
|
-
|
|
519
|
-
print("[ExpoSettings] No active stream to stop")
|
|
520
|
-
setStatus("stopped")
|
|
521
|
-
return
|
|
522
|
-
}
|
|
216
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
523
217
|
|
|
524
|
-
|
|
218
|
+
print("[ExpoSettings] STREAM STARTED in \(ms)ms")
|
|
525
219
|
|
|
526
|
-
|
|
527
|
-
print("[ExpoSettings] 📤 Stop capture (keep RTMP open for flush)")
|
|
528
|
-
stream.attachCamera(nil) { _, _ in }
|
|
529
|
-
stream.attachAudio(nil) { _, _ in }
|
|
220
|
+
setStatus("started")
|
|
530
221
|
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
222
|
+
sendEvent("onStreamTiming", [
|
|
223
|
+
"event": "firstDataSent",
|
|
224
|
+
"delayMs": ms,
|
|
225
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
226
|
+
])
|
|
227
|
+
}
|
|
534
228
|
|
|
535
|
-
|
|
536
|
-
let maxFlushSeconds: TimeInterval = 12.0
|
|
537
|
-
let stableZeroNeeded: Int = 6 // 6 * 0.2s = 1.2s stable
|
|
229
|
+
private func stopStream() async {
|
|
538
230
|
|
|
539
|
-
|
|
540
|
-
var stableZeroCount = 0
|
|
231
|
+
print("[ExpoSettings] stopStream")
|
|
541
232
|
|
|
542
|
-
|
|
543
|
-
|
|
233
|
+
guard let stream = rtmpStream,
|
|
234
|
+
let connection = rtmpConnection else {
|
|
235
|
+
cleanup()
|
|
236
|
+
setStatus("stopped")
|
|
237
|
+
return
|
|
238
|
+
}
|
|
544
239
|
|
|
545
|
-
|
|
546
|
-
|
|
240
|
+
operationStartTime = Date()
|
|
241
|
+
setStatus("stopping")
|
|
547
242
|
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
stableZeroCount = 0
|
|
552
|
-
} else {
|
|
553
|
-
stableZeroCount += 1
|
|
554
|
-
}
|
|
243
|
+
// Stop capture
|
|
244
|
+
stream.attachCamera(nil)
|
|
245
|
+
stream.attachAudio(nil)
|
|
555
246
|
|
|
556
|
-
|
|
247
|
+
// Flush encoder (GOP + 0.5s)
|
|
248
|
+
let flushNs = UInt64(gopSeconds) * 1_000_000_000 + 500_000_000
|
|
249
|
+
try? await Task.sleep(nanoseconds: flushNs)
|
|
557
250
|
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
251
|
+
// Close stream
|
|
252
|
+
stream.close()
|
|
253
|
+
try? await Task.sleep(nanoseconds: 300_000_000)
|
|
561
254
|
|
|
562
|
-
// Close
|
|
563
|
-
|
|
255
|
+
// Close socket
|
|
256
|
+
connection.close()
|
|
564
257
|
|
|
565
|
-
|
|
566
|
-
guard let self else { return }
|
|
567
|
-
self.rtmpConnection?.close()
|
|
258
|
+
cleanup()
|
|
568
259
|
|
|
569
|
-
|
|
570
|
-
guard let self else { return }
|
|
260
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
571
261
|
|
|
572
|
-
|
|
573
|
-
if let preview = ExpoSettingsView.current {
|
|
574
|
-
preview.attachStream(nil)
|
|
575
|
-
}
|
|
262
|
+
print("[ExpoSettings] STOPPED in \(ms)ms")
|
|
576
263
|
|
|
577
|
-
|
|
578
|
-
let totalMs = self.stopRequestTime.map { Int(finalTime.timeIntervalSince($0) * 1000) } ?? -1
|
|
264
|
+
setStatus("stopped")
|
|
579
265
|
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
266
|
+
sendEvent("onStreamTiming", [
|
|
267
|
+
"event": "shutdownComplete",
|
|
268
|
+
"totalDurationMs": ms,
|
|
269
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
270
|
+
])
|
|
271
|
+
}
|
|
585
272
|
|
|
586
|
-
|
|
587
|
-
self.rtmpConnection = nil
|
|
588
|
-
self.pendingPublish = nil
|
|
273
|
+
private func cleanup() {
|
|
589
274
|
|
|
590
|
-
|
|
591
|
-
print("[ExpoSettings] ✅ Stream stopped (total \(totalMs)ms)")
|
|
592
|
-
}
|
|
593
|
-
}
|
|
275
|
+
print("[ExpoSettings] Cleanup")
|
|
594
276
|
|
|
595
|
-
|
|
596
|
-
|
|
277
|
+
rtmpStream?.attachCamera(nil)
|
|
278
|
+
rtmpStream?.attachAudio(nil)
|
|
279
|
+
rtmpStream?.close()
|
|
280
|
+
rtmpStream = nil
|
|
597
281
|
|
|
598
|
-
|
|
599
|
-
|
|
282
|
+
rtmpConnection?.close()
|
|
283
|
+
rtmpConnection = nil
|
|
600
284
|
}
|
|
601
|
-
|
|
602
|
-
flushTick()
|
|
603
|
-
}
|
|
604
285
|
}
|
|
@@ -11,9 +11,6 @@ public class ExpoSettingsView: ExpoView {
|
|
|
11
11
|
return view
|
|
12
12
|
}()
|
|
13
13
|
|
|
14
|
-
// Guarda stream para reattach se a view recriar/layout mudar
|
|
15
|
-
private weak var attachedStream: RTMPStream?
|
|
16
|
-
|
|
17
14
|
required init(appContext: AppContext? = nil) {
|
|
18
15
|
super.init(appContext: appContext)
|
|
19
16
|
clipsToBounds = true
|
|
@@ -27,10 +24,8 @@ public class ExpoSettingsView: ExpoView {
|
|
|
27
24
|
hkView.frame = bounds
|
|
28
25
|
}
|
|
29
26
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
attachedStream = stream
|
|
33
|
-
hkView.attachStream(stream) // normalmente aceita nil
|
|
27
|
+
public func attachStream(_ stream: RTMPStream) {
|
|
28
|
+
hkView.attachStream(stream)
|
|
34
29
|
}
|
|
35
30
|
|
|
36
31
|
deinit {
|