blix-expo-settings 0.1.12 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ios/ExpoSettingsModule.swift +197 -515
- package/ios/ExpoSettingsView.swift +2 -7
- package/package.json +1 -5
|
@@ -2,57 +2,28 @@ import ExpoModulesCore
|
|
|
2
2
|
import HaishinKit
|
|
3
3
|
import AVFoundation
|
|
4
4
|
import VideoToolbox
|
|
5
|
-
import Logboard
|
|
6
|
-
|
|
7
|
-
// MARK: - RTMP Event Observer
|
|
8
|
-
|
|
9
|
-
final class RTMPEventObserver: NSObject {
|
|
10
|
-
var onStatus: ((String, String, String) -> Void)?
|
|
11
|
-
var onError: ((String) -> Void)?
|
|
12
|
-
|
|
13
|
-
@objc func rtmpStatusHandler(_ notification: Notification) {
|
|
14
|
-
let e: Event = Event.from(notification)
|
|
15
|
-
guard let data = e.data as? [String: Any] else { return }
|
|
16
|
-
let code = data["code"] as? String ?? ""
|
|
17
|
-
let level = data["level"] as? String ?? ""
|
|
18
|
-
let desc = data["description"] as? String ?? ""
|
|
19
|
-
onStatus?(code, level, desc)
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
@objc func rtmpErrorHandler(_ notification: Notification) {
|
|
23
|
-
let e: Event = Event.from(notification)
|
|
24
|
-
onError?("ioError: \(e)")
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// MARK: - Module
|
|
29
5
|
|
|
30
6
|
public class ExpoSettingsModule: Module {
|
|
31
7
|
private var rtmpConnection: RTMPConnection?
|
|
32
8
|
private var rtmpStream: RTMPStream?
|
|
33
|
-
private var
|
|
34
|
-
private
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
private
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
private
|
|
41
|
-
private
|
|
42
|
-
private
|
|
43
|
-
|
|
44
|
-
private var
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
private var configuredFrameRate: Float64 = 30
|
|
52
|
-
|
|
53
|
-
// Monitor cancellation
|
|
54
|
-
private var dataMonitorToken: UUID?
|
|
55
|
-
private var stopFlushToken: UUID?
|
|
9
|
+
private var currentStatus: String = "idle"
|
|
10
|
+
private var operationStartTime: Date?
|
|
11
|
+
|
|
12
|
+
// MARK: - Stream Configuration (Portrait 9:16)
|
|
13
|
+
private let videoWidth = 720
|
|
14
|
+
private let videoHeight = 1280
|
|
15
|
+
private let videoBitrate = 4_000_000
|
|
16
|
+
private let audioBitrate = 128_000
|
|
17
|
+
private let frameRate: Float64 = 30
|
|
18
|
+
private let gopSeconds: Int32 = 1
|
|
19
|
+
|
|
20
|
+
private static var audioSessionConfigured = false
|
|
21
|
+
|
|
22
|
+
private static let isoFormatter: ISO8601DateFormatter = {
|
|
23
|
+
let f = ISO8601DateFormatter()
|
|
24
|
+
f.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
|
|
25
|
+
return f
|
|
26
|
+
}()
|
|
56
27
|
|
|
57
28
|
public func definition() -> ModuleDefinition {
|
|
58
29
|
Name("ExpoSettings")
|
|
@@ -63,541 +34,252 @@ public class ExpoSettingsModule: Module {
|
|
|
63
34
|
// não precisa colocar nada aqui se você não tiver Props
|
|
64
35
|
}
|
|
65
36
|
|
|
66
|
-
|
|
37
|
+
Events("onStreamStatus", "onStreamTiming")
|
|
67
38
|
|
|
68
39
|
Function("getStreamStatus") {
|
|
69
40
|
return self.currentStreamStatus
|
|
70
41
|
}
|
|
71
42
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
let ar = (h == 0) ? 0.0 : (Double(w) / Double(h))
|
|
76
|
-
return [
|
|
77
|
-
"videoWidth": w,
|
|
78
|
-
"videoHeight": h,
|
|
79
|
-
"aspectRatio": String(format: "%.4f", ar),
|
|
80
|
-
"bitrate": self.configuredBitrate,
|
|
81
|
-
"frameRate": self.configuredFrameRate
|
|
82
|
-
]
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
Function("getDeviceDimensions") { () -> [String: Any] in
|
|
86
|
-
let screen = UIScreen.main.bounds
|
|
87
|
-
let scale = UIScreen.main.scale
|
|
88
|
-
return [
|
|
89
|
-
"screenWidth": Int(screen.width),
|
|
90
|
-
"screenHeight": Int(screen.height),
|
|
91
|
-
"scale": scale,
|
|
92
|
-
"pixelWidth": Int(screen.width * scale),
|
|
93
|
-
"pixelHeight": Int(screen.height * scale),
|
|
94
|
-
"streamWidth": self.calculatedVideoWidth,
|
|
95
|
-
"streamHeight": self.calculatedVideoHeight,
|
|
96
|
-
"aspectRatio": String(format: "%.4f", Double(self.calculatedVideoWidth) / Double(max(self.calculatedVideoHeight, 1)))
|
|
97
|
-
]
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
Function("getStreamTiming") { () -> [String: Any] in
|
|
101
|
-
var result: [String: Any] = [:]
|
|
102
|
-
let fmt = ISO8601DateFormatter()
|
|
103
|
-
|
|
104
|
-
if let t = self.previewInitTime { result["previewInitTime"] = fmt.string(from: t) }
|
|
105
|
-
if let t = self.publishRequestTime { result["publishRequestTime"] = fmt.string(from: t) }
|
|
106
|
-
if let t = self.firstDataSentTime { result["firstDataSentTime"] = fmt.string(from: t) }
|
|
107
|
-
if let t = self.stopRequestTime { result["stopRequestTime"] = fmt.string(from: t) }
|
|
108
|
-
if let t = self.lastDataSentTime { result["lastDataSentTime"] = fmt.string(from: t) }
|
|
109
|
-
|
|
110
|
-
if let publish = self.publishRequestTime, let first = self.firstDataSentTime {
|
|
111
|
-
result["startDelayMs"] = Int(first.timeIntervalSince(publish) * 1000)
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
if let stop = self.stopRequestTime, let last = self.lastDataSentTime {
|
|
115
|
-
// Positive means stop happened after last data timestamp
|
|
116
|
-
result["timeSinceLastDataMs"] = Int(stop.timeIntervalSince(last) * 1000)
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
return result
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
Function("initializePreview") { () -> Void in
|
|
123
|
-
DispatchQueue.main.async { self.initializePreview() }
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
Function("publishStream") { (url: String, streamKey: String) -> Void in
|
|
127
|
-
DispatchQueue.main.async { self.publishStream(url: url, streamKey: streamKey) }
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
Function("stopStream") { () -> Void in
|
|
131
|
-
DispatchQueue.main.async { self.stopStream() }
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
// MARK: - Helpers
|
|
136
|
-
|
|
137
|
-
private func setStatus(_ s: String) {
|
|
138
|
-
guard currentStreamStatus != s else { return }
|
|
139
|
-
currentStreamStatus = s
|
|
140
|
-
sendEvent("onStreamStatus", [
|
|
141
|
-
"status": s,
|
|
142
|
-
"timestamp": ISO8601DateFormatter().string(from: Date())
|
|
143
|
-
])
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
private func sanitizeRTMPUrl(_ url: String) -> String {
|
|
147
|
-
var u = url.trimmingCharacters(in: .whitespacesAndNewlines)
|
|
148
|
-
while u.hasSuffix("/") { u.removeLast() }
|
|
149
|
-
return u
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
private func calculateStreamDimensions() -> (width: Int, height: Int) {
|
|
153
|
-
let width = 720
|
|
154
|
-
let height = 1280
|
|
155
|
-
|
|
156
|
-
let aspectRatio = CGFloat(width) / CGFloat(height)
|
|
157
|
-
let expected = TARGET_ASPECT_RATIO
|
|
158
|
-
|
|
159
|
-
assert(abs(aspectRatio - expected) < 0.001, "Aspect ratio mismatch!")
|
|
160
|
-
|
|
161
|
-
print("[ExpoSettings] 📐 Stream dimensions: \(width)x\(height)")
|
|
162
|
-
print("[ExpoSettings] 📐 Aspect ratio: \(String(format: "%.4f", aspectRatio)) expected \(String(format: "%.4f", expected))")
|
|
163
|
-
return (width, height)
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
// MARK: - Permissions
|
|
167
|
-
|
|
168
|
-
private func requestAVPermissions(completion: @escaping (Bool) -> Void) {
|
|
169
|
-
let group = DispatchGroup()
|
|
170
|
-
var camOK = false
|
|
171
|
-
var micOK = false
|
|
172
|
-
|
|
173
|
-
group.enter()
|
|
174
|
-
AVCaptureDevice.requestAccess(for: .video) { granted in
|
|
175
|
-
camOK = granted
|
|
176
|
-
group.leave()
|
|
177
|
-
}
|
|
43
|
+
Function("initializePreview") {
|
|
44
|
+
Task { await self.initializePreview() }
|
|
45
|
+
}
|
|
178
46
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
group.leave()
|
|
183
|
-
}
|
|
47
|
+
Function("publishStream") { (url: String, streamKey: String) in
|
|
48
|
+
Task { await self.publishStream(url: url, streamKey: streamKey) }
|
|
49
|
+
}
|
|
184
50
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
print("[ExpoSettings] mic permission \(micOK)")
|
|
188
|
-
completion(camOK && micOK)
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
// MARK: - Preview init
|
|
193
|
-
|
|
194
|
-
private func initializePreview() {
|
|
195
|
-
previewInitTime = Date()
|
|
196
|
-
LBLogger.with("com.haishinkit.HaishinKit").level = .trace
|
|
197
|
-
|
|
198
|
-
print("[ExpoSettings] ⏱️ initializePreview at \(ISO8601DateFormatter().string(from: previewInitTime!))")
|
|
199
|
-
setStatus("previewInitializing")
|
|
200
|
-
|
|
201
|
-
requestAVPermissions { [weak self] ok in
|
|
202
|
-
guard let self else { return }
|
|
203
|
-
guard ok else {
|
|
204
|
-
print("[ExpoSettings] ❌ Missing camera/mic permissions")
|
|
205
|
-
self.setStatus("error")
|
|
206
|
-
return
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
// Audio session
|
|
210
|
-
let session = AVAudioSession.sharedInstance()
|
|
211
|
-
do {
|
|
212
|
-
try session.setCategory(.playAndRecord, mode: .videoRecording, options: [.defaultToSpeaker, .allowBluetooth])
|
|
213
|
-
try session.setPreferredSampleRate(44_100)
|
|
214
|
-
try session.setActive(true)
|
|
215
|
-
print("[ExpoSettings] ✅ AudioSession OK")
|
|
216
|
-
} catch {
|
|
217
|
-
print("[ExpoSettings] ❌ AudioSession error: \(error)")
|
|
218
|
-
}
|
|
219
|
-
|
|
220
|
-
let connection = RTMPConnection()
|
|
221
|
-
let stream = RTMPStream(connection: connection)
|
|
222
|
-
|
|
223
|
-
// Attach listeners
|
|
224
|
-
connection.addEventListener(.rtmpStatus,
|
|
225
|
-
selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
|
|
226
|
-
observer: self.rtmpObserver)
|
|
227
|
-
connection.addEventListener(.ioError,
|
|
228
|
-
selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
|
|
229
|
-
observer: self.rtmpObserver)
|
|
230
|
-
|
|
231
|
-
stream.addEventListener(.rtmpStatus,
|
|
232
|
-
selector: #selector(RTMPEventObserver.rtmpStatusHandler(_:)),
|
|
233
|
-
observer: self.rtmpObserver)
|
|
234
|
-
stream.addEventListener(.ioError,
|
|
235
|
-
selector: #selector(RTMPEventObserver.rtmpErrorHandler(_:)),
|
|
236
|
-
observer: self.rtmpObserver)
|
|
237
|
-
|
|
238
|
-
self.rtmpConnection = connection
|
|
239
|
-
self.rtmpStream = stream
|
|
240
|
-
|
|
241
|
-
self.rtmpObserver.onStatus = { [weak self] code, level, desc in
|
|
242
|
-
self?.handleRTMPStatus(code: code, level: level, desc: desc)
|
|
243
|
-
}
|
|
244
|
-
self.rtmpObserver.onError = { [weak self] msg in
|
|
245
|
-
print("[ExpoSettings] ❌ \(msg)")
|
|
246
|
-
self?.setStatus("error")
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
// Dimensions
|
|
250
|
-
let dimensions = self.calculateStreamDimensions()
|
|
251
|
-
self.calculatedVideoWidth = dimensions.width
|
|
252
|
-
self.calculatedVideoHeight = dimensions.height
|
|
253
|
-
|
|
254
|
-
// Video settings
|
|
255
|
-
self.configuredBitrate = 2_500_000
|
|
256
|
-
self.configuredFrameRate = 30
|
|
257
|
-
|
|
258
|
-
let videoSettings = VideoCodecSettings(
|
|
259
|
-
videoSize: CGSize(width: dimensions.width, height: dimensions.height),
|
|
260
|
-
bitRate: self.configuredBitrate,
|
|
261
|
-
profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
|
|
262
|
-
scalingMode: .trim,
|
|
263
|
-
bitRateMode: .average,
|
|
264
|
-
maxKeyFrameIntervalDuration: 1, // GOP 1s
|
|
265
|
-
allowFrameReordering: nil,
|
|
266
|
-
isHardwareEncoderEnabled: true
|
|
267
|
-
)
|
|
268
|
-
stream.videoSettings = videoSettings
|
|
269
|
-
stream.frameRate = self.configuredFrameRate
|
|
270
|
-
|
|
271
|
-
print("[ExpoSettings] 📐 VideoSettings videoSize=\(stream.videoSettings.videoSize) bitrate=\(stream.videoSettings.bitRate) GOP=\(stream.videoSettings.maxKeyFrameIntervalDuration) fps=\(stream.frameRate)")
|
|
272
|
-
|
|
273
|
-
// Audio settings
|
|
274
|
-
var audioSettings = AudioCodecSettings()
|
|
275
|
-
audioSettings.bitRate = 128_000
|
|
276
|
-
stream.audioSettings = audioSettings
|
|
277
|
-
print("[ExpoSettings] 🔊 Audio bitRate: 128000")
|
|
278
|
-
|
|
279
|
-
// Devices
|
|
280
|
-
guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
|
|
281
|
-
print("[ExpoSettings] ❌ No front camera")
|
|
282
|
-
self.setStatus("error")
|
|
283
|
-
return
|
|
284
|
-
}
|
|
285
|
-
guard let microphone = AVCaptureDevice.default(for: .audio) else {
|
|
286
|
-
print("[ExpoSettings] ❌ No microphone")
|
|
287
|
-
self.setStatus("error")
|
|
288
|
-
return
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
// Attach camera (portrait, mirrored)
|
|
292
|
-
stream.attachCamera(camera) { videoUnit, error in
|
|
293
|
-
if let error = error {
|
|
294
|
-
print("[ExpoSettings] ❌ Camera ERROR: \(error)")
|
|
295
|
-
} else {
|
|
296
|
-
videoUnit?.isVideoMirrored = true
|
|
297
|
-
videoUnit?.videoOrientation = .portrait
|
|
298
|
-
print("[ExpoSettings] ✅ Camera attached (portrait, mirrored)")
|
|
51
|
+
Function("stopStream") {
|
|
52
|
+
Task { await self.stopStream() }
|
|
299
53
|
}
|
|
300
|
-
}
|
|
301
54
|
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
} else {
|
|
306
|
-
print("[ExpoSettings] ✅ Audio attached")
|
|
55
|
+
Function("forceCleanup") {
|
|
56
|
+
self.cleanup()
|
|
57
|
+
self.setStatus("idle")
|
|
307
58
|
}
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
// Attach preview
|
|
311
|
-
if let preview = ExpoSettingsView.current {
|
|
312
|
-
preview.attachStream(stream) // requires RTMPStream? in view to allow nil later
|
|
313
|
-
print("[ExpoSettings] ✅ Preview attached")
|
|
314
|
-
}
|
|
315
|
-
|
|
316
|
-
// Wait for encoder warm-up
|
|
317
|
-
DispatchQueue.main.asyncAfter(deadline: .now() + 1.2) { [weak self] in
|
|
318
|
-
guard let self, let s = self.rtmpStream else { return }
|
|
319
|
-
print("[ExpoSettings] 🔍 Warm verify videoSize=\(s.videoSettings.videoSize) fps=\(s.frameRate)")
|
|
320
|
-
self.setStatus("previewReady")
|
|
321
|
-
print("[ExpoSettings] ✅ Preview READY")
|
|
322
|
-
}
|
|
323
59
|
}
|
|
324
|
-
}
|
|
325
60
|
|
|
326
|
-
|
|
61
|
+
private func setStatus(_ status: String) {
|
|
62
|
+
guard currentStatus != status else { return }
|
|
327
63
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
firstDataSentTime = nil
|
|
331
|
-
lastDataSentTime = nil
|
|
332
|
-
stopRequestTime = nil
|
|
64
|
+
print("[ExpoSettings] \(currentStatus) → \(status)")
|
|
65
|
+
currentStatus = status
|
|
333
66
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
print("[ExpoSettings] ⏱️ publishStream at \(ISO8601DateFormatter().string(from: publishRequestTime!))")
|
|
339
|
-
print("[ExpoSettings] URL: \(cleanUrl)")
|
|
340
|
-
print("[ExpoSettings] Key: \(streamKey)")
|
|
341
|
-
|
|
342
|
-
guard let connection = rtmpConnection, let stream = rtmpStream else {
|
|
343
|
-
print("[ExpoSettings] ❌ No connection/stream")
|
|
344
|
-
setStatus("error")
|
|
345
|
-
return
|
|
67
|
+
sendEvent("onStreamStatus", [
|
|
68
|
+
"status": status,
|
|
69
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
70
|
+
])
|
|
346
71
|
}
|
|
347
72
|
|
|
348
|
-
|
|
73
|
+
private func setupAudioSession() -> Bool {
|
|
349
74
|
|
|
350
|
-
|
|
351
|
-
setStatus("connecting")
|
|
352
|
-
connection.connect(cleanUrl)
|
|
353
|
-
}
|
|
75
|
+
if Self.audioSessionConfigured { return true }
|
|
354
76
|
|
|
355
|
-
|
|
77
|
+
do {
|
|
78
|
+
let session = AVAudioSession.sharedInstance()
|
|
356
79
|
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
80
|
+
try session.setCategory(
|
|
81
|
+
.playAndRecord,
|
|
82
|
+
mode: .default,
|
|
83
|
+
options: [.defaultToSpeaker, .allowBluetooth]
|
|
84
|
+
)
|
|
360
85
|
|
|
361
|
-
|
|
86
|
+
try session.setActive(true)
|
|
362
87
|
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
if let p = pendingPublish {
|
|
367
|
-
pendingPublish = nil
|
|
368
|
-
setStatus("publishing")
|
|
369
|
-
print("[ExpoSettings] 📤 Publishing...")
|
|
370
|
-
stream.publish(p.streamKey, type: .live)
|
|
371
|
-
|
|
372
|
-
// Start monitoring for real media egress
|
|
373
|
-
monitorForRealOutboundMedia()
|
|
374
|
-
}
|
|
375
|
-
|
|
376
|
-
case "NetStream.Publish.Start":
|
|
377
|
-
// IMPORTANT:
|
|
378
|
-
// Do NOT setStatus("started") here anymore.
|
|
379
|
-
// This event means publish handshake started, not necessarily that DVR/RTMP has real media yet.
|
|
380
|
-
print("[ExpoSettings] ✅ Publish.Start received (waiting for data confirmation...)")
|
|
381
|
-
|
|
382
|
-
case "NetStream.Publish.BadName",
|
|
383
|
-
"NetStream.Publish.Rejected",
|
|
384
|
-
"NetConnection.Connect.Failed":
|
|
385
|
-
stopStatsTimer()
|
|
386
|
-
setStatus("error")
|
|
387
|
-
|
|
388
|
-
case "NetConnection.Connect.Closed":
|
|
389
|
-
stopStatsTimer()
|
|
390
|
-
setStatus("stopped")
|
|
391
|
-
|
|
392
|
-
default:
|
|
393
|
-
break
|
|
394
|
-
}
|
|
395
|
-
}
|
|
88
|
+
Self.audioSessionConfigured = true
|
|
89
|
+
print("[ExpoSettings] Audio session ready")
|
|
90
|
+
return true
|
|
396
91
|
|
|
397
|
-
|
|
92
|
+
} catch {
|
|
93
|
+
print("[ExpoSettings] Audio session error:", error)
|
|
94
|
+
return false
|
|
95
|
+
}
|
|
96
|
+
}
|
|
398
97
|
|
|
399
|
-
|
|
400
|
-
guard let connection = rtmpConnection, let stream = rtmpStream else { return }
|
|
401
|
-
let token = dataMonitorToken ?? UUID()
|
|
402
|
-
dataMonitorToken = token
|
|
98
|
+
private func initializePreview() async {
|
|
403
99
|
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
let interval: TimeInterval = 0.1
|
|
100
|
+
print("[ExpoSettings] initializePreview")
|
|
101
|
+
operationStartTime = Date()
|
|
407
102
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
let neededGoodStreak = 4 // 400ms stable
|
|
103
|
+
cleanup()
|
|
104
|
+
try? await Task.sleep(nanoseconds: 200_000_000)
|
|
411
105
|
|
|
412
|
-
|
|
413
|
-
// cancelled?
|
|
414
|
-
guard self.dataMonitorToken == token else { return }
|
|
106
|
+
setStatus("previewInitializing")
|
|
415
107
|
|
|
416
|
-
|
|
108
|
+
guard setupAudioSession() else {
|
|
109
|
+
setStatus("error")
|
|
110
|
+
return
|
|
111
|
+
}
|
|
417
112
|
|
|
418
|
-
|
|
419
|
-
|
|
113
|
+
let connection = RTMPConnection()
|
|
114
|
+
// 2) Criar RTMPStream, mas não publica pro servidor ainda
|
|
115
|
+
let stream = RTMPStream(connection: connection)
|
|
116
|
+
self.rtmpConnection = connection
|
|
117
|
+
self.rtmpStream = stream
|
|
118
|
+
|
|
119
|
+
// ---------- Stream Base ----------
|
|
120
|
+
stream.sessionPreset = .hd1280x720
|
|
121
|
+
stream.frameRate = frameRate
|
|
122
|
+
stream.videoOrientation = .portrait
|
|
123
|
+
stream.configuration { captureSession in
|
|
124
|
+
captureSession.automaticallyConfiguresApplicationAudioSession = true
|
|
125
|
+
}
|
|
420
126
|
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
127
|
+
// ---------- Audio ----------
|
|
128
|
+
stream.audioSettings = AudioCodecSettings(
|
|
129
|
+
bitRate: audioBitrate
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
// ---------- Video ----------
|
|
133
|
+
stream.videoSettings = VideoCodecSettings(
|
|
134
|
+
videoSize: .init(width: videoWidth, height: videoHeight),
|
|
135
|
+
bitRate: videoBitrate,
|
|
136
|
+
profileLevel: kVTProfileLevel_H264_Main_4_1 as String,
|
|
137
|
+
scalingMode: .letterbox,
|
|
138
|
+
bitRateMode: .average,
|
|
139
|
+
maxKeyFrameIntervalDuration: gopSeconds,
|
|
140
|
+
allowFrameReordering: nil,
|
|
141
|
+
isHardwareEncoderEnabled: true
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
// ---------- Attach Audio ----------
|
|
145
|
+
if let mic = AVCaptureDevice.default(for: .audio) {
|
|
146
|
+
stream.attachAudio(mic)
|
|
147
|
+
}
|
|
425
148
|
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
149
|
+
// ---------- Attach Camera ----------
|
|
150
|
+
if let cam = AVCaptureDevice.default(
|
|
151
|
+
.builtInWideAngleCamera,
|
|
152
|
+
for: .video,
|
|
153
|
+
position: .front
|
|
154
|
+
) {
|
|
155
|
+
stream.attachCamera(cam) { unit, _ in
|
|
156
|
+
guard let unit else { return }
|
|
157
|
+
unit.videoOrientation = .portrait
|
|
158
|
+
unit.isVideoMirrored = true
|
|
159
|
+
unit.preferredVideoStabilizationMode = .standard
|
|
160
|
+
unit.colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
|
|
161
|
+
}
|
|
162
|
+
}
|
|
431
163
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
164
|
+
// ---------- Preview ----------
|
|
165
|
+
if let preview = await ExpoSettingsView.current {
|
|
166
|
+
await preview.attachStream(stream)
|
|
167
|
+
} else {
|
|
168
|
+
print("[ExpoSettings] ERROR: Preview view not found during publish!")
|
|
436
169
|
}
|
|
437
170
|
|
|
438
|
-
let
|
|
439
|
-
self.firstDataSentTime.map { Int($0.timeIntervalSince(pub) * 1000) }
|
|
440
|
-
} ?? -1
|
|
171
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
441
172
|
|
|
442
|
-
print("[ExpoSettings]
|
|
173
|
+
print("[ExpoSettings] Preview ready in \(ms)ms")
|
|
174
|
+
setStatus("previewReady")
|
|
443
175
|
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
"delayMs": delayMs,
|
|
448
|
-
"timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
|
|
449
|
-
])
|
|
450
|
-
self.sendEvent("onStreamTiming", [
|
|
451
|
-
"event": "firstDataSent",
|
|
452
|
-
"delayMs": delayMs,
|
|
453
|
-
"timestamp": ISO8601DateFormatter().string(from: self.firstDataSentTime ?? Date())
|
|
176
|
+
sendEvent("onStreamTiming", [
|
|
177
|
+
"event": "previewReady",
|
|
178
|
+
"durationMs": ms
|
|
454
179
|
])
|
|
455
|
-
|
|
456
|
-
self.setStatus("started")
|
|
457
|
-
self.startStatsTimer()
|
|
458
|
-
return
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
// Timeout
|
|
462
|
-
if checks >= maxChecks {
|
|
463
|
-
print("[ExpoSettings] ⚠️ Start confirmation timeout (still no stable outbound media). Keeping status=\(self.currentStreamStatus)")
|
|
464
|
-
// Keep status as "publishing" or whatever it currently is; do not force started.
|
|
465
|
-
return
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
// Keep checking while in publishing/connected state
|
|
469
|
-
if self.currentStreamStatus == "publishing" || self.currentStreamStatus == "connected" || self.currentStreamStatus == "connecting" {
|
|
470
|
-
DispatchQueue.main.asyncAfter(deadline: .now() + interval) { tick() }
|
|
471
|
-
}
|
|
472
180
|
}
|
|
473
181
|
|
|
474
|
-
|
|
475
|
-
}
|
|
182
|
+
private func publishStream(url: String, streamKey: String) async {
|
|
476
183
|
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
guard let self,
|
|
483
|
-
let c = self.rtmpConnection,
|
|
484
|
-
let s = self.rtmpStream else { return }
|
|
184
|
+
guard let connection = rtmpConnection,
|
|
185
|
+
let stream = rtmpStream else {
|
|
186
|
+
setStatus("error")
|
|
187
|
+
return
|
|
188
|
+
}
|
|
485
189
|
|
|
486
|
-
|
|
487
|
-
|
|
190
|
+
operationStartTime = Date()
|
|
191
|
+
setStatus("connecting")
|
|
488
192
|
|
|
489
|
-
|
|
490
|
-
self.lastDataSentTime = Date()
|
|
491
|
-
}
|
|
193
|
+
self.rtmpConnection?.connect(url)
|
|
492
194
|
|
|
493
|
-
|
|
494
|
-
"fps": fps,
|
|
495
|
-
"bps": bps,
|
|
496
|
-
"timestamp": ISO8601DateFormatter().string(from: Date())
|
|
497
|
-
])
|
|
498
|
-
}
|
|
499
|
-
}
|
|
195
|
+
let deadline = Date().addingTimeInterval(10)
|
|
500
196
|
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
197
|
+
while Date() < deadline {
|
|
198
|
+
if connection.connected { break }
|
|
199
|
+
try? await Task.sleep(nanoseconds: 50_000_000)
|
|
200
|
+
}
|
|
505
201
|
|
|
506
|
-
|
|
202
|
+
guard connection.connected else {
|
|
203
|
+
print("[ExpoSettings] Connect timeout")
|
|
204
|
+
setStatus("error")
|
|
205
|
+
return
|
|
206
|
+
}
|
|
507
207
|
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
print("[ExpoSettings] ⏱️ stopStream at \(ISO8601DateFormatter().string(from: stopRequestTime!))")
|
|
208
|
+
setStatus("connected")
|
|
209
|
+
try? await Task.sleep(nanoseconds: 150_000_000)
|
|
511
210
|
|
|
512
|
-
|
|
513
|
-
|
|
211
|
+
setStatus("publishing")
|
|
212
|
+
stream.publish(streamKey)
|
|
514
213
|
|
|
515
|
-
|
|
214
|
+
try? await Task.sleep(nanoseconds: 200_000_000)
|
|
516
215
|
|
|
517
|
-
|
|
518
|
-
print("[ExpoSettings] No active stream to stop")
|
|
519
|
-
setStatus("stopped")
|
|
520
|
-
return
|
|
521
|
-
}
|
|
216
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
522
217
|
|
|
523
|
-
|
|
218
|
+
print("[ExpoSettings] STREAM STARTED in \(ms)ms")
|
|
524
219
|
|
|
525
|
-
|
|
526
|
-
print("[ExpoSettings] 📤 Stop capture (keep RTMP open for flush)")
|
|
527
|
-
stream.attachCamera(nil) { _, _ in }
|
|
528
|
-
stream.attachAudio(nil) { _, _ in }
|
|
220
|
+
setStatus("started")
|
|
529
221
|
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
222
|
+
sendEvent("onStreamTiming", [
|
|
223
|
+
"event": "firstDataSent",
|
|
224
|
+
"delayMs": ms,
|
|
225
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
226
|
+
])
|
|
227
|
+
}
|
|
533
228
|
|
|
534
|
-
|
|
535
|
-
let maxFlushSeconds: TimeInterval = 12.0
|
|
536
|
-
let stableZeroNeeded: Int = 6 // 6 * 0.2s = 1.2s stable
|
|
229
|
+
private func stopStream() async {
|
|
537
230
|
|
|
538
|
-
|
|
539
|
-
var stableZeroCount = 0
|
|
231
|
+
print("[ExpoSettings] stopStream")
|
|
540
232
|
|
|
541
|
-
|
|
542
|
-
|
|
233
|
+
guard let stream = rtmpStream,
|
|
234
|
+
let connection = rtmpConnection else {
|
|
235
|
+
cleanup()
|
|
236
|
+
setStatus("stopped")
|
|
237
|
+
return
|
|
238
|
+
}
|
|
543
239
|
|
|
544
|
-
|
|
545
|
-
|
|
240
|
+
operationStartTime = Date()
|
|
241
|
+
setStatus("stopping")
|
|
546
242
|
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
stableZeroCount = 0
|
|
551
|
-
} else {
|
|
552
|
-
stableZeroCount += 1
|
|
553
|
-
}
|
|
243
|
+
// Stop capture
|
|
244
|
+
stream.attachCamera(nil)
|
|
245
|
+
stream.attachAudio(nil)
|
|
554
246
|
|
|
555
|
-
|
|
247
|
+
// Flush encoder (GOP + 0.5s)
|
|
248
|
+
let flushNs = UInt64(gopSeconds) * 1_000_000_000 + 500_000_000
|
|
249
|
+
try? await Task.sleep(nanoseconds: flushNs)
|
|
556
250
|
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
251
|
+
// Close stream
|
|
252
|
+
stream.close()
|
|
253
|
+
try? await Task.sleep(nanoseconds: 300_000_000)
|
|
560
254
|
|
|
561
|
-
// Close
|
|
562
|
-
|
|
255
|
+
// Close socket
|
|
256
|
+
connection.close()
|
|
563
257
|
|
|
564
|
-
|
|
565
|
-
guard let self else { return }
|
|
566
|
-
self.rtmpConnection?.close()
|
|
258
|
+
cleanup()
|
|
567
259
|
|
|
568
|
-
|
|
569
|
-
guard let self else { return }
|
|
260
|
+
let ms = Int(Date().timeIntervalSince(operationStartTime!) * 1000)
|
|
570
261
|
|
|
571
|
-
|
|
572
|
-
if let preview = ExpoSettingsView.current {
|
|
573
|
-
preview.attachStream(nil)
|
|
574
|
-
}
|
|
262
|
+
print("[ExpoSettings] STOPPED in \(ms)ms")
|
|
575
263
|
|
|
576
|
-
|
|
577
|
-
let totalMs = self.stopRequestTime.map { Int(finalTime.timeIntervalSince($0) * 1000) } ?? -1
|
|
264
|
+
setStatus("stopped")
|
|
578
265
|
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
266
|
+
sendEvent("onStreamTiming", [
|
|
267
|
+
"event": "shutdownComplete",
|
|
268
|
+
"totalDurationMs": ms,
|
|
269
|
+
"timestamp": Self.isoFormatter.string(from: Date())
|
|
270
|
+
])
|
|
271
|
+
}
|
|
584
272
|
|
|
585
|
-
|
|
586
|
-
self.rtmpConnection = nil
|
|
587
|
-
self.pendingPublish = nil
|
|
273
|
+
private func cleanup() {
|
|
588
274
|
|
|
589
|
-
|
|
590
|
-
print("[ExpoSettings] ✅ Stream stopped (total \(totalMs)ms)")
|
|
591
|
-
}
|
|
592
|
-
}
|
|
275
|
+
print("[ExpoSettings] Cleanup")
|
|
593
276
|
|
|
594
|
-
|
|
595
|
-
|
|
277
|
+
rtmpStream?.attachCamera(nil)
|
|
278
|
+
rtmpStream?.attachAudio(nil)
|
|
279
|
+
rtmpStream?.close()
|
|
280
|
+
rtmpStream = nil
|
|
596
281
|
|
|
597
|
-
|
|
598
|
-
|
|
282
|
+
rtmpConnection?.close()
|
|
283
|
+
rtmpConnection = nil
|
|
599
284
|
}
|
|
600
|
-
|
|
601
|
-
flushTick()
|
|
602
|
-
}
|
|
603
285
|
}
|
|
@@ -11,9 +11,6 @@ public class ExpoSettingsView: ExpoView {
|
|
|
11
11
|
return view
|
|
12
12
|
}()
|
|
13
13
|
|
|
14
|
-
// Guarda stream para reattach se a view recriar/layout mudar
|
|
15
|
-
private weak var attachedStream: RTMPStream?
|
|
16
|
-
|
|
17
14
|
required init(appContext: AppContext? = nil) {
|
|
18
15
|
super.init(appContext: appContext)
|
|
19
16
|
clipsToBounds = true
|
|
@@ -27,10 +24,8 @@ public class ExpoSettingsView: ExpoView {
|
|
|
27
24
|
hkView.frame = bounds
|
|
28
25
|
}
|
|
29
26
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
attachedStream = stream
|
|
33
|
-
hkView.attachStream(stream) // normalmente aceita nil
|
|
27
|
+
public func attachStream(_ stream: RTMPStream) {
|
|
28
|
+
hkView.attachStream(stream)
|
|
34
29
|
}
|
|
35
30
|
|
|
36
31
|
deinit {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "blix-expo-settings",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.14",
|
|
4
4
|
"description": "LiveStream",
|
|
5
5
|
"main": "build/index.js",
|
|
6
6
|
"types": "build/index.d.ts",
|
|
@@ -29,7 +29,6 @@
|
|
|
29
29
|
"license": "MIT",
|
|
30
30
|
"homepage": "https://github.com/BlixTechnology/expo-settings#readme",
|
|
31
31
|
"devDependencies": {
|
|
32
|
-
"@react-native-community/cli-server-api": "^20.1.1",
|
|
33
32
|
"@types/react": "~19.0.0",
|
|
34
33
|
"expo": "^50.0.21",
|
|
35
34
|
"expo-module-scripts": "^4.1.7",
|
|
@@ -39,8 +38,5 @@
|
|
|
39
38
|
"expo": "*",
|
|
40
39
|
"react": "*",
|
|
41
40
|
"react-native": "*"
|
|
42
|
-
},
|
|
43
|
-
"dependencies": {
|
|
44
|
-
"expo-dev-client": "~3.3.12"
|
|
45
41
|
}
|
|
46
42
|
}
|