@trustchex/react-native-sdk 1.381.0 → 1.409.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/trustchex/reactnativesdk/camera/TrustchexCameraView.kt +1 -12
- package/android/src/main/java/com/trustchex/reactnativesdk/mlkit/MLKitModule.kt +1 -1
- package/ios/Camera/TrustchexCameraView.swift +1 -12
- package/ios/MLKit/MLKitModule.swift +1 -1
- package/lib/module/Screens/Debug/BarcodeTestScreen.js +308 -0
- package/lib/module/Screens/Debug/MRZTestScreen.js +105 -13
- package/lib/module/Screens/Dynamic/ContractAcceptanceScreen.js +49 -29
- package/lib/module/Screens/Dynamic/IdentityDocumentEIDScanningScreen.js +5 -0
- package/lib/module/Screens/Dynamic/IdentityDocumentScanningScreen.js +5 -0
- package/lib/module/Screens/Dynamic/LivenessDetectionScreen.js +26 -6
- package/lib/module/Screens/Dynamic/VideoCallScreen.js +676 -0
- package/lib/module/Screens/Static/OTPVerificationScreen.js +6 -0
- package/lib/module/Screens/Static/QrCodeScanningScreen.js +7 -1
- package/lib/module/Screens/Static/ResultScreen.js +27 -13
- package/lib/module/Screens/Static/VerificationSessionCheckScreen.js +51 -51
- package/lib/module/Shared/Animations/video-call.json +1 -0
- package/lib/module/Shared/Components/DebugNavigationPanel.js +180 -14
- package/lib/module/Shared/Components/EIDScanner.js +1 -4
- package/lib/module/Shared/Components/IdentityDocumentCamera.js +29 -8
- package/lib/module/Shared/Components/NavigationManager.js +15 -3
- package/lib/module/Shared/Contexts/AppContext.js +1 -0
- package/lib/module/Shared/Libs/SignalingClient.js +128 -0
- package/lib/module/Shared/Libs/analytics.utils.js +4 -0
- package/lib/module/Shared/Libs/deeplink.utils.js +9 -1
- package/lib/module/Shared/Libs/http-client.js +9 -0
- package/lib/module/Shared/Libs/promise.utils.js +16 -2
- package/lib/module/Shared/Libs/status-bar.utils.js +21 -0
- package/lib/module/Shared/Services/DataUploadService.js +294 -0
- package/lib/module/Shared/Services/VideoSessionService.js +156 -0
- package/lib/module/Shared/Services/WebRTCService.js +510 -0
- package/lib/module/Shared/Types/analytics.types.js +2 -0
- package/lib/module/Translation/Resources/en.js +20 -0
- package/lib/module/Translation/Resources/tr.js +20 -0
- package/lib/module/Trustchex.js +10 -0
- package/lib/module/version.js +1 -1
- package/lib/typescript/src/Screens/Debug/BarcodeTestScreen.d.ts +3 -0
- package/lib/typescript/src/Screens/Debug/BarcodeTestScreen.d.ts.map +1 -0
- package/lib/typescript/src/Screens/Debug/MRZTestScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/ContractAcceptanceScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/IdentityDocumentScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/LivenessDetectionScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Dynamic/VideoCallScreen.d.ts +3 -0
- package/lib/typescript/src/Screens/Dynamic/VideoCallScreen.d.ts.map +1 -0
- package/lib/typescript/src/Screens/Static/OTPVerificationScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/QrCodeScanningScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/ResultScreen.d.ts.map +1 -1
- package/lib/typescript/src/Screens/Static/VerificationSessionCheckScreen.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/DebugNavigationPanel.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/EIDScanner.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/IdentityDocumentCamera.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Components/NavigationManager.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Contexts/AppContext.d.ts +1 -0
- package/lib/typescript/src/Shared/Contexts/AppContext.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/SignalingClient.d.ts +24 -0
- package/lib/typescript/src/Shared/Libs/SignalingClient.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Libs/analytics.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/deeplink.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/http-client.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/promise.utils.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Libs/status-bar.utils.d.ts +9 -0
- package/lib/typescript/src/Shared/Libs/status-bar.utils.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Services/DataUploadService.d.ts +25 -0
- package/lib/typescript/src/Shared/Services/DataUploadService.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Services/VideoSessionService.d.ts +33 -0
- package/lib/typescript/src/Shared/Services/VideoSessionService.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Services/WebRTCService.d.ts +58 -0
- package/lib/typescript/src/Shared/Services/WebRTCService.d.ts.map +1 -0
- package/lib/typescript/src/Shared/Types/analytics.types.d.ts +2 -0
- package/lib/typescript/src/Shared/Types/analytics.types.d.ts.map +1 -1
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts +4 -1
- package/lib/typescript/src/Shared/Types/identificationInfo.d.ts.map +1 -1
- package/lib/typescript/src/Translation/Resources/en.d.ts +20 -0
- package/lib/typescript/src/Translation/Resources/en.d.ts.map +1 -1
- package/lib/typescript/src/Translation/Resources/tr.d.ts +20 -0
- package/lib/typescript/src/Translation/Resources/tr.d.ts.map +1 -1
- package/lib/typescript/src/Trustchex.d.ts.map +1 -1
- package/lib/typescript/src/version.d.ts +1 -1
- package/package.json +29 -2
- package/src/Screens/Debug/BarcodeTestScreen.tsx +317 -0
- package/src/Screens/Debug/MRZTestScreen.tsx +107 -13
- package/src/Screens/Dynamic/ContractAcceptanceScreen.tsx +59 -33
- package/src/Screens/Dynamic/IdentityDocumentEIDScanningScreen.tsx +6 -0
- package/src/Screens/Dynamic/IdentityDocumentScanningScreen.tsx +6 -0
- package/src/Screens/Dynamic/LivenessDetectionScreen.tsx +34 -6
- package/src/Screens/Dynamic/VideoCallScreen.tsx +764 -0
- package/src/Screens/Static/OTPVerificationScreen.tsx +6 -0
- package/src/Screens/Static/QrCodeScanningScreen.tsx +7 -1
- package/src/Screens/Static/ResultScreen.tsx +58 -23
- package/src/Screens/Static/VerificationSessionCheckScreen.tsx +58 -72
- package/src/Shared/Animations/video-call.json +1 -0
- package/src/Shared/Components/DebugNavigationPanel.tsx +185 -9
- package/src/Shared/Components/EIDScanner.tsx +1 -5
- package/src/Shared/Components/IdentityDocumentCamera.tsx +29 -8
- package/src/Shared/Components/NavigationManager.tsx +14 -1
- package/src/Shared/Contexts/AppContext.ts +2 -0
- package/src/Shared/Libs/SignalingClient.ts +189 -0
- package/src/Shared/Libs/analytics.utils.ts +4 -0
- package/src/Shared/Libs/deeplink.utils.ts +12 -1
- package/src/Shared/Libs/http-client.ts +10 -0
- package/src/Shared/Libs/promise.utils.ts +16 -2
- package/src/Shared/Libs/status-bar.utils.ts +19 -0
- package/src/Shared/Services/DataUploadService.ts +395 -0
- package/src/Shared/Services/VideoSessionService.ts +190 -0
- package/src/Shared/Services/WebRTCService.ts +636 -0
- package/src/Shared/Types/analytics.types.ts +2 -0
- package/src/Shared/Types/identificationInfo.ts +5 -1
- package/src/Translation/Resources/en.ts +25 -0
- package/src/Translation/Resources/tr.ts +27 -0
- package/src/Trustchex.tsx +12 -2
- package/src/version.ts +1 -1
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
export class VideoSessionService {
|
|
4
|
+
constructor(baseUrl, identificationId) {
|
|
5
|
+
this.baseUrl = baseUrl;
|
|
6
|
+
this.identificationId = identificationId;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Get current active video session for customer
|
|
11
|
+
*/
|
|
12
|
+
async getCurrentSession() {
|
|
13
|
+
const params = new URLSearchParams();
|
|
14
|
+
if (this.identificationId) {
|
|
15
|
+
params.append('identificationId', this.identificationId);
|
|
16
|
+
}
|
|
17
|
+
const url = `${this.baseUrl}/api/app/mobile/video-sessions/current${params.toString() ? '?' + params.toString() : ''}`;
|
|
18
|
+
try {
|
|
19
|
+
const response = await fetch(url, {
|
|
20
|
+
method: 'GET',
|
|
21
|
+
headers: {
|
|
22
|
+
'Content-Type': 'application/json'
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
if (!response.ok) {
|
|
26
|
+
throw new Error(`Failed to get current session: ${response.statusText}`);
|
|
27
|
+
}
|
|
28
|
+
return await response.json();
|
|
29
|
+
} catch (error) {
|
|
30
|
+
console.error('[VideoSessionService] Error getting current session:', error);
|
|
31
|
+
throw error;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Join a video session
|
|
37
|
+
*/
|
|
38
|
+
async joinSession(sessionId) {
|
|
39
|
+
const params = new URLSearchParams();
|
|
40
|
+
if (this.identificationId) {
|
|
41
|
+
params.append('identificationId', this.identificationId);
|
|
42
|
+
}
|
|
43
|
+
const url = `${this.baseUrl}/api/app/mobile/video-sessions/${sessionId}/join${params.toString() ? '?' + params.toString() : ''}`;
|
|
44
|
+
try {
|
|
45
|
+
const response = await fetch(url, {
|
|
46
|
+
method: 'POST',
|
|
47
|
+
headers: {
|
|
48
|
+
'Content-Type': 'application/json'
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
if (!response.ok) {
|
|
52
|
+
throw new Error(`Failed to join session: ${response.statusText}`);
|
|
53
|
+
}
|
|
54
|
+
return await response.json();
|
|
55
|
+
} catch (error) {
|
|
56
|
+
console.error('[VideoSessionService] Error joining session:', error);
|
|
57
|
+
throw error;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Send a heartbeat to keep the queue-status SSE watchdog alive
|
|
63
|
+
*/
|
|
64
|
+
async sendHeartbeat(sessionId) {
|
|
65
|
+
const params = new URLSearchParams();
|
|
66
|
+
if (this.identificationId) {
|
|
67
|
+
params.append('identificationId', this.identificationId);
|
|
68
|
+
}
|
|
69
|
+
const url = `${this.baseUrl}/api/app/mobile/video-sessions/${sessionId}/heartbeat${params.toString() ? '?' + params.toString() : ''}`;
|
|
70
|
+
try {
|
|
71
|
+
await fetch(url, {
|
|
72
|
+
method: 'POST'
|
|
73
|
+
});
|
|
74
|
+
} catch {
|
|
75
|
+
// best-effort, ignore errors
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Leave the session queue (drops customer from WAITING state)
|
|
81
|
+
*/
|
|
82
|
+
async leaveSession(sessionId) {
|
|
83
|
+
const params = new URLSearchParams();
|
|
84
|
+
if (this.identificationId) {
|
|
85
|
+
params.append('identificationId', this.identificationId);
|
|
86
|
+
}
|
|
87
|
+
const url = `${this.baseUrl}/api/app/mobile/video-sessions/${sessionId}/join${params.toString() ? '?' + params.toString() : ''}`;
|
|
88
|
+
try {
|
|
89
|
+
await fetch(url, {
|
|
90
|
+
method: 'DELETE'
|
|
91
|
+
});
|
|
92
|
+
} catch (error) {
|
|
93
|
+
console.error('[VideoSessionService] Error leaving session:', error);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Subscribe to queue status updates via Server-Sent Events (SSE)
|
|
99
|
+
*/
|
|
100
|
+
subscribeToQueueUpdates(sessionId, onUpdate, onError) {
|
|
101
|
+
try {
|
|
102
|
+
// Use react-native-sse for real SSE support
|
|
103
|
+
const EventSource = require('react-native-sse').default;
|
|
104
|
+
const url = `${this.baseUrl}/api/app/mobile/video-sessions/${sessionId}/queue-status?identificationId=${this.identificationId}`;
|
|
105
|
+
console.log('[VideoSessionService] Creating SSE connection to:', url);
|
|
106
|
+
const es = new EventSource(url);
|
|
107
|
+
let heartbeatInterval = null;
|
|
108
|
+
es.addEventListener('open', () => {
|
|
109
|
+
console.log('[VideoSessionService] Queue SSE connected');
|
|
110
|
+
// Send initial heartbeat
|
|
111
|
+
this.sendHeartbeat(sessionId).catch(() => {});
|
|
112
|
+
// Set up periodic heartbeat (every 20s to stay under 30s watchdog)
|
|
113
|
+
heartbeatInterval = setInterval(() => {
|
|
114
|
+
this.sendHeartbeat(sessionId).catch(() => {});
|
|
115
|
+
}, 20000);
|
|
116
|
+
});
|
|
117
|
+
es.addEventListener('ping', () => {
|
|
118
|
+
// Server sent a ping, respond with heartbeat
|
|
119
|
+
this.sendHeartbeat(sessionId).catch(() => {});
|
|
120
|
+
});
|
|
121
|
+
es.addEventListener('message', event => {
|
|
122
|
+
try {
|
|
123
|
+
const data = JSON.parse(event.data);
|
|
124
|
+
console.log('[VideoSessionService] Queue update:', data);
|
|
125
|
+
if (data.queuePosition !== undefined) {
|
|
126
|
+
onUpdate(data.queuePosition);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Close connection if no longer waiting
|
|
130
|
+
if (data.callState !== 'WAITING') {
|
|
131
|
+
if (heartbeatInterval) clearInterval(heartbeatInterval);
|
|
132
|
+
es.close();
|
|
133
|
+
}
|
|
134
|
+
} catch (error) {
|
|
135
|
+
console.error('[SSE] Failed to parse queue update:', error);
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
es.addEventListener('error', event => {
|
|
139
|
+
console.error('[SSE] Connection error:', event);
|
|
140
|
+
if (heartbeatInterval) clearInterval(heartbeatInterval);
|
|
141
|
+
onError(new Error('SSE connection failed'));
|
|
142
|
+
es.close();
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
// Return cleanup function
|
|
146
|
+
return () => {
|
|
147
|
+
if (heartbeatInterval) clearInterval(heartbeatInterval);
|
|
148
|
+
es.close();
|
|
149
|
+
};
|
|
150
|
+
} catch (error) {
|
|
151
|
+
console.error('[VideoSessionService] Failed to create SSE connection:', error);
|
|
152
|
+
onError(error instanceof Error ? error : new Error('Failed to create SSE connection'));
|
|
153
|
+
return () => {};
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
@@ -0,0 +1,510 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
import { RTCPeerConnection, RTCIceCandidate, RTCSessionDescription, mediaDevices, MediaStream } from 'react-native-webrtc';
|
|
4
|
+
import { SignalingClient } from "../Libs/SignalingClient.js";
|
|
5
|
+
export class WebRTCService {
|
|
6
|
+
peerConnection = null;
|
|
7
|
+
localStream = null;
|
|
8
|
+
remoteStream = null;
|
|
9
|
+
isFlashOn = false;
|
|
10
|
+
pendingCandidates = [];
|
|
11
|
+
hasRemoteDescription = false;
|
|
12
|
+
isProcessingOffer = false;
|
|
13
|
+
lastProcessedOfferSdp = null;
|
|
14
|
+
tracksAdded = false;
|
|
15
|
+
pendingOffer = null;
|
|
16
|
+
isSwitchingCamera = false;
|
|
17
|
+
lastSwitchTime = 0;
|
|
18
|
+
facingMode = 'user';
|
|
19
|
+
videoTrackEndedHandler = null;
|
|
20
|
+
constructor(config) {
|
|
21
|
+
this.onRemoteStream = config.onRemoteStream;
|
|
22
|
+
this.onConnectionStateChange = config.onConnectionStateChange;
|
|
23
|
+
this.onCommand = config.onCommand;
|
|
24
|
+
this.onSnapshotRequest = config.onSnapshotRequest;
|
|
25
|
+
this.onLocalStreamUpdate = config.onLocalStreamUpdate;
|
|
26
|
+
this.baseUrl = config.baseUrl;
|
|
27
|
+
this.sessionId = config.sessionId;
|
|
28
|
+
this.identificationId = config.identificationId;
|
|
29
|
+
this.signalingClient = new SignalingClient(config.baseUrl, config.sessionId, this.handleSignalingMessage.bind(this), config.identificationId, config.onSessionEnded);
|
|
30
|
+
}
|
|
31
|
+
async fetchIceServers() {
|
|
32
|
+
try {
|
|
33
|
+
const params = this.identificationId ? `?identificationId=${this.identificationId}` : '';
|
|
34
|
+
const response = await fetch(`${this.baseUrl}/api/app/mobile/video-sessions/${this.sessionId}/ice-servers${params}`);
|
|
35
|
+
if (!response.ok) return [];
|
|
36
|
+
const data = await response.json();
|
|
37
|
+
return Array.isArray(data.iceServers) ? data.iceServers : [];
|
|
38
|
+
} catch {
|
|
39
|
+
return [];
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
async initialize() {
|
|
43
|
+
console.log('[WebRTCService] Initializing...');
|
|
44
|
+
try {
|
|
45
|
+
// Fetch ICE servers before getUserMedia so peer connection is ready sooner
|
|
46
|
+
const iceServers = await this.fetchIceServers();
|
|
47
|
+
|
|
48
|
+
// Initialize PeerConnection before getUserMedia so signaling can start
|
|
49
|
+
// immediately — avoids missing the agent's offer while camera init blocks.
|
|
50
|
+
this.peerConnection = new RTCPeerConnection({
|
|
51
|
+
iceServers,
|
|
52
|
+
iceCandidatePoolSize: 2
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
// Set up event handlers before signaling connects.
|
|
56
|
+
// Use addEventListener instead of property assignment for reliable
|
|
57
|
+
// delivery in react-native-webrtc (property may be overwritten).
|
|
58
|
+
this.peerConnection.addEventListener('track', event => {
|
|
59
|
+
if (event.streams && event.streams[0]) {
|
|
60
|
+
this.remoteStream = event.streams[0];
|
|
61
|
+
this.onRemoteStream(this.remoteStream);
|
|
62
|
+
} else if (event.track) {
|
|
63
|
+
const existingTracks = this.remoteStream ? this.remoteStream.getTracks() : [];
|
|
64
|
+
this.remoteStream = new MediaStream([...existingTracks, event.track]);
|
|
65
|
+
this.onRemoteStream(this.remoteStream);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
this.peerConnection.addEventListener('icecandidate', event => {
|
|
69
|
+
if (event.candidate) {
|
|
70
|
+
this.signalingClient.send('ice-candidate', event.candidate.toJSON());
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
// react-native-webrtc often does not fire connectionstatechange or sets
|
|
75
|
+
// connectionState to undefined. Use iceconnectionstatechange which is
|
|
76
|
+
// reliably implemented across all versions.
|
|
77
|
+
this.peerConnection.addEventListener('iceconnectionstatechange', () => {
|
|
78
|
+
if (!this.peerConnection) return;
|
|
79
|
+
const iceState = this.peerConnection.iceConnectionState || 'new';
|
|
80
|
+
// Map ICE states to the connection states the UI expects
|
|
81
|
+
const mapped = iceState === 'connected' || iceState === 'completed' ? 'connected' : iceState === 'failed' ? 'failed' : iceState === 'disconnected' ? 'disconnected' : iceState === 'closed' ? 'closed' : iceState;
|
|
82
|
+
this.onConnectionStateChange(mapped);
|
|
83
|
+
if (mapped === 'connected') {
|
|
84
|
+
this.sendTorchAvailability();
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
// Connect signaling now — peer connection + handlers are ready.
|
|
89
|
+
// Any offer that arrives before local tracks are added is buffered in
|
|
90
|
+
// pendingOffer and processed once tracks are attached below.
|
|
91
|
+
this.signalingClient.connect();
|
|
92
|
+
|
|
93
|
+
// Get local stream. Avoid strict min constraints that hang on some Android
|
|
94
|
+
// devices when the camera cannot satisfy the combination.
|
|
95
|
+
const stream = await mediaDevices.getUserMedia({
|
|
96
|
+
audio: true,
|
|
97
|
+
video: {
|
|
98
|
+
facingMode: 'user',
|
|
99
|
+
frameRate: {
|
|
100
|
+
ideal: 30
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
this.localStream = stream;
|
|
105
|
+
|
|
106
|
+
// Add local tracks
|
|
107
|
+
stream.getTracks().forEach(track => {
|
|
108
|
+
this.peerConnection?.addTrack(track, stream);
|
|
109
|
+
});
|
|
110
|
+
this.tracksAdded = true;
|
|
111
|
+
|
|
112
|
+
// If the agent's offer arrived while getUserMedia was pending, process it now.
|
|
113
|
+
if (this.pendingOffer) {
|
|
114
|
+
const buffered = this.pendingOffer;
|
|
115
|
+
this.pendingOffer = null;
|
|
116
|
+
await this.handleSignalingMessage(buffered);
|
|
117
|
+
}
|
|
118
|
+
this.sendTorchAvailability();
|
|
119
|
+
return stream;
|
|
120
|
+
} catch (error) {
|
|
121
|
+
console.error('[WebRTCService] Initialization failed:', error);
|
|
122
|
+
throw error;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
async handleSignalingMessage(message) {
|
|
126
|
+
if (!this.peerConnection) return;
|
|
127
|
+
try {
|
|
128
|
+
switch (message.type) {
|
|
129
|
+
case 'offer':
|
|
130
|
+
{
|
|
131
|
+
// Buffer the offer until local tracks have been added so the answer
|
|
132
|
+
// includes the mobile's audio/video tracks.
|
|
133
|
+
if (!this.tracksAdded) {
|
|
134
|
+
console.log('[WebRTCService] Offer received before tracks ready, buffering');
|
|
135
|
+
this.pendingOffer = message;
|
|
136
|
+
return;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Skip if we're already processing an offer
|
|
140
|
+
if (this.isProcessingOffer) {
|
|
141
|
+
console.log('[WebRTCService] Skipping offer - already processing one');
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
const offerSdp = message.payload?.sdp;
|
|
145
|
+
if (offerSdp && this.lastProcessedOfferSdp === offerSdp) {
|
|
146
|
+
console.log('[WebRTCService] Skipping offer - duplicate SDP');
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
this.isProcessingOffer = true;
|
|
150
|
+
console.log('[WebRTCService] Received offer');
|
|
151
|
+
try {
|
|
152
|
+
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(message.payload));
|
|
153
|
+
this.hasRemoteDescription = true;
|
|
154
|
+
// Add any pending candidates
|
|
155
|
+
await this.addPendingCandidates();
|
|
156
|
+
const answer = await this.peerConnection.createAnswer();
|
|
157
|
+
await this.peerConnection.setLocalDescription(answer);
|
|
158
|
+
await this.signalingClient.send('answer', {
|
|
159
|
+
type: answer.type,
|
|
160
|
+
sdp: answer.sdp
|
|
161
|
+
});
|
|
162
|
+
this.lastProcessedOfferSdp = offerSdp ?? null;
|
|
163
|
+
} finally {
|
|
164
|
+
this.isProcessingOffer = false;
|
|
165
|
+
}
|
|
166
|
+
break;
|
|
167
|
+
}
|
|
168
|
+
case 'answer':
|
|
169
|
+
console.log('[WebRTCService] Received answer');
|
|
170
|
+
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(message.payload));
|
|
171
|
+
this.hasRemoteDescription = true;
|
|
172
|
+
// Add any pending candidates
|
|
173
|
+
await this.addPendingCandidates();
|
|
174
|
+
break;
|
|
175
|
+
case 'ice-candidate':
|
|
176
|
+
if (message.payload) {
|
|
177
|
+
const candidate = new RTCIceCandidate(message.payload);
|
|
178
|
+
if (this.hasRemoteDescription) {
|
|
179
|
+
console.log('[WebRTCService] Adding ICE candidate');
|
|
180
|
+
await this.peerConnection.addIceCandidate(candidate);
|
|
181
|
+
} else {
|
|
182
|
+
this.pendingCandidates.push(candidate);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
break;
|
|
186
|
+
case 'command':
|
|
187
|
+
this.handleCommand(message.payload);
|
|
188
|
+
break;
|
|
189
|
+
}
|
|
190
|
+
} catch (error) {
|
|
191
|
+
console.error('[WebRTCService] Error handling signaling:', error);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
handleCommand(payload) {
|
|
195
|
+
console.log('[WebRTCService] Received command:', payload);
|
|
196
|
+
if (this.onCommand) {
|
|
197
|
+
this.onCommand(payload);
|
|
198
|
+
}
|
|
199
|
+
switch (payload.type) {
|
|
200
|
+
case 'switchCamera':
|
|
201
|
+
this.switchCamera();
|
|
202
|
+
break;
|
|
203
|
+
case 'toggleFlash':
|
|
204
|
+
this.toggleFlash();
|
|
205
|
+
break;
|
|
206
|
+
case 'captureSnapshot':
|
|
207
|
+
this.captureSnapshot(payload.requestId);
|
|
208
|
+
break;
|
|
209
|
+
case 'takeSnapshot':
|
|
210
|
+
this.captureSnapshot(payload.requestId);
|
|
211
|
+
break;
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
async addPendingCandidates() {
|
|
215
|
+
if (this.pendingCandidates.length > 0 && this.peerConnection) {
|
|
216
|
+
console.log(`[WebRTCService] Adding ${this.pendingCandidates.length} pending ICE candidates`);
|
|
217
|
+
for (const candidate of this.pendingCandidates) {
|
|
218
|
+
try {
|
|
219
|
+
await this.peerConnection.addIceCandidate(candidate);
|
|
220
|
+
} catch (error) {
|
|
221
|
+
console.error('[WebRTCService] Error adding pending candidate:', error);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
this.pendingCandidates = [];
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
cleanup() {
|
|
228
|
+
this.localStream?.getTracks().forEach(t => t.stop());
|
|
229
|
+
this.peerConnection?.close();
|
|
230
|
+
this.signalingClient.disconnect();
|
|
231
|
+
this.localStream = null;
|
|
232
|
+
this.remoteStream = null;
|
|
233
|
+
this.peerConnection = null;
|
|
234
|
+
this.pendingCandidates = [];
|
|
235
|
+
this.hasRemoteDescription = false;
|
|
236
|
+
this.lastProcessedOfferSdp = null;
|
|
237
|
+
this.isProcessingOffer = false;
|
|
238
|
+
this.tracksAdded = false;
|
|
239
|
+
this.pendingOffer = null;
|
|
240
|
+
}
|
|
241
|
+
async switchCamera() {
|
|
242
|
+
// Debounce: ignore if already switching or within 2s cooldown
|
|
243
|
+
const now = Date.now();
|
|
244
|
+
if (this.isSwitchingCamera || now - this.lastSwitchTime < 2000) {
|
|
245
|
+
console.warn('[WebRTCService] switchCamera ignored — cooldown');
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
if (!this.localStream || !this.peerConnection) return;
|
|
249
|
+
const videoTrack = this.localStream.getVideoTracks()[0];
|
|
250
|
+
if (!videoTrack) return;
|
|
251
|
+
this.isSwitchingCamera = true;
|
|
252
|
+
this.lastSwitchTime = now;
|
|
253
|
+
try {
|
|
254
|
+
const newFacing = this.facingMode === 'user' ? 'environment' : 'user';
|
|
255
|
+
|
|
256
|
+
// First, remove the video track from the sender (set to null).
|
|
257
|
+
// This signals to the encoder to stop, which releases the camera HAL.
|
|
258
|
+
const senders = this.peerConnection.getSenders?.();
|
|
259
|
+
let videoSender = null;
|
|
260
|
+
if (senders) {
|
|
261
|
+
for (const sender of senders) {
|
|
262
|
+
if (sender.track?.kind === 'video') {
|
|
263
|
+
videoSender = sender;
|
|
264
|
+
await sender.replaceTrack(null);
|
|
265
|
+
break;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
// Stop the old track to fully release the camera hardware.
|
|
271
|
+
videoTrack.stop();
|
|
272
|
+
|
|
273
|
+
// Small delay to let the camera HAL fully release on Android.
|
|
274
|
+
await new Promise(res => setTimeout(res, 200));
|
|
275
|
+
|
|
276
|
+
// Acquire a new stream from the target camera. Now that the old camera
|
|
277
|
+
// is fully released, there's no concurrent camera conflict.
|
|
278
|
+
const newStream = await mediaDevices.getUserMedia({
|
|
279
|
+
audio: false,
|
|
280
|
+
video: {
|
|
281
|
+
facingMode: newFacing,
|
|
282
|
+
frameRate: {
|
|
283
|
+
ideal: 30
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
});
|
|
287
|
+
const newVideoTrack = newStream.getVideoTracks()[0];
|
|
288
|
+
if (!newVideoTrack) {
|
|
289
|
+
console.warn('[WebRTCService] switchCamera: no video track from getUserMedia');
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Replace the null track in the sender with the new camera track.
|
|
294
|
+
// This properly notifies the WebRTC encoder to start encoding from
|
|
295
|
+
// the new camera — unlike _switchCamera() which only changes the
|
|
296
|
+
// hardware source without informing the encoder/remote side.
|
|
297
|
+
if (videoSender) {
|
|
298
|
+
await videoSender.replaceTrack(newVideoTrack);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// Build a new MediaStream with existing audio + new video.
|
|
302
|
+
const audioTracks = this.localStream.getAudioTracks();
|
|
303
|
+
this.localStream = new MediaStream([...audioTracks, newVideoTrack]);
|
|
304
|
+
this.facingMode = newFacing;
|
|
305
|
+
|
|
306
|
+
// Listen for the track ending unexpectedly (Android camera HAL power
|
|
307
|
+
// management can kill the rear camera mid-call). Re-acquire on ended.
|
|
308
|
+
if (this.videoTrackEndedHandler) {
|
|
309
|
+
videoTrack.removeEventListener?.('ended', this.videoTrackEndedHandler);
|
|
310
|
+
}
|
|
311
|
+
this.videoTrackEndedHandler = () => {
|
|
312
|
+
console.warn('[WebRTCService] video track ended unexpectedly — re-acquiring');
|
|
313
|
+
this.reacquireVideoTrack();
|
|
314
|
+
};
|
|
315
|
+
newVideoTrack.addEventListener?.('ended', this.videoTrackEndedHandler);
|
|
316
|
+
|
|
317
|
+
// Notify the screen to update its local stream reference and remount RTCView
|
|
318
|
+
this.onLocalStreamUpdate?.(this.localStream);
|
|
319
|
+
} catch (error) {
|
|
320
|
+
console.warn('[WebRTCService] switchCamera failed:', error);
|
|
321
|
+
} finally {
|
|
322
|
+
this.isSwitchingCamera = false;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
async reacquireVideoTrack() {
|
|
326
|
+
if (!this.localStream || !this.peerConnection) return;
|
|
327
|
+
try {
|
|
328
|
+
const newStream = await mediaDevices.getUserMedia({
|
|
329
|
+
audio: false,
|
|
330
|
+
video: {
|
|
331
|
+
facingMode: this.facingMode,
|
|
332
|
+
frameRate: {
|
|
333
|
+
ideal: 30
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
});
|
|
337
|
+
const newVideoTrack = newStream.getVideoTracks()[0];
|
|
338
|
+
if (!newVideoTrack) return;
|
|
339
|
+
const senders = this.peerConnection.getSenders?.();
|
|
340
|
+
if (senders) {
|
|
341
|
+
for (const sender of senders) {
|
|
342
|
+
if (sender.track?.kind === 'video' || sender.track === null) {
|
|
343
|
+
await sender.replaceTrack(newVideoTrack);
|
|
344
|
+
break;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
if (this.videoTrackEndedHandler) {
|
|
349
|
+
newVideoTrack.addEventListener?.('ended', this.videoTrackEndedHandler);
|
|
350
|
+
}
|
|
351
|
+
const audioTracks = this.localStream.getAudioTracks();
|
|
352
|
+
this.localStream = new MediaStream([...audioTracks, newVideoTrack]);
|
|
353
|
+
this.onLocalStreamUpdate?.(this.localStream);
|
|
354
|
+
} catch (error) {
|
|
355
|
+
console.warn('[WebRTCService] reacquireVideoTrack failed:', error);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
async toggleFlash() {
|
|
359
|
+
const videoTrack = this.localStream?.getVideoTracks()[0];
|
|
360
|
+
if (!videoTrack) {
|
|
361
|
+
console.warn('[WebRTCService] No video track available for flash toggle');
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
this.isFlashOn = !this.isFlashOn;
|
|
365
|
+
|
|
366
|
+
// applyConstraints is the only reliable method for torch on Android.
|
|
367
|
+
// Retry up to 3 times with 300ms delay to handle "Camera switch already in progress" race.
|
|
368
|
+
let success = false;
|
|
369
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
370
|
+
try {
|
|
371
|
+
await videoTrack.applyConstraints({
|
|
372
|
+
advanced: [{
|
|
373
|
+
torch: this.isFlashOn
|
|
374
|
+
}]
|
|
375
|
+
});
|
|
376
|
+
success = true;
|
|
377
|
+
break;
|
|
378
|
+
} catch (error) {
|
|
379
|
+
const msg = error?.message || '';
|
|
380
|
+
if (msg.includes('Camera switch') && attempt < 2) {
|
|
381
|
+
await new Promise(res => setTimeout(res, 300));
|
|
382
|
+
continue;
|
|
383
|
+
}
|
|
384
|
+
console.warn('[WebRTCService] Failed to toggle flash:', error);
|
|
385
|
+
this.isFlashOn = !this.isFlashOn; // revert
|
|
386
|
+
this.signalingClient.send('command', {
|
|
387
|
+
type: 'torch_availability',
|
|
388
|
+
available: false
|
|
389
|
+
});
|
|
390
|
+
return;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
if (!success) {
|
|
394
|
+
this.isFlashOn = !this.isFlashOn; // revert
|
|
395
|
+
this.signalingClient.send('command', {
|
|
396
|
+
type: 'torch_availability',
|
|
397
|
+
available: false
|
|
398
|
+
});
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// Confirm flash is available now that we know it works
|
|
403
|
+
this.signalingClient.send('command', {
|
|
404
|
+
type: 'torch_availability',
|
|
405
|
+
available: true
|
|
406
|
+
});
|
|
407
|
+
this.signalingClient.send('command', {
|
|
408
|
+
type: 'flash_state',
|
|
409
|
+
isFlashOn: this.isFlashOn
|
|
410
|
+
});
|
|
411
|
+
}
|
|
412
|
+
sendTorchAvailability() {
|
|
413
|
+
const videoTrack = this.localStream?.getVideoTracks()[0];
|
|
414
|
+
if (!videoTrack) {
|
|
415
|
+
this.signalingClient.send('command', {
|
|
416
|
+
type: 'torch_availability',
|
|
417
|
+
available: false
|
|
418
|
+
});
|
|
419
|
+
return;
|
|
420
|
+
}
|
|
421
|
+
let capabilities;
|
|
422
|
+
try {
|
|
423
|
+
capabilities = videoTrack.getCapabilities?.();
|
|
424
|
+
} catch {
|
|
425
|
+
capabilities = undefined;
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
// Only trust getCapabilities().torch — _setTorch exists on every Android
|
|
429
|
+
// track regardless of whether the camera actually has a flash.
|
|
430
|
+
const available = !!capabilities?.torch;
|
|
431
|
+
this.signalingClient.send('command', {
|
|
432
|
+
type: 'torch_availability',
|
|
433
|
+
available
|
|
434
|
+
});
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
/**
|
|
438
|
+
* Handle snapshot capture request. Captures a frame from the local video track directly.
|
|
439
|
+
*/
|
|
440
|
+
async captureSnapshot(requestId) {
|
|
441
|
+
console.log('[WebRTCService] Snapshot requested, requestId:', requestId);
|
|
442
|
+
if (this.onSnapshotRequest && requestId) {
|
|
443
|
+
// Delegate to parent component which has access to the RTCView
|
|
444
|
+
this.onSnapshotRequest(requestId);
|
|
445
|
+
return;
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// Capture directly from local video track via react-native-webrtc captureFrame()
|
|
449
|
+
const videoTrack = this.localStream?.getVideoTracks()[0];
|
|
450
|
+
if (!videoTrack) {
|
|
451
|
+
console.error('[WebRTCService] No local video track for snapshot');
|
|
452
|
+
return;
|
|
453
|
+
}
|
|
454
|
+
try {
|
|
455
|
+
const frame = await videoTrack.captureFrame();
|
|
456
|
+
const base64 = frame?.data;
|
|
457
|
+
if (!base64) {
|
|
458
|
+
console.error('[WebRTCService] captureFrame returned no data');
|
|
459
|
+
return;
|
|
460
|
+
}
|
|
461
|
+
await this.uploadSnapshot(requestId, base64);
|
|
462
|
+
} catch (error) {
|
|
463
|
+
console.error('[WebRTCService] captureFrame failed:', error);
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* Upload a captured snapshot image (called by parent component after capturing RTCView).
|
|
469
|
+
*/
|
|
470
|
+
async uploadSnapshot(requestId, base64Image) {
|
|
471
|
+
console.log('[WebRTCService] Uploading snapshot, requestId:', requestId);
|
|
472
|
+
try {
|
|
473
|
+
const formData = new FormData();
|
|
474
|
+
// Convert base64 to Blob via fetch data URL (works in React Native)
|
|
475
|
+
const dataResponse = await fetch(`data:image/jpeg;base64,${base64Image}`);
|
|
476
|
+
const blob = await dataResponse.blob();
|
|
477
|
+
formData.append('image', blob);
|
|
478
|
+
const uploadUrl = `${this.baseUrl}/api/app/mobile/video-sessions/${this.sessionId}/snapshots?identificationId=${this.identificationId}`;
|
|
479
|
+
const response = await fetch(uploadUrl, {
|
|
480
|
+
method: 'POST',
|
|
481
|
+
body: formData
|
|
482
|
+
});
|
|
483
|
+
if (response.ok) {
|
|
484
|
+
const result = await response.json();
|
|
485
|
+
console.log('[WebRTCService] Snapshot uploaded:', result);
|
|
486
|
+
await this.signalingClient.send('command', {
|
|
487
|
+
type: 'snapshotReady',
|
|
488
|
+
snapshotId: result.snapshotId,
|
|
489
|
+
url: result.url,
|
|
490
|
+
createdAt: new Date().toISOString()
|
|
491
|
+
});
|
|
492
|
+
} else {
|
|
493
|
+
const errorText = await response.text();
|
|
494
|
+
console.error('[WebRTCService] Snapshot upload failed:', response.status, errorText);
|
|
495
|
+
await this.signalingClient.send('command', {
|
|
496
|
+
type: 'snapshotError',
|
|
497
|
+
requestId,
|
|
498
|
+
error: `Upload failed: ${response.status}`
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
} catch (error) {
|
|
502
|
+
console.error('[WebRTCService] Snapshot upload error:', error);
|
|
503
|
+
await this.signalingClient.send('command', {
|
|
504
|
+
type: 'snapshotError',
|
|
505
|
+
requestId,
|
|
506
|
+
error: String(error)
|
|
507
|
+
});
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
@@ -68,6 +68,8 @@ export let AnalyticsEventName = /*#__PURE__*/function (AnalyticsEventName) {
|
|
|
68
68
|
AnalyticsEventName["IDENTITY_DOCUMENT_EID_SCAN_COMPLETED"] = "identity_document_eid_scan_completed";
|
|
69
69
|
AnalyticsEventName["LIVENESS_CHECK_STARTED"] = "liveness_check_started";
|
|
70
70
|
AnalyticsEventName["LIVENESS_CHECK_COMPLETED"] = "liveness_check_completed";
|
|
71
|
+
AnalyticsEventName["VIDEO_CALL_STARTED"] = "video_call_started";
|
|
72
|
+
AnalyticsEventName["VIDEO_CALL_COMPLETED"] = "video_call_completed";
|
|
71
73
|
// NFC Scan Events (used by trackNFCScan* helpers)
|
|
72
74
|
AnalyticsEventName["NFC_SCAN_STARTED"] = "nfc_scan_started";
|
|
73
75
|
AnalyticsEventName["NFC_SCAN_COMPLETED"] = "nfc_scan_completed";
|