astra-sdk-web 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +13 -2
- package/src/App.tsx +12 -8
- package/src/components/KycFlow.tsx +41 -0
- package/src/components/index.ts +3 -0
- package/src/contexts/KycContext.tsx +66 -0
- package/src/features/documentUpload/hooks/useDocumentUpload.ts +226 -0
- package/src/features/documentUpload/index.ts +3 -0
- package/src/features/documentUpload/types.ts +16 -0
- package/src/features/faceScan/hooks/useCamera.ts +62 -0
- package/src/features/faceScan/hooks/useFaceScan.ts +249 -0
- package/src/features/faceScan/index.ts +4 -0
- package/src/features/faceScan/types.ts +29 -0
- package/src/index.css +13 -62
- package/src/pages/DocumentUploadModal.tsx +262 -0
- package/src/pages/FaceScanModal.tsx +207 -0
- package/src/pages/MobileRoute.tsx +42 -0
- package/src/pages/QRCodePage.tsx +125 -0
- package/src/sdk/index.ts +5 -30
- package/src/services/faceMeshService.ts +382 -0
- package/src/services/index.ts +5 -0
- package/src/services/kycApiService.ts +194 -0
- package/src/utils/deviceDetection.ts +28 -0
- package/src/App.css +0 -42
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
import { FaceMesh, FACEMESH_TESSELATION, FACEMESH_FACE_OVAL, FACEMESH_LEFT_EYE, FACEMESH_RIGHT_EYE, FACEMESH_LIPS } from '@mediapipe/face_mesh';
|
|
2
|
+
import { drawConnectors, drawLandmarks as drawMPLandmarks } from '@mediapipe/drawing_utils';
|
|
3
|
+
|
|
4
|
+
export type LivenessStage = 'CENTER' | 'LEFT' | 'RIGHT' | 'SNAP' | 'DONE';
|
|
5
|
+
|
|
6
|
+
export interface FaceMeshServiceCallbacks {
|
|
7
|
+
onFaceDetected?: (faceOnCanvas: Array<{ x: number; y: number }>) => void;
|
|
8
|
+
onLivenessUpdate?: (stage: LivenessStage, instruction: string) => void;
|
|
9
|
+
onModelLoaded?: () => void;
|
|
10
|
+
onModelFailed?: (error: Error) => void;
|
|
11
|
+
onCaptureTrigger?: () => void;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface LivenessState {
|
|
15
|
+
centerHold: number;
|
|
16
|
+
leftHold: number;
|
|
17
|
+
rightHold: number;
|
|
18
|
+
snapTriggered: boolean;
|
|
19
|
+
lastResultsAt: number;
|
|
20
|
+
stage: LivenessStage;
|
|
21
|
+
livenessReady: boolean;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export class FaceMeshService {
|
|
25
|
+
private faceMesh: FaceMesh | null = null;
|
|
26
|
+
private videoRef: React.RefObject<HTMLVideoElement | null>;
|
|
27
|
+
private canvasRef: React.RefObject<HTMLCanvasElement | null>;
|
|
28
|
+
private callbacks: FaceMeshServiceCallbacks;
|
|
29
|
+
private cameraDriverRef: React.MutableRefObject<number | null>;
|
|
30
|
+
private livenessStateRef: React.MutableRefObject<LivenessState>;
|
|
31
|
+
private cancelled = false;
|
|
32
|
+
|
|
33
|
+
constructor(
|
|
34
|
+
videoRef: React.RefObject<HTMLVideoElement | null>,
|
|
35
|
+
canvasRef: React.RefObject<HTMLCanvasElement | null>,
|
|
36
|
+
cameraDriverRef: React.MutableRefObject<number | null>,
|
|
37
|
+
livenessStateRef: React.MutableRefObject<LivenessState>,
|
|
38
|
+
callbacks: FaceMeshServiceCallbacks
|
|
39
|
+
) {
|
|
40
|
+
this.videoRef = videoRef;
|
|
41
|
+
this.canvasRef = canvasRef;
|
|
42
|
+
this.cameraDriverRef = cameraDriverRef;
|
|
43
|
+
this.livenessStateRef = livenessStateRef;
|
|
44
|
+
this.callbacks = callbacks;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
private drawOverlays(ctx: CanvasRenderingContext2D, normalized: Array<{ x: number; y: number }>) {
|
|
48
|
+
drawConnectors(ctx, normalized as any, FACEMESH_TESSELATION, { color: "#60a5fa", lineWidth: 0.5 });
|
|
49
|
+
drawConnectors(ctx, normalized as any, FACEMESH_FACE_OVAL, { color: "#f59e0b", lineWidth: 2 });
|
|
50
|
+
drawConnectors(ctx, normalized as any, FACEMESH_LEFT_EYE, { color: "#10b981", lineWidth: 1.5 });
|
|
51
|
+
drawConnectors(ctx, normalized as any, FACEMESH_RIGHT_EYE, { color: "#ef4444", lineWidth: 1.5 });
|
|
52
|
+
drawConnectors(ctx, normalized as any, FACEMESH_LIPS, { color: "#a855f7", lineWidth: 1.5 });
|
|
53
|
+
drawMPLandmarks(ctx, normalized as any, { color: "#2563eb", lineWidth: 0, radius: 1.5 });
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
private processResults(results: any) {
|
|
57
|
+
const canvas = this.canvasRef.current;
|
|
58
|
+
if (!canvas) return;
|
|
59
|
+
const ctx = canvas.getContext("2d");
|
|
60
|
+
if (!ctx) return;
|
|
61
|
+
|
|
62
|
+
const dpr = Math.max(1, Math.min(3, window.devicePixelRatio || 1));
|
|
63
|
+
const displayW = (canvas.parentElement as HTMLElement)?.clientWidth || canvas.width;
|
|
64
|
+
const displayH = (canvas.parentElement as HTMLElement)?.clientHeight || canvas.height;
|
|
65
|
+
if (canvas.width !== Math.round(displayW * dpr) || canvas.height !== Math.round(displayH * dpr)) {
|
|
66
|
+
canvas.width = Math.round(displayW * dpr);
|
|
67
|
+
canvas.height = Math.round(displayH * dpr);
|
|
68
|
+
}
|
|
69
|
+
const w = canvas.width, h = canvas.height;
|
|
70
|
+
|
|
71
|
+
ctx.fillStyle = 'rgb(0, 0, 0)';
|
|
72
|
+
ctx.fillRect(0, 0, w, h);
|
|
73
|
+
|
|
74
|
+
const faces = results.multiFaceLandmarks as Array<Array<{ x: number; y: number }>> | undefined;
|
|
75
|
+
const face = faces && faces[0];
|
|
76
|
+
|
|
77
|
+
if (face) {
|
|
78
|
+
const vid = this.videoRef.current as HTMLVideoElement | null;
|
|
79
|
+
const vidW = Math.max(1, vid?.videoWidth || displayW);
|
|
80
|
+
const vidH = Math.max(1, vid?.videoHeight || displayH);
|
|
81
|
+
const scale = Math.max(w / vidW, h / vidH);
|
|
82
|
+
const offsetX = (w - vidW * scale) / 2;
|
|
83
|
+
const offsetY = (h - vidH * scale) / 2;
|
|
84
|
+
|
|
85
|
+
const faceOnCanvas = face.map(p => {
|
|
86
|
+
const mappedX = (p.x * vidW * scale + offsetX) / w;
|
|
87
|
+
return {
|
|
88
|
+
x: 1 - mappedX,
|
|
89
|
+
y: (p.y * vidH * scale + offsetY) / h,
|
|
90
|
+
};
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
const guideCX = w / 2;
|
|
94
|
+
const guideCY = h / 2;
|
|
95
|
+
const guideR = Math.min(w, h) * 0.45;
|
|
96
|
+
|
|
97
|
+
ctx.save();
|
|
98
|
+
ctx.beginPath();
|
|
99
|
+
ctx.arc(guideCX, guideCY, guideR, 0, Math.PI * 2);
|
|
100
|
+
ctx.clip();
|
|
101
|
+
ctx.drawImage(vid!, offsetX, offsetY, vidW * scale, vidH * scale);
|
|
102
|
+
ctx.restore();
|
|
103
|
+
|
|
104
|
+
this.drawOverlays(ctx, faceOnCanvas as any);
|
|
105
|
+
|
|
106
|
+
this.livenessStateRef.current.lastResultsAt = Date.now();
|
|
107
|
+
|
|
108
|
+
if (this.callbacks.onFaceDetected) {
|
|
109
|
+
this.callbacks.onFaceDetected(faceOnCanvas);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
this.processLiveness(faceOnCanvas, w, h);
|
|
113
|
+
} else {
|
|
114
|
+
const vid = this.videoRef.current as HTMLVideoElement | null;
|
|
115
|
+
if (vid) {
|
|
116
|
+
const vidW = Math.max(1, vid?.videoWidth || displayW);
|
|
117
|
+
const vidH = Math.max(1, vid?.videoHeight || displayH);
|
|
118
|
+
const scale = Math.max(w / vidW, h / vidH);
|
|
119
|
+
const offsetX = (w - vidW * scale) / 2;
|
|
120
|
+
const offsetY = (h - vidH * scale) / 2;
|
|
121
|
+
|
|
122
|
+
const guideCX = w / 2;
|
|
123
|
+
const guideCY = h / 2;
|
|
124
|
+
const guideR = Math.min(w, h) * 0.45;
|
|
125
|
+
|
|
126
|
+
ctx.save();
|
|
127
|
+
ctx.beginPath();
|
|
128
|
+
ctx.arc(guideCX, guideCY, guideR, 0, Math.PI * 2);
|
|
129
|
+
ctx.clip();
|
|
130
|
+
ctx.drawImage(vid, offsetX, offsetY, vidW * scale, vidH * scale);
|
|
131
|
+
ctx.restore();
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
if (Date.now() - this.livenessStateRef.current.lastResultsAt > 2000) {
|
|
135
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
136
|
+
this.callbacks.onLivenessUpdate(
|
|
137
|
+
this.livenessStateRef.current.stage,
|
|
138
|
+
"No face detected. Center your face in frame with good lighting."
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
private processLiveness(faceOnCanvas: Array<{ x: number; y: number }>, w: number, h: number) {
|
|
146
|
+
const eyeA = faceOnCanvas[33];
|
|
147
|
+
const eyeB = faceOnCanvas[263];
|
|
148
|
+
const flipX = (p: any) => ({ x: 1 - p.x, y: p.y });
|
|
149
|
+
const eA = flipX(eyeA);
|
|
150
|
+
const eB = flipX(eyeB);
|
|
151
|
+
const n1 = faceOnCanvas[1];
|
|
152
|
+
const n4 = faceOnCanvas[4];
|
|
153
|
+
const nT = flipX(n1 && n4 ? { x: (n1.x + n4.x) / 2, y: (n1.y + n4.y) / 2 } : (n1 || n4 || faceOnCanvas[197]));
|
|
154
|
+
const leftEyeOuter = eA.x < eB.x ? eA : eB;
|
|
155
|
+
const rightEyeOuter = eA.x < eB.x ? eB : eA;
|
|
156
|
+
|
|
157
|
+
if (leftEyeOuter && rightEyeOuter && nT) {
|
|
158
|
+
const faceWidth = Math.abs(rightEyeOuter.x - leftEyeOuter.x);
|
|
159
|
+
const midX = (leftEyeOuter.x + rightEyeOuter.x) / 2;
|
|
160
|
+
const yaw = (nT.x - midX) / Math.max(1e-6, faceWidth);
|
|
161
|
+
const absYaw = Math.abs(yaw);
|
|
162
|
+
|
|
163
|
+
const xs = faceOnCanvas.map(p => p.x), ys = faceOnCanvas.map(p => p.y);
|
|
164
|
+
const minX = Math.min(...xs) * w, maxX = Math.max(...xs) * w;
|
|
165
|
+
const minY = Math.min(...ys) * h, maxY = Math.max(...ys) * h;
|
|
166
|
+
const boxCX = (minX + maxX) / 2, boxCY = (minY + maxY) / 2;
|
|
167
|
+
const guideCX = w / 2;
|
|
168
|
+
const guideCY = h / 2;
|
|
169
|
+
const guideR = Math.min(w, h) * 0.45;
|
|
170
|
+
const dx = boxCX - guideCX;
|
|
171
|
+
const dy = boxCY - guideCY;
|
|
172
|
+
const insideGuide = (dx * dx + dy * dy) <= (guideR * guideR)
|
|
173
|
+
&& (maxX - minX) <= guideR * 2 * 1.05 && (maxY - minY) <= guideR * 2 * 1.05;
|
|
174
|
+
|
|
175
|
+
if (!this.livenessStateRef.current.livenessReady) {
|
|
176
|
+
this.livenessStateRef.current.livenessReady = true;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const centerThreshold = 0.05;
|
|
180
|
+
const leftThreshold = 0.08;
|
|
181
|
+
const rightThreshold = 0.08;
|
|
182
|
+
const holdFramesCenter = 12;
|
|
183
|
+
const holdFramesTurn = 12;
|
|
184
|
+
|
|
185
|
+
const state = this.livenessStateRef.current;
|
|
186
|
+
|
|
187
|
+
if (state.stage === "CENTER") {
|
|
188
|
+
if (!insideGuide) {
|
|
189
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
190
|
+
this.callbacks.onLivenessUpdate(state.stage, "Center your face inside the circle");
|
|
191
|
+
}
|
|
192
|
+
} else if (absYaw < centerThreshold) {
|
|
193
|
+
state.centerHold += 1;
|
|
194
|
+
if (state.centerHold >= holdFramesCenter) {
|
|
195
|
+
const newStage: LivenessStage = "LEFT";
|
|
196
|
+
state.stage = newStage;
|
|
197
|
+
state.centerHold = 0;
|
|
198
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
199
|
+
this.callbacks.onLivenessUpdate(newStage, "Turn your face LEFT");
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
} else {
|
|
203
|
+
state.centerHold = 0;
|
|
204
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
205
|
+
this.callbacks.onLivenessUpdate(state.stage, yaw > 0 ? "Move your face slightly LEFT" : "Move your face slightly RIGHT");
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
} else if (state.stage === "LEFT") {
|
|
209
|
+
if (faceWidth < 0.08) {
|
|
210
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
211
|
+
this.callbacks.onLivenessUpdate(state.stage, "Move closer to the camera");
|
|
212
|
+
}
|
|
213
|
+
} else if (yaw < -leftThreshold) {
|
|
214
|
+
state.leftHold += 1;
|
|
215
|
+
if (state.leftHold >= holdFramesTurn) {
|
|
216
|
+
const newStage: LivenessStage = "RIGHT";
|
|
217
|
+
state.stage = newStage;
|
|
218
|
+
state.leftHold = 0;
|
|
219
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
220
|
+
this.callbacks.onLivenessUpdate(newStage, "Great! Now turn your face RIGHT");
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
} else {
|
|
224
|
+
state.leftHold = 0;
|
|
225
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
226
|
+
this.callbacks.onLivenessUpdate(state.stage, yaw > rightThreshold ? "You're facing right. Turn LEFT" : "Turn a bit more LEFT");
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
} else if (state.stage === "RIGHT") {
|
|
230
|
+
if (faceWidth < 0.08) {
|
|
231
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
232
|
+
this.callbacks.onLivenessUpdate(state.stage, "Move closer to the camera");
|
|
233
|
+
}
|
|
234
|
+
} else if (yaw > rightThreshold) {
|
|
235
|
+
state.rightHold += 1;
|
|
236
|
+
if (state.rightHold >= holdFramesTurn) {
|
|
237
|
+
state.rightHold = 0;
|
|
238
|
+
if (!state.snapTriggered) {
|
|
239
|
+
state.snapTriggered = true;
|
|
240
|
+
const newStage: LivenessStage = "DONE";
|
|
241
|
+
state.stage = newStage;
|
|
242
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
243
|
+
this.callbacks.onLivenessUpdate(newStage, "Capturing...");
|
|
244
|
+
}
|
|
245
|
+
if (this.callbacks.onCaptureTrigger) {
|
|
246
|
+
this.callbacks.onCaptureTrigger();
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
} else {
|
|
251
|
+
state.rightHold = 0;
|
|
252
|
+
if (this.callbacks.onLivenessUpdate) {
|
|
253
|
+
this.callbacks.onLivenessUpdate(state.stage, yaw < -leftThreshold ? "You're facing left. Turn RIGHT" : "Turn a bit more RIGHT");
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
private waitForVideoReady(): Promise<void> {
|
|
261
|
+
return new Promise((resolve, reject) => {
|
|
262
|
+
let attempts = 0;
|
|
263
|
+
const maxAttempts = 100;
|
|
264
|
+
|
|
265
|
+
const checkReady = () => {
|
|
266
|
+
if (this.cancelled) {
|
|
267
|
+
reject(new Error('Cancelled'));
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const video = this.videoRef.current;
|
|
272
|
+
if (
|
|
273
|
+
video &&
|
|
274
|
+
video.readyState >= 2 &&
|
|
275
|
+
video.videoWidth > 0 &&
|
|
276
|
+
video.videoHeight > 0 &&
|
|
277
|
+
!isNaN(video.videoWidth) &&
|
|
278
|
+
!isNaN(video.videoHeight)
|
|
279
|
+
) {
|
|
280
|
+
resolve();
|
|
281
|
+
} else if (attempts >= maxAttempts) {
|
|
282
|
+
if (video) {
|
|
283
|
+
resolve();
|
|
284
|
+
} else {
|
|
285
|
+
reject(new Error('Video not ready'));
|
|
286
|
+
}
|
|
287
|
+
} else {
|
|
288
|
+
attempts++;
|
|
289
|
+
requestAnimationFrame(checkReady);
|
|
290
|
+
}
|
|
291
|
+
};
|
|
292
|
+
checkReady();
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
async initialize(): Promise<void> {
|
|
297
|
+
try {
|
|
298
|
+
const fm = new FaceMesh({
|
|
299
|
+
locateFile: (file: string) => `https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh@0.4.1633559619/${file}`
|
|
300
|
+
});
|
|
301
|
+
fm.setOptions({
|
|
302
|
+
selfieMode: true,
|
|
303
|
+
maxNumFaces: 1,
|
|
304
|
+
refineLandmarks: true,
|
|
305
|
+
minDetectionConfidence: 0.5,
|
|
306
|
+
minTrackingConfidence: 0.5
|
|
307
|
+
} as any);
|
|
308
|
+
|
|
309
|
+
this.faceMesh = fm;
|
|
310
|
+
|
|
311
|
+
if (this.cancelled) return;
|
|
312
|
+
|
|
313
|
+
if (this.callbacks.onModelLoaded) {
|
|
314
|
+
this.callbacks.onModelLoaded();
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
fm.onResults((results) => {
|
|
318
|
+
if (!this.cancelled) {
|
|
319
|
+
this.processResults(results);
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
if (this.videoRef.current) {
|
|
324
|
+
try {
|
|
325
|
+
await this.waitForVideoReady();
|
|
326
|
+
} catch (error) {
|
|
327
|
+
if (!this.cancelled) {
|
|
328
|
+
console.debug('Video ready check failed, continuing anyway:', error);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
const tick = async () => {
|
|
333
|
+
if (this.cancelled) return;
|
|
334
|
+
if (!this.videoRef.current || !this.faceMesh) return;
|
|
335
|
+
|
|
336
|
+
const video = this.videoRef.current;
|
|
337
|
+
if (
|
|
338
|
+
video.readyState >= 2 &&
|
|
339
|
+
video.videoWidth > 0 &&
|
|
340
|
+
video.videoHeight > 0 &&
|
|
341
|
+
!isNaN(video.videoWidth) &&
|
|
342
|
+
!isNaN(video.videoHeight)
|
|
343
|
+
) {
|
|
344
|
+
try {
|
|
345
|
+
await this.faceMesh.send({ image: video as HTMLVideoElement });
|
|
346
|
+
} catch (error) {
|
|
347
|
+
if (!this.cancelled) {
|
|
348
|
+
console.debug('MediaPipe send error (non-critical):', error);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
if (!this.cancelled) {
|
|
354
|
+
this.cameraDriverRef.current = requestAnimationFrame(tick);
|
|
355
|
+
}
|
|
356
|
+
};
|
|
357
|
+
|
|
358
|
+
this.cameraDriverRef.current = requestAnimationFrame(tick);
|
|
359
|
+
}
|
|
360
|
+
} catch (e) {
|
|
361
|
+
if (!this.cancelled && this.callbacks.onModelFailed) {
|
|
362
|
+
this.callbacks.onModelFailed(e as Error);
|
|
363
|
+
}
|
|
364
|
+
throw e;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
cleanup(): void {
|
|
369
|
+
this.cancelled = true;
|
|
370
|
+
if (this.cameraDriverRef.current) {
|
|
371
|
+
cancelAnimationFrame(this.cameraDriverRef.current);
|
|
372
|
+
this.cameraDriverRef.current = null;
|
|
373
|
+
}
|
|
374
|
+
if (this.faceMesh) {
|
|
375
|
+
try {
|
|
376
|
+
(this.faceMesh as any).close?.();
|
|
377
|
+
} catch {}
|
|
378
|
+
this.faceMesh = null;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { FaceMeshService } from './faceMeshService';
|
|
2
|
+
export type { LivenessStage, FaceMeshServiceCallbacks, LivenessState } from './faceMeshService';
|
|
3
|
+
export { KycApiService } from './kycApiService';
|
|
4
|
+
export type { KycApiConfig, SessionStatusResponse, FaceScanResponse, DocumentUploadResponse } from './kycApiService';
|
|
5
|
+
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* KYC API Service
|
|
3
|
+
* Handles all KYC-related API calls (face scan, document upload, status check)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export interface KycApiConfig {
|
|
7
|
+
apiBaseUrl: string;
|
|
8
|
+
sessionId: string;
|
|
9
|
+
serverKey: string;
|
|
10
|
+
deviceType?: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface SessionStatusResponse {
|
|
14
|
+
status: string;
|
|
15
|
+
message: string;
|
|
16
|
+
data: {
|
|
17
|
+
session_id: string;
|
|
18
|
+
status: 'ACTIVE' | 'INACTIVE' | 'EXPIRED' | 'COMPLETED';
|
|
19
|
+
completed_steps: string[];
|
|
20
|
+
next_step: string;
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export interface FaceScanResponse {
|
|
25
|
+
status: string;
|
|
26
|
+
message: string;
|
|
27
|
+
data?: unknown;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface DocumentUploadResponse {
|
|
31
|
+
status: string;
|
|
32
|
+
message: string;
|
|
33
|
+
data?: unknown;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export class KycApiService {
|
|
37
|
+
private config: KycApiConfig;
|
|
38
|
+
|
|
39
|
+
constructor(config: KycApiConfig) {
|
|
40
|
+
this.config = config;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Detect device type
|
|
45
|
+
*/
|
|
46
|
+
private detectDeviceType(): string {
|
|
47
|
+
const userAgent = navigator.userAgent || navigator.vendor || (window as any).opera;
|
|
48
|
+
if (/android/i.test(userAgent)) return 'android';
|
|
49
|
+
if (/iPad|iPhone|iPod/.test(userAgent) && !(window as any).MSStream) return 'ios';
|
|
50
|
+
if (/Mac|Windows|Linux/.test(userAgent)) return 'desktop';
|
|
51
|
+
return 'unknown';
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Get session status
|
|
56
|
+
*/
|
|
57
|
+
async getSessionStatus(): Promise<SessionStatusResponse> {
|
|
58
|
+
const deviceType = this.config.deviceType || this.detectDeviceType();
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const response = await fetch(
|
|
62
|
+
`${this.config.apiBaseUrl}/api/v2/dashboard/merchant/onsite/session/${this.config.sessionId}/status`,
|
|
63
|
+
{
|
|
64
|
+
method: 'GET',
|
|
65
|
+
headers: {
|
|
66
|
+
'x-server-key': this.config.serverKey,
|
|
67
|
+
'device-type': deviceType,
|
|
68
|
+
'Content-Type': 'application/json',
|
|
69
|
+
},
|
|
70
|
+
credentials: 'include',
|
|
71
|
+
}
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
if (!response.ok) {
|
|
75
|
+
const errorData = await response.json().catch(() => ({}));
|
|
76
|
+
const message = errorData?.message || `Status fetch failed with status ${response.status}`;
|
|
77
|
+
throw new Error(message);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const data = await response.json();
|
|
81
|
+
return data;
|
|
82
|
+
} catch (error: any) {
|
|
83
|
+
const message = error?.message || 'Status fetch failed';
|
|
84
|
+
throw new Error(`Status fetch failed: ${message}`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Upload face scan image
|
|
90
|
+
*/
|
|
91
|
+
async uploadFaceScan(faceBlob: Blob | File): Promise<FaceScanResponse> {
|
|
92
|
+
// Check session status first
|
|
93
|
+
await this.checkSessionActive();
|
|
94
|
+
|
|
95
|
+
const deviceType = this.config.deviceType || this.detectDeviceType();
|
|
96
|
+
const formData = new FormData();
|
|
97
|
+
const faceFileName = (faceBlob as File)?.name || `face-${Date.now()}.jpg`;
|
|
98
|
+
formData.append('face_scan_img', faceBlob, faceFileName);
|
|
99
|
+
|
|
100
|
+
try {
|
|
101
|
+
const response = await fetch(
|
|
102
|
+
`${this.config.apiBaseUrl}/api/v2/dashboard/merchant/onsite/session/${this.config.sessionId}/face`,
|
|
103
|
+
{
|
|
104
|
+
method: 'POST',
|
|
105
|
+
headers: {
|
|
106
|
+
'x-server-key': this.config.serverKey,
|
|
107
|
+
'device-type': deviceType,
|
|
108
|
+
},
|
|
109
|
+
credentials: 'include',
|
|
110
|
+
body: formData,
|
|
111
|
+
}
|
|
112
|
+
);
|
|
113
|
+
|
|
114
|
+
if (!response.ok) {
|
|
115
|
+
const errorData = await response.json().catch(() => ({}));
|
|
116
|
+
const message = errorData?.message || `Face upload failed with status ${response.status}`;
|
|
117
|
+
throw new Error(message);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const data = await response.json();
|
|
121
|
+
return data;
|
|
122
|
+
} catch (error: any) {
|
|
123
|
+
const message = error?.message || 'Face upload failed';
|
|
124
|
+
throw new Error(`Face upload failed: ${message}`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Upload document scan image
|
|
130
|
+
*/
|
|
131
|
+
async uploadDocument(docBlob: Blob | File, docType: string): Promise<DocumentUploadResponse> {
|
|
132
|
+
// Check session status first
|
|
133
|
+
await this.checkSessionActive();
|
|
134
|
+
|
|
135
|
+
const deviceType = this.config.deviceType || this.detectDeviceType();
|
|
136
|
+
const formData = new FormData();
|
|
137
|
+
const docFileName = (docBlob as File)?.name || `document-${Date.now()}.jpg`;
|
|
138
|
+
formData.append('docs_scan_img', docBlob, docFileName);
|
|
139
|
+
formData.append('docType', docType);
|
|
140
|
+
|
|
141
|
+
try {
|
|
142
|
+
const response = await fetch(
|
|
143
|
+
`${this.config.apiBaseUrl}/api/v2/dashboard/merchant/onsite/session/${this.config.sessionId}/docs`,
|
|
144
|
+
{
|
|
145
|
+
method: 'POST',
|
|
146
|
+
headers: {
|
|
147
|
+
'x-server-key': this.config.serverKey,
|
|
148
|
+
'device-type': deviceType,
|
|
149
|
+
},
|
|
150
|
+
credentials: 'include',
|
|
151
|
+
body: formData,
|
|
152
|
+
}
|
|
153
|
+
);
|
|
154
|
+
|
|
155
|
+
if (!response.ok) {
|
|
156
|
+
const errorData = await response.json().catch(() => ({}));
|
|
157
|
+
const message = errorData?.message || `Document upload failed with status ${response.status}`;
|
|
158
|
+
throw new Error(message);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const data = await response.json();
|
|
162
|
+
return data;
|
|
163
|
+
} catch (error: any) {
|
|
164
|
+
const message = error?.message || 'Document upload failed';
|
|
165
|
+
throw new Error(`Document upload failed: ${message}`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Check if session is active, throw error if not
|
|
171
|
+
*/
|
|
172
|
+
async checkSessionActive(): Promise<void> {
|
|
173
|
+
const status = await this.getSessionStatus();
|
|
174
|
+
|
|
175
|
+
if (status.data.status !== 'ACTIVE') {
|
|
176
|
+
throw new Error('Session expired or inactive. Please start a new session.');
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Update configuration
|
|
182
|
+
*/
|
|
183
|
+
updateConfig(config: Partial<KycApiConfig>): void {
|
|
184
|
+
this.config = { ...this.config, ...config };
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Get current configuration
|
|
189
|
+
*/
|
|
190
|
+
getConfig(): KycApiConfig {
|
|
191
|
+
return { ...this.config };
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility functions for device detection
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
export function isMobileDevice(): boolean {
|
|
6
|
+
if (typeof window === 'undefined') {
|
|
7
|
+
return false;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
// Check user agent
|
|
11
|
+
const userAgent = navigator.userAgent || navigator.vendor || (window as any).opera;
|
|
12
|
+
|
|
13
|
+
// Common mobile device patterns
|
|
14
|
+
const mobileRegex = /android|webos|iphone|ipad|ipod|blackberry|iemobile|opera mini/i;
|
|
15
|
+
|
|
16
|
+
// Check screen width (mobile devices typically have smaller screens)
|
|
17
|
+
const isSmallScreen = window.innerWidth <= 768;
|
|
18
|
+
|
|
19
|
+
// Check for touch support
|
|
20
|
+
const hasTouchScreen = 'ontouchstart' in window || navigator.maxTouchPoints > 0;
|
|
21
|
+
|
|
22
|
+
return mobileRegex.test(userAgent) || (isSmallScreen && hasTouchScreen);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function getDeviceType(): 'mobile' | 'desktop' {
|
|
26
|
+
return isMobileDevice() ? 'mobile' : 'desktop';
|
|
27
|
+
}
|
|
28
|
+
|
package/src/App.css
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
#root {
|
|
2
|
-
max-width: 1280px;
|
|
3
|
-
margin: 0 auto;
|
|
4
|
-
padding: 2rem;
|
|
5
|
-
text-align: center;
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
.logo {
|
|
9
|
-
height: 6em;
|
|
10
|
-
padding: 1.5em;
|
|
11
|
-
will-change: filter;
|
|
12
|
-
transition: filter 300ms;
|
|
13
|
-
}
|
|
14
|
-
.logo:hover {
|
|
15
|
-
filter: drop-shadow(0 0 2em #646cffaa);
|
|
16
|
-
}
|
|
17
|
-
.logo.react:hover {
|
|
18
|
-
filter: drop-shadow(0 0 2em #61dafbaa);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
@keyframes logo-spin {
|
|
22
|
-
from {
|
|
23
|
-
transform: rotate(0deg);
|
|
24
|
-
}
|
|
25
|
-
to {
|
|
26
|
-
transform: rotate(360deg);
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
@media (prefers-reduced-motion: no-preference) {
|
|
31
|
-
a:nth-of-type(2) .logo {
|
|
32
|
-
animation: logo-spin infinite 20s linear;
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
.card {
|
|
37
|
-
padding: 2em;
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
.read-the-docs {
|
|
41
|
-
color: #888;
|
|
42
|
-
}
|