@hexar/biometric-identity-sdk-core 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/BiometricIdentitySDK.d.ts +111 -0
  2. package/dist/BiometricIdentitySDK.js +395 -0
  3. package/dist/ai-models/FaceDetector.d.ts +59 -0
  4. package/dist/ai-models/FaceDetector.js +167 -0
  5. package/dist/ai-models/LivenessDetector.d.ts +61 -0
  6. package/dist/ai-models/LivenessDetector.js +218 -0
  7. package/dist/api/BackendClient.d.ts +178 -0
  8. package/dist/api/BackendClient.js +199 -0
  9. package/dist/api/index.d.ts +5 -0
  10. package/dist/api/index.js +8 -0
  11. package/dist/encryption/index.d.ts +38 -0
  12. package/dist/encryption/index.js +99 -0
  13. package/dist/i18n/index.d.ts +6 -0
  14. package/dist/i18n/index.js +47 -0
  15. package/dist/i18n/languages/en.d.ts +2 -0
  16. package/dist/i18n/languages/en.js +112 -0
  17. package/dist/i18n/languages/es-AR.d.ts +2 -0
  18. package/dist/i18n/languages/es-AR.js +112 -0
  19. package/dist/i18n/languages/es.d.ts +2 -0
  20. package/dist/i18n/languages/es.js +112 -0
  21. package/dist/i18n/languages/pt-BR.d.ts +2 -0
  22. package/dist/i18n/languages/pt-BR.js +112 -0
  23. package/dist/i18n/types.d.ts +110 -0
  24. package/dist/i18n/types.js +2 -0
  25. package/dist/index.d.ts +20 -0
  26. package/dist/index.js +64 -0
  27. package/dist/services/BackendValidationService.d.ts +84 -0
  28. package/dist/services/BackendValidationService.js +174 -0
  29. package/dist/services/IValidationService.d.ts +132 -0
  30. package/dist/services/IValidationService.js +8 -0
  31. package/dist/services/index.d.ts +8 -0
  32. package/dist/services/index.js +10 -0
  33. package/dist/types/index.d.ts +288 -0
  34. package/dist/types/index.js +34 -0
  35. package/dist/validation/DocumentValidator.d.ts +84 -0
  36. package/dist/validation/DocumentValidator.js +295 -0
  37. package/dist/validation/OCREngine.d.ts +75 -0
  38. package/dist/validation/OCREngine.js +225 -0
  39. package/package.json +24 -0
  40. package/src/BiometricIdentitySDK.ts +493 -0
  41. package/src/ai-models/FaceDetector.ts +200 -0
  42. package/src/ai-models/LivenessDetector.ts +274 -0
  43. package/src/api/BackendClient.ts +395 -0
  44. package/src/api/index.ts +15 -0
  45. package/src/encryption/index.ts +108 -0
  46. package/src/i18n/index.ts +35 -0
  47. package/src/i18n/languages/en.ts +121 -0
  48. package/src/i18n/languages/es-AR.ts +121 -0
  49. package/src/i18n/languages/es.ts +121 -0
  50. package/src/i18n/languages/pt-BR.ts +121 -0
  51. package/src/i18n/types.ts +121 -0
  52. package/src/index.ts +54 -0
  53. package/src/services/BackendValidationService.ts +228 -0
  54. package/src/services/IValidationService.ts +158 -0
  55. package/src/services/index.ts +17 -0
  56. package/src/types/index.ts +380 -0
  57. package/src/validation/DocumentValidator.ts +353 -0
  58. package/src/validation/OCREngine.ts +265 -0
  59. package/tsconfig.json +20 -0
@@ -0,0 +1,274 @@
1
+ /**
2
+ * Liveness Detection using video analysis
3
+ * Detects if the subject is a real person vs photo/video replay
4
+ */
5
+
6
+ import { LivenessValidationResult, VideoResult, LivenessInstruction } from '../types';
7
+ import { FaceDetector } from './FaceDetector';
8
+
9
+ export interface LivenessFrame {
10
+ imageData: string;
11
+ timestamp: number;
12
+ instruction?: LivenessInstruction;
13
+ }
14
+
15
+ export class LivenessDetector {
16
+ private faceDetector: FaceDetector;
17
+ private model: any;
18
+ private isModelLoaded: boolean = false;
19
+
20
+ constructor(private modelPath?: string) {
21
+ this.faceDetector = new FaceDetector();
22
+ }
23
+
24
+ /**
25
+ * Load the liveness detection model
26
+ */
27
+ async loadModel(): Promise<void> {
28
+ if (this.isModelLoaded) return;
29
+
30
+ try {
31
+ console.log('Loading liveness detection model...');
32
+
33
+ // Load face detector
34
+ await this.faceDetector.loadModel();
35
+
36
+ // Simulate liveness model loading
37
+ await new Promise(resolve => setTimeout(resolve, 500));
38
+
39
+ this.isModelLoaded = true;
40
+ console.log('Liveness detection model loaded successfully');
41
+ } catch (error) {
42
+ throw new Error(`Failed to load liveness detection model: ${error}`);
43
+ }
44
+ }
45
+
46
+ /**
47
+ * Validate liveness from video frames
48
+ */
49
+ async validateLiveness(videoResult: VideoResult): Promise<LivenessValidationResult> {
50
+ if (!this.isModelLoaded) {
51
+ await this.loadModel();
52
+ }
53
+
54
+ try {
55
+ // Extract frames for analysis
56
+ const frames = videoResult.frames.map((frame, index) => ({
57
+ imageData: frame,
58
+ timestamp: (index / videoResult.frames.length) * videoResult.duration,
59
+ }));
60
+
61
+ // Perform multiple liveness checks
62
+ const motionCheck = await this.checkMotion(frames);
63
+ const textureCheck = await this.checkTexture(frames);
64
+ const depthCheck = await this.check3DDepth(frames);
65
+ const blinkCheck = await this.checkBlink(frames);
66
+
67
+ // Get face embedding from best frame
68
+ const faceEmbedding = await this.extractBestFaceEmbedding(frames);
69
+
70
+ // Calculate overall liveness score
71
+ const livenessScore = this.calculateLivenessScore({
72
+ motionCheck,
73
+ textureCheck,
74
+ depthCheck,
75
+ blinkCheck,
76
+ });
77
+
78
+ const isLive = livenessScore >= 80 && motionCheck && textureCheck;
79
+
80
+ return {
81
+ isLive,
82
+ livenessScore,
83
+ checks: {
84
+ motionCheck,
85
+ textureCheck,
86
+ depthCheck,
87
+ blinkCheck,
88
+ },
89
+ faceEmbedding,
90
+ };
91
+ } catch (error) {
92
+ throw new Error(`Liveness validation failed: ${error}`);
93
+ }
94
+ }
95
+
96
+ /**
97
+ * Check for natural motion in video
98
+ */
99
+ private async checkMotion(frames: LivenessFrame[]): Promise<boolean> {
100
+ if (frames.length < 3) return false;
101
+
102
+ try {
103
+ // Detect faces in multiple frames
104
+ const facePositions = await Promise.all(
105
+ frames.slice(0, 10).map(async (frame) => {
106
+ const bbox = await this.faceDetector.getFaceBoundingBox(frame.imageData);
107
+ return bbox;
108
+ })
109
+ );
110
+
111
+ // Check if face position changes (indicates real movement)
112
+ const validPositions = facePositions.filter(pos => pos !== null);
113
+
114
+ if (validPositions.length < 3) return false;
115
+
116
+ // Calculate movement variance
117
+ const xPositions = validPositions.map(pos => pos!.x);
118
+ const yPositions = validPositions.map(pos => pos!.y);
119
+
120
+ const xVariance = this.calculateVariance(xPositions);
121
+ const yVariance = this.calculateVariance(yPositions);
122
+
123
+ // Motion should exist but not be too extreme
124
+ const hasMotion = (xVariance + yVariance) > 10 && (xVariance + yVariance) < 1000;
125
+
126
+ return hasMotion;
127
+ } catch (error) {
128
+ console.error('Motion check failed:', error);
129
+ return false;
130
+ }
131
+ }
132
+
133
+ /**
134
+ * Check texture patterns to detect printed photos
135
+ */
136
+ private async checkTexture(frames: LivenessFrame[]): Promise<boolean> {
137
+ // In real implementation:
138
+ // 1. Analyze high-frequency texture patterns
139
+ // 2. Detect print artifacts (moiré patterns)
140
+ // 3. Check for screen refresh patterns
141
+
142
+ // Mock implementation - simulate texture analysis
143
+ const randomScore = Math.random();
144
+ return randomScore > 0.2; // 80% pass rate for valid faces
145
+ }
146
+
147
+ /**
148
+ * Check 3D depth using parallax or facial structure
149
+ */
150
+ private async check3DDepth(frames: LivenessFrame[]): Promise<boolean> {
151
+ // In real implementation:
152
+ // 1. Analyze facial landmarks movement
153
+ // 2. Check for parallax effect when head moves
154
+ // 3. Validate 3D structure consistency
155
+
156
+ // Mock implementation
157
+ const randomScore = Math.random();
158
+ return randomScore > 0.15; // 85% pass rate
159
+ }
160
+
161
+ /**
162
+ * Detect eye blinks in video
163
+ */
164
+ private async checkBlink(frames: LivenessFrame[]): Promise<boolean> {
165
+ // In real implementation:
166
+ // 1. Detect eyes in each frame
167
+ // 2. Calculate eye aspect ratio (EAR)
168
+ // 3. Detect blink events (EAR drops and recovers)
169
+
170
+ // Mock implementation - simulate blink detection
171
+ const randomScore = Math.random();
172
+ return randomScore > 0.3; // 70% detection rate
173
+ }
174
+
175
+ /**
176
+ * Extract face embedding from the best quality frame
177
+ */
178
+ private async extractBestFaceEmbedding(frames: LivenessFrame[]) {
179
+ // Select middle frames for best quality
180
+ const middleFrames = frames.slice(
181
+ Math.floor(frames.length * 0.3),
182
+ Math.floor(frames.length * 0.7)
183
+ );
184
+
185
+ // Get face detections for middle frames
186
+ const detections = await Promise.all(
187
+ middleFrames.slice(0, 5).map(frame =>
188
+ this.faceDetector.detectFaces(frame.imageData)
189
+ )
190
+ );
191
+
192
+ // Find frame with highest confidence
193
+ let bestEmbedding = null;
194
+ let bestConfidence = 0;
195
+
196
+ for (const detection of detections) {
197
+ if (detection.faces.length > 0) {
198
+ const face = detection.faces[0];
199
+ if (face.confidence > bestConfidence) {
200
+ bestConfidence = face.confidence;
201
+ bestEmbedding = face;
202
+ }
203
+ }
204
+ }
205
+
206
+ if (!bestEmbedding) {
207
+ throw new Error('No face detected in video frames');
208
+ }
209
+
210
+ return bestEmbedding;
211
+ }
212
+
213
+ /**
214
+ * Calculate overall liveness score
215
+ */
216
+ private calculateLivenessScore(checks: {
217
+ motionCheck: boolean;
218
+ textureCheck: boolean;
219
+ depthCheck: boolean;
220
+ blinkCheck: boolean;
221
+ }): number {
222
+ const weights = {
223
+ motionCheck: 0.35,
224
+ textureCheck: 0.35,
225
+ depthCheck: 0.20,
226
+ blinkCheck: 0.10,
227
+ };
228
+
229
+ let score = 0;
230
+
231
+ if (checks.motionCheck) score += weights.motionCheck * 100;
232
+ if (checks.textureCheck) score += weights.textureCheck * 100;
233
+ if (checks.depthCheck) score += weights.depthCheck * 100;
234
+ if (checks.blinkCheck) score += weights.blinkCheck * 100;
235
+
236
+ return Math.round(score);
237
+ }
238
+
239
+ /**
240
+ * Calculate variance of an array of numbers
241
+ */
242
+ private calculateVariance(values: number[]): number {
243
+ const mean = values.reduce((a, b) => a + b, 0) / values.length;
244
+ const squaredDiffs = values.map(value => Math.pow(value - mean, 2));
245
+ return squaredDiffs.reduce((a, b) => a + b, 0) / values.length;
246
+ }
247
+
248
+ /**
249
+ * Validate instruction following
250
+ */
251
+ async validateInstructions(
252
+ frames: LivenessFrame[],
253
+ expectedInstructions: LivenessInstruction[]
254
+ ): Promise<boolean> {
255
+ // In real implementation:
256
+ // 1. Detect head pose for each frame
257
+ // 2. Detect facial expressions (smile, etc.)
258
+ // 3. Validate instructions were followed in sequence
259
+
260
+ // Mock implementation
261
+ return expectedInstructions.length > 0 && frames.length >= expectedInstructions.length * 10;
262
+ }
263
+
264
+ /**
265
+ * Clean up resources
266
+ */
267
+ dispose(): void {
268
+ this.faceDetector.dispose();
269
+ this.model = null;
270
+ this.isModelLoaded = false;
271
+ }
272
+ }
273
+
274
+
@@ -0,0 +1,395 @@
1
+ /**
2
+ * Backend API Client for Biometric Identity SDK
3
+ * Communicates with the Python backend for AI-powered validation
4
+ */
5
+
6
+ import {
7
+ ValidationResult,
8
+ VideoResult,
9
+ LivenessValidationResult,
10
+ DocumentData,
11
+ BiometricConfig
12
+ } from '../types';
13
+
14
+ export interface ChallengeAction {
15
+ action: string;
16
+ instruction: string;
17
+ duration_ms: number;
18
+ order: number;
19
+ }
20
+
21
+ export interface ChallengeResponse {
22
+ success: boolean;
23
+ session_id: string;
24
+ challenges: ChallengeAction[];
25
+ total_duration_ms: number;
26
+ expires_at: string;
27
+ }
28
+
29
+ export interface LivenessResponse {
30
+ success: boolean;
31
+ is_live: boolean;
32
+ liveness_score: number;
33
+ checks: Array<{
34
+ name: string;
35
+ passed: boolean;
36
+ score: number;
37
+ details?: string;
38
+ }>;
39
+ face_detected: boolean;
40
+ face_count: number;
41
+ best_frame_index?: number;
42
+ warnings: string[];
43
+ session_id?: string;
44
+ }
45
+
46
+ export interface FaceMatchResponse {
47
+ success: boolean;
48
+ is_match: boolean;
49
+ match_score: number;
50
+ distance: number;
51
+ document_face_detected: boolean;
52
+ live_face_detected: boolean;
53
+ document_face_confidence: number;
54
+ live_face_confidence: number;
55
+ warnings: string[];
56
+ }
57
+
58
+ export interface DocumentValidationResponse {
59
+ success: boolean;
60
+ is_authentic: boolean;
61
+ authenticity_score: number;
62
+ document_type_detected?: string;
63
+ extracted_data: {
64
+ first_name?: string;
65
+ last_name?: string;
66
+ full_name?: string;
67
+ document_number?: string;
68
+ date_of_birth?: string;
69
+ expiration_date?: string;
70
+ issue_date?: string;
71
+ nationality?: string;
72
+ gender?: string;
73
+ address?: string;
74
+ issuing_country?: string;
75
+ document_type?: string;
76
+ mrz_line_1?: string;
77
+ mrz_line_2?: string;
78
+ mrz_line_3?: string;
79
+ raw_text?: string;
80
+ };
81
+ quality: {
82
+ overall_score: number;
83
+ has_glare: boolean;
84
+ has_blur: boolean;
85
+ has_cropping: boolean;
86
+ has_shadows: boolean;
87
+ is_color: boolean;
88
+ resolution_adequate: boolean;
89
+ edge_detection_score: number;
90
+ };
91
+ tamper_detection: {
92
+ is_tampered: boolean;
93
+ tamper_score: number;
94
+ suspicious_regions: Array<{ x: number; y: number; width: number; height: number }>;
95
+ details?: string;
96
+ };
97
+ face_detected_in_document: boolean;
98
+ warnings: string[];
99
+ }
100
+
101
+ export interface FullValidationResponse {
102
+ success: boolean;
103
+ is_verified: boolean;
104
+ verification_score: number;
105
+ liveness_passed: boolean;
106
+ face_match_passed: boolean;
107
+ document_authentic: boolean;
108
+ match_score: number;
109
+ liveness_score: number;
110
+ extracted_data: DocumentValidationResponse['extracted_data'];
111
+ details: {
112
+ liveness: LivenessResponse;
113
+ face_match: FaceMatchResponse;
114
+ document: DocumentValidationResponse;
115
+ };
116
+ warnings: string[];
117
+ session_id: string;
118
+ timestamp: string;
119
+ transaction_id: string;
120
+ }
121
+
122
+ export interface BackendClientConfig {
123
+ /** Backend API URL */
124
+ apiEndpoint: string;
125
+ /** API key for authentication */
126
+ apiKey: string;
127
+ /** Request timeout in ms (default: 60000) */
128
+ timeout?: number;
129
+ }
130
+
131
+ /**
132
+ * Client for communicating with the Biometric Identity Backend
133
+ */
134
+ export class BackendClient {
135
+ private config: Required<BackendClientConfig>;
136
+ private currentSessionId: string | null = null;
137
+
138
+ constructor(config: BackendClientConfig) {
139
+ this.config = {
140
+ apiEndpoint: config.apiEndpoint.replace(/\/$/, ''), // Remove trailing slash
141
+ apiKey: config.apiKey,
142
+ timeout: config.timeout || 60000,
143
+ };
144
+ }
145
+
146
+ /**
147
+ * Check if the backend is available
148
+ */
149
+ async healthCheck(): Promise<boolean> {
150
+ try {
151
+ const response = await this.request<{ status: string }>('/api/v1/health', 'GET');
152
+ return response.status === 'healthy';
153
+ } catch (error) {
154
+ console.warn('Backend health check failed:', error);
155
+ return false;
156
+ }
157
+ }
158
+
159
+ /**
160
+ * Generate a liveness challenge
161
+ */
162
+ async generateChallenge(
163
+ challengeType: 'active' | 'passive' = 'active'
164
+ ): Promise<ChallengeResponse> {
165
+ const response = await this.request<ChallengeResponse>(
166
+ '/api/v1/liveness/challenge',
167
+ 'POST',
168
+ {
169
+ challenge_type: challengeType,
170
+ session_id: this.currentSessionId,
171
+ }
172
+ );
173
+
174
+ this.currentSessionId = response.session_id;
175
+ return response;
176
+ }
177
+
178
+ /**
179
+ * Validate liveness from video frames
180
+ */
181
+ async validateLiveness(
182
+ videoFrames: string[],
183
+ videoDurationMs: number,
184
+ challengesCompleted: string[] = [],
185
+ deviceInfo?: Record<string, any>
186
+ ): Promise<LivenessResponse> {
187
+ if (!this.currentSessionId) {
188
+ throw new Error('No active session. Call generateChallenge() first.');
189
+ }
190
+
191
+ const response = await this.request<LivenessResponse>(
192
+ '/api/v1/liveness/validate',
193
+ 'POST',
194
+ {
195
+ session_id: this.currentSessionId,
196
+ video_frames: videoFrames,
197
+ video_duration_ms: videoDurationMs,
198
+ challenges_completed: challengesCompleted,
199
+ device_info: deviceInfo,
200
+ }
201
+ );
202
+
203
+ return response;
204
+ }
205
+
206
+ /**
207
+ * Compare faces between document and live capture
208
+ */
209
+ async matchFaces(
210
+ documentImage: string,
211
+ liveFaceImage: string
212
+ ): Promise<FaceMatchResponse> {
213
+ return this.request<FaceMatchResponse>(
214
+ '/api/v1/face/match',
215
+ 'POST',
216
+ {
217
+ document_image: documentImage,
218
+ live_face_image: liveFaceImage,
219
+ session_id: this.currentSessionId,
220
+ }
221
+ );
222
+ }
223
+
224
+ /**
225
+ * Validate document and extract data
226
+ */
227
+ async validateDocument(
228
+ frontImage: string,
229
+ backImage?: string,
230
+ documentType?: string,
231
+ countryCode?: string
232
+ ): Promise<DocumentValidationResponse> {
233
+ return this.request<DocumentValidationResponse>(
234
+ '/api/v1/document/validate',
235
+ 'POST',
236
+ {
237
+ front_image: frontImage,
238
+ back_image: backImage,
239
+ document_type: documentType,
240
+ country_code: countryCode,
241
+ }
242
+ );
243
+ }
244
+
245
+ /**
246
+ * Perform full biometric validation
247
+ */
248
+ async fullValidation(params: {
249
+ frontIdImage: string;
250
+ backIdImage?: string;
251
+ videoFrames: string[];
252
+ videoDurationMs: number;
253
+ challengesCompleted?: string[];
254
+ documentType?: string;
255
+ countryCode?: string;
256
+ deviceInfo?: Record<string, any>;
257
+ }): Promise<FullValidationResponse> {
258
+ if (!this.currentSessionId) {
259
+ // Generate a challenge first if no session exists
260
+ await this.generateChallenge();
261
+ }
262
+
263
+ return this.request<FullValidationResponse>(
264
+ '/api/v1/validate',
265
+ 'POST',
266
+ {
267
+ front_id_image: params.frontIdImage,
268
+ back_id_image: params.backIdImage,
269
+ video_frames: params.videoFrames,
270
+ video_duration_ms: params.videoDurationMs,
271
+ session_id: this.currentSessionId,
272
+ challenges_completed: params.challengesCompleted || [],
273
+ document_type: params.documentType,
274
+ country_code: params.countryCode,
275
+ device_info: params.deviceInfo,
276
+ }
277
+ );
278
+ }
279
+
280
+ /**
281
+ * Convert backend response to SDK ValidationResult format
282
+ */
283
+ convertToValidationResult(response: FullValidationResponse): ValidationResult {
284
+ const extractedData = response.extracted_data;
285
+
286
+ return {
287
+ matchScore: response.match_score,
288
+ isValidFaceMatch: response.face_match_passed,
289
+ isDocumentAuthentic: response.document_authentic,
290
+ livenessScore: response.liveness_score * 100, // Convert 0-1 to 0-100
291
+ extractedDocumentData: {
292
+ firstName: extractedData.first_name || '',
293
+ lastName: extractedData.last_name || '',
294
+ documentNumber: extractedData.document_number || '',
295
+ dateOfBirth: extractedData.date_of_birth || '',
296
+ expirationDate: extractedData.expiration_date || '',
297
+ nationality: extractedData.nationality || '',
298
+ documentType: extractedData.document_type,
299
+ gender: extractedData.gender,
300
+ address: extractedData.address,
301
+ issuingCountry: extractedData.issuing_country,
302
+ rawText: extractedData.raw_text,
303
+ },
304
+ warnings: response.warnings,
305
+ timestamp: new Date(response.timestamp).getTime(),
306
+ details: {
307
+ faceDetection: {
308
+ detected: response.details.face_match.live_face_detected,
309
+ confidence: response.details.face_match.live_face_confidence,
310
+ },
311
+ documentQuality: {
312
+ hasGlare: response.details.document.quality.has_glare,
313
+ hasBlur: response.details.document.quality.has_blur,
314
+ hasCropping: response.details.document.quality.has_cropping,
315
+ hasReflections: response.details.document.quality.has_shadows,
316
+ edgeDetectionScore: response.details.document.quality.edge_detection_score,
317
+ },
318
+ tamperDetection: {
319
+ isTampered: response.details.document.tamper_detection.is_tampered,
320
+ tamperScore: response.details.document.tamper_detection.tamper_score,
321
+ suspiciousRegions: response.details.document.tamper_detection.suspicious_regions,
322
+ },
323
+ livenessChecks: {
324
+ passedMotionCheck: response.details.liveness.checks.find(c => c.name === 'motion_analysis')?.passed || false,
325
+ passedTextureCheck: response.details.liveness.checks.find(c => c.name === 'texture_analysis')?.passed || false,
326
+ passedDepthCheck: response.details.liveness.checks.find(c => c.name === 'face_consistency')?.passed || false,
327
+ },
328
+ },
329
+ };
330
+ }
331
+
332
+ /**
333
+ * Get current session ID
334
+ */
335
+ getSessionId(): string | null {
336
+ return this.currentSessionId;
337
+ }
338
+
339
+ /**
340
+ * Reset session
341
+ */
342
+ resetSession(): void {
343
+ this.currentSessionId = null;
344
+ }
345
+
346
+ /**
347
+ * Make HTTP request to backend
348
+ */
349
+ private async request<T>(
350
+ endpoint: string,
351
+ method: 'GET' | 'POST' | 'PUT' | 'DELETE',
352
+ body?: any
353
+ ): Promise<T> {
354
+ const url = `${this.config.apiEndpoint}${endpoint}`;
355
+
356
+ const controller = new AbortController();
357
+ const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
358
+
359
+ try {
360
+ const response = await fetch(url, {
361
+ method,
362
+ headers: {
363
+ 'Content-Type': 'application/json',
364
+ 'X-API-Key': this.config.apiKey,
365
+ },
366
+ body: body ? JSON.stringify(body) : undefined,
367
+ signal: controller.signal as RequestInit['signal'],
368
+ });
369
+
370
+ clearTimeout(timeoutId);
371
+
372
+ if (!response.ok) {
373
+ const errorData = await response.json().catch(() => ({})) as Record<string, any>;
374
+ throw new Error(
375
+ errorData?.error?.message ||
376
+ errorData?.detail ||
377
+ `Request failed with status ${response.status}`
378
+ );
379
+ }
380
+
381
+ return await response.json() as T;
382
+ } catch (error: any) {
383
+ clearTimeout(timeoutId);
384
+
385
+ if (error.name === 'AbortError') {
386
+ throw new Error('Request timeout');
387
+ }
388
+
389
+ throw error;
390
+ }
391
+ }
392
+ }
393
+
394
+ export default BackendClient;
395
+
@@ -0,0 +1,15 @@
1
+ /**
2
+ * API Client exports
3
+ */
4
+
5
+ export { BackendClient } from './BackendClient';
6
+ export type {
7
+ ChallengeAction,
8
+ ChallengeResponse,
9
+ LivenessResponse,
10
+ FaceMatchResponse,
11
+ DocumentValidationResponse,
12
+ FullValidationResponse,
13
+ BackendClientConfig,
14
+ } from './BackendClient';
15
+