@zeey4d/react-native-gesture-engine 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1438 @@
1
+ /** Classification of input sources feeding into the pipeline */
2
+ declare enum InputType {
3
+ Touch = "touch",
4
+ Sensor = "sensor",
5
+ Hardware = "hardware",
6
+ Camera = "camera"
7
+ }
8
+ /** Sub-type classification for touch inputs */
9
+ declare enum TouchType {
10
+ Pan = "pan",
11
+ Tap = "tap",
12
+ Pinch = "pinch",
13
+ Rotation = "rotation"
14
+ }
15
+ /** Sub-type classification for sensor inputs */
16
+ declare enum SensorType {
17
+ Accelerometer = "accelerometer",
18
+ Gyroscope = "gyroscope"
19
+ }
20
+ /** Normalized touch data emitted by TouchInputProvider */
21
+ interface TouchData {
22
+ /** Touch sub-type (pan, tap, pinch, rotation) */
23
+ type: TouchType;
24
+ /** Current X position in screen coordinates */
25
+ x: number;
26
+ /** Current Y position in screen coordinates */
27
+ y: number;
28
+ /** Translation from start point on X axis */
29
+ translationX: number;
30
+ /** Translation from start point on Y axis */
31
+ translationY: number;
32
+ /** Instantaneous velocity on X axis (px/ms) */
33
+ velocityX: number;
34
+ /** Instantaneous velocity on Y axis (px/ms) */
35
+ velocityY: number;
36
+ /** Scale factor for pinch gestures (1.0 = no scale) */
37
+ scale: number;
38
+ /** Rotation angle in radians for rotation gestures */
39
+ rotation: number;
40
+ /** Number of active touch points */
41
+ numberOfPointers: number;
42
+ }
43
+ /** Sensor data emitted by SensorInputProvider */
44
+ interface SensorData {
45
+ type: SensorType;
46
+ x: number;
47
+ y: number;
48
+ z: number;
49
+ }
50
+ /** Hardware input data (e.g., volume button presses) */
51
+ interface HardwareData {
52
+ /** Key identifier, e.g. 'volumeUp', 'volumeDown' */
53
+ key: string;
54
+ /** Press action: 'down' or 'up' */
55
+ action: 'down' | 'up';
56
+ }
57
+ /**
58
+ * Unified input event emitted by all providers on the InputRaw channel.
59
+ * The `data` field is discriminated by `inputType`.
60
+ */
61
+ interface InputEvent {
62
+ /** Unique event identifier */
63
+ id: string;
64
+ /** Timestamp in milliseconds (performance.now or Date.now) */
65
+ timestamp: number;
66
+ /** Source classification */
67
+ inputType: InputType;
68
+ /** Payload — shape depends on inputType */
69
+ data: TouchData | SensorData | HardwareData;
70
+ }
71
+ /** Cardinal direction classification */
72
+ declare enum CardinalDirection {
73
+ Up = "up",
74
+ Down = "down",
75
+ Left = "left",
76
+ Right = "right",
77
+ UpLeft = "up-left",
78
+ UpRight = "up-right",
79
+ DownLeft = "down-left",
80
+ DownRight = "down-right",
81
+ None = "none"
82
+ }
83
+ /**
84
+ * Enriched data produced by the processing layer.
85
+ * Combines raw input with computed velocity, angle, and normalized magnitude.
86
+ */
87
+ interface ProcessedSample {
88
+ /** Reference to the original input event */
89
+ inputEvent: InputEvent;
90
+ /** Computed velocity magnitude (px/ms or g/ms) */
91
+ velocity: number;
92
+ /** Velocity on X axis */
93
+ velocityX: number;
94
+ /** Velocity on Y axis */
95
+ velocityY: number;
96
+ /** Direction angle in radians */
97
+ angleRadians: number;
98
+ /** Direction angle in degrees */
99
+ angleDegrees: number;
100
+ /** Cardinal direction classification */
101
+ direction: CardinalDirection;
102
+ /** Magnitude normalized to [0, 1] */
103
+ normalizedMagnitude: number;
104
+ /** Filtered data coordinates (noise-reduced) */
105
+ filtered: {
106
+ x: number;
107
+ y: number;
108
+ z: number;
109
+ };
110
+ /** Processing timestamp */
111
+ timestamp: number;
112
+ }
113
+ /**
114
+ * Recognizer state machine states.
115
+ * Transitions: Idle → Possible → Began → Changed → Ended
116
+ * ↘ Failed / Cancelled
117
+ */
118
+ declare enum RecognizerState {
119
+ Idle = "idle",
120
+ Possible = "possible",
121
+ Began = "began",
122
+ Changed = "changed",
123
+ Ended = "ended",
124
+ Failed = "failed",
125
+ Cancelled = "cancelled"
126
+ }
127
+ /** Additional metadata attached to gesture events */
128
+ interface GestureMetadata {
129
+ /** Translation amount for pan/swipe gestures */
130
+ translation?: {
131
+ x: number;
132
+ y: number;
133
+ };
134
+ /** Scale factor for pinch gestures */
135
+ scale?: number;
136
+ /** Rotation angle for rotation gestures */
137
+ rotation?: number;
138
+ /** Velocity at recognition time */
139
+ velocity?: {
140
+ x: number;
141
+ y: number;
142
+ };
143
+ /** Edge identifier for edge-swipe gestures */
144
+ edge?: 'left' | 'right' | 'top' | 'bottom';
145
+ /** Detected symbol name for symbol recognizer */
146
+ symbol?: string;
147
+ /** Confidence score [0, 1] for symbol recognizer */
148
+ confidence?: number;
149
+ /** Sensor magnitude for shake/tilt/flick gestures */
150
+ magnitude?: number;
151
+ /** Tilt angles */
152
+ tilt?: {
153
+ pitch: number;
154
+ roll: number;
155
+ };
156
+ /** Generic key-value bag for custom recognizers */
157
+ [key: string]: unknown;
158
+ }
159
+ /**
160
+ * Event emitted when a recognizer transitions state.
161
+ * This is the primary output of Layer 3, consumed by conflict resolution and actions.
162
+ */
163
+ interface GestureEvent {
164
+ /** Unique event identifier */
165
+ id: string;
166
+ /** Human-readable gesture name, e.g. 'tap', 'shake', 'edge-swipe-left' */
167
+ name: string;
168
+ /** Current recognizer state */
169
+ state: RecognizerState;
170
+ /** Recognizer priority (lower = higher priority) */
171
+ priority: number;
172
+ /** Whether this gesture is exclusive (blocks others) */
173
+ isExclusive: boolean;
174
+ /** Event timestamp */
175
+ timestamp: number;
176
+ /** Gesture-specific metadata */
177
+ metadata: GestureMetadata;
178
+ }
179
+ /** Named channels for the typed pub/sub EventBus */
180
+ declare enum EventChannel {
181
+ /** Raw input data from providers */
182
+ InputRaw = "input:raw",
183
+ /** Processed samples with velocity, angle, normalization */
184
+ ProcessingSample = "processing:sample",
185
+ /** Gesture recognition events (state transitions) */
186
+ RecognitionGesture = "recognition:gesture",
187
+ /** Events that survived conflict resolution */
188
+ ConflictResolved = "conflict:resolved",
189
+ /** Events after action dispatch */
190
+ ActionDispatched = "action:dispatched"
191
+ }
192
+ /** Compile-time mapping from channel to payload type */
193
+ interface EventChannelMap {
194
+ [EventChannel.InputRaw]: InputEvent;
195
+ [EventChannel.ProcessingSample]: ProcessedSample;
196
+ [EventChannel.RecognitionGesture]: GestureEvent;
197
+ [EventChannel.ConflictResolved]: GestureEvent;
198
+ [EventChannel.ActionDispatched]: GestureEvent;
199
+ }
200
+ /** Typed event handler */
201
+ type EventHandler<C extends EventChannel> = (data: EventChannelMap[C]) => void;
202
+ /** EventBus interface */
203
+ interface IEventBus {
204
+ on<C extends EventChannel>(channel: C, handler: EventHandler<C>): () => void;
205
+ emit<C extends EventChannel>(channel: C, data: EventChannelMap[C]): void;
206
+ off<C extends EventChannel>(channel: C, handler: EventHandler<C>): void;
207
+ clear(): void;
208
+ }
209
+ /** Interface for all input providers (Layer 1) */
210
+ interface IInputProvider {
211
+ /** Start emitting input events */
212
+ start(): void;
213
+ /** Stop emitting input events and clean up subscriptions */
214
+ stop(): void;
215
+ /** Whether the provider is currently active */
216
+ readonly isActive: boolean;
217
+ }
218
+ /** Interface for all gesture recognizers (Layer 3) */
219
+ interface IRecognizer {
220
+ /** Unique recognizer identifier */
221
+ readonly id: string;
222
+ /** Human-readable name matching the GestureEvent.name field */
223
+ readonly name: string;
224
+ /** Priority: lower number = higher priority */
225
+ readonly priority: number;
226
+ /** If true, activating this gesture blocks lower-priority gestures */
227
+ readonly isExclusive: boolean;
228
+ /** Current state machine state */
229
+ readonly state: RecognizerState;
230
+ /** Enable/disable the recognizer at runtime */
231
+ enabled: boolean;
232
+ /** Feed a processed sample into the recognizer for evaluation */
233
+ onProcessedSample(sample: ProcessedSample): void;
234
+ /** Reset the recognizer to Idle state */
235
+ reset(): void;
236
+ /** Clean up resources */
237
+ dispose(): void;
238
+ }
239
+ /** Interface for gesture actions (Layer 5) */
240
+ interface IGestureAction {
241
+ /** Unique action identifier */
242
+ readonly actionId: string;
243
+ /** Execute the action in response to a resolved gesture */
244
+ execute(event: GestureEvent): void;
245
+ }
246
+ /** Interface for feedback providers (Layer 6) */
247
+ interface IFeedbackProvider {
248
+ /** Trigger feedback in response to a resolved gesture */
249
+ trigger(event: GestureEvent): void;
250
+ /** Whether this feedback type is supported on the current device */
251
+ readonly isSupported: boolean;
252
+ }
253
+ /** Configuration for the GestureEngine orchestrator */
254
+ interface GestureEngineConfig {
255
+ /** Sensor polling interval in ms (default 100 = 10Hz). Max 16 (~60Hz). */
256
+ sensorInterval?: number;
257
+ /** Enable haptic feedback (requires expo-haptics) */
258
+ hapticEnabled?: boolean;
259
+ /** Enable debug logging */
260
+ debug?: boolean;
261
+ /** Screen dimensions for edge detection */
262
+ screenWidth?: number;
263
+ /** Screen dimensions for edge detection */
264
+ screenHeight?: number;
265
+ }
266
+ /** Configuration for recognizers */
267
+ interface RecognizerConfig {
268
+ /** Override the default priority */
269
+ priority?: number;
270
+ /** Override the default exclusive setting */
271
+ isExclusive?: boolean;
272
+ /** Whether the recognizer starts enabled */
273
+ enabled?: boolean;
274
+ }
275
+ /** Tap recognizer specific config */
276
+ interface TapRecognizerConfig extends RecognizerConfig {
277
+ /** Maximum duration for a tap in ms (default 300) */
278
+ maxDuration?: number;
279
+ /** Maximum movement threshold in px (default 10) */
280
+ maxDistance?: number;
281
+ }
282
+ /** Double-tap recognizer specific config */
283
+ interface DoubleTapRecognizerConfig extends RecognizerConfig {
284
+ /** Maximum time between taps in ms (default 300) */
285
+ maxInterval?: number;
286
+ /** Maximum movement between taps in px (default 30) */
287
+ maxDistance?: number;
288
+ }
289
+ /** Pan recognizer specific config */
290
+ interface PanRecognizerConfig extends RecognizerConfig {
291
+ /** Minimum distance to activate in px (default 10) */
292
+ minDistance?: number;
293
+ }
294
+ /** Pinch recognizer specific config */
295
+ interface PinchRecognizerConfig extends RecognizerConfig {
296
+ /** Minimum scale change to activate (default 0.05) */
297
+ minScale?: number;
298
+ }
299
+ /** Rotation recognizer specific config */
300
+ interface RotationRecognizerConfig extends RecognizerConfig {
301
+ /** Minimum rotation in radians to activate (default 0.05) */
302
+ minRotation?: number;
303
+ }
304
+ /** Edge-swipe recognizer specific config */
305
+ interface EdgeSwipeRecognizerConfig extends RecognizerConfig {
306
+ /** Edge to detect: 'left', 'right', 'top', 'bottom' */
307
+ edge: 'left' | 'right' | 'top' | 'bottom';
308
+ /** Width of the edge detection zone in px (default 30) */
309
+ edgeZoneWidth?: number;
310
+ /** Minimum swipe distance in px (default 50) */
311
+ minDistance?: number;
312
+ /** Minimum velocity in px/ms (default 0.3) */
313
+ minVelocity?: number;
314
+ /** Screen width in px */
315
+ screenWidth?: number;
316
+ /** Screen height in px */
317
+ screenHeight?: number;
318
+ }
319
+ /** Corner recognizer specific config */
320
+ interface CornerRecognizerConfig extends RecognizerConfig {
321
+ /** Corner to detect */
322
+ corner: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right';
323
+ /** Size of the corner zone in px (default 50) */
324
+ cornerZoneSize?: number;
325
+ /** Minimum swipe distance in px (default 40) */
326
+ minDistance?: number;
327
+ /** Screen width in px */
328
+ screenWidth?: number;
329
+ /** Screen height in px */
330
+ screenHeight?: number;
331
+ }
332
+ /** Shake recognizer specific config */
333
+ interface ShakeRecognizerConfig extends RecognizerConfig {
334
+ /** Acceleration threshold in g (default 1.5) */
335
+ threshold?: number;
336
+ /** Consecutive samples above threshold to trigger (default 2) */
337
+ consecutiveSamples?: number;
338
+ /** Cooldown period in ms (default 1000) */
339
+ cooldownMs?: number;
340
+ }
341
+ /** Tilt recognizer specific config */
342
+ interface TiltRecognizerConfig extends RecognizerConfig {
343
+ /** Tilt angle threshold in degrees (default 30) */
344
+ tiltThreshold?: number;
345
+ /** Cooldown period in ms (default 500) */
346
+ cooldownMs?: number;
347
+ }
348
+ /** Wrist-flick recognizer specific config */
349
+ interface WristFlickRecognizerConfig extends RecognizerConfig {
350
+ /** Angular velocity threshold in deg/s (default 150) */
351
+ angularVelocityThreshold?: number;
352
+ /** Cooldown in ms (default 800) */
353
+ cooldownMs?: number;
354
+ }
355
+ /** Sequence recognizer specific config */
356
+ interface SequenceRecognizerConfig extends RecognizerConfig {
357
+ /** Ordered gesture names to match */
358
+ sequence: string[];
359
+ /** Max time between steps in ms (default 800) */
360
+ timeoutMs?: number;
361
+ }
362
+ /** Symbol recognizer specific config */
363
+ interface SymbolRecognizerConfig extends RecognizerConfig {
364
+ /** Templates to match against: name → point array */
365
+ templates?: Record<string, Array<{
366
+ x: number;
367
+ y: number;
368
+ }>>;
369
+ /** Minimum confidence score to accept (default 0.7) */
370
+ minConfidence?: number;
371
+ }
372
+ /** Utility type to generate unique IDs */
373
+ type UniqueId = string;
374
+ /** Helper to create a unique ID */
375
+ declare function generateId(): UniqueId;
376
+
377
+ /**
378
+ * Typed EventBus implementation.
379
+ *
380
+ * Design decisions:
381
+ * - Lives outside the React tree to avoid unnecessary re-renders.
382
+ * - Uses Map<channel, Set<handler>> for O(1) subscribe/unsubscribe.
383
+ * - Generic channel parameter ensures type-safe emit/subscribe at compile time.
384
+ * - `on()` returns an unsubscribe function for easy cleanup in useEffect.
385
+ */
386
+ declare class EventBus implements IEventBus {
387
+ private listeners;
388
+ /**
389
+ * Subscribe to a channel. Returns an unsubscribe function.
390
+ *
391
+ * @example
392
+ * const unsub = bus.on(EventChannel.InputRaw, (event) => { ... });
393
+ * // later:
394
+ * unsub();
395
+ */
396
+ on<C extends EventChannel>(channel: C, handler: EventHandler<C>): () => void;
397
+ /**
398
+ * Emit data on a channel. All registered handlers are called synchronously.
399
+ * The generic parameter ensures the data type matches the channel.
400
+ */
401
+ emit<C extends EventChannel>(channel: C, data: EventChannelMap[C]): void;
402
+ /**
403
+ * Remove a specific handler from a channel.
404
+ */
405
+ off<C extends EventChannel>(channel: C, handler: EventHandler<C>): void;
406
+ /**
407
+ * Remove all handlers from all channels. Called during engine teardown.
408
+ */
409
+ clear(): void;
410
+ }
411
+
412
+ /**
413
+ * TouchInputProvider wraps RNGH pan/tap/pinch/rotation gestures
414
+ * and normalizes their data into InputEvent objects.
415
+ *
416
+ * Usage:
417
+ * - Call `start()` to enable event emission
418
+ * - Use the gesture handler callbacks (onPan, onTap, etc.) inside
419
+ * GestureDetector components
420
+ * - Call `stop()` to disable emission
421
+ */
422
+ declare class TouchInputProvider implements IInputProvider {
423
+ private eventBus;
424
+ private _isActive;
425
+ constructor(eventBus: IEventBus);
426
+ get isActive(): boolean;
427
+ start(): void;
428
+ stop(): void;
429
+ /**
430
+ * Called from RNGH Pan gesture callbacks.
431
+ * Emits normalized TouchData with translation and velocity.
432
+ */
433
+ onPan(data: {
434
+ x: number;
435
+ y: number;
436
+ translationX: number;
437
+ translationY: number;
438
+ velocityX: number;
439
+ velocityY: number;
440
+ numberOfPointers: number;
441
+ }): void;
442
+ /**
443
+ * Called from RNGH Tap gesture callbacks.
444
+ */
445
+ onTap(data: {
446
+ x: number;
447
+ y: number;
448
+ numberOfPointers: number;
449
+ }): void;
450
+ /**
451
+ * Called from RNGH Pinch gesture callbacks.
452
+ */
453
+ onPinch(data: {
454
+ scale: number;
455
+ focalX: number;
456
+ focalY: number;
457
+ velocity: number;
458
+ numberOfPointers: number;
459
+ }): void;
460
+ /**
461
+ * Called from RNGH Rotation gesture callbacks.
462
+ */
463
+ onRotation(data: {
464
+ rotation: number;
465
+ anchorX: number;
466
+ anchorY: number;
467
+ velocity: number;
468
+ numberOfPointers: number;
469
+ }): void;
470
+ /** Emit a normalized InputEvent onto the EventBus */
471
+ private emitInput;
472
+ }
473
+
474
+ /**
475
+ * SensorInputProvider subscribes to device accelerometer and gyroscope
476
+ * and emits normalized SensorData events.
477
+ *
478
+ * Performance considerations:
479
+ * - Default update interval is 100ms (10Hz) — configurable, capped at ~60Hz.
480
+ * - Subscriptions are lazily created and cleaned up on stop().
481
+ * - Data is emitted as-is; filtering happens in the Processing layer.
482
+ */
483
+ declare class SensorInputProvider implements IInputProvider {
484
+ private eventBus;
485
+ private _isActive;
486
+ private accelSubscription;
487
+ private gyroSubscription;
488
+ private updateIntervalMs;
489
+ constructor(eventBus: IEventBus, updateIntervalMs?: number);
490
+ get isActive(): boolean;
491
+ start(): void;
492
+ stop(): void;
493
+ }
494
+
495
+ /**
496
+ * HardwareInputProvider listens for hardware button events on Android
497
+ * via DeviceEventEmitter and emits them as InputEvents.
498
+ *
499
+ * Note: This requires a companion native module to forward volume button
500
+ * events. Without one, this provider is effectively a no-op stub that
501
+ * demonstrates the extensibility of the input layer.
502
+ *
503
+ * To extend for custom hardware events:
504
+ * 1. Create a native module that emits 'onHardwareKey' events
505
+ * 2. The event payload should contain { key: string, action: 'down' | 'up' }
506
+ */
507
+ declare class HardwareInputProvider implements IInputProvider {
508
+ private eventBus;
509
+ private _isActive;
510
+ private subscription;
511
+ private eventName;
512
+ constructor(eventBus: IEventBus, eventName?: string);
513
+ get isActive(): boolean;
514
+ start(): void;
515
+ stop(): void;
516
+ }
517
+
518
+ /**
519
+ * CameraInputProvider is a future-ready stub for camera-based gesture input.
520
+ *
521
+ * When implemented, this would:
522
+ * - Subscribe to a camera frame processing pipeline
523
+ * - Run hand/pose detection models
524
+ * - Emit InputEvents with detected gesture landmarks
525
+ *
526
+ * Currently a no-op — calling start()/stop() has no effect.
527
+ */
528
+ declare class CameraInputProvider implements IInputProvider {
529
+ private eventBus;
530
+ private _isActive;
531
+ constructor(eventBus: IEventBus);
532
+ get isActive(): boolean;
533
+ start(): void;
534
+ stop(): void;
535
+ }
536
+
537
+ /**
538
+ * First-order IIR noise filter with both low-pass and high-pass modes.
539
+ *
540
+ * Low-pass: output = α * input + (1 - α) * previousOutput
541
+ * - Smooths noisy signals (e.g., touch jitter)
542
+ * - Higher alpha = more responsive but noisier
543
+ *
544
+ * High-pass: output = input - lowPass(input)
545
+ * - Removes slowly-changing components (e.g., gravity from accelerometer)
546
+ * - Isolates sudden movements (shakes, flicks)
547
+ */
548
+ declare class NoiseFilter {
549
+ private lowPassState;
550
+ private alpha;
551
+ /**
552
+ * @param alpha - Filter coefficient [0, 1]. Default 0.8.
553
+ * - For low-pass: 0.1 = very smooth, 0.9 = barely filtered
554
+ * - For high-pass: same alpha applied to the underlying low-pass
555
+ */
556
+ constructor(alpha?: number);
557
+ /**
558
+ * Apply low-pass filter. Removes high-frequency jitter.
559
+ */
560
+ lowPass(x: number, y: number, z: number): {
561
+ x: number;
562
+ y: number;
563
+ z: number;
564
+ };
565
+ /**
566
+ * Apply high-pass filter. Removes low-frequency components (gravity).
567
+ * Returns only the dynamic/transient part of the signal.
568
+ */
569
+ highPass(x: number, y: number, z: number): {
570
+ x: number;
571
+ y: number;
572
+ z: number;
573
+ };
574
+ /**
575
+ * Reset filter state. Call when starting a new gesture or after a pause.
576
+ */
577
+ reset(): void;
578
+ /**
579
+ * Update alpha dynamically (e.g., for adaptive filtering).
580
+ */
581
+ setAlpha(alpha: number): void;
582
+ }
583
+
584
+ /**
585
+ * Computes velocity from sequential position/time samples.
586
+ * Velocity = (currentPosition - previousPosition) / (currentTime - previousTime)
587
+ */
588
+ declare class VelocityCalculator {
589
+ private prevX;
590
+ private prevY;
591
+ private prevTimestamp;
592
+ /**
593
+ * Calculate velocity from a new position sample.
594
+ *
595
+ * @param x - Current X position
596
+ * @param y - Current Y position
597
+ * @param timestamp - Current timestamp in milliseconds
598
+ * @returns Velocity components and magnitude
599
+ */
600
+ calculate(x: number, y: number, timestamp: number): {
601
+ velocityX: number;
602
+ velocityY: number;
603
+ velocity: number;
604
+ };
605
+ /**
606
+ * Reset state. Call when starting a new gesture.
607
+ */
608
+ reset(): void;
609
+ }
610
+
611
+ /**
612
+ * Detects direction angle and classifies into cardinal directions.
613
+ *
614
+ * Coordinate system:
615
+ * - 0° / 0 rad = right
616
+ * - 90° / π/2 rad = down (screen coordinates, Y grows downward)
617
+ * - 180° / π rad = left
618
+ * - -90° / -π/2 rad = up
619
+ */
620
+ declare class AngleDetector {
621
+ /**
622
+ * Calculate angle from X/Y deltas.
623
+ *
624
+ * @param dx - Change in X (positive = right)
625
+ * @param dy - Change in Y (positive = down in screen coords)
626
+ * @returns Angle in radians, degrees, and cardinal direction
627
+ */
628
+ calculate(dx: number, dy: number): {
629
+ angleRadians: number;
630
+ angleDegrees: number;
631
+ direction: CardinalDirection;
632
+ };
633
+ /**
634
+ * Classify angle (in degrees) into 8 cardinal directions.
635
+ * Uses 45° sectors centered on each direction.
636
+ *
637
+ * Sectors (in screen coordinates where Y grows down):
638
+ * Right: -22.5° to 22.5°
639
+ * DownRight: 22.5° to 67.5°
640
+ * Down: 67.5° to 112.5°
641
+ * DownLeft: 112.5° to 157.5°
642
+ * Left: 157.5° to 180° or -180° to -157.5°
643
+ * UpLeft: -157.5° to -112.5°
644
+ * Up: -112.5° to -67.5°
645
+ * UpRight: -67.5° to -22.5°
646
+ */
647
+ private classifyDirection;
648
+ }
649
+
650
+ /**
651
+ * Maps raw magnitudes to a normalized [0, 1] range with clamping.
652
+ *
653
+ * Formula: normalized = clamp((value - min) / (max - min), 0, 1)
654
+ *
655
+ * This is useful for turning raw sensor values (e.g., acceleration in g,
656
+ * velocity in px/ms) into intensity values that can be used for feedback
657
+ * or threshold comparison.
658
+ */
659
+ declare class ThresholdNormalizer {
660
+ private min;
661
+ private max;
662
+ /**
663
+ * @param min - Minimum threshold. Values at or below this map to 0.
664
+ * @param max - Maximum threshold. Values at or above this map to 1.
665
+ */
666
+ constructor(min?: number, max?: number);
667
+ /**
668
+ * Normalize a raw value to [0, 1].
669
+ */
670
+ normalize(value: number): number;
671
+ /**
672
+ * Update thresholds dynamically.
673
+ */
674
+ setRange(min: number, max: number): void;
675
+ /**
676
+ * Get current min threshold.
677
+ */
678
+ getMin(): number;
679
+ /**
680
+ * Get current max threshold.
681
+ */
682
+ getMax(): number;
683
+ }
684
+
685
+ /**
686
+ * Fixed-size ring buffer with time-based eviction.
687
+ *
688
+ * Design decisions:
689
+ * - Uses a pre-allocated array with head/tail pointers for O(1) push.
690
+ * - Evicts entries older than windowMs on each push (amortized O(1)).
691
+ * - Capacity is generously sized (default 64) to handle burst input at 60Hz.
692
+ * - getAll() returns samples in chronological order.
693
+ */
694
+ declare class StreamBuffer {
695
+ private buffer;
696
+ private head;
697
+ private count;
698
+ private capacity;
699
+ private windowMs;
700
+ /**
701
+ * @param windowMs - Time window in ms. Samples older than this are evicted. Default 400.
702
+ * @param capacity - Maximum buffer size. Default 64 (~1 sec at 60Hz).
703
+ */
704
+ constructor(windowMs?: number, capacity?: number);
705
+ /**
706
+ * Push a new sample. Automatically evicts stale samples.
707
+ * O(1) amortized.
708
+ */
709
+ push(sample: ProcessedSample): void;
710
+ /**
711
+ * Get all non-stale samples in chronological order.
712
+ */
713
+ getAll(): ProcessedSample[];
714
+ /**
715
+ * Get the most recent sample, or null if buffer is empty.
716
+ */
717
+ latest(): ProcessedSample | null;
718
+ /**
719
+ * Get the number of samples currently in the buffer.
720
+ */
721
+ size(): number;
722
+ /**
723
+ * Clear the buffer.
724
+ */
725
+ clear(): void;
726
+ /**
727
+ * Remove samples older than windowMs from the head.
728
+ */
729
+ private evictStale;
730
+ }
731
+
732
+ /**
733
+ * Abstract base class for all gesture recognizers.
734
+ *
735
+ * Subclasses must implement:
736
+ * - `onProcessedSample(sample)`: evaluate the sample and call transition methods
737
+ *
738
+ * The base class provides:
739
+ * - State machine with validated transitions
740
+ * - Automatic GestureEvent emission on state changes
741
+ * - EventBus integration
742
+ * - reset() and dispose() lifecycle methods
743
+ */
744
+ declare abstract class BaseRecognizer implements IRecognizer {
745
+ readonly id: string;
746
+ readonly name: string;
747
+ readonly priority: number;
748
+ readonly isExclusive: boolean;
749
+ enabled: boolean;
750
+ private _state;
751
+ protected eventBus: IEventBus;
752
+ constructor(name: string, eventBus: IEventBus, options?: {
753
+ priority?: number;
754
+ isExclusive?: boolean;
755
+ enabled?: boolean;
756
+ });
757
+ get state(): RecognizerState;
758
+ /**
759
+ * Must be implemented by subclasses.
760
+ * Evaluate the incoming processed sample and trigger state transitions.
761
+ */
762
+ abstract onProcessedSample(sample: ProcessedSample): void;
763
+ /**
764
+ * Reset the recognizer to Idle state.
765
+ */
766
+ reset(): void;
767
+ /**
768
+ * Clean up resources. Override in subclasses for custom cleanup.
769
+ */
770
+ dispose(): void;
771
+ /**
772
+ * Transition to Possible state (gesture might be starting).
773
+ */
774
+ protected transitionToPossible(): void;
775
+ /**
776
+ * Transition to Began state and emit gesture event.
777
+ * Only valid from Possible state.
778
+ */
779
+ protected transitionToBegan(metadata?: GestureMetadata): void;
780
+ /**
781
+ * Transition to Changed state and emit gesture event.
782
+ * Only valid from Began or Changed state (continuous gestures).
783
+ */
784
+ protected transitionToChanged(metadata?: GestureMetadata): void;
785
+ /**
786
+ * Transition to Ended state and emit gesture event.
787
+ * Valid from Began, Changed, or Possible states.
788
+ */
789
+ protected transitionToEnded(metadata?: GestureMetadata): void;
790
+ /**
791
+ * Transition to Failed state (gesture didn't match criteria).
792
+ * Auto-resets to Idle.
793
+ */
794
+ protected transitionToFailed(): void;
795
+ /**
796
+ * Transition to Cancelled state (gesture was interrupted).
797
+ * Auto-resets to Idle.
798
+ */
799
+ protected transitionToCancelled(): void;
800
+ /**
801
+ * Emit a GestureEvent on the RecognitionGesture channel.
802
+ */
803
+ private emitGestureEvent;
804
+ }
805
+
806
+ declare class TapRecognizer extends BaseRecognizer {
807
+ private maxDuration;
808
+ private maxDistance;
809
+ private startTime;
810
+ private startX;
811
+ private startY;
812
+ constructor(eventBus: IEventBus, config?: TapRecognizerConfig);
813
+ onProcessedSample(sample: ProcessedSample): void;
814
+ reset(): void;
815
+ private resetState;
816
+ }
817
+
818
+ declare class DoubleTapRecognizer extends BaseRecognizer {
819
+ private maxInterval;
820
+ private maxDistance;
821
+ private firstTapTime;
822
+ private firstTapX;
823
+ private firstTapY;
824
+ private tapCount;
825
+ constructor(eventBus: IEventBus, config?: DoubleTapRecognizerConfig);
826
+ onProcessedSample(sample: ProcessedSample): void;
827
+ reset(): void;
828
+ private resetState;
829
+ }
830
+
831
+ declare class PanRecognizer extends BaseRecognizer {
832
+ private minDistance;
833
+ constructor(eventBus: IEventBus, config?: PanRecognizerConfig);
834
+ onProcessedSample(sample: ProcessedSample): void;
835
+ }
836
+
837
+ declare class PinchRecognizer extends BaseRecognizer {
838
+ private minScale;
839
+ constructor(eventBus: IEventBus, config?: PinchRecognizerConfig);
840
+ onProcessedSample(sample: ProcessedSample): void;
841
+ }
842
+
843
+ declare class RotationRecognizer extends BaseRecognizer {
844
+ private minRotation;
845
+ constructor(eventBus: IEventBus, config?: RotationRecognizerConfig);
846
+ onProcessedSample(sample: ProcessedSample): void;
847
+ }
848
+
849
+ declare class EdgeSwipeRecognizer extends BaseRecognizer {
850
+ private edge;
851
+ private edgeZoneWidth;
852
+ private minDistance;
853
+ private minVelocity;
854
+ private screenWidth;
855
+ private screenHeight;
856
+ private startedInEdge;
857
+ private startX;
858
+ private startY;
859
+ constructor(eventBus: IEventBus, config: EdgeSwipeRecognizerConfig);
860
+ onProcessedSample(sample: ProcessedSample): void;
861
+ reset(): void;
862
+ /**
863
+ * Check if a point is within the configured edge zone.
864
+ */
865
+ private isInEdgeZone;
866
+ /**
867
+ * Get the swipe distance along the expected axis.
868
+ * Left/right edges → horizontal distance, top/bottom → vertical.
869
+ */
870
+ private getSwipeDistance;
871
+ /**
872
+ * Get the swipe velocity along the expected axis.
873
+ */
874
+ private getSwipeVelocity;
875
+ private resetState;
876
+ }
877
+
878
+ declare class CornerRecognizer extends BaseRecognizer {
879
+ private corner;
880
+ private cornerZoneSize;
881
+ private minDistance;
882
+ private screenWidth;
883
+ private screenHeight;
884
+ private startedInCorner;
885
+ private startX;
886
+ private startY;
887
+ constructor(eventBus: IEventBus, config: CornerRecognizerConfig);
888
+ onProcessedSample(sample: ProcessedSample): void;
889
+ reset(): void;
890
+ private isInCornerZone;
891
+ private resetState;
892
+ }
893
+
894
+ declare class ShakeRecognizer extends BaseRecognizer {
895
+ private threshold;
896
+ private consecutiveSamples;
897
+ private cooldownMs;
898
+ private aboveThresholdCount;
899
+ private lastTriggerTime;
900
+ constructor(eventBus: IEventBus, config?: ShakeRecognizerConfig);
901
+ onProcessedSample(sample: ProcessedSample): void;
902
+ reset(): void;
903
+ }
904
+
905
+ declare class TiltRecognizer extends BaseRecognizer {
906
+ private tiltThreshold;
907
+ private cooldownMs;
908
+ private lastTriggerTime;
909
+ constructor(eventBus: IEventBus, config?: TiltRecognizerConfig);
910
+ onProcessedSample(sample: ProcessedSample): void;
911
+ }
912
+
913
+ declare class WristFlickRecognizer extends BaseRecognizer {
914
+ private angularVelocityThreshold;
915
+ private cooldownMs;
916
+ private lastTriggerTime;
917
+ constructor(eventBus: IEventBus, config?: WristFlickRecognizerConfig);
918
+ onProcessedSample(sample: ProcessedSample): void;
919
+ }
920
+
921
+ declare class SequenceRecognizer extends BaseRecognizer {
922
+ private sequence;
923
+ private timeoutMs;
924
+ private currentIndex;
925
+ private lastStepTime;
926
+ private unsubscribe;
927
+ constructor(eventBus: IEventBus, config: SequenceRecognizerConfig);
928
+ /**
929
+ * SequenceRecognizer doesn't use ProcessedSample — it listens
930
+ * to GestureEvent objects on the EventBus instead.
931
+ */
932
+ onProcessedSample(_sample: ProcessedSample): void;
933
+ reset(): void;
934
+ dispose(): void;
935
+ /**
936
+ * Subscribe to the RecognitionGesture channel to listen for
937
+ * completed gestures and advance the sequence.
938
+ */
939
+ private subscribeToGestures;
940
+ }
941
+
942
+ declare class SymbolRecognizer extends BaseRecognizer {
943
+ private templates;
944
+ private minConfidence;
945
+ private currentPath;
946
+ private isDrawing;
947
+ private resampleCount;
948
+ private squareSize;
949
+ constructor(eventBus: IEventBus, config?: SymbolRecognizerConfig);
950
+ onProcessedSample(sample: ProcessedSample): void;
951
+ reset(): void;
952
+ /**
953
+ * Run the $1 recognizer against collected path points.
954
+ */
955
+ private recognize;
956
+ private resample;
957
+ private getCentroid;
958
+ private rotateBy;
959
+ private scaleTo;
960
+ private translateToOrigin;
961
+ private boundingBox;
962
+ private pathDistance;
963
+ private pathLength;
964
+ private distance;
965
+ }
966
+
967
+ /**
968
+ * LockManager tracks which gestures currently hold exclusive locks.
969
+ *
970
+ * When an exclusive gesture fires:
971
+ * 1. It calls acquireLock(name, priority)
972
+ * 2. The ConflictResolver checks isLocked(name, priority) before allowing
973
+ * other gestures to pass through
974
+ * 3. Once the gesture ends, releaseLock(name) frees the slot
975
+ *
976
+ * Multiple locks can coexist (e.g., a high-priority lock + unrelated gestures).
977
+ * A gesture is blocked if ANY active lock has higher or equal priority.
978
+ */
979
+ declare class LockManager {
980
+ /** Active locks: gestureName → priority */
981
+ private locks;
982
+ /**
983
+ * Acquire an exclusive lock for a gesture.
984
+ *
985
+ * @param gestureName - The gesture acquiring the lock
986
+ * @param priority - The priority level of the lock (lower = higher priority)
987
+ * @returns true if the lock was acquired
988
+ */
989
+ acquireLock(gestureName: string, priority: number): boolean;
990
+ /**
991
+ * Release the lock held by a gesture.
992
+ */
993
+ releaseLock(gestureName: string): void;
994
+ /**
995
+ * Check if a gesture with the given priority is blocked by any active lock.
996
+ * A gesture is blocked if an active lock has higher or equal priority
997
+ * (lower or equal priority number) and is not the same gesture.
998
+ *
999
+ * @param gestureName - The gesture to check
1000
+ * @param priority - The priority of the gesture to check
1001
+ * @returns true if the gesture is blocked
1002
+ */
1003
+ isLocked(gestureName: string, priority: number): boolean;
1004
+ /**
1005
+ * Check if a specific gesture holds a lock.
1006
+ */
1007
+ hasLock(gestureName: string): boolean;
1008
+ /**
1009
+ * Clear all locks. Called during engine reset.
1010
+ */
1011
+ clearAll(): void;
1012
+ /**
1013
+ * Get the number of active locks.
1014
+ */
1015
+ get activeLockCount(): number;
1016
+ }
1017
+
1018
+ /**
1019
+ * ConflictResolver processes gesture events through priority ordering
1020
+ * and exclusive locking to determine which gestures should be dispatched.
1021
+ *
1022
+ * Rules:
1023
+ * 1. Events are queued and processed in priority order (lower = first)
1024
+ * 2. If an exclusive gesture fires (Began state), it acquires a lock
1025
+ * 3. Locked gestures of equal or lower priority are blocked
1026
+ * 4. When an exclusive gesture ends, its lock is released
1027
+ * 5. Non-exclusive gestures pass through if not blocked
1028
+ */
1029
+ declare class ConflictResolver {
1030
+ private priorityQueue;
1031
+ private lockManager;
1032
+ private eventBus;
1033
+ private unsubscribe;
1034
+ private pendingEvents;
1035
+ private processingScheduled;
1036
+ constructor(eventBus: IEventBus);
1037
+ /**
1038
+ * Start listening for gesture events and resolving conflicts.
1039
+ */
1040
+ start(): void;
1041
+ /**
1042
+ * Stop listening and clear all state.
1043
+ */
1044
+ stop(): void;
1045
+ /**
1046
+ * Schedule conflict resolution for the next microtask.
1047
+ * This batches events that arrive in the same tick.
1048
+ */
1049
+ private scheduleProcessing;
1050
+ /**
1051
+ * Process all pending events through priority queue and lock rules.
1052
+ */
1053
+ private processEvents;
1054
+ /** Expose lock manager for testing */
1055
+ getLockManager(): LockManager;
1056
+ }
1057
+
1058
+ /**
1059
+ * Min-heap priority queue for GestureEvents.
1060
+ * Lower priority number = higher priority = processed first.
1061
+ *
1062
+ * Operations:
1063
+ * - insert: O(log n)
1064
+ * - extractMin: O(log n)
1065
+ * - peek: O(1)
1066
+ */
1067
+ declare class GesturePriorityQueue {
1068
+ private heap;
1069
+ get size(): number;
1070
+ isEmpty(): boolean;
1071
+ /** Insert a gesture event into the queue. O(log n). */
1072
+ insert(event: GestureEvent): void;
1073
+ /** Extract and return the highest-priority (lowest priority number) event. O(log n). */
1074
+ extractMin(): GestureEvent | null;
1075
+ /** Peek at the highest-priority event without removing it. O(1). */
1076
+ peek(): GestureEvent | null;
1077
+ /** Remove all events from the queue. */
1078
+ clear(): void;
1079
+ /** Drain the queue, returning all events in priority order. */
1080
+ drainAll(): GestureEvent[];
1081
+ private bubbleUp;
1082
+ private bubbleDown;
1083
+ }
1084
+
1085
+ /**
1086
+ * ActionDispatcher receives resolved gesture events and dispatches them
1087
+ * to registered action handlers.
1088
+ *
1089
+ * Actions are registered by gesture name. Multiple actions can be registered
1090
+ * for a single gesture (they all execute in registration order).
1091
+ */
1092
+ declare class ActionDispatcher {
1093
+ private actionMap;
1094
+ private eventBus;
1095
+ private unsubscribe;
1096
+ constructor(eventBus: IEventBus);
1097
+ /**
1098
+ * Start listening for resolved gesture events.
1099
+ */
1100
+ start(): void;
1101
+ /**
1102
+ * Stop listening.
1103
+ */
1104
+ stop(): void;
1105
+ /**
1106
+ * Register an action for a gesture name.
1107
+ */
1108
+ registerAction(gestureName: string, action: IGestureAction): void;
1109
+ /**
1110
+ * Unregister a specific action from a gesture.
1111
+ */
1112
+ unregisterAction(gestureName: string, actionId: string): void;
1113
+ /**
1114
+ * Clear all registered actions.
1115
+ */
1116
+ clearActions(): void;
1117
+ /**
1118
+ * Dispatch a gesture event to all matching registered actions.
1119
+ */
1120
+ private dispatch;
1121
+ }
1122
+
1123
+ /**
1124
+ * NavigationAction triggers navigation when a gesture is recognized.
1125
+ * Accepts a callback that receives the gesture event and can perform
1126
+ * any navigation logic (e.g., navigation.goBack()).
1127
+ */
1128
+ declare class NavigationAction implements IGestureAction {
1129
+ readonly actionId: string;
1130
+ private callback;
1131
+ constructor(actionId: string, callback: (event: GestureEvent) => void);
1132
+ execute(event: GestureEvent): void;
1133
+ }
1134
+
1135
+ /**
1136
+ * UITransformAction applies Reanimated transformations when a gesture fires.
1137
+ *
1138
+ * Accepts a callback where you can update shared values:
1139
+ * ```typescript
1140
+ * new UITransformAction('scale-on-pinch', (event) => {
1141
+ * scale.value = withSpring(event.metadata.scale ?? 1);
1142
+ * });
1143
+ * ```
1144
+ */
1145
+ declare class UITransformAction implements IGestureAction {
1146
+ readonly actionId: string;
1147
+ private transform;
1148
+ constructor(actionId: string, transform: (event: GestureEvent) => void);
1149
+ execute(event: GestureEvent): void;
1150
+ }
1151
+
1152
+ /**
1153
+ * SystemAction performs system-level operations when a gesture fires.
1154
+ */
1155
+ declare class SystemAction implements IGestureAction {
1156
+ readonly actionId: string;
1157
+ private callback;
1158
+ constructor(actionId: string, callback: (event: GestureEvent) => void);
1159
+ execute(event: GestureEvent): void;
1160
+ }
1161
+
1162
+ /**
1163
+ * CustomAction wraps a user-defined callback.
1164
+ *
1165
+ * @example
1166
+ * ```typescript
1167
+ * const logAction = new CustomAction('log-shake', (event) => {
1168
+ * analytics.track('shake_gesture', { magnitude: event.metadata.magnitude });
1169
+ * });
1170
+ * ```
1171
+ */
1172
+ declare class CustomAction implements IGestureAction {
1173
+ readonly actionId: string;
1174
+ private callback;
1175
+ constructor(actionId: string, callback: (event: GestureEvent) => void);
1176
+ execute(event: GestureEvent): void;
1177
+ }
1178
+
1179
+ /**
1180
+ * HapticFeedback triggers haptic responses when gestures are recognized.
1181
+ *
1182
+ * Supports three haptic types (via expo-haptics):
1183
+ * - Impact: for gesture activations (tap, swipe)
1184
+ * - Notification: for important events (shake, sequence complete)
1185
+ * - Selection: for continuous feedback (pan, rotation)
1186
+ *
1187
+ * Falls back to react-native Vibration API when expo-haptics is not installed.
1188
+ */
1189
+ declare class HapticFeedback implements IFeedbackProvider {
1190
+ private _isSupported;
1191
+ private useVibrationFallback;
1192
+ private enabled;
1193
+ constructor(enabled?: boolean);
1194
+ get isSupported(): boolean;
1195
+ trigger(event: GestureEvent): void;
1196
+ setEnabled(enabled: boolean): void;
1197
+ }
1198
+
1199
+ /**
1200
+ * VisualFeedback invokes registered callbacks to trigger visual animations
1201
+ * when gestures are recognized.
1202
+ *
1203
+ * Typical usage: update Reanimated shared values in the callback.
1204
+ *
1205
+ * @example
1206
+ * ```typescript
1207
+ * const visual = new VisualFeedback((event) => {
1208
+ * opacity.value = withTiming(0.5, { duration: 100 });
1209
+ * scale.value = withSpring(0.95);
1210
+ * });
1211
+ * ```
1212
+ */
1213
+ declare class VisualFeedback implements IFeedbackProvider {
1214
+ private _isSupported;
1215
+ private callback;
1216
+ constructor(callback?: (event: GestureEvent) => void);
1217
+ get isSupported(): boolean;
1218
+ trigger(event: GestureEvent): void;
1219
+ /**
1220
+ * Update the visual feedback callback at runtime.
1221
+ */
1222
+ setCallback(callback: (event: GestureEvent) => void): void;
1223
+ }
1224
+
1225
+ /**
1226
+ * AccessibilityFeedback announces gesture events for screen reader users.
1227
+ *
1228
+ * Automatically generates human-readable announcements based on gesture names.
1229
+ * Custom announcement builders can be provided via setAnnouncementBuilder().
1230
+ */
1231
+ declare class AccessibilityFeedback implements IFeedbackProvider {
1232
+ private _isSupported;
1233
+ private announcementBuilder;
1234
+ constructor();
1235
+ get isSupported(): boolean;
1236
+ trigger(event: GestureEvent): void;
1237
+ /**
1238
+ * Set a custom function to build announcement strings.
1239
+ */
1240
+ setAnnouncementBuilder(builder: (event: GestureEvent) => string): void;
1241
+ /**
1242
+ * Generate a default human-readable announcement based on gesture name.
1243
+ */
1244
+ private defaultAnnouncement;
1245
+ }
1246
+
1247
+ /**
1248
+ * GestureEngine is the main orchestrator for the gesture pipeline.
1249
+ *
1250
+ * It connects all 6 layers:
1251
+ * Input → Processing → Recognition → Conflict → Action → Feedback
1252
+ *
1253
+ * All communication flows through the typed EventBus.
1254
+ */
1255
+ declare class GestureEngine {
1256
+ readonly eventBus: IEventBus;
1257
+ private config;
1258
+ readonly touchInput: TouchInputProvider;
1259
+ readonly sensorInput: SensorInputProvider;
1260
+ private noiseFilter;
1261
+ private sensorNoiseFilter;
1262
+ private velocityCalc;
1263
+ private angleDetector;
1264
+ private normalizer;
1265
+ private streamBuffer;
1266
+ private recognizers;
1267
+ private conflictResolver;
1268
+ private actionDispatcher;
1269
+ private feedbackProviders;
1270
+ private _isRunning;
1271
+ private inputUnsubscribe;
1272
+ private feedbackUnsubscribe;
1273
+ constructor(config?: GestureEngineConfig);
1274
+ get isRunning(): boolean;
1275
+ /**
1276
+ * Start the gesture engine. Activates all providers and wires the pipeline.
1277
+ */
1278
+ start(): void;
1279
+ /**
1280
+ * Stop the gesture engine. Cleans up all subscriptions and providers.
1281
+ */
1282
+ stop(): void;
1283
+ /**
1284
+ * Register a gesture recognizer with the engine.
1285
+ */
1286
+ registerRecognizer(recognizer: IRecognizer): void;
1287
+ /**
1288
+ * Unregister a recognizer by its ID.
1289
+ */
1290
+ unregisterRecognizer(recognizerId: string): void;
1291
+ /**
1292
+ * Register an action for a gesture name.
1293
+ */
1294
+ registerAction(gestureName: string, action: IGestureAction): void;
1295
+ /**
1296
+ * Register a feedback provider.
1297
+ */
1298
+ registerFeedback(provider: IFeedbackProvider): void;
1299
+ /**
1300
+ * Get all registered recognizers.
1301
+ */
1302
+ getRecognizers(): IRecognizer[];
1303
+ /**
1304
+ * Dispose the engine and clean up all resources.
1305
+ */
1306
+ dispose(): void;
1307
+ /**
1308
+ * Process a raw input event through Layer 2 (processing) and feed
1309
+ * the resulting ProcessedSample into Layer 3 (recognition).
1310
+ */
1311
+ private processInput;
1312
+ }
1313
+
1314
+ interface UseGestureEngineConfig extends GestureEngineConfig {
1315
+ /**
1316
+ * Recognizer instances to register with the engine.
1317
+ * These are created outside the hook and passed in.
1318
+ */
1319
+ recognizers?: IRecognizer[];
1320
+ /**
1321
+ * Action mappings: gestureName → array of actions
1322
+ */
1323
+ actions?: Record<string, IGestureAction[]>;
1324
+ /**
1325
+ * Feedback providers to register
1326
+ */
1327
+ feedback?: IFeedbackProvider[];
1328
+ }
1329
+ interface UseGestureEngineResult {
1330
+ /** The GestureEngine instance */
1331
+ engine: GestureEngine | null;
1332
+ /** Whether the engine is initialized and running */
1333
+ isReady: boolean;
1334
+ }
1335
+ /**
1336
+ * React hook that creates, configures, and manages a GestureEngine lifecycle.
1337
+ *
1338
+ * The engine is created once on mount and disposed on unmount.
1339
+ * Uses useRef to keep the engine outside the React render cycle.
1340
+ *
1341
+ * @example
1342
+ * ```tsx
1343
+ * const { engine, isReady } = useGestureEngine({
1344
+ * sensorInterval: 100,
1345
+ * hapticEnabled: true,
1346
+ * recognizers: [shakeRecognizer, edgeSwipeRecognizer],
1347
+ * actions: { 'shake': [myShakeAction] },
1348
+ * });
1349
+ * ```
1350
+ */
1351
+ declare function useGestureEngine(config?: UseGestureEngineConfig): UseGestureEngineResult;
1352
+
1353
+ interface UseShakeGestureConfig {
1354
+ /** Acceleration threshold in g (default 1.5) */
1355
+ threshold?: number;
1356
+ /** Cooldown in ms (default 1000) */
1357
+ cooldownMs?: number;
1358
+ /** Callback when shake is detected */
1359
+ onShake: () => void;
1360
+ /** Enable haptic feedback (default true) */
1361
+ hapticEnabled?: boolean;
1362
+ /** Sensor polling interval in ms (default 100) */
1363
+ sensorInterval?: number;
1364
+ }
1365
+ /**
1366
+ * Convenience hook for shake gesture detection.
1367
+ *
1368
+ * Creates a minimal GestureEngine with just a ShakeRecognizer.
1369
+ * Ideal for simple use cases where you just need shake detection.
1370
+ *
1371
+ * @example
1372
+ * ```tsx
1373
+ * useShakeGesture({
1374
+ * threshold: 1.5,
1375
+ * cooldownMs: 1000,
1376
+ * onShake: () => console.log('Device shaken!'),
1377
+ * });
1378
+ * ```
1379
+ */
1380
+ declare function useShakeGesture(config: UseShakeGestureConfig): void;
1381
+
1382
+ interface UseEdgeSwipeConfig {
1383
+ /** Edge to detect: 'left', 'right', 'top', 'bottom' */
1384
+ edge: 'left' | 'right' | 'top' | 'bottom';
1385
+ /** Minimum swipe distance in px (default 50) */
1386
+ minDistance?: number;
1387
+ /** Edge zone width in px (default 30) */
1388
+ edgeZoneWidth?: number;
1389
+ /** Minimum velocity in px/ms (default 0.3) */
1390
+ minVelocity?: number;
1391
+ /** Screen width in px */
1392
+ screenWidth?: number;
1393
+ /** Screen height in px */
1394
+ screenHeight?: number;
1395
+ /** Callback when edge swipe is detected */
1396
+ onSwipe: (event: GestureEvent) => void;
1397
+ /** Enable haptic feedback (default true) */
1398
+ hapticEnabled?: boolean;
1399
+ }
1400
+ /**
1401
+ * Convenience hook for edge swipe detection.
1402
+ *
1403
+ * @example
1404
+ * ```tsx
1405
+ * useEdgeSwipe({
1406
+ * edge: 'left',
1407
+ * minDistance: 50,
1408
+ * onSwipe: (event) => navigation.goBack(),
1409
+ * });
1410
+ * ```
1411
+ */
1412
+ declare function useEdgeSwipe(config: UseEdgeSwipeConfig): void;
1413
+
1414
+ interface UseGestureSequenceConfig {
1415
+ /** Ordered gesture names to match */
1416
+ sequence: string[];
1417
+ /** Max time between steps in ms (default 800) */
1418
+ timeoutMs?: number;
1419
+ /** Callback when sequence is completed */
1420
+ onComplete: () => void;
1421
+ /** Enable haptic feedback (default true) */
1422
+ hapticEnabled?: boolean;
1423
+ }
1424
+ /**
1425
+ * Convenience hook for gesture sequence detection.
1426
+ *
1427
+ * @example
1428
+ * ```tsx
1429
+ * useGestureSequence({
1430
+ * sequence: ['tap', 'tap', 'edge-swipe-right'],
1431
+ * timeoutMs: 800,
1432
+ * onComplete: () => console.log('Secret gesture unlocked!'),
1433
+ * });
1434
+ * ```
1435
+ */
1436
+ declare function useGestureSequence(config: UseGestureSequenceConfig): void;
1437
+
1438
+ export { AccessibilityFeedback, ActionDispatcher, AngleDetector, BaseRecognizer, CameraInputProvider, CardinalDirection, ConflictResolver, CornerRecognizer, type CornerRecognizerConfig, CustomAction, DoubleTapRecognizer, type DoubleTapRecognizerConfig, EdgeSwipeRecognizer, type EdgeSwipeRecognizerConfig, EventBus, EventChannel, type EventChannelMap, type EventHandler, GestureEngine, type GestureEngineConfig, type GestureEvent, type GestureMetadata, GesturePriorityQueue, HapticFeedback, type HardwareData, HardwareInputProvider, type IEventBus, type IFeedbackProvider, type IGestureAction, type IInputProvider, type IRecognizer, type InputEvent, InputType, LockManager, NavigationAction, NoiseFilter, PanRecognizer, type PanRecognizerConfig, PinchRecognizer, type PinchRecognizerConfig, type ProcessedSample, type RecognizerConfig, RecognizerState, RotationRecognizer, type RotationRecognizerConfig, type SensorData, SensorInputProvider, SensorType, SequenceRecognizer, type SequenceRecognizerConfig, ShakeRecognizer, type ShakeRecognizerConfig, StreamBuffer, SymbolRecognizer, type SymbolRecognizerConfig, SystemAction, TapRecognizer, type TapRecognizerConfig, ThresholdNormalizer, TiltRecognizer, type TiltRecognizerConfig, type TouchData, TouchInputProvider, TouchType, UITransformAction, type UseEdgeSwipeConfig, type UseGestureEngineConfig, type UseGestureEngineResult, type UseGestureSequenceConfig, type UseShakeGestureConfig, VelocityCalculator, VisualFeedback, WristFlickRecognizer, type WristFlickRecognizerConfig, generateId, useEdgeSwipe, useGestureEngine, useGestureSequence, useShakeGesture };