@iam-protocol/pulse-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +59 -0
- package/dist/index.d.mts +376 -0
- package/dist/index.d.ts +376 -0
- package/dist/index.js +14316 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +14238 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +51 -0
- package/src/challenge/lissajous.ts +56 -0
- package/src/challenge/phrase.ts +29 -0
- package/src/config.ts +40 -0
- package/src/extraction/kinematic.ts +101 -0
- package/src/extraction/mfcc.ts +93 -0
- package/src/extraction/statistics.ts +59 -0
- package/src/extraction/types.ts +17 -0
- package/src/hashing/poseidon.ts +92 -0
- package/src/hashing/simhash.ts +87 -0
- package/src/hashing/types.ts +16 -0
- package/src/identity/anchor.ts +75 -0
- package/src/identity/types.ts +18 -0
- package/src/index.ts +43 -0
- package/src/proof/prover.ts +87 -0
- package/src/proof/serializer.ts +79 -0
- package/src/proof/types.ts +31 -0
- package/src/pulse.ts +397 -0
- package/src/sensor/audio.ts +94 -0
- package/src/sensor/motion.ts +83 -0
- package/src/sensor/touch.ts +65 -0
- package/src/sensor/types.ts +55 -0
- package/src/submit/relayer.ts +58 -0
- package/src/submit/types.ts +15 -0
- package/src/submit/wallet.ts +167 -0
- package/src/types.d.ts +14 -0
- package/test/integration.test.ts +102 -0
- package/test/poseidon.test.ts +81 -0
- package/test/serializer.test.ts +86 -0
- package/test/simhash.test.ts +57 -0
- package/test/statistics.test.ts +51 -0
- package/tsconfig.json +21 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +8 -0
package/src/pulse.ts
ADDED
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
import type { PulseConfig } from "./config";
|
|
2
|
+
import { DEFAULT_THRESHOLD, DEFAULT_CAPTURE_MS } from "./config";
|
|
3
|
+
import type { SensorData, AudioCapture, MotionSample, TouchSample, StageState } from "./sensor/types";
|
|
4
|
+
import type { TBH } from "./hashing/types";
|
|
5
|
+
import type { SolanaProof } from "./proof/types";
|
|
6
|
+
import type { VerificationResult } from "./submit/types";
|
|
7
|
+
import type { StoredVerificationData } from "./identity/types";
|
|
8
|
+
|
|
9
|
+
import { captureAudio } from "./sensor/audio";
|
|
10
|
+
import { captureMotion } from "./sensor/motion";
|
|
11
|
+
import { captureTouch } from "./sensor/touch";
|
|
12
|
+
import { extractMFCC } from "./extraction/mfcc";
|
|
13
|
+
import {
|
|
14
|
+
extractMotionFeatures,
|
|
15
|
+
extractTouchFeatures,
|
|
16
|
+
} from "./extraction/kinematic";
|
|
17
|
+
import { fuseFeatures } from "./extraction/statistics";
|
|
18
|
+
import { simhash } from "./hashing/simhash";
|
|
19
|
+
import { generateTBH } from "./hashing/poseidon";
|
|
20
|
+
import { prepareCircuitInput, generateProof } from "./proof/prover";
|
|
21
|
+
import { serializeProof } from "./proof/serializer";
|
|
22
|
+
import { submitViaWallet } from "./submit/wallet";
|
|
23
|
+
import { submitViaRelayer } from "./submit/relayer";
|
|
24
|
+
import {
|
|
25
|
+
storeVerificationData,
|
|
26
|
+
loadVerificationData,
|
|
27
|
+
} from "./identity/anchor";
|
|
28
|
+
|
|
29
|
+
type ResolvedConfig = Required<Pick<PulseConfig, "cluster" | "threshold">> &
|
|
30
|
+
PulseConfig;
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Extract features from sensor data and fuse into a single vector.
|
|
34
|
+
*/
|
|
35
|
+
function extractFeatures(data: SensorData): number[] {
|
|
36
|
+
const audioFeatures = data.audio
|
|
37
|
+
? extractMFCC(data.audio)
|
|
38
|
+
: new Array(156).fill(0);
|
|
39
|
+
const motionFeatures = extractMotionFeatures(data.motion);
|
|
40
|
+
const touchFeatures = extractTouchFeatures(data.touch);
|
|
41
|
+
return fuseFeatures(audioFeatures, motionFeatures, touchFeatures);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Shared pipeline: features → simhash → TBH → proof → submit.
|
|
46
|
+
* Used by both PulseSDK.verify() and PulseSession.complete().
|
|
47
|
+
*/
|
|
48
|
+
async function processSensorData(
|
|
49
|
+
sensorData: SensorData,
|
|
50
|
+
config: ResolvedConfig,
|
|
51
|
+
wallet?: any,
|
|
52
|
+
connection?: any
|
|
53
|
+
): Promise<VerificationResult> {
|
|
54
|
+
// Extract features
|
|
55
|
+
const features = extractFeatures(sensorData);
|
|
56
|
+
|
|
57
|
+
// Generate fingerprint via SimHash
|
|
58
|
+
const fingerprint = simhash(features);
|
|
59
|
+
|
|
60
|
+
// Generate TBH (Poseidon commitment)
|
|
61
|
+
const tbh = await generateTBH(fingerprint);
|
|
62
|
+
|
|
63
|
+
// Check for previous verification data
|
|
64
|
+
const previousData = loadVerificationData();
|
|
65
|
+
const isFirstVerification = !previousData;
|
|
66
|
+
|
|
67
|
+
let solanaProof: SolanaProof | null = null;
|
|
68
|
+
|
|
69
|
+
if (!isFirstVerification && previousData) {
|
|
70
|
+
const previousTBH: TBH = {
|
|
71
|
+
fingerprint: previousData.fingerprint,
|
|
72
|
+
salt: BigInt(previousData.salt),
|
|
73
|
+
commitment: BigInt(previousData.commitment),
|
|
74
|
+
commitmentBytes: new Uint8Array(32),
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
const circuitInput = prepareCircuitInput(
|
|
78
|
+
tbh,
|
|
79
|
+
previousTBH,
|
|
80
|
+
config.threshold
|
|
81
|
+
);
|
|
82
|
+
|
|
83
|
+
const wasmPath = config.wasmUrl ?? "";
|
|
84
|
+
const zkeyPath = config.zkeyUrl ?? "";
|
|
85
|
+
|
|
86
|
+
const { proof, publicSignals } = await generateProof(
|
|
87
|
+
circuitInput,
|
|
88
|
+
wasmPath,
|
|
89
|
+
zkeyPath
|
|
90
|
+
);
|
|
91
|
+
|
|
92
|
+
solanaProof = serializeProof(proof, publicSignals);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Submit
|
|
96
|
+
let submission;
|
|
97
|
+
|
|
98
|
+
if (wallet && connection) {
|
|
99
|
+
if (isFirstVerification) {
|
|
100
|
+
submission = await submitViaWallet(
|
|
101
|
+
solanaProof ?? { proofBytes: new Uint8Array(0), publicInputs: [] },
|
|
102
|
+
tbh.commitmentBytes,
|
|
103
|
+
{ wallet, connection, isFirstVerification: true }
|
|
104
|
+
);
|
|
105
|
+
} else {
|
|
106
|
+
submission = await submitViaWallet(solanaProof!, tbh.commitmentBytes, {
|
|
107
|
+
wallet,
|
|
108
|
+
connection,
|
|
109
|
+
isFirstVerification: false,
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
} else if (config.relayerUrl) {
|
|
113
|
+
submission = await submitViaRelayer(
|
|
114
|
+
solanaProof ?? { proofBytes: new Uint8Array(0), publicInputs: [] },
|
|
115
|
+
tbh.commitmentBytes,
|
|
116
|
+
{ relayerUrl: config.relayerUrl, isFirstVerification }
|
|
117
|
+
);
|
|
118
|
+
} else {
|
|
119
|
+
return {
|
|
120
|
+
success: false,
|
|
121
|
+
commitment: tbh.commitmentBytes,
|
|
122
|
+
isFirstVerification,
|
|
123
|
+
error: "No wallet or relayer configured",
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Store verification data locally for next re-verification
|
|
128
|
+
if (submission.success) {
|
|
129
|
+
storeVerificationData({
|
|
130
|
+
fingerprint: tbh.fingerprint,
|
|
131
|
+
salt: tbh.salt.toString(),
|
|
132
|
+
commitment: tbh.commitment.toString(),
|
|
133
|
+
timestamp: Date.now(),
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
success: submission.success,
|
|
139
|
+
commitment: tbh.commitmentBytes,
|
|
140
|
+
txSignature: submission.txSignature,
|
|
141
|
+
isFirstVerification,
|
|
142
|
+
error: submission.error,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* PulseSession — event-driven staged capture session.
|
|
148
|
+
*
|
|
149
|
+
* Gives the caller control over when each sensor stage starts and stops.
|
|
150
|
+
* After all stages complete, call complete() to run the processing pipeline.
|
|
151
|
+
*
|
|
152
|
+
* Usage:
|
|
153
|
+
* const session = pulse.createSession(touchElement);
|
|
154
|
+
* await session.startAudio();
|
|
155
|
+
* // ... user speaks ...
|
|
156
|
+
* await session.stopAudio();
|
|
157
|
+
* await session.startMotion();
|
|
158
|
+
* // ... user holds device ...
|
|
159
|
+
* await session.stopMotion();
|
|
160
|
+
* await session.startTouch();
|
|
161
|
+
* // ... user traces curve ...
|
|
162
|
+
* await session.stopTouch();
|
|
163
|
+
* const result = await session.complete(wallet, connection);
|
|
164
|
+
*/
|
|
165
|
+
export class PulseSession {
|
|
166
|
+
private config: ResolvedConfig;
|
|
167
|
+
private touchElement: HTMLElement | undefined;
|
|
168
|
+
|
|
169
|
+
private audioStageState: StageState = "idle";
|
|
170
|
+
private motionStageState: StageState = "idle";
|
|
171
|
+
private touchStageState: StageState = "idle";
|
|
172
|
+
|
|
173
|
+
private audioController: AbortController | null = null;
|
|
174
|
+
private motionController: AbortController | null = null;
|
|
175
|
+
private touchController: AbortController | null = null;
|
|
176
|
+
|
|
177
|
+
private audioPromise: Promise<AudioCapture | null> | null = null;
|
|
178
|
+
private motionPromise: Promise<MotionSample[]> | null = null;
|
|
179
|
+
private touchPromise: Promise<TouchSample[]> | null = null;
|
|
180
|
+
|
|
181
|
+
private audioData: AudioCapture | null = null;
|
|
182
|
+
private motionData: MotionSample[] = [];
|
|
183
|
+
private touchData: TouchSample[] = [];
|
|
184
|
+
|
|
185
|
+
constructor(config: ResolvedConfig, touchElement?: HTMLElement) {
|
|
186
|
+
this.config = config;
|
|
187
|
+
this.touchElement = touchElement;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// --- Audio ---
|
|
191
|
+
|
|
192
|
+
async startAudio(): Promise<void> {
|
|
193
|
+
if (this.audioStageState !== "idle")
|
|
194
|
+
throw new Error("Audio capture already started");
|
|
195
|
+
this.audioStageState = "capturing";
|
|
196
|
+
this.audioController = new AbortController();
|
|
197
|
+
this.audioPromise = captureAudio({
|
|
198
|
+
signal: this.audioController.signal,
|
|
199
|
+
}).catch(() => null);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
async stopAudio(): Promise<AudioCapture | null> {
|
|
203
|
+
if (this.audioStageState !== "capturing")
|
|
204
|
+
throw new Error("Audio capture not active");
|
|
205
|
+
this.audioController!.abort();
|
|
206
|
+
this.audioData = await this.audioPromise!;
|
|
207
|
+
this.audioStageState = "captured";
|
|
208
|
+
return this.audioData;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
skipAudio(): void {
|
|
212
|
+
if (this.audioStageState !== "idle")
|
|
213
|
+
throw new Error("Audio capture already started");
|
|
214
|
+
this.audioStageState = "skipped";
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// --- Motion ---
|
|
218
|
+
|
|
219
|
+
async startMotion(): Promise<void> {
|
|
220
|
+
if (this.motionStageState !== "idle")
|
|
221
|
+
throw new Error("Motion capture already started");
|
|
222
|
+
this.motionStageState = "capturing";
|
|
223
|
+
this.motionController = new AbortController();
|
|
224
|
+
this.motionPromise = captureMotion({
|
|
225
|
+
signal: this.motionController.signal,
|
|
226
|
+
}).catch(() => []);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
async stopMotion(): Promise<MotionSample[]> {
|
|
230
|
+
if (this.motionStageState !== "capturing")
|
|
231
|
+
throw new Error("Motion capture not active");
|
|
232
|
+
this.motionController!.abort();
|
|
233
|
+
this.motionData = await this.motionPromise!;
|
|
234
|
+
this.motionStageState = "captured";
|
|
235
|
+
return this.motionData;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
skipMotion(): void {
|
|
239
|
+
if (this.motionStageState !== "idle")
|
|
240
|
+
throw new Error("Motion capture already started");
|
|
241
|
+
this.motionStageState = "skipped";
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// --- Touch ---
|
|
245
|
+
|
|
246
|
+
async startTouch(): Promise<void> {
|
|
247
|
+
if (this.touchStageState !== "idle")
|
|
248
|
+
throw new Error("Touch capture already started");
|
|
249
|
+
if (!this.touchElement)
|
|
250
|
+
throw new Error("No touch element provided to session");
|
|
251
|
+
this.touchStageState = "capturing";
|
|
252
|
+
this.touchController = new AbortController();
|
|
253
|
+
this.touchPromise = captureTouch(this.touchElement, {
|
|
254
|
+
signal: this.touchController.signal,
|
|
255
|
+
}).catch(() => []);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
async stopTouch(): Promise<TouchSample[]> {
|
|
259
|
+
if (this.touchStageState !== "capturing")
|
|
260
|
+
throw new Error("Touch capture not active");
|
|
261
|
+
this.touchController!.abort();
|
|
262
|
+
this.touchData = await this.touchPromise!;
|
|
263
|
+
this.touchStageState = "captured";
|
|
264
|
+
return this.touchData;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
skipTouch(): void {
|
|
268
|
+
if (this.touchStageState !== "idle")
|
|
269
|
+
throw new Error("Touch capture already started");
|
|
270
|
+
this.touchStageState = "skipped";
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// --- Complete ---
|
|
274
|
+
|
|
275
|
+
async complete(wallet?: any, connection?: any): Promise<VerificationResult> {
|
|
276
|
+
const active: string[] = [];
|
|
277
|
+
if (this.audioStageState === "capturing") active.push("audio");
|
|
278
|
+
if (this.motionStageState === "capturing") active.push("motion");
|
|
279
|
+
if (this.touchStageState === "capturing") active.push("touch");
|
|
280
|
+
if (active.length > 0) {
|
|
281
|
+
throw new Error(
|
|
282
|
+
`Cannot complete: stages still capturing: ${active.join(", ")}`
|
|
283
|
+
);
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
const sensorData: SensorData = {
|
|
287
|
+
audio: this.audioData,
|
|
288
|
+
motion: this.motionData,
|
|
289
|
+
touch: this.touchData,
|
|
290
|
+
modalities: {
|
|
291
|
+
audio: this.audioData !== null,
|
|
292
|
+
motion: this.motionData.length > 0,
|
|
293
|
+
touch: this.touchData.length > 0,
|
|
294
|
+
},
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
return processSensorData(sensorData, this.config, wallet, connection);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* PulseSDK — main entry point for IAM Protocol verification.
|
|
303
|
+
*
|
|
304
|
+
* Two usage modes:
|
|
305
|
+
* 1. Simple (backward-compatible): pulse.verify(touchElement) — captures all sensors
|
|
306
|
+
* for DEFAULT_CAPTURE_MS in parallel, then processes.
|
|
307
|
+
* 2. Staged (event-driven): pulse.createSession(touchElement) — caller controls
|
|
308
|
+
* when each sensor stage starts and stops.
|
|
309
|
+
*/
|
|
310
|
+
export class PulseSDK {
|
|
311
|
+
private config: ResolvedConfig;
|
|
312
|
+
|
|
313
|
+
constructor(config: PulseConfig) {
|
|
314
|
+
this.config = {
|
|
315
|
+
threshold: DEFAULT_THRESHOLD,
|
|
316
|
+
...config,
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
/**
|
|
321
|
+
* Create a staged capture session for event-driven control.
|
|
322
|
+
*/
|
|
323
|
+
createSession(touchElement?: HTMLElement): PulseSession {
|
|
324
|
+
return new PulseSession(this.config, touchElement);
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Run a full verification with automatic timed capture (backward-compatible).
|
|
329
|
+
* Captures all sensors in parallel for DEFAULT_CAPTURE_MS, then processes.
|
|
330
|
+
*/
|
|
331
|
+
async verify(
|
|
332
|
+
touchElement?: HTMLElement,
|
|
333
|
+
wallet?: any,
|
|
334
|
+
connection?: any
|
|
335
|
+
): Promise<VerificationResult> {
|
|
336
|
+
try {
|
|
337
|
+
const session = this.createSession(touchElement);
|
|
338
|
+
const stopPromises: Promise<void>[] = [];
|
|
339
|
+
|
|
340
|
+
// Audio
|
|
341
|
+
try {
|
|
342
|
+
await session.startAudio();
|
|
343
|
+
stopPromises.push(
|
|
344
|
+
new Promise<void>((r) => setTimeout(r, DEFAULT_CAPTURE_MS)).then(
|
|
345
|
+
() => {
|
|
346
|
+
session.stopAudio();
|
|
347
|
+
}
|
|
348
|
+
)
|
|
349
|
+
);
|
|
350
|
+
} catch {
|
|
351
|
+
session.skipAudio();
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
// Motion
|
|
355
|
+
try {
|
|
356
|
+
await session.startMotion();
|
|
357
|
+
stopPromises.push(
|
|
358
|
+
new Promise<void>((r) => setTimeout(r, DEFAULT_CAPTURE_MS)).then(
|
|
359
|
+
() => {
|
|
360
|
+
session.stopMotion();
|
|
361
|
+
}
|
|
362
|
+
)
|
|
363
|
+
);
|
|
364
|
+
} catch {
|
|
365
|
+
session.skipMotion();
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Touch
|
|
369
|
+
if (touchElement) {
|
|
370
|
+
try {
|
|
371
|
+
await session.startTouch();
|
|
372
|
+
stopPromises.push(
|
|
373
|
+
new Promise<void>((r) => setTimeout(r, DEFAULT_CAPTURE_MS)).then(
|
|
374
|
+
() => {
|
|
375
|
+
session.stopTouch();
|
|
376
|
+
}
|
|
377
|
+
)
|
|
378
|
+
);
|
|
379
|
+
} catch {
|
|
380
|
+
session.skipTouch();
|
|
381
|
+
}
|
|
382
|
+
} else {
|
|
383
|
+
session.skipTouch();
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
await Promise.all(stopPromises);
|
|
387
|
+
return session.complete(wallet, connection);
|
|
388
|
+
} catch (err: any) {
|
|
389
|
+
return {
|
|
390
|
+
success: false,
|
|
391
|
+
commitment: new Uint8Array(32),
|
|
392
|
+
isFirstVerification: true,
|
|
393
|
+
error: err.message ?? String(err),
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import type { AudioCapture, CaptureOptions } from "./types";
|
|
2
|
+
import { MIN_CAPTURE_MS, MAX_CAPTURE_MS } from "../config";
|
|
3
|
+
|
|
4
|
+
const TARGET_SAMPLE_RATE = 16000;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Capture audio at 16kHz until signaled to stop.
|
|
8
|
+
* Uses ScriptProcessorNode for raw PCM sample access.
|
|
9
|
+
*
|
|
10
|
+
* Stop behavior:
|
|
11
|
+
* - If signal fires before minDurationMs, capture continues until minimum is reached.
|
|
12
|
+
* - If signal never fires, capture auto-stops at maxDurationMs.
|
|
13
|
+
* - If no signal provided, captures for maxDurationMs.
|
|
14
|
+
*/
|
|
15
|
+
export async function captureAudio(
|
|
16
|
+
options: CaptureOptions = {}
|
|
17
|
+
): Promise<AudioCapture> {
|
|
18
|
+
const {
|
|
19
|
+
signal,
|
|
20
|
+
minDurationMs = MIN_CAPTURE_MS,
|
|
21
|
+
maxDurationMs = MAX_CAPTURE_MS,
|
|
22
|
+
} = options;
|
|
23
|
+
|
|
24
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
25
|
+
audio: {
|
|
26
|
+
sampleRate: TARGET_SAMPLE_RATE,
|
|
27
|
+
channelCount: 1,
|
|
28
|
+
echoCancellation: false,
|
|
29
|
+
noiseSuppression: false,
|
|
30
|
+
autoGainControl: false,
|
|
31
|
+
},
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
const ctx = new AudioContext({ sampleRate: TARGET_SAMPLE_RATE });
|
|
35
|
+
const source = ctx.createMediaStreamSource(stream);
|
|
36
|
+
const chunks: Float32Array[] = [];
|
|
37
|
+
const startTime = performance.now();
|
|
38
|
+
|
|
39
|
+
return new Promise((resolve) => {
|
|
40
|
+
let stopped = false;
|
|
41
|
+
const bufferSize = 4096;
|
|
42
|
+
const processor = ctx.createScriptProcessor(bufferSize, 1, 1);
|
|
43
|
+
|
|
44
|
+
processor.onaudioprocess = (e: AudioProcessingEvent) => {
|
|
45
|
+
chunks.push(new Float32Array(e.inputBuffer.getChannelData(0)));
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
source.connect(processor);
|
|
49
|
+
processor.connect(ctx.destination);
|
|
50
|
+
|
|
51
|
+
function stopCapture() {
|
|
52
|
+
if (stopped) return;
|
|
53
|
+
stopped = true;
|
|
54
|
+
clearTimeout(maxTimer);
|
|
55
|
+
|
|
56
|
+
processor.disconnect();
|
|
57
|
+
source.disconnect();
|
|
58
|
+
stream.getTracks().forEach((t: MediaStreamTrack) => t.stop());
|
|
59
|
+
ctx.close().catch(() => {});
|
|
60
|
+
|
|
61
|
+
const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
62
|
+
const samples = new Float32Array(totalLength);
|
|
63
|
+
let offset = 0;
|
|
64
|
+
for (const chunk of chunks) {
|
|
65
|
+
samples.set(chunk, offset);
|
|
66
|
+
offset += chunk.length;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
resolve({
|
|
70
|
+
samples,
|
|
71
|
+
sampleRate: ctx.sampleRate,
|
|
72
|
+
duration: totalLength / ctx.sampleRate,
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const maxTimer = setTimeout(stopCapture, maxDurationMs);
|
|
77
|
+
|
|
78
|
+
if (signal) {
|
|
79
|
+
if (signal.aborted) {
|
|
80
|
+
setTimeout(stopCapture, minDurationMs);
|
|
81
|
+
} else {
|
|
82
|
+
signal.addEventListener(
|
|
83
|
+
"abort",
|
|
84
|
+
() => {
|
|
85
|
+
const elapsed = performance.now() - startTime;
|
|
86
|
+
const remaining = Math.max(0, minDurationMs - elapsed);
|
|
87
|
+
setTimeout(stopCapture, remaining);
|
|
88
|
+
},
|
|
89
|
+
{ once: true }
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import type { MotionSample, CaptureOptions } from "./types";
|
|
2
|
+
import { MIN_CAPTURE_MS, MAX_CAPTURE_MS } from "../config";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Request motion sensor permission (required on iOS 13+).
|
|
6
|
+
* No-op on Android/Chrome where permission is implicit.
|
|
7
|
+
*/
|
|
8
|
+
export async function requestMotionPermission(): Promise<boolean> {
|
|
9
|
+
const DME = (globalThis as any).DeviceMotionEvent;
|
|
10
|
+
if (!DME) return false;
|
|
11
|
+
|
|
12
|
+
if (typeof DME.requestPermission === "function") {
|
|
13
|
+
const permission = await DME.requestPermission();
|
|
14
|
+
return permission === "granted";
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// Android/Chrome: permission is implicit
|
|
18
|
+
return true;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Capture accelerometer + gyroscope data until signaled to stop.
|
|
23
|
+
* Samples at the device's native rate (typically ~60-100Hz).
|
|
24
|
+
*/
|
|
25
|
+
export async function captureMotion(
|
|
26
|
+
options: CaptureOptions = {}
|
|
27
|
+
): Promise<MotionSample[]> {
|
|
28
|
+
const {
|
|
29
|
+
signal,
|
|
30
|
+
minDurationMs = MIN_CAPTURE_MS,
|
|
31
|
+
maxDurationMs = MAX_CAPTURE_MS,
|
|
32
|
+
} = options;
|
|
33
|
+
|
|
34
|
+
const hasPermission = await requestMotionPermission();
|
|
35
|
+
if (!hasPermission) return [];
|
|
36
|
+
|
|
37
|
+
const samples: MotionSample[] = [];
|
|
38
|
+
const startTime = performance.now();
|
|
39
|
+
|
|
40
|
+
return new Promise((resolve) => {
|
|
41
|
+
let stopped = false;
|
|
42
|
+
|
|
43
|
+
const handler = (e: DeviceMotionEvent) => {
|
|
44
|
+
samples.push({
|
|
45
|
+
timestamp: performance.now(),
|
|
46
|
+
ax: e.acceleration?.x ?? 0,
|
|
47
|
+
ay: e.acceleration?.y ?? 0,
|
|
48
|
+
az: e.acceleration?.z ?? 0,
|
|
49
|
+
gx: e.rotationRate?.alpha ?? 0,
|
|
50
|
+
gy: e.rotationRate?.beta ?? 0,
|
|
51
|
+
gz: e.rotationRate?.gamma ?? 0,
|
|
52
|
+
});
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
function stopCapture() {
|
|
56
|
+
if (stopped) return;
|
|
57
|
+
stopped = true;
|
|
58
|
+
clearTimeout(maxTimer);
|
|
59
|
+
window.removeEventListener("devicemotion", handler);
|
|
60
|
+
resolve(samples);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
window.addEventListener("devicemotion", handler);
|
|
64
|
+
|
|
65
|
+
const maxTimer = setTimeout(stopCapture, maxDurationMs);
|
|
66
|
+
|
|
67
|
+
if (signal) {
|
|
68
|
+
if (signal.aborted) {
|
|
69
|
+
setTimeout(stopCapture, minDurationMs);
|
|
70
|
+
} else {
|
|
71
|
+
signal.addEventListener(
|
|
72
|
+
"abort",
|
|
73
|
+
() => {
|
|
74
|
+
const elapsed = performance.now() - startTime;
|
|
75
|
+
const remaining = Math.max(0, minDurationMs - elapsed);
|
|
76
|
+
setTimeout(stopCapture, remaining);
|
|
77
|
+
},
|
|
78
|
+
{ once: true }
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import type { TouchSample, CaptureOptions } from "./types";
|
|
2
|
+
import { MIN_CAPTURE_MS, MAX_CAPTURE_MS } from "../config";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Capture touch/pointer data (position, pressure, contact area) until signaled to stop.
|
|
6
|
+
* Uses PointerEvent for cross-platform support (touch, pen, mouse).
|
|
7
|
+
*/
|
|
8
|
+
export function captureTouch(
|
|
9
|
+
element: HTMLElement,
|
|
10
|
+
options: CaptureOptions = {}
|
|
11
|
+
): Promise<TouchSample[]> {
|
|
12
|
+
const {
|
|
13
|
+
signal,
|
|
14
|
+
minDurationMs = MIN_CAPTURE_MS,
|
|
15
|
+
maxDurationMs = MAX_CAPTURE_MS,
|
|
16
|
+
} = options;
|
|
17
|
+
|
|
18
|
+
const samples: TouchSample[] = [];
|
|
19
|
+
const startTime = performance.now();
|
|
20
|
+
|
|
21
|
+
return new Promise((resolve) => {
|
|
22
|
+
let stopped = false;
|
|
23
|
+
|
|
24
|
+
const handler = (e: PointerEvent) => {
|
|
25
|
+
samples.push({
|
|
26
|
+
timestamp: performance.now(),
|
|
27
|
+
x: e.clientX,
|
|
28
|
+
y: e.clientY,
|
|
29
|
+
pressure: e.pressure,
|
|
30
|
+
width: e.width,
|
|
31
|
+
height: e.height,
|
|
32
|
+
});
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
function stopCapture() {
|
|
36
|
+
if (stopped) return;
|
|
37
|
+
stopped = true;
|
|
38
|
+
clearTimeout(maxTimer);
|
|
39
|
+
element.removeEventListener("pointermove", handler);
|
|
40
|
+
element.removeEventListener("pointerdown", handler);
|
|
41
|
+
resolve(samples);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
element.addEventListener("pointermove", handler);
|
|
45
|
+
element.addEventListener("pointerdown", handler);
|
|
46
|
+
|
|
47
|
+
const maxTimer = setTimeout(stopCapture, maxDurationMs);
|
|
48
|
+
|
|
49
|
+
if (signal) {
|
|
50
|
+
if (signal.aborted) {
|
|
51
|
+
setTimeout(stopCapture, minDurationMs);
|
|
52
|
+
} else {
|
|
53
|
+
signal.addEventListener(
|
|
54
|
+
"abort",
|
|
55
|
+
() => {
|
|
56
|
+
const elapsed = performance.now() - startTime;
|
|
57
|
+
const remaining = Math.max(0, minDurationMs - elapsed);
|
|
58
|
+
setTimeout(stopCapture, remaining);
|
|
59
|
+
},
|
|
60
|
+
{ once: true }
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/** Raw audio samples captured during the Pulse challenge */
|
|
2
|
+
export interface AudioCapture {
|
|
3
|
+
samples: Float32Array;
|
|
4
|
+
sampleRate: number;
|
|
5
|
+
duration: number;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
/** Single IMU reading */
|
|
9
|
+
export interface MotionSample {
|
|
10
|
+
timestamp: number;
|
|
11
|
+
ax: number;
|
|
12
|
+
ay: number;
|
|
13
|
+
az: number;
|
|
14
|
+
gx: number;
|
|
15
|
+
gy: number;
|
|
16
|
+
gz: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/** Single touch reading */
|
|
20
|
+
export interface TouchSample {
|
|
21
|
+
timestamp: number;
|
|
22
|
+
x: number;
|
|
23
|
+
y: number;
|
|
24
|
+
pressure: number;
|
|
25
|
+
width: number;
|
|
26
|
+
height: number;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/** Options for event-driven sensor capture */
|
|
30
|
+
export interface CaptureOptions {
|
|
31
|
+
/** AbortSignal to stop capture. If omitted, captures for maxDurationMs. */
|
|
32
|
+
signal?: AbortSignal;
|
|
33
|
+
/** Minimum capture duration in ms. Capture continues until this even if signal fires early. Default: 2000 */
|
|
34
|
+
minDurationMs?: number;
|
|
35
|
+
/** Maximum capture duration in ms. Auto-stops if signal hasn't fired. Default: 60000 */
|
|
36
|
+
maxDurationMs?: number;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/** Stage of a capture session */
|
|
40
|
+
export type CaptureStage = "audio" | "motion" | "touch";
|
|
41
|
+
|
|
42
|
+
/** State of an individual capture stage */
|
|
43
|
+
export type StageState = "idle" | "capturing" | "captured" | "skipped";
|
|
44
|
+
|
|
45
|
+
/** Combined sensor data from a Pulse capture session */
|
|
46
|
+
export interface SensorData {
|
|
47
|
+
audio: AudioCapture | null;
|
|
48
|
+
motion: MotionSample[];
|
|
49
|
+
touch: TouchSample[];
|
|
50
|
+
modalities: {
|
|
51
|
+
audio: boolean;
|
|
52
|
+
motion: boolean;
|
|
53
|
+
touch: boolean;
|
|
54
|
+
};
|
|
55
|
+
}
|