@archetypeai/ds-cli 0.3.9 → 0.3.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/bin.js +4 -0
  2. package/commands/create.js +76 -33
  3. package/commands/init.js +49 -19
  4. package/files/AGENTS.md +19 -3
  5. package/files/CLAUDE.md +21 -3
  6. package/files/rules/accessibility.md +49 -0
  7. package/files/rules/frontend-architecture.md +77 -0
  8. package/files/skills/apply-ds/SKILL.md +86 -82
  9. package/files/skills/apply-ds/scripts/audit.sh +169 -0
  10. package/files/skills/apply-ds/scripts/setup.sh +48 -166
  11. package/files/skills/create-dashboard/SKILL.md +12 -0
  12. package/files/skills/embedding-from-file/SKILL.md +415 -0
  13. package/files/skills/embedding-from-sensor/SKILL.md +406 -0
  14. package/files/skills/embedding-upload/SKILL.md +414 -0
  15. package/files/skills/fix-accessibility/SKILL.md +57 -9
  16. package/files/skills/newton-activity-monitor-lens-on-video/SKILL.md +817 -0
  17. package/files/skills/newton-camera-frame-analysis/SKILL.md +611 -0
  18. package/files/skills/newton-camera-frame-analysis/scripts/activity-monitor-frame.py +165 -0
  19. package/files/skills/newton-camera-frame-analysis/scripts/captures/logs/api_responses_20260206_105610.json +62 -0
  20. package/files/skills/newton-camera-frame-analysis/scripts/continuous_monitor.py +119 -0
  21. package/files/skills/newton-direct-query/SKILL.md +212 -0
  22. package/files/skills/newton-direct-query/scripts/direct_query.py +129 -0
  23. package/files/skills/newton-machine-state-from-file/SKILL.md +545 -0
  24. package/files/skills/newton-machine-state-from-sensor/SKILL.md +707 -0
  25. package/files/skills/newton-machine-state-upload/SKILL.md +986 -0
  26. package/lib/add-ds-config-codeagent.js +5 -1
  27. package/lib/is-interactive.js +19 -0
  28. package/lib/scaffold-ds-svelte-project.js +117 -0
  29. package/package.json +1 -1
@@ -0,0 +1,707 @@
1
+ ---
2
+ name: newton-machine-state-from-sensor
3
+ description: Run a Machine State Lens by streaming real-time data from a physical sensor (BLE, USB, UDP, or recording playback). Use when doing real-time machine state classification from live sensor hardware.
4
+ argument-hint: [source-type]
5
+ ---
6
+
7
+ # Newton Machine State Lens — Stream from Sensor
8
+
9
+ Generate a script that streams real-time IMU sensor data to the Archetype AI Machine State Lens for live n-shot state classification. Supports both Python and JavaScript/Web.
10
+
11
+ > **Frontend architecture:** When building a web UI for this skill, decompose into components (sensor connection, status display, results view) rather than a monolithic page. Extract sensor and API logic into `$lib/api/`. See `@rules/frontend-architecture` for conventions and `@skills/create-dashboard` / `@skills/build-pattern` for layout and component patterns.
12
+
13
+ ---
14
+
15
+ ## Python Implementation
16
+
17
+ ### Requirements
18
+
19
+ - `archetypeai` Python package
20
+ - `numpy`
21
+ - `bleak` (for BLE sources)
22
+ - `pyserial` (for USB sources)
23
+ - Environment variables: `ATAI_API_KEY`, optionally `ATAI_API_ENDPOINT`
24
+
25
+ ### Supported Source Types
26
+
27
+ | Source | Description | Extra args |
28
+ |--------|-------------|------------|
29
+ | `ble` | Bluetooth Low Energy IMU device | None (auto-discovers) |
30
+ | `usb` | USB serial IMU device | `--sensor-port` (default `/dev/tty.usbmodem1101`) |
31
+ | `udp` | UDP relay (from BLE relay server) | `--udp-port` (default `5556`) |
32
+ | `recording` | Replay a CSV recording | `--file-path` |
33
+
34
+ ### Architecture
35
+
36
+ #### 1. API Client & N-Shot Setup
37
+
38
+ ```python
39
+ from archetypeai.api_client import ArchetypeAI
40
+
41
+ client = ArchetypeAI(api_key, api_endpoint=api_endpoint)
42
+
43
+ # Upload n-shot files, derive class names from filenames
44
+ n_shot_files = {}
45
+ for file_path in args.n_shot_files:
46
+ class_name = Path(file_path).stem.upper()
47
+ resp = client.files.local.upload(file_path)
48
+ n_shot_files[class_name] = resp["file_id"]
49
+ ```
50
+
51
+ #### 2. Lens YAML Config
52
+
53
+ Same YAML structure as file-based, with dynamic `input_n_shot` built from uploaded files:
54
+
55
+ ```python
56
+ n_shot_yaml_lines = []
57
+ for class_name, file_id in n_shot_files.items():
58
+ n_shot_yaml_lines.append(f" {class_name}: {file_id}")
59
+ n_shot_yaml = "\n".join(n_shot_yaml_lines)
60
+ ```
61
+
62
+ Insert into the YAML template under `model_parameters.input_n_shot`.
63
+
64
+ #### 3. ImuReceiver — Multi-Source Data Acquisition
65
+
66
+ Create an `ImuReceiver` class that handles all source types with a unified interface:
67
+
68
+ ```python
69
+ class ImuReceiver:
70
+ def __init__(self, incoming_data, num_samples_per_packet=10,
71
+ num_sensor_packets_per_packets_out=10):
72
+ self.packet_queue = queue.Queue()
73
+
74
+ if 'recording' in incoming_data:
75
+ self.source = 'recording'
76
+ self.recording = incoming_data['recording']
77
+ elif 'sensor' in incoming_data:
78
+ self.source = 'sensor'
79
+ self.port = incoming_data['sensor']
80
+ elif 'ble' in incoming_data:
81
+ self.source = 'ble'
82
+
83
+ def get_data(self):
84
+ """Returns (packet_out, timestamp) or (None, None)"""
85
+ if self.packet_queue.qsize() >= self.num_sensor_packets_per_packets_out:
86
+ packets = [self.packet_queue.get()
87
+ for _ in range(self.num_sensor_packets_per_packets_out)]
88
+ packet_out = np.vstack([p['data'] for p in packets]).tolist()
89
+ return packet_out, packets[-1]['sensor_timestamp']
90
+ return None, None
91
+ ```
92
+
93
+ ##### BLE Acquisition (async)
94
+
95
+ ```python
96
+ async def acquire_ble(self, exception_holder):
97
+ scanner = bleak.BleakScanner(
98
+ detection_callback=self.detection_callback,
99
+ service_uuids=[IMU_SERVICE_UUID]
100
+ )
101
+ await scanner.start()
102
+ await asyncio.sleep(5)
103
+ await scanner.stop()
104
+
105
+ async with bleak.BleakClient(self.the_device) as client:
106
+ await client.start_notify(IMU_CHARACTERISTIC_UUID, self.notify_callback)
107
+ while client.is_connected:
108
+ await asyncio.sleep(1)
109
+
110
+ def notify_callback(self, handle, data):
111
+ samples = np.frombuffer(data, dtype=np.int16)
112
+ header = samples[0]
113
+ payload = samples[1:]
114
+ imu = payload.reshape(-1, 3) # (n, 3) — ax, ay, az
115
+ self.packet_queue.put({"data": imu, "sensor_timestamp": time.time()})
116
+ ```
117
+
118
+ ##### USB Acquisition (threaded)
119
+
120
+ ```python
121
+ def acquire_usb(self, exception_holder):
122
+ port = serial.Serial(self.port, 115200, timeout=5.0)
123
+ port.read(2) # device ID
124
+ port.write(bytearray([0x47])) # Go signal
125
+
126
+ while True:
127
+ raw = port.read(self.num_samples_per_packet * 2)
128
+ samples = list(array.array('h', raw))
129
+ self.packet_queue.put({
130
+ "data": np.array(samples),
131
+ "sensor_timestamp": time.time()
132
+ })
133
+ ```
134
+
135
+ ##### UDP Acquisition (threaded)
136
+
137
+ ```python
138
+ class UdpImuReceiver:
139
+ """Drop-in replacement for ImuReceiver using UDP relay data"""
140
+ def __init__(self, port=5556):
141
+ self.port = port
142
+ self.packet_queue = queue.Queue()
143
+
144
+ def _receive_loop(self):
145
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
146
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
147
+ sock.bind(('', self.port))
148
+ sock.settimeout(0.5)
149
+
150
+ while self.running:
151
+ try:
152
+ data, _ = sock.recvfrom(4096)
153
+ packet = json.loads(data.decode('utf-8'))
154
+ imu = np.array(packet['data'], dtype=np.int16)
155
+ self.packet_queue.put({
156
+ "data": imu,
157
+ "sensor_timestamp": packet['timestamp']
158
+ })
159
+ except socket.timeout:
160
+ continue
161
+ ```
162
+
163
+ #### 4. Real-Time Streaming with Buffering
164
+
165
+ Use a `deque` to buffer incoming sensor data and stream windows to the API:
166
+
167
+ ```python
168
+ from collections import deque
169
+
170
+ data_buffer = deque(maxlen=window_size * 2)
171
+
172
+ while not stop_event.is_set():
173
+ packet_out, packet_timestamp = imu_receiver.get_data()
174
+
175
+ if packet_out is not None:
176
+ for row in packet_out:
177
+ data_buffer.append((int(row[0]), int(row[1]), int(row[2])))
178
+
179
+ if len(data_buffer) >= window_size:
180
+ window_rows = list(data_buffer)[-window_size:]
181
+
182
+ a1 = [r[0] for r in window_rows]
183
+ a2 = [r[1] for r in window_rows]
184
+ a3 = [r[2] for r in window_rows]
185
+ a4 = [int((ax**2 + ay**2 + az**2) ** 0.5)
186
+ for ax, ay, az in window_rows]
187
+
188
+ payload = {
189
+ "type": "session.update",
190
+ "event_data": {
191
+ "type": "data.json",
192
+ "event_data": {
193
+ "sensor_data": [a1, a2, a3, a4],
194
+ "sensor_metadata": {
195
+ "sensor_timestamp": packet_timestamp,
196
+ "sensor_id": f"imu_sensor_{counter}"
197
+ }
198
+ }
199
+ }
200
+ }
201
+ client.lens.sessions.process_event(session_id, payload)
202
+
203
+ # Advance by step_size
204
+ for _ in range(min(step_size, len(data_buffer))):
205
+ data_buffer.popleft()
206
+ ```
207
+
208
+ #### 5. SSE Event Listening
209
+
210
+ ```python
211
+ sse_reader = client.lens.sessions.create_sse_consumer(
212
+ session_id, max_read_time_sec=max_run_time_sec
213
+ )
214
+
215
+ for event in sse_reader.read(block=True):
216
+ if event.get("type") == "inference.result":
217
+ result = event["event_data"].get("response")
218
+ print(f"Predicted: {result}")
219
+ ```
220
+
221
+ #### 6. Threading Model
222
+
223
+ ```
224
+ Main Thread: session_callback → starts SSE listener
225
+ Thread 1: ImuReceiver (BLE async / USB serial / UDP socket)
226
+ Thread 2: Streaming loop (buffer → API)
227
+ Optional: CSV recording of session data
228
+ ```
229
+
230
+ - BLE uses `asyncio.run()` in a daemon thread
231
+ - USB/recording use `threading.Thread(target=..., daemon=True)`
232
+ - Graceful shutdown via `signal.SIGINT` → `stop_event.set()`
233
+
234
+ #### 7. Optional: Record Session Data
235
+
236
+ Save streamed data to CSV for later replay or analysis:
237
+
238
+ ```python
239
+ csv_filename = f"sessions/session_data_{timestamp}.csv"
240
+ writer.writerow(['timestamp', 'a1', 'a2', 'a3', 'a4'])
241
+
242
+ for ax, ay, az in window_rows:
243
+ mag = int((ax**2 + ay**2 + az**2) ** 0.5)
244
+ writer.writerow([time.time(), ax, ay, az, mag])
245
+ ```
246
+
247
+ ### CLI Arguments to Include
248
+
249
+ ```
250
+ --api-key API key (fallback to ATAI_API_KEY env var)
251
+ --api-endpoint API endpoint (default from SDK)
252
+ --source-type {ble, usb, recording, udp} (required)
253
+ --file-path Recording file path (for recording mode)
254
+ --sensor-port USB serial port (default: /dev/tty.usbmodem1101)
255
+ --udp-port UDP relay port (default: 5556)
256
+ --n-shot-files Paths to n-shot example CSVs (required, nargs='+')
257
+ --window-size Window size in samples (default: 100)
258
+ --step-size-n-shot Training step size (default: 100)
259
+ --step-size-inference Inference step size (default: 100)
260
+ --max-run-time-sec Max runtime (default: 500)
261
+ ```
262
+
263
+ ### Example Usage
264
+
265
+ ```bash
266
+ # From UDP relay
267
+ python stream_from_sensor.py --source-type udp \
268
+ --n-shot-files healthy.csv broken.csv
269
+
270
+ # From BLE device
271
+ python stream_from_sensor.py --source-type ble \
272
+ --n-shot-files holding.csv walking.csv sitting.csv
273
+
274
+ # Replay a recording
275
+ python stream_from_sensor.py --source-type recording \
276
+ --file-path data.csv --n-shot-files healthy.csv broken.csv
277
+ ```
278
+
279
+ ---
280
+
281
+ ## Web / JavaScript Implementation
282
+
283
+ Uses direct `fetch` calls to the Archetype AI REST API with Web Bluetooth API or WebSocket for sensor data. Based on the working pattern from `test-stream/src/lib/atai-client.ts`.
284
+
285
+ ### Requirements
286
+
287
+ - `@microsoft/fetch-event-source` for SSE consumption
288
+ - Web Bluetooth API (Chrome/Edge) for BLE sensors
289
+ - WebSocket support for UDP relay via WebSocket bridge
290
+
291
+ ### Supported Web Source Types
292
+
293
+ | Source | Web API | Notes |
294
+ |--------|---------|-------|
295
+ | `ble` | Web Bluetooth API | Chrome/Edge only, requires HTTPS |
296
+ | `websocket` | WebSocket | Connect to a UDP-to-WebSocket bridge |
297
+ | `file` | File API | Replay a CSV recording from file input |
298
+
299
+ ### API Reference
300
+
301
+ | Operation | Method | Endpoint | Body |
302
+ |-----------|--------|----------|------|
303
+ | Upload file | POST | `/files` | `FormData` |
304
+ | Register lens | POST | `/lens/register` | `{ lens_config: config }` |
305
+ | Delete lens | POST | `/lens/delete` | `{ lens_id }` |
306
+ | Create session | POST | `/lens/sessions/create` | `{ lens_id }` |
307
+ | Process event | POST | `/lens/sessions/events/process` | `{ session_id, event }` |
308
+ | Destroy session | POST | `/lens/sessions/destroy` | `{ session_id }` |
309
+ | SSE consumer | GET | `/lens/sessions/consumer/{sessionId}` | — |
310
+
311
+ ### Helper: API fetch wrapper
312
+
313
+ ```typescript
314
+ const API_ENDPOINT = 'https://api.u1.archetypeai.app/v0.5'
315
+
316
+ async function apiPost<T>(path: string, apiKey: string, body: unknown, timeoutMs = 5000): Promise<T> {
317
+ const controller = new AbortController()
318
+ const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
319
+
320
+ try {
321
+ const response = await fetch(`${API_ENDPOINT}${path}`, {
322
+ method: 'POST',
323
+ headers: {
324
+ Authorization: `Bearer ${apiKey}`,
325
+ 'Content-Type': 'application/json',
326
+ },
327
+ body: JSON.stringify(body),
328
+ signal: controller.signal,
329
+ })
330
+
331
+ if (!response.ok) {
332
+ const errorBody = await response.json().catch(() => ({}))
333
+ throw new Error(`API POST ${path} failed: ${response.status} - ${JSON.stringify(errorBody)}`)
334
+ }
335
+
336
+ return response.json()
337
+ } finally {
338
+ clearTimeout(timeoutId)
339
+ }
340
+ }
341
+ ```
342
+
343
+ ### Step 1: Upload n-shot CSV files
344
+
345
+ ```typescript
346
+ const nShotMap: Record<string, string> = {}
347
+
348
+ for (const { file, className } of nShotFiles) {
349
+ const formData = new FormData()
350
+ formData.append('file', file)
351
+
352
+ const response = await fetch(`${API_ENDPOINT}/files`, {
353
+ method: 'POST',
354
+ headers: { Authorization: `Bearer ${apiKey}` },
355
+ body: formData,
356
+ })
357
+ const result = await response.json()
358
+ nShotMap[className.toUpperCase()] = result.file_id
359
+ }
360
+ ```
361
+
362
+ ### Step 2: Register lens, create session, wait for ready
363
+
364
+ ```typescript
365
+ const windowSize = 100
366
+ const stepSize = 100
367
+
368
+ const lensConfig = {
369
+ lens_name: 'machine_state_lens',
370
+ lens_config: {
371
+ model_pipeline: [
372
+ { processor_name: 'lens_timeseries_state_processor', processor_config: {} },
373
+ ],
374
+ model_parameters: {
375
+ model_name: 'OmegaEncoder',
376
+ model_version: 'OmegaEncoder::omega_embeddings_01',
377
+ normalize_input: true,
378
+ buffer_size: windowSize,
379
+ input_n_shot: nShotMap,
380
+ csv_configs: {
381
+ timestamp_column: 'timestamp',
382
+ data_columns: ['a1', 'a2', 'a3', 'a4'],
383
+ window_size: windowSize,
384
+ step_size: stepSize,
385
+ },
386
+ knn_configs: {
387
+ n_neighbors: 5,
388
+ metric: 'manhattan',
389
+ weights: 'distance',
390
+ algorithm: 'ball_tree',
391
+ normalize_embeddings: false,
392
+ },
393
+ },
394
+ output_streams: [
395
+ { stream_type: 'server_sent_events_writer' },
396
+ ],
397
+ },
398
+ }
399
+
400
+ // Register lens — NOTE: body wraps config as { lens_config: config }
401
+ const registeredLens = await apiPost<{ lens_id: string }>(
402
+ '/lens/register', apiKey, { lens_config: lensConfig }
403
+ )
404
+ const lensId = registeredLens.lens_id
405
+
406
+ // Create session
407
+ const session = await apiPost<{ session_id: string; session_endpoint: string }>(
408
+ '/lens/sessions/create', apiKey, { lens_id: lensId }
409
+ )
410
+ const sessionId = session.session_id
411
+
412
+ await apiPost('/lens/delete', apiKey, { lens_id: lensId })
413
+
414
+ // Wait for session to be ready
415
+ async function waitForSessionReady(sessionId: string, maxWaitMs = 30000): Promise<boolean> {
416
+ const start = Date.now()
417
+ while (Date.now() - start < maxWaitMs) {
418
+ const status = await apiPost<{ session_status: string }>(
419
+ '/lens/sessions/events/process', apiKey,
420
+ { session_id: sessionId, event: { type: 'session.status' } },
421
+ 10000
422
+ )
423
+ if (status.session_status === 'LensSessionStatus.SESSION_STATUS_RUNNING' ||
424
+ status.session_status === '3') {
425
+ return true
426
+ }
427
+ if (status.session_status === 'LensSessionStatus.SESSION_STATUS_FAILED' ||
428
+ status.session_status === '6') {
429
+ return false
430
+ }
431
+ await new Promise(r => setTimeout(r, 500))
432
+ }
433
+ return false
434
+ }
435
+
436
+ const isReady = await waitForSessionReady(sessionId)
437
+ if (!isReady) throw new Error('Session failed to start')
438
+ ```
439
+
440
+ ### Step 3: Acquire sensor data (Web Bluetooth)
441
+
442
+ ```typescript
443
+ const IMU_SERVICE = '0000fff0-0000-1000-8000-00805f9b34fb'
444
+ const IMU_CHARACTERISTIC = '0000fff1-0000-1000-8000-00805f9b34fb'
445
+
446
+ // Request BLE device (requires user gesture)
447
+ const device = await navigator.bluetooth.requestDevice({
448
+ filters: [{ services: [IMU_SERVICE] }],
449
+ })
450
+ const server = await device.gatt.connect()
451
+ const service = await server.getPrimaryService(IMU_SERVICE)
452
+ const characteristic = await service.getCharacteristic(IMU_CHARACTERISTIC)
453
+
454
+ // Buffer for incoming samples
455
+ const dataBuffer: [number, number, number][] = []
456
+
457
+ characteristic.addEventListener('characteristicvaluechanged', (event) => {
458
+ const value = (event.target as BluetoothRemoteGATTCharacteristic).value!
459
+ const samples = new Int16Array(value.buffer)
460
+
461
+ // Skip header byte, parse (ax, ay, az) triplets
462
+ const payload = samples.slice(1)
463
+ for (let i = 0; i + 2 < payload.length; i += 3) {
464
+ dataBuffer.push([payload[i], payload[i + 1], payload[i + 2]])
465
+ }
466
+ })
467
+
468
+ await characteristic.startNotifications()
469
+ ```
470
+
471
+ ### Step 4: Stream buffered data in windows
472
+
473
+ ```typescript
474
+ let counter = 0
475
+
476
+ const streamLoop = setInterval(async () => {
477
+ if (dataBuffer.length < windowSize) return
478
+
479
+ const window = dataBuffer.splice(0, windowSize)
480
+
481
+ const a1 = window.map(r => r[0])
482
+ const a2 = window.map(r => r[1])
483
+ const a3 = window.map(r => r[2])
484
+ const a4 = window.map(([ax, ay, az]) =>
485
+ Math.floor(Math.sqrt(ax * ax + ay * ay + az * az))
486
+ )
487
+
488
+ await apiPost('/lens/sessions/events/process', apiKey, {
489
+ session_id: sessionId,
490
+ event: {
491
+ type: 'session.update',
492
+ event_data: {
493
+ type: 'data.json',
494
+ event_data: {
495
+ sensor_data: [a1, a2, a3, a4],
496
+ sensor_metadata: {
497
+ sensor_timestamp: Date.now() / 1000,
498
+ sensor_id: `web_ble_sensor_${counter++}`,
499
+ },
500
+ },
501
+ },
502
+ },
503
+ }, 10000)
504
+ }, 200) // check every 200ms
505
+ ```
506
+
507
+ ### Step 5: Acquire sensor data (WebSocket bridge)
508
+
509
+ Alternative for UDP relay — connect to a WebSocket bridge that forwards UDP packets:
510
+
511
+ ```typescript
512
+ const ws = new WebSocket('ws://localhost:8765')
513
+
514
+ ws.onmessage = (event) => {
515
+ const packet = JSON.parse(event.data)
516
+ // packet.data is [[ax, ay, az], ...] from UDP relay
517
+ for (const [ax, ay, az] of packet.data) {
518
+ dataBuffer.push([ax, ay, az])
519
+ }
520
+ }
521
+ ```
522
+
523
+ ### Step 6: Consume SSE results
524
+
525
+ ```typescript
526
+ import { fetchEventSource } from '@microsoft/fetch-event-source'
527
+
528
+ const abortController = new AbortController()
529
+
530
+ fetchEventSource(`${API_ENDPOINT}/lens/sessions/consumer/${sessionId}`, {
531
+ headers: { Authorization: `Bearer ${apiKey}` },
532
+ signal: abortController.signal,
533
+ onmessage(event) {
534
+ const parsed = JSON.parse(event.data)
535
+
536
+ if (parsed.type === 'inference.result') {
537
+ const result = parsed.event_data.response
538
+ const meta = parsed.event_data.query_metadata
539
+ console.log(`Predicted: ${result}`)
540
+ }
541
+
542
+ if (parsed.type === 'sse.stream.end') {
543
+ console.log('Stream complete')
544
+ }
545
+ },
546
+ })
547
+ ```
548
+
549
+ ### Step 7: Cleanup
550
+
551
+ ```typescript
552
+ clearInterval(streamLoop)
553
+ abortController.abort()
554
+ await device.gatt.disconnect()
555
+ await apiPost('/lens/sessions/destroy', apiKey, { session_id: sessionId })
556
+ ```
557
+
558
+ ### Web Lifecycle Summary
559
+
560
+ ```
561
+ 1. Upload n-shot CSVs -> POST /files (FormData, one per class)
562
+ 2. Register lens -> POST /lens/register { lens_config: config }
563
+ 3. Create session -> POST /lens/sessions/create { lens_id }
564
+ 4. Wait for ready -> POST /lens/sessions/events/process { session_id, event: { type: 'session.status' } }
565
+ 5. (Optional) Delete lens -> POST /lens/delete { lens_id }
566
+ 6. Connect sensor (BLE / WS) -> Web Bluetooth API or WebSocket
567
+ 7. Buffer + stream windows -> POST /lens/sessions/events/process { session_id, event } (loop)
568
+ 8. Consume SSE results -> GET /lens/sessions/consumer/{sessionId}
569
+ 9. Disconnect + destroy -> POST /lens/sessions/destroy { session_id }
570
+ ```
571
+
572
+ ---
573
+
574
+ ## CSV Format Expected
575
+
576
+ ```csv
577
+ timestamp,a1,a2,a3,a4
578
+ 1700000000.0,100,200,300,374
579
+ ```
580
+
581
+ - `timestamp`: UNIX epoch float
582
+ - `a1, a2, a3`: Sensor axes (e.g., accelerometer x, y, z)
583
+ - `a4`: Magnitude (sqrt(a1² + a2² + a3²))
584
+
585
+ ## Optional: Results Logging
586
+
587
+ Save predictions to a timestamped CSV for analysis or visualization.
588
+
589
+ ### Python — Results CSV
590
+
591
+ ```python
592
+ import csv
593
+ from pathlib import Path
594
+ from datetime import datetime
595
+
596
+ # Create results directory and timestamped filename
597
+ results_dir = Path("results")
598
+ results_dir.mkdir(exist_ok=True)
599
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
600
+ results_file = results_dir / f"sensor_{args.source_type}_{timestamp}.csv"
601
+
602
+ # Write CSV header
603
+ with open(results_file, 'w', newline='', encoding='utf-8') as f:
604
+ writer = csv.writer(f)
605
+ writer.writerow(['read_index', 'predicted_class', 'confidence_scores',
606
+ 'file_id', 'window_size', 'total_rows'])
607
+
608
+ # Inside the SSE event loop, when handling inference.result:
609
+ if event.get("type") == "inference.result":
610
+ ed = event.get("event_data", {})
611
+ result = ed.get("response")
612
+ meta = ed.get("query_metadata", {})
613
+ query_meta = meta.get("query_metadata", {})
614
+
615
+ predicted_class = result[0] if isinstance(result, list) and len(result) > 0 else "unknown"
616
+ confidence_scores = result[1] if isinstance(result, list) and len(result) > 1 else {}
617
+ read_index = query_meta.get("read_index", "N/A")
618
+ file_id = query_meta.get("file_id", "N/A")
619
+ window_size_val = query_meta.get("window_size", "N/A")
620
+ total_rows = query_meta.get("total_rows", "N/A")
621
+
622
+ print(f"[{read_index}] Predicted: {predicted_class} | Scores: {confidence_scores}")
623
+
624
+ with open(results_file, 'a', newline='', encoding='utf-8') as f:
625
+ writer = csv.writer(f)
626
+ writer.writerow([read_index, predicted_class, str(confidence_scores),
627
+ file_id, window_size_val, total_rows])
628
+ ```
629
+
630
+ ### Response Structure
631
+
632
+ The `inference.result` response contains:
633
+ - `response[0]`: predicted class name (string, e.g. `"HEALTHY"`)
634
+ - `response[1]`: confidence scores dict (e.g. `{"HEALTHY": 0.95, "BROKEN": 0.05}`)
635
+ - `query_metadata.query_metadata.read_index`: window position in the data
636
+ - `query_metadata.query_metadata.file_id`: reference file ID
637
+ - `query_metadata.query_metadata.window_size`: window size used
638
+ - `query_metadata.query_metadata.total_rows`: total rows processed
639
+
640
+ ### Web/JS — Results Array + CSV Download
641
+
642
+ ```typescript
643
+ interface PredictionResult {
644
+ readIndex: number | string
645
+ predictedClass: string
646
+ confidenceScores: Record<string, number>
647
+ fileId: string
648
+ windowSize: number
649
+ totalRows: number
650
+ }
651
+
652
+ const results: PredictionResult[] = []
653
+
654
+ // Inside the SSE onmessage handler:
655
+ if (parsed.type === 'inference.result') {
656
+ const result = parsed.event_data.response
657
+ const meta = parsed.event_data.query_metadata
658
+ const queryMeta = meta?.query_metadata ?? {}
659
+
660
+ const prediction: PredictionResult = {
661
+ readIndex: queryMeta.read_index ?? 'N/A',
662
+ predictedClass: Array.isArray(result) && result.length > 0 ? result[0] : 'unknown',
663
+ confidenceScores: Array.isArray(result) && result.length > 1 ? result[1] : {},
664
+ fileId: queryMeta.file_id ?? 'N/A',
665
+ windowSize: queryMeta.window_size ?? 0,
666
+ totalRows: queryMeta.total_rows ?? 0,
667
+ }
668
+
669
+ results.push(prediction)
670
+ console.log(`[${prediction.readIndex}] ${prediction.predictedClass}`, prediction.confidenceScores)
671
+ }
672
+
673
+ // Download results as CSV
674
+ function downloadResultsCsv(results: PredictionResult[], filename: string) {
675
+ const header = 'read_index,predicted_class,confidence_scores,file_id,window_size,total_rows\n'
676
+ const rows = results.map(r =>
677
+ `${r.readIndex},${r.predictedClass},"${JSON.stringify(r.confidenceScores)}",${r.fileId},${r.windowSize},${r.totalRows}`
678
+ ).join('\n')
679
+
680
+ const blob = new Blob([header + rows], { type: 'text/csv' })
681
+ const url = URL.createObjectURL(blob)
682
+ const a = document.createElement('a')
683
+ a.href = url
684
+ a.download = filename
685
+ a.click()
686
+ URL.revokeObjectURL(url)
687
+ }
688
+ ```
689
+
690
+ ### CLI Flag
691
+
692
+ Add `--save-results` flag (default: `True`) to enable/disable results logging:
693
+
694
+ ```
695
+ --save-results Save predictions to CSV in results/ directory (default: True)
696
+ ```
697
+
698
+ ---
699
+
700
+ ## Key Implementation Notes
701
+
702
+ - Default `window_size` and `step_size`: **100**
703
+ - N-shot class names derived from filename stems (e.g., `healthy.csv` → `HEALTHY`)
704
+ - Python: `signal.SIGINT` for graceful shutdown
705
+ - Web: `AbortController` for SSE, `clearInterval` for stream loop, `gatt.disconnect()` for BLE
706
+ - Web Bluetooth requires HTTPS and a user gesture to initiate pairing
707
+ - For WebSocket bridge, run a small relay server that forwards UDP broadcast to WebSocket clients