@blueharford/scrypted-spatial-awareness 0.6.25 → 0.6.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/out/plugin.zip CHANGED
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@blueharford/scrypted-spatial-awareness",
3
- "version": "0.6.25",
3
+ "version": "0.6.27",
4
4
  "description": "Cross-camera object tracking for Scrypted NVR with spatial awareness",
5
5
  "author": "Joshua Seidel <blueharford>",
6
6
  "license": "Apache-2.0",
@@ -33,6 +33,8 @@ const { systemManager, mediaManager } = sdk;
33
33
  export interface SpatialReasoningConfig {
34
34
  /** Enable LLM-based descriptions */
35
35
  enableLlm: boolean;
36
+ /** Specific LLM device ID to use (if not set, auto-discovers) */
37
+ llmDeviceId?: string;
36
38
  /** Enable landmark learning/suggestions */
37
39
  enableLandmarkLearning: boolean;
38
40
  /** Minimum confidence for landmark suggestions */
@@ -436,69 +438,158 @@ export class SpatialReasoningEngine {
436
438
  private llmProvider: string | null = null;
437
439
  private llmProviderType: LlmProvider = 'unknown';
438
440
 
439
- /** Find or initialize LLM device - looks for ChatCompletion interface from @scrypted/llm plugin */
440
- private async findLlmDevice(): Promise<ChatCompletionDevice | null> {
441
- if (this.llmDevice) return this.llmDevice;
442
- if (this.llmSearched) return null; // Already searched and found nothing
443
-
441
+ // Load balancing for multiple LLMs
442
+ private llmDevices: Array<{
443
+ device: ChatCompletionDevice;
444
+ id: string;
445
+ name: string;
446
+ providerType: LlmProvider;
447
+ lastUsed: number;
448
+ errorCount: number;
449
+ }> = [];
450
+ private llmIndex: number = 0;
451
+
452
+ /** Find LLM devices - uses configured device or auto-discovers all for load balancing */
453
+ private async findAllLlmDevices(): Promise<void> {
454
+ if (this.llmSearched) return;
444
455
  this.llmSearched = true;
445
456
 
446
457
  try {
447
- // Look for devices with ChatCompletion interface (the correct interface for @scrypted/llm)
458
+ // If a specific LLM device is configured, use only that one
459
+ if (this.config.llmDeviceId) {
460
+ const device = systemManager.getDeviceById(this.config.llmDeviceId);
461
+ if (device?.interfaces?.includes('ChatCompletion')) {
462
+ const providerTypeEnum = this.detectProviderType(device);
463
+ this.llmDevices.push({
464
+ device: device as unknown as ChatCompletionDevice,
465
+ id: this.config.llmDeviceId,
466
+ name: device.name || this.config.llmDeviceId,
467
+ providerType: providerTypeEnum,
468
+ lastUsed: 0,
469
+ errorCount: 0,
470
+ });
471
+ this.console.log(`[LLM] Using configured LLM: ${device.name}`);
472
+ return;
473
+ } else {
474
+ this.console.warn(`[LLM] Configured device ${this.config.llmDeviceId} not found or doesn't support ChatCompletion`);
475
+ }
476
+ }
477
+
478
+ // Auto-discover all LLM devices for load balancing
448
479
  for (const id of Object.keys(systemManager.getSystemState())) {
449
480
  const device = systemManager.getDeviceById(id);
450
481
  if (!device) continue;
451
482
 
452
- // Check if this device has ChatCompletion interface
453
- // The @scrypted/llm plugin exposes ChatCompletion, not ObjectDetection
454
483
  if (device.interfaces?.includes('ChatCompletion')) {
455
- const deviceName = device.name?.toLowerCase() || '';
456
- const pluginId = (device as any).pluginId?.toLowerCase() || '';
457
-
458
- // Identify the provider type for logging and image format selection
459
- let providerType = 'Unknown';
460
- let providerTypeEnum: LlmProvider = 'unknown';
461
-
462
- if (deviceName.includes('openai') || deviceName.includes('gpt')) {
463
- providerType = 'OpenAI';
464
- providerTypeEnum = 'openai';
465
- } else if (deviceName.includes('anthropic') || deviceName.includes('claude')) {
466
- providerType = 'Anthropic';
467
- providerTypeEnum = 'anthropic';
468
- } else if (deviceName.includes('ollama')) {
469
- providerType = 'Ollama';
470
- providerTypeEnum = 'openai'; // Ollama uses OpenAI-compatible format
471
- } else if (deviceName.includes('gemini') || deviceName.includes('google')) {
472
- providerType = 'Google';
473
- providerTypeEnum = 'openai'; // Google uses OpenAI-compatible format
474
- } else if (deviceName.includes('llama')) {
475
- providerType = 'llama.cpp';
476
- providerTypeEnum = 'openai'; // llama.cpp uses OpenAI-compatible format
477
- } else if (pluginId.includes('@scrypted/llm') || pluginId.includes('llm')) {
478
- providerType = 'Scrypted LLM';
479
- providerTypeEnum = 'unknown';
480
- }
484
+ const providerTypeEnum = this.detectProviderType(device);
485
+
486
+ this.llmDevices.push({
487
+ device: device as unknown as ChatCompletionDevice,
488
+ id,
489
+ name: device.name || id,
490
+ providerType: providerTypeEnum,
491
+ lastUsed: 0,
492
+ errorCount: 0,
493
+ });
481
494
 
482
- this.llmDevice = device as unknown as ChatCompletionDevice;
483
- this.llmProvider = `${providerType} (${device.name})`;
484
- this.llmProviderType = providerTypeEnum;
485
- this.console.log(`[LLM] Connected to ${providerType}: ${device.name}`);
486
- this.console.log(`[LLM] Plugin: ${pluginId || 'N/A'}`);
487
- this.console.log(`[LLM] Image format: ${providerTypeEnum}`);
488
- this.console.log(`[LLM] Interfaces: ${device.interfaces?.join(', ')}`);
489
- return this.llmDevice;
495
+ this.console.log(`[LLM] Found: ${device.name}`);
490
496
  }
491
497
  }
492
498
 
493
- // If we get here, no LLM plugin found
494
- this.console.warn('[LLM] No ChatCompletion device found. Install @scrypted/llm for enhanced descriptions.');
495
- this.console.warn('[LLM] Falling back to rule-based descriptions using topology data.');
496
-
499
+ if (this.llmDevices.length === 0) {
500
+ this.console.warn('[LLM] No ChatCompletion devices found. Install @scrypted/llm for enhanced descriptions.');
501
+ } else {
502
+ this.console.log(`[LLM] Load balancing across ${this.llmDevices.length} LLM device(s)`);
503
+ }
497
504
  } catch (e) {
498
- this.console.error('[LLM] Error searching for LLM device:', e);
505
+ this.console.error('[LLM] Error searching for LLM devices:', e);
499
506
  }
507
+ }
500
508
 
501
- return null;
509
+ /** Detect the provider type from device name */
510
+ private detectProviderType(device: ScryptedDevice): LlmProvider {
511
+ const deviceName = device.name?.toLowerCase() || '';
512
+ const pluginId = (device as any).pluginId?.toLowerCase() || '';
513
+
514
+ if (deviceName.includes('openai') || deviceName.includes('gpt')) {
515
+ return 'openai';
516
+ } else if (deviceName.includes('anthropic') || deviceName.includes('claude')) {
517
+ return 'anthropic';
518
+ } else if (deviceName.includes('ollama')) {
519
+ return 'openai'; // Ollama uses OpenAI-compatible format
520
+ } else if (deviceName.includes('gemini') || deviceName.includes('google')) {
521
+ return 'openai'; // Google uses OpenAI-compatible format
522
+ } else if (deviceName.includes('llama')) {
523
+ return 'openai'; // llama.cpp uses OpenAI-compatible format
524
+ } else if (pluginId.includes('@scrypted/llm') || pluginId.includes('llm')) {
525
+ return 'unknown';
526
+ }
527
+ return 'unknown';
528
+ }
529
+
530
+ /** Get the next available LLM using round-robin with least-recently-used preference */
531
+ private async findLlmDevice(): Promise<ChatCompletionDevice | null> {
532
+ await this.findAllLlmDevices();
533
+
534
+ if (this.llmDevices.length === 0) return null;
535
+
536
+ // If only one LLM, just use it
537
+ if (this.llmDevices.length === 1) {
538
+ const llm = this.llmDevices[0];
539
+ this.llmDevice = llm.device;
540
+ this.llmProvider = llm.name;
541
+ this.llmProviderType = llm.providerType;
542
+ return llm.device;
543
+ }
544
+
545
+ // Find the LLM with the oldest lastUsed time (least recently used)
546
+ // Also prefer LLMs with fewer errors
547
+ let bestIndex = 0;
548
+ let bestScore = Infinity;
549
+
550
+ for (let i = 0; i < this.llmDevices.length; i++) {
551
+ const llm = this.llmDevices[i];
552
+ // Score = lastUsed time + (errorCount * 60 seconds penalty)
553
+ const score = llm.lastUsed + (llm.errorCount * 60000);
554
+ if (score < bestScore) {
555
+ bestScore = score;
556
+ bestIndex = i;
557
+ }
558
+ }
559
+
560
+ const selected = this.llmDevices[bestIndex];
561
+ this.llmDevice = selected.device;
562
+ this.llmProvider = selected.name;
563
+ this.llmProviderType = selected.providerType;
564
+
565
+ this.console.log(`[LLM] Selected: ${selected.name} (last used ${Math.round((Date.now() - selected.lastUsed) / 1000)}s ago, errors: ${selected.errorCount})`);
566
+
567
+ return selected.device;
568
+ }
569
+
570
+ /** Mark an LLM as used (for load balancing) */
571
+ private markLlmUsed(device: ChatCompletionDevice): void {
572
+ const llm = this.llmDevices.find(l => l.device === device);
573
+ if (llm) {
574
+ llm.lastUsed = Date.now();
575
+ }
576
+ }
577
+
578
+ /** Mark an LLM as having an error (for load balancing - will be deprioritized) */
579
+ private markLlmError(device: ChatCompletionDevice): void {
580
+ const llm = this.llmDevices.find(l => l.device === device);
581
+ if (llm) {
582
+ llm.errorCount++;
583
+ this.console.log(`[LLM] ${llm.name} error count: ${llm.errorCount}`);
584
+ }
585
+ }
586
+
587
+ /** Reset error count for an LLM after successful call */
588
+ private markLlmSuccess(device: ChatCompletionDevice): void {
589
+ const llm = this.llmDevices.find(l => l.device === device);
590
+ if (llm && llm.errorCount > 0) {
591
+ llm.errorCount = Math.max(0, llm.errorCount - 1); // Gradually reduce error count
592
+ }
502
593
  }
503
594
 
504
595
  /** Get the current LLM provider name */
@@ -946,6 +1037,9 @@ export class SpatialReasoningEngine {
946
1037
  messageContent = prompt;
947
1038
  }
948
1039
 
1040
+ // Mark LLM as used for load balancing
1041
+ this.markLlmUsed(llm);
1042
+
949
1043
  // Call LLM using ChatCompletion interface
950
1044
  const result = await llm.getChatCompletion({
951
1045
  messages: [
@@ -961,12 +1055,14 @@ export class SpatialReasoningEngine {
961
1055
  // Extract description from ChatCompletion result
962
1056
  const content = result?.choices?.[0]?.message?.content;
963
1057
  if (content && typeof content === 'string') {
1058
+ this.markLlmSuccess(llm);
964
1059
  return content.trim();
965
1060
  }
966
1061
 
967
1062
  return null;
968
1063
  } catch (e) {
969
1064
  this.console.warn('LLM description generation failed:', e);
1065
+ this.markLlmError(llm);
970
1066
  return null;
971
1067
  }
972
1068
  }
@@ -1049,6 +1145,9 @@ Examples of good descriptions:
1049
1145
 
1050
1146
  Generate ONLY the description, nothing else:`;
1051
1147
 
1148
+ // Mark LLM as used for load balancing
1149
+ this.markLlmUsed(llm);
1150
+
1052
1151
  // Try multimodal format first, fall back to text-only if it fails
1053
1152
  let result: any;
1054
1153
  let usedVision = false;
@@ -1101,6 +1200,7 @@ Generate ONLY the description, nothing else:`;
1101
1200
  const content = result?.choices?.[0]?.message?.content;
1102
1201
  if (content && typeof content === 'string') {
1103
1202
  this.console.log(`[LLM] Got ${eventType} description (vision=${usedVision}): ${content.trim().substring(0, 50)}...`);
1203
+ this.markLlmSuccess(llm);
1104
1204
  return content.trim();
1105
1205
  }
1106
1206
 
@@ -1108,6 +1208,7 @@ Generate ONLY the description, nothing else:`;
1108
1208
  return null;
1109
1209
  } catch (e) {
1110
1210
  this.console.warn(`[LLM] ${eventType} description generation failed:`, e);
1211
+ this.markLlmError(llm);
1111
1212
  return null;
1112
1213
  }
1113
1214
  }
@@ -254,11 +254,19 @@ export class TopologyDiscoveryEngine {
254
254
  return this.config.discoveryIntervalHours > 0;
255
255
  }
256
256
 
257
- /** Find LLM device with ChatCompletion interface */
258
- private async findLlmDevice(): Promise<ChatCompletionDevice | null> {
259
- if (this.llmDevice) return this.llmDevice;
260
- if (this.llmSearched) return null;
261
-
257
+ // Load balancing for multiple LLMs
258
+ private llmDevices: Array<{
259
+ device: ChatCompletionDevice;
260
+ id: string;
261
+ name: string;
262
+ providerType: LlmProvider;
263
+ lastUsed: number;
264
+ errorCount: number;
265
+ }> = [];
266
+
267
+ /** Find ALL LLM devices for load balancing */
268
+ private async findAllLlmDevices(): Promise<void> {
269
+ if (this.llmSearched) return;
262
270
  this.llmSearched = true;
263
271
 
264
272
  try {
@@ -269,32 +277,84 @@ export class TopologyDiscoveryEngine {
269
277
  if (device.interfaces?.includes('ChatCompletion')) {
270
278
  const deviceName = device.name?.toLowerCase() || '';
271
279
 
272
- // Detect provider type for image format selection
280
+ let providerType: LlmProvider = 'unknown';
273
281
  if (deviceName.includes('openai') || deviceName.includes('gpt')) {
274
- this.llmProviderType = 'openai';
282
+ providerType = 'openai';
275
283
  } else if (deviceName.includes('anthropic') || deviceName.includes('claude')) {
276
- this.llmProviderType = 'anthropic';
284
+ providerType = 'anthropic';
277
285
  } else if (deviceName.includes('ollama') || deviceName.includes('gemini') ||
278
286
  deviceName.includes('google') || deviceName.includes('llama')) {
279
- // These providers use OpenAI-compatible format
280
- this.llmProviderType = 'openai';
281
- } else {
282
- this.llmProviderType = 'unknown';
287
+ providerType = 'openai';
283
288
  }
284
289
 
285
- this.llmDevice = device as unknown as ChatCompletionDevice;
286
- this.console.log(`[Discovery] Connected to LLM: ${device.name}`);
287
- this.console.log(`[Discovery] Image format: ${this.llmProviderType}`);
288
- return this.llmDevice;
290
+ this.llmDevices.push({
291
+ device: device as unknown as ChatCompletionDevice,
292
+ id,
293
+ name: device.name || id,
294
+ providerType,
295
+ lastUsed: 0,
296
+ errorCount: 0,
297
+ });
298
+
299
+ this.console.log(`[Discovery] Found LLM: ${device.name}`);
289
300
  }
290
301
  }
291
302
 
292
- this.console.warn('[Discovery] No ChatCompletion device found. Vision-based discovery unavailable.');
303
+ if (this.llmDevices.length === 0) {
304
+ this.console.warn('[Discovery] No ChatCompletion devices found. Vision-based discovery unavailable.');
305
+ } else {
306
+ this.console.log(`[Discovery] Load balancing across ${this.llmDevices.length} LLM device(s)`);
307
+ }
293
308
  } catch (e) {
294
- this.console.error('[Discovery] Error finding LLM device:', e);
309
+ this.console.error('[Discovery] Error finding LLM devices:', e);
310
+ }
311
+ }
312
+
313
+ /** Find LLM device with ChatCompletion interface - uses load balancing */
314
+ private async findLlmDevice(): Promise<ChatCompletionDevice | null> {
315
+ await this.findAllLlmDevices();
316
+
317
+ if (this.llmDevices.length === 0) return null;
318
+
319
+ // If only one LLM, just use it
320
+ if (this.llmDevices.length === 1) {
321
+ const llm = this.llmDevices[0];
322
+ this.llmDevice = llm.device;
323
+ this.llmProviderType = llm.providerType;
324
+ return llm.device;
295
325
  }
296
326
 
297
- return null;
327
+ // Find the LLM with oldest lastUsed time (least recently used)
328
+ let bestIndex = 0;
329
+ let bestScore = Infinity;
330
+
331
+ for (let i = 0; i < this.llmDevices.length; i++) {
332
+ const llm = this.llmDevices[i];
333
+ const score = llm.lastUsed + (llm.errorCount * 60000);
334
+ if (score < bestScore) {
335
+ bestScore = score;
336
+ bestIndex = i;
337
+ }
338
+ }
339
+
340
+ const selected = this.llmDevices[bestIndex];
341
+ this.llmDevice = selected.device;
342
+ this.llmProviderType = selected.providerType;
343
+
344
+ // Mark as used
345
+ selected.lastUsed = Date.now();
346
+
347
+ this.console.log(`[Discovery] Selected LLM: ${selected.name}`);
348
+ return selected.device;
349
+ }
350
+
351
+ /** Mark an LLM as having an error */
352
+ private markLlmError(device: ChatCompletionDevice): void {
353
+ const llm = this.llmDevices.find(l => l.device === device);
354
+ if (llm) {
355
+ llm.errorCount++;
356
+ this.console.log(`[Discovery] ${llm.name} error count: ${llm.errorCount}`);
357
+ }
298
358
  }
299
359
 
300
360
  /** Get camera snapshot as ImageData */
@@ -495,6 +555,11 @@ Use the mount height to help estimate distances - objects at ground level will a
495
555
 
496
556
  // All formats failed
497
557
  if (lastError) {
558
+ // Track error for load balancing
559
+ if (llm) {
560
+ this.markLlmError(llm);
561
+ }
562
+
498
563
  const errorStr = String(lastError);
499
564
  if (isVisionFormatError(lastError)) {
500
565
  analysis.error = 'Vision/image analysis failed with all formats. Ensure you have a vision-capable model (e.g., gpt-4o, gpt-4-turbo, claude-3-sonnet) configured and the @scrypted/llm plugin supports vision.';
@@ -63,6 +63,8 @@ export interface TrackingEngineConfig {
63
63
  objectAlertCooldown: number;
64
64
  /** Use LLM for enhanced descriptions */
65
65
  useLlmDescriptions: boolean;
66
+ /** Specific LLM device ID to use (if not set, auto-discovers all for load balancing) */
67
+ llmDeviceId?: string;
66
68
  /** LLM rate limit interval (ms) - minimum time between LLM calls */
67
69
  llmDebounceInterval?: number;
68
70
  /** Whether to fall back to basic notifications when LLM is unavailable or slow */
@@ -161,6 +163,7 @@ export class TrackingEngine {
161
163
  // Initialize spatial reasoning engine
162
164
  const spatialConfig: SpatialReasoningConfig = {
163
165
  enableLlm: config.useLlmDescriptions,
166
+ llmDeviceId: config.llmDeviceId,
164
167
  enableLandmarkLearning: config.enableLandmarkLearning ?? true,
165
168
  landmarkConfidenceThreshold: config.landmarkConfidenceThreshold ?? 0.7,
166
169
  contextCacheTtl: 60000, // 1 minute cache
package/src/main.ts CHANGED
@@ -237,20 +237,21 @@ export class SpatialAwarenessPlugin extends ScryptedDeviceBase
237
237
  group: 'MQTT Integration',
238
238
  },
239
239
 
240
- // Alert Settings
241
- enableAlerts: {
242
- title: 'Enable Alerts',
243
- type: 'boolean',
244
- defaultValue: true,
245
- group: 'Alerts',
240
+ // Integrations
241
+ llmDevice: {
242
+ title: 'LLM Provider',
243
+ type: 'device',
244
+ deviceFilter: `interfaces.includes('ChatCompletion')`,
245
+ description: 'Select the LLM plugin to use for smart descriptions (e.g., OpenAI, Anthropic, Ollama)',
246
+ group: 'Integrations',
246
247
  },
247
248
  defaultNotifiers: {
248
- title: 'Notifiers',
249
+ title: 'Notification Service',
249
250
  type: 'device',
250
251
  multiple: true,
251
252
  deviceFilter: `interfaces.includes('${ScryptedInterface.Notifier}')`,
252
- description: 'Select one or more notifiers to receive alerts',
253
- group: 'Alerts',
253
+ description: 'Select one or more notifiers to receive alerts (e.g., Pushover, Home Assistant)',
254
+ group: 'Integrations',
254
255
  },
255
256
 
256
257
  // Tracked Cameras
@@ -262,13 +263,6 @@ export class SpatialAwarenessPlugin extends ScryptedDeviceBase
262
263
  group: 'Cameras',
263
264
  description: 'Select cameras with object detection to track',
264
265
  },
265
-
266
- // Alert Rules (stored as JSON)
267
- alertRules: {
268
- title: 'Alert Rules',
269
- type: 'string',
270
- hide: true,
271
- },
272
266
  });
273
267
 
274
268
  constructor(nativeId?: ScryptedNativeId) {
@@ -368,7 +362,8 @@ export class SpatialAwarenessPlugin extends ScryptedDeviceBase
368
362
  loiteringThreshold: (this.storageSettings.values.loiteringThreshold as number || 3) * 1000,
369
363
  objectAlertCooldown: (this.storageSettings.values.objectAlertCooldown as number || 30) * 1000,
370
364
  useLlmDescriptions: this.storageSettings.values.useLlmDescriptions as boolean ?? true,
371
- llmDebounceInterval: (this.storageSettings.values.llmDebounceInterval as number || 10) * 1000,
365
+ llmDeviceId: this.storageSettings.values.llmDevice as string || undefined,
366
+ llmDebounceInterval: (this.storageSettings.values.llmDebounceInterval as number || 30) * 1000,
372
367
  llmFallbackEnabled: this.storageSettings.values.llmFallbackEnabled as boolean ?? true,
373
368
  llmFallbackTimeout: (this.storageSettings.values.llmFallbackTimeout as number || 3) * 1000,
374
369
  enableTransitTimeLearning: this.storageSettings.values.enableTransitTimeLearning as boolean ?? true,
@@ -697,95 +692,21 @@ export class SpatialAwarenessPlugin extends ScryptedDeviceBase
697
692
  // ==================== 5. Tracking ====================
698
693
  addGroup('Tracking');
699
694
 
700
- // ==================== 6. AI & Spatial Reasoning ====================
695
+ // ==================== 6. Integrations ====================
696
+ addGroup('Integrations');
697
+
698
+ // ==================== 7. AI & Spatial Reasoning ====================
701
699
  addGroup('AI & Spatial Reasoning');
702
700
 
703
- // ==================== 7. Auto-Topology Discovery ====================
701
+ // ==================== 8. Auto-Topology Discovery ====================
704
702
  addGroup('Auto-Topology Discovery');
705
703
 
706
- // ==================== 8. Alerts ====================
707
- addGroup('Alerts');
708
-
709
- // Add alert rules configuration UI
710
- const alertRules = this.alertManager.getRules();
711
- const rulesHtml = this.generateAlertRulesHtml(alertRules);
712
- settings.push({
713
- key: 'alertRulesEditor',
714
- title: 'Alert Rules',
715
- type: 'html' as any,
716
- value: rulesHtml,
717
- group: 'Alerts',
718
- });
719
-
720
704
  // ==================== 9. MQTT Integration ====================
721
705
  addGroup('MQTT Integration');
722
706
 
723
707
  return settings;
724
708
  }
725
709
 
726
- private generateAlertRulesHtml(rules: any[]): string {
727
- const ruleRows = rules.map(rule => `
728
- <tr data-rule-id="${rule.id}">
729
- <td style="padding:8px;border-bottom:1px solid #333;">
730
- <input type="checkbox" ${rule.enabled ? 'checked' : ''}
731
- onchange="(function(el){var rules=JSON.parse(localStorage.getItem('sa-temp-rules')||'[]');var r=rules.find(x=>x.id==='${rule.id}');if(r)r.enabled=el.checked;localStorage.setItem('sa-temp-rules',JSON.stringify(rules));})(this)" />
732
- </td>
733
- <td style="padding:8px;border-bottom:1px solid #333;color:#fff;">${rule.name}</td>
734
- <td style="padding:8px;border-bottom:1px solid #333;color:#888;">${rule.type}</td>
735
- <td style="padding:8px;border-bottom:1px solid #333;">
736
- <span style="padding:2px 8px;border-radius:4px;font-size:12px;background:${
737
- rule.severity === 'critical' ? '#e94560' :
738
- rule.severity === 'warning' ? '#f39c12' : '#3498db'
739
- };color:white;">${rule.severity}</span>
740
- </td>
741
- <td style="padding:8px;border-bottom:1px solid #333;color:#888;">${Math.round(rule.cooldown / 1000)}s</td>
742
- </tr>
743
- `).join('');
744
-
745
- const initCode = `localStorage.setItem('sa-temp-rules',JSON.stringify(${JSON.stringify(rules)}))`;
746
- const saveCode = `(function(){var rules=JSON.parse(localStorage.getItem('sa-temp-rules')||'[]');fetch('/endpoint/@blueharford/scrypted-spatial-awareness/api/alert-rules',{method:'PUT',headers:{'Content-Type':'application/json'},body:JSON.stringify(rules)}).then(r=>r.json()).then(d=>{if(d.success)alert('Alert rules saved!');else alert('Error: '+d.error);}).catch(e=>alert('Error: '+e));})()`;
747
-
748
- return `
749
- <style>
750
- .sa-rules-table { width:100%; border-collapse:collapse; margin-top:10px; }
751
- .sa-rules-table th { text-align:left; padding:10px 8px; border-bottom:2px solid #e94560; color:#e94560; font-size:13px; }
752
- .sa-save-rules-btn {
753
- background: linear-gradient(135deg, #27ae60 0%, #2ecc71 100%);
754
- color: white;
755
- border: none;
756
- padding: 10px 20px;
757
- border-radius: 6px;
758
- font-size: 14px;
759
- font-weight: 600;
760
- cursor: pointer;
761
- margin-top: 15px;
762
- }
763
- .sa-save-rules-btn:hover { opacity: 0.9; }
764
- .sa-rules-container { background:#16213e; border-radius:8px; padding:15px; }
765
- .sa-rules-desc { color:#888; font-size:13px; margin-bottom:10px; }
766
- </style>
767
- <div class="sa-rules-container">
768
- <p class="sa-rules-desc">Enable or disable alert types. Movement alerts notify you when someone moves between cameras.</p>
769
- <table class="sa-rules-table">
770
- <thead>
771
- <tr>
772
- <th style="width:40px;">On</th>
773
- <th>Alert Type</th>
774
- <th>Event</th>
775
- <th>Severity</th>
776
- <th>Cooldown</th>
777
- </tr>
778
- </thead>
779
- <tbody>
780
- ${ruleRows}
781
- </tbody>
782
- </table>
783
- <button class="sa-save-rules-btn" onclick="${saveCode}">Save Alert Rules</button>
784
- <script>(function(){${initCode}})();</script>
785
- </div>
786
- `;
787
- }
788
-
789
710
  async putSetting(key: string, value: SettingValue): Promise<void> {
790
711
  await this.storageSettings.putSetting(key, value);
791
712
 
@@ -802,6 +723,7 @@ export class SpatialAwarenessPlugin extends ScryptedDeviceBase
802
723
  key === 'llmDebounceInterval' ||
803
724
  key === 'llmFallbackEnabled' ||
804
725
  key === 'llmFallbackTimeout' ||
726
+ key === 'llmDevice' ||
805
727
  key === 'enableTransitTimeLearning' ||
806
728
  key === 'enableConnectionSuggestions' ||
807
729
  key === 'enableLandmarkLearning' ||