@blueharford/scrypted-spatial-awareness 0.2.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +175 -10
- package/dist/main.nodejs.js +1 -1
- package/dist/main.nodejs.js.map +1 -1
- package/dist/plugin.zip +0 -0
- package/out/main.nodejs.js +2585 -60
- package/out/main.nodejs.js.map +1 -1
- package/out/plugin.zip +0 -0
- package/package.json +1 -1
- package/src/core/tracking-engine.ts +963 -10
- package/src/main.ts +492 -19
- package/src/models/training.ts +300 -0
- package/src/ui/editor-html.ts +256 -0
- package/src/ui/training-html.ts +1007 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Training Mode Types
|
|
3
|
+
*
|
|
4
|
+
* These types support the guided training system where a user physically
|
|
5
|
+
* walks around their property to train the system on camera positions,
|
|
6
|
+
* transit times, overlaps, landmarks, and structures.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/** Unique identifier for a training session */
|
|
10
|
+
export type TrainingSessionId = string;
|
|
11
|
+
|
|
12
|
+
/** Current state of the training session */
|
|
13
|
+
export type TrainingSessionState = 'idle' | 'active' | 'paused' | 'completed';
|
|
14
|
+
|
|
15
|
+
/** Type of training action being performed */
|
|
16
|
+
export type TrainingActionType =
|
|
17
|
+
| 'camera_visit' // User arrived at a camera
|
|
18
|
+
| 'transit_start' // User started walking to another camera
|
|
19
|
+
| 'transit_end' // User arrived at destination camera
|
|
20
|
+
| 'mark_landmark' // User marked a landmark location
|
|
21
|
+
| 'mark_overlap' // User marked camera overlap zone
|
|
22
|
+
| 'mark_structure' // User marked a structure (wall, fence, etc.)
|
|
23
|
+
| 'confirm_position' // User confirmed camera position on floor plan
|
|
24
|
+
| 'adjust_fov'; // User adjusted camera field of view
|
|
25
|
+
|
|
26
|
+
/** A single camera visit during training */
|
|
27
|
+
export interface TrainingCameraVisit {
|
|
28
|
+
/** Camera device ID */
|
|
29
|
+
cameraId: string;
|
|
30
|
+
/** Camera name for display */
|
|
31
|
+
cameraName: string;
|
|
32
|
+
/** When the trainer was first detected on this camera */
|
|
33
|
+
arrivedAt: number;
|
|
34
|
+
/** When the trainer left this camera (null if still there) */
|
|
35
|
+
departedAt: number | null;
|
|
36
|
+
/** Visual embedding captured for the trainer */
|
|
37
|
+
trainerEmbedding?: string;
|
|
38
|
+
/** Confidence of trainer detection (0-1) */
|
|
39
|
+
detectionConfidence: number;
|
|
40
|
+
/** Bounding box of trainer in frame [x, y, width, height] */
|
|
41
|
+
boundingBox?: [number, number, number, number];
|
|
42
|
+
/** Position on floor plan if confirmed */
|
|
43
|
+
floorPlanPosition?: { x: number; y: number };
|
|
44
|
+
/** Entry zone detected (if any) */
|
|
45
|
+
entryZone?: string;
|
|
46
|
+
/** Exit zone detected (if any) */
|
|
47
|
+
exitZone?: string;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/** A recorded transit between two cameras */
|
|
51
|
+
export interface TrainingTransit {
|
|
52
|
+
/** Unique ID for this transit */
|
|
53
|
+
id: string;
|
|
54
|
+
/** Source camera ID */
|
|
55
|
+
fromCameraId: string;
|
|
56
|
+
/** Destination camera ID */
|
|
57
|
+
toCameraId: string;
|
|
58
|
+
/** Transit start time */
|
|
59
|
+
startTime: number;
|
|
60
|
+
/** Transit end time */
|
|
61
|
+
endTime: number;
|
|
62
|
+
/** Calculated transit duration in seconds */
|
|
63
|
+
transitSeconds: number;
|
|
64
|
+
/** Whether there was direct overlap (both cameras saw trainer simultaneously) */
|
|
65
|
+
hasOverlap: boolean;
|
|
66
|
+
/** Duration of overlap in seconds (if any) */
|
|
67
|
+
overlapDuration?: number;
|
|
68
|
+
/** Exit zone from source camera */
|
|
69
|
+
exitZone?: string;
|
|
70
|
+
/** Entry zone to destination camera */
|
|
71
|
+
entryZone?: string;
|
|
72
|
+
/** Path description entered by user (optional) */
|
|
73
|
+
pathDescription?: string;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** A landmark marked during training */
|
|
77
|
+
export interface TrainingLandmark {
|
|
78
|
+
/** Unique ID for this landmark */
|
|
79
|
+
id: string;
|
|
80
|
+
/** Name given by user */
|
|
81
|
+
name: string;
|
|
82
|
+
/** Type of landmark */
|
|
83
|
+
type: 'mailbox' | 'garage' | 'shed' | 'tree' | 'gate' | 'door' | 'driveway' | 'pathway' | 'garden' | 'pool' | 'deck' | 'patio' | 'other';
|
|
84
|
+
/** Position on floor plan */
|
|
85
|
+
position: { x: number; y: number };
|
|
86
|
+
/** Which camera(s) can see this landmark */
|
|
87
|
+
visibleFromCameras: string[];
|
|
88
|
+
/** When this was marked */
|
|
89
|
+
markedAt: number;
|
|
90
|
+
/** Optional description */
|
|
91
|
+
description?: string;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/** A camera overlap zone marked during training */
|
|
95
|
+
export interface TrainingOverlap {
|
|
96
|
+
/** Unique ID for this overlap */
|
|
97
|
+
id: string;
|
|
98
|
+
/** First camera in overlap */
|
|
99
|
+
camera1Id: string;
|
|
100
|
+
/** Second camera in overlap */
|
|
101
|
+
camera2Id: string;
|
|
102
|
+
/** Position on floor plan where overlap was confirmed */
|
|
103
|
+
position: { x: number; y: number };
|
|
104
|
+
/** Approximate radius of overlap zone */
|
|
105
|
+
radius: number;
|
|
106
|
+
/** When this was marked */
|
|
107
|
+
markedAt: number;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/** A structure marked during training (walls, fences, etc.) */
|
|
111
|
+
export interface TrainingStructure {
|
|
112
|
+
/** Unique ID for this structure */
|
|
113
|
+
id: string;
|
|
114
|
+
/** Type of structure */
|
|
115
|
+
type: 'wall' | 'fence' | 'hedge' | 'building' | 'path' | 'road' | 'other';
|
|
116
|
+
/** Name/description */
|
|
117
|
+
name: string;
|
|
118
|
+
/** Points defining the structure (line or polygon) */
|
|
119
|
+
points: Array<{ x: number; y: number }>;
|
|
120
|
+
/** When this was marked */
|
|
121
|
+
markedAt: number;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/** Summary statistics for a training session */
|
|
125
|
+
export interface TrainingSessionStats {
|
|
126
|
+
/** Total duration of training in seconds */
|
|
127
|
+
totalDuration: number;
|
|
128
|
+
/** Number of cameras visited */
|
|
129
|
+
camerasVisited: number;
|
|
130
|
+
/** Number of transits recorded */
|
|
131
|
+
transitsRecorded: number;
|
|
132
|
+
/** Number of landmarks marked */
|
|
133
|
+
landmarksMarked: number;
|
|
134
|
+
/** Number of overlaps detected */
|
|
135
|
+
overlapsDetected: number;
|
|
136
|
+
/** Number of structures marked */
|
|
137
|
+
structuresMarked: number;
|
|
138
|
+
/** Average transit time in seconds */
|
|
139
|
+
averageTransitTime: number;
|
|
140
|
+
/** Coverage percentage (cameras visited / total cameras) */
|
|
141
|
+
coveragePercentage: number;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/** A training session */
|
|
145
|
+
export interface TrainingSession {
|
|
146
|
+
/** Unique session ID */
|
|
147
|
+
id: TrainingSessionId;
|
|
148
|
+
/** Current state */
|
|
149
|
+
state: TrainingSessionState;
|
|
150
|
+
/** When the session started */
|
|
151
|
+
startedAt: number;
|
|
152
|
+
/** When the session was last updated */
|
|
153
|
+
updatedAt: number;
|
|
154
|
+
/** When the session ended (if completed) */
|
|
155
|
+
completedAt?: number;
|
|
156
|
+
/** Visual embedding of the trainer (captured at start) */
|
|
157
|
+
trainerEmbedding?: string;
|
|
158
|
+
/** Name of the trainer (for display) */
|
|
159
|
+
trainerName?: string;
|
|
160
|
+
/** All camera visits during this session */
|
|
161
|
+
visits: TrainingCameraVisit[];
|
|
162
|
+
/** All transits recorded during this session */
|
|
163
|
+
transits: TrainingTransit[];
|
|
164
|
+
/** All landmarks marked during this session */
|
|
165
|
+
landmarks: TrainingLandmark[];
|
|
166
|
+
/** All overlaps detected during this session */
|
|
167
|
+
overlaps: TrainingOverlap[];
|
|
168
|
+
/** All structures marked during this session */
|
|
169
|
+
structures: TrainingStructure[];
|
|
170
|
+
/** Current camera where trainer is detected (if any) */
|
|
171
|
+
currentCameraId?: string;
|
|
172
|
+
/** Previous camera (for transit tracking) */
|
|
173
|
+
previousCameraId?: string;
|
|
174
|
+
/** Time when trainer left previous camera */
|
|
175
|
+
transitStartTime?: number;
|
|
176
|
+
/** Session statistics */
|
|
177
|
+
stats: TrainingSessionStats;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/** Configuration for training mode */
|
|
181
|
+
export interface TrainingConfig {
|
|
182
|
+
/** Minimum confidence for trainer detection */
|
|
183
|
+
minDetectionConfidence: number;
|
|
184
|
+
/** Maximum time (seconds) to wait for trainer at next camera */
|
|
185
|
+
maxTransitWait: number;
|
|
186
|
+
/** Whether to auto-detect overlaps */
|
|
187
|
+
autoDetectOverlaps: boolean;
|
|
188
|
+
/** Whether to auto-suggest landmarks based on AI */
|
|
189
|
+
autoSuggestLandmarks: boolean;
|
|
190
|
+
/** Minimum overlap duration (seconds) to count as overlap */
|
|
191
|
+
minOverlapDuration: number;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/** Real-time training status update sent to UI */
|
|
195
|
+
export interface TrainingStatusUpdate {
|
|
196
|
+
/** Session ID */
|
|
197
|
+
sessionId: TrainingSessionId;
|
|
198
|
+
/** Current state */
|
|
199
|
+
state: TrainingSessionState;
|
|
200
|
+
/** Current camera (if detected) */
|
|
201
|
+
currentCamera?: {
|
|
202
|
+
id: string;
|
|
203
|
+
name: string;
|
|
204
|
+
detectedAt: number;
|
|
205
|
+
confidence: number;
|
|
206
|
+
};
|
|
207
|
+
/** Active transit (if in transit) */
|
|
208
|
+
activeTransit?: {
|
|
209
|
+
fromCameraId: string;
|
|
210
|
+
fromCameraName: string;
|
|
211
|
+
startTime: number;
|
|
212
|
+
elapsedSeconds: number;
|
|
213
|
+
};
|
|
214
|
+
/** Recent action */
|
|
215
|
+
lastAction?: {
|
|
216
|
+
type: TrainingActionType;
|
|
217
|
+
description: string;
|
|
218
|
+
timestamp: number;
|
|
219
|
+
};
|
|
220
|
+
/** Session stats */
|
|
221
|
+
stats: TrainingSessionStats;
|
|
222
|
+
/** Suggestions for next actions */
|
|
223
|
+
suggestions: string[];
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/** Result of applying training to topology */
|
|
227
|
+
export interface TrainingApplicationResult {
|
|
228
|
+
/** Number of cameras added to topology */
|
|
229
|
+
camerasAdded: number;
|
|
230
|
+
/** Number of connections created */
|
|
231
|
+
connectionsCreated: number;
|
|
232
|
+
/** Number of connections updated */
|
|
233
|
+
connectionsUpdated: number;
|
|
234
|
+
/** Number of landmarks added */
|
|
235
|
+
landmarksAdded: number;
|
|
236
|
+
/** Number of zones created */
|
|
237
|
+
zonesCreated: number;
|
|
238
|
+
/** Any warnings or issues */
|
|
239
|
+
warnings: string[];
|
|
240
|
+
/** Whether the application was successful */
|
|
241
|
+
success: boolean;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/** Default training configuration */
|
|
245
|
+
export const DEFAULT_TRAINING_CONFIG: TrainingConfig = {
|
|
246
|
+
minDetectionConfidence: 0.7,
|
|
247
|
+
maxTransitWait: 120, // 2 minutes
|
|
248
|
+
autoDetectOverlaps: true,
|
|
249
|
+
autoSuggestLandmarks: true,
|
|
250
|
+
minOverlapDuration: 2, // 2 seconds
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
/** Create a new empty training session */
|
|
254
|
+
export function createTrainingSession(trainerName?: string): TrainingSession {
|
|
255
|
+
const now = Date.now();
|
|
256
|
+
return {
|
|
257
|
+
id: `training-${now}-${Math.random().toString(36).substr(2, 9)}`,
|
|
258
|
+
state: 'idle',
|
|
259
|
+
startedAt: now,
|
|
260
|
+
updatedAt: now,
|
|
261
|
+
trainerName,
|
|
262
|
+
visits: [],
|
|
263
|
+
transits: [],
|
|
264
|
+
landmarks: [],
|
|
265
|
+
overlaps: [],
|
|
266
|
+
structures: [],
|
|
267
|
+
stats: {
|
|
268
|
+
totalDuration: 0,
|
|
269
|
+
camerasVisited: 0,
|
|
270
|
+
transitsRecorded: 0,
|
|
271
|
+
landmarksMarked: 0,
|
|
272
|
+
overlapsDetected: 0,
|
|
273
|
+
structuresMarked: 0,
|
|
274
|
+
averageTransitTime: 0,
|
|
275
|
+
coveragePercentage: 0,
|
|
276
|
+
},
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
/** Calculate session statistics */
|
|
281
|
+
export function calculateTrainingStats(session: TrainingSession, totalCameras: number): TrainingSessionStats {
|
|
282
|
+
const uniqueCameras = new Set(session.visits.map(v => v.cameraId));
|
|
283
|
+
const transitTimes = session.transits.map(t => t.transitSeconds);
|
|
284
|
+
const avgTransit = transitTimes.length > 0
|
|
285
|
+
? transitTimes.reduce((a, b) => a + b, 0) / transitTimes.length
|
|
286
|
+
: 0;
|
|
287
|
+
|
|
288
|
+
return {
|
|
289
|
+
totalDuration: (session.completedAt || Date.now()) - session.startedAt,
|
|
290
|
+
camerasVisited: uniqueCameras.size,
|
|
291
|
+
transitsRecorded: session.transits.length,
|
|
292
|
+
landmarksMarked: session.landmarks.length,
|
|
293
|
+
overlapsDetected: session.overlaps.length,
|
|
294
|
+
structuresMarked: session.structures.length,
|
|
295
|
+
averageTransitTime: Math.round(avgTransit),
|
|
296
|
+
coveragePercentage: totalCameras > 0
|
|
297
|
+
? Math.round((uniqueCameras.size / totalCameras) * 100)
|
|
298
|
+
: 0,
|
|
299
|
+
};
|
|
300
|
+
}
|
package/src/ui/editor-html.ts
CHANGED
|
@@ -105,6 +105,23 @@ export const EDITOR_HTML = `<!DOCTYPE html>
|
|
|
105
105
|
</div>
|
|
106
106
|
<div id="suggestions-list"></div>
|
|
107
107
|
</div>
|
|
108
|
+
<div class="section" id="connection-suggestions-section" style="display: none;">
|
|
109
|
+
<div class="section-title">
|
|
110
|
+
<span>Connection Suggestions</span>
|
|
111
|
+
<button class="btn btn-small" onclick="loadConnectionSuggestions()">Refresh</button>
|
|
112
|
+
</div>
|
|
113
|
+
<div id="connection-suggestions-list"></div>
|
|
114
|
+
</div>
|
|
115
|
+
<div class="section" id="live-tracking-section">
|
|
116
|
+
<div class="section-title">
|
|
117
|
+
<span>Live Tracking</span>
|
|
118
|
+
<label class="checkbox-group" style="font-size: 11px; font-weight: normal; text-transform: none;">
|
|
119
|
+
<input type="checkbox" id="live-tracking-toggle" onchange="toggleLiveTracking(this.checked)">
|
|
120
|
+
Enable
|
|
121
|
+
</label>
|
|
122
|
+
</div>
|
|
123
|
+
<div id="live-tracking-list" style="max-height: 150px; overflow-y: auto;"></div>
|
|
124
|
+
</div>
|
|
108
125
|
</div>
|
|
109
126
|
</div>
|
|
110
127
|
<div class="editor">
|
|
@@ -295,6 +312,12 @@ export const EDITOR_HTML = `<!DOCTYPE html>
|
|
|
295
312
|
let availableCameras = [];
|
|
296
313
|
let landmarkTemplates = [];
|
|
297
314
|
let pendingSuggestions = [];
|
|
315
|
+
let connectionSuggestions = [];
|
|
316
|
+
let liveTrackingData = { objects: [], timestamp: 0 };
|
|
317
|
+
let liveTrackingEnabled = false;
|
|
318
|
+
let liveTrackingInterval = null;
|
|
319
|
+
let selectedJourneyId = null;
|
|
320
|
+
let journeyPath = null;
|
|
298
321
|
let isDrawing = false;
|
|
299
322
|
let drawStart = null;
|
|
300
323
|
let currentDrawing = null;
|
|
@@ -307,6 +330,7 @@ export const EDITOR_HTML = `<!DOCTYPE html>
|
|
|
307
330
|
await loadAvailableCameras();
|
|
308
331
|
await loadLandmarkTemplates();
|
|
309
332
|
await loadSuggestions();
|
|
333
|
+
await loadConnectionSuggestions();
|
|
310
334
|
resizeCanvas();
|
|
311
335
|
render();
|
|
312
336
|
updateUI();
|
|
@@ -406,6 +430,132 @@ export const EDITOR_HTML = `<!DOCTYPE html>
|
|
|
406
430
|
} catch (e) { console.error('Failed to reject suggestion:', e); }
|
|
407
431
|
}
|
|
408
432
|
|
|
433
|
+
// ==================== Connection Suggestions ====================
|
|
434
|
+
async function loadConnectionSuggestions() {
|
|
435
|
+
try {
|
|
436
|
+
const response = await fetch('../api/connection-suggestions');
|
|
437
|
+
if (response.ok) {
|
|
438
|
+
const data = await response.json();
|
|
439
|
+
connectionSuggestions = data.suggestions || [];
|
|
440
|
+
updateConnectionSuggestionsUI();
|
|
441
|
+
}
|
|
442
|
+
} catch (e) { console.error('Failed to load connection suggestions:', e); }
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
function updateConnectionSuggestionsUI() {
|
|
446
|
+
const section = document.getElementById('connection-suggestions-section');
|
|
447
|
+
const list = document.getElementById('connection-suggestions-list');
|
|
448
|
+
if (connectionSuggestions.length === 0) {
|
|
449
|
+
section.style.display = 'none';
|
|
450
|
+
return;
|
|
451
|
+
}
|
|
452
|
+
section.style.display = 'block';
|
|
453
|
+
list.innerHTML = connectionSuggestions.map(s =>
|
|
454
|
+
'<div class="camera-item" style="display: flex; justify-content: space-between; align-items: center;">' +
|
|
455
|
+
'<div><div class="camera-name">' + s.fromCameraName + ' → ' + s.toCameraName + '</div>' +
|
|
456
|
+
'<div class="camera-info">' + Math.round(s.suggestedTransitTime.typical / 1000) + 's typical, ' +
|
|
457
|
+
Math.round(s.confidence * 100) + '% confidence</div></div>' +
|
|
458
|
+
'<div style="display: flex; gap: 5px;">' +
|
|
459
|
+
'<button class="btn btn-small btn-primary" onclick="acceptConnectionSuggestion(\\'' + s.id + '\\')">Accept</button>' +
|
|
460
|
+
'<button class="btn btn-small" onclick="rejectConnectionSuggestion(\\'' + s.id + '\\')">Reject</button>' +
|
|
461
|
+
'</div></div>'
|
|
462
|
+
).join('');
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
async function acceptConnectionSuggestion(id) {
|
|
466
|
+
try {
|
|
467
|
+
const response = await fetch('../api/connection-suggestions/' + encodeURIComponent(id) + '/accept', { method: 'POST' });
|
|
468
|
+
if (response.ok) {
|
|
469
|
+
const data = await response.json();
|
|
470
|
+
if (data.connection) {
|
|
471
|
+
topology.connections.push(data.connection);
|
|
472
|
+
updateUI();
|
|
473
|
+
render();
|
|
474
|
+
}
|
|
475
|
+
await loadConnectionSuggestions();
|
|
476
|
+
setStatus('Connection accepted', 'success');
|
|
477
|
+
}
|
|
478
|
+
} catch (e) { console.error('Failed to accept connection suggestion:', e); }
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
async function rejectConnectionSuggestion(id) {
|
|
482
|
+
try {
|
|
483
|
+
await fetch('../api/connection-suggestions/' + encodeURIComponent(id) + '/reject', { method: 'POST' });
|
|
484
|
+
await loadConnectionSuggestions();
|
|
485
|
+
setStatus('Connection suggestion rejected', 'success');
|
|
486
|
+
} catch (e) { console.error('Failed to reject connection suggestion:', e); }
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// ==================== Live Tracking ====================
|
|
490
|
+
function toggleLiveTracking(enabled) {
|
|
491
|
+
liveTrackingEnabled = enabled;
|
|
492
|
+
if (enabled) {
|
|
493
|
+
loadLiveTracking();
|
|
494
|
+
liveTrackingInterval = setInterval(loadLiveTracking, 2000); // Poll every 2 seconds
|
|
495
|
+
} else {
|
|
496
|
+
if (liveTrackingInterval) {
|
|
497
|
+
clearInterval(liveTrackingInterval);
|
|
498
|
+
liveTrackingInterval = null;
|
|
499
|
+
}
|
|
500
|
+
liveTrackingData = { objects: [], timestamp: 0 };
|
|
501
|
+
selectedJourneyId = null;
|
|
502
|
+
journeyPath = null;
|
|
503
|
+
updateLiveTrackingUI();
|
|
504
|
+
render();
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
async function loadLiveTracking() {
|
|
509
|
+
try {
|
|
510
|
+
const response = await fetch('../api/live-tracking');
|
|
511
|
+
if (response.ok) {
|
|
512
|
+
liveTrackingData = await response.json();
|
|
513
|
+
updateLiveTrackingUI();
|
|
514
|
+
render();
|
|
515
|
+
}
|
|
516
|
+
} catch (e) { console.error('Failed to load live tracking:', e); }
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
function updateLiveTrackingUI() {
|
|
520
|
+
const list = document.getElementById('live-tracking-list');
|
|
521
|
+
if (liveTrackingData.objects.length === 0) {
|
|
522
|
+
list.innerHTML = '<div style="color: #666; font-size: 12px; text-align: center; padding: 10px;">No active objects</div>';
|
|
523
|
+
return;
|
|
524
|
+
}
|
|
525
|
+
list.innerHTML = liveTrackingData.objects.map(obj => {
|
|
526
|
+
const isSelected = selectedJourneyId === obj.globalId;
|
|
527
|
+
const ageSeconds = Math.round((Date.now() - obj.lastSeen) / 1000);
|
|
528
|
+
const ageStr = ageSeconds < 60 ? ageSeconds + 's ago' : Math.round(ageSeconds / 60) + 'm ago';
|
|
529
|
+
return '<div class="camera-item' + (isSelected ? ' selected' : '') + '" ' +
|
|
530
|
+
'onclick="selectTrackedObject(\\'' + obj.globalId + '\\')" ' +
|
|
531
|
+
'style="padding: 8px; cursor: pointer;">' +
|
|
532
|
+
'<div class="camera-name" style="font-size: 12px;">' +
|
|
533
|
+
(obj.className.charAt(0).toUpperCase() + obj.className.slice(1)) +
|
|
534
|
+
(obj.label ? ' (' + obj.label + ')' : '') + '</div>' +
|
|
535
|
+
'<div class="camera-info">' + obj.lastCameraName + ' • ' + ageStr + '</div>' +
|
|
536
|
+
'</div>';
|
|
537
|
+
}).join('');
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
async function selectTrackedObject(globalId) {
|
|
541
|
+
if (selectedJourneyId === globalId) {
|
|
542
|
+
// Deselect
|
|
543
|
+
selectedJourneyId = null;
|
|
544
|
+
journeyPath = null;
|
|
545
|
+
} else {
|
|
546
|
+
selectedJourneyId = globalId;
|
|
547
|
+
// Load journey path
|
|
548
|
+
try {
|
|
549
|
+
const response = await fetch('../api/journey-path/' + globalId);
|
|
550
|
+
if (response.ok) {
|
|
551
|
+
journeyPath = await response.json();
|
|
552
|
+
}
|
|
553
|
+
} catch (e) { console.error('Failed to load journey path:', e); }
|
|
554
|
+
}
|
|
555
|
+
updateLiveTrackingUI();
|
|
556
|
+
render();
|
|
557
|
+
}
|
|
558
|
+
|
|
409
559
|
function openAddLandmarkModal() {
|
|
410
560
|
updateLandmarkSuggestions();
|
|
411
561
|
document.getElementById('add-landmark-modal').classList.add('active');
|
|
@@ -629,6 +779,112 @@ export const EDITOR_HTML = `<!DOCTYPE html>
|
|
|
629
779
|
for (const camera of topology.cameras) {
|
|
630
780
|
if (camera.floorPlanPosition) { drawCamera(camera); }
|
|
631
781
|
}
|
|
782
|
+
|
|
783
|
+
// Draw journey path if selected
|
|
784
|
+
if (journeyPath && journeyPath.segments.length > 0) {
|
|
785
|
+
drawJourneyPath();
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
// Draw live tracking objects
|
|
789
|
+
if (liveTrackingEnabled && liveTrackingData.objects.length > 0) {
|
|
790
|
+
drawLiveTrackingObjects();
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
|
|
794
|
+
function drawJourneyPath() {
|
|
795
|
+
if (!journeyPath) return;
|
|
796
|
+
|
|
797
|
+
ctx.strokeStyle = '#ff6b6b';
|
|
798
|
+
ctx.lineWidth = 3;
|
|
799
|
+
ctx.setLineDash([8, 4]);
|
|
800
|
+
|
|
801
|
+
// Draw path segments
|
|
802
|
+
for (const segment of journeyPath.segments) {
|
|
803
|
+
if (segment.fromCamera.position && segment.toCamera.position) {
|
|
804
|
+
ctx.beginPath();
|
|
805
|
+
ctx.moveTo(segment.fromCamera.position.x, segment.fromCamera.position.y);
|
|
806
|
+
ctx.lineTo(segment.toCamera.position.x, segment.toCamera.position.y);
|
|
807
|
+
ctx.stroke();
|
|
808
|
+
|
|
809
|
+
// Draw timestamp indicator
|
|
810
|
+
const midX = (segment.fromCamera.position.x + segment.toCamera.position.x) / 2;
|
|
811
|
+
const midY = (segment.fromCamera.position.y + segment.toCamera.position.y) / 2;
|
|
812
|
+
ctx.fillStyle = 'rgba(255, 107, 107, 0.9)';
|
|
813
|
+
ctx.beginPath();
|
|
814
|
+
ctx.arc(midX, midY, 4, 0, Math.PI * 2);
|
|
815
|
+
ctx.fill();
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
ctx.setLineDash([]);
|
|
820
|
+
|
|
821
|
+
// Draw current location indicator
|
|
822
|
+
if (journeyPath.currentLocation?.position) {
|
|
823
|
+
const pos = journeyPath.currentLocation.position;
|
|
824
|
+
// Pulsing dot effect
|
|
825
|
+
const pulse = (Date.now() % 1000) / 1000;
|
|
826
|
+
const radius = 10 + pulse * 5;
|
|
827
|
+
const alpha = 1 - pulse * 0.5;
|
|
828
|
+
|
|
829
|
+
ctx.beginPath();
|
|
830
|
+
ctx.arc(pos.x, pos.y, radius, 0, Math.PI * 2);
|
|
831
|
+
ctx.fillStyle = 'rgba(255, 107, 107, ' + alpha + ')';
|
|
832
|
+
ctx.fill();
|
|
833
|
+
|
|
834
|
+
ctx.beginPath();
|
|
835
|
+
ctx.arc(pos.x, pos.y, 6, 0, Math.PI * 2);
|
|
836
|
+
ctx.fillStyle = '#ff6b6b';
|
|
837
|
+
ctx.fill();
|
|
838
|
+
ctx.strokeStyle = '#fff';
|
|
839
|
+
ctx.lineWidth = 2;
|
|
840
|
+
ctx.stroke();
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
function drawLiveTrackingObjects() {
|
|
845
|
+
const objectColors = {
|
|
846
|
+
person: '#4caf50',
|
|
847
|
+
car: '#2196f3',
|
|
848
|
+
animal: '#ff9800',
|
|
849
|
+
default: '#9c27b0'
|
|
850
|
+
};
|
|
851
|
+
|
|
852
|
+
for (const obj of liveTrackingData.objects) {
|
|
853
|
+
if (!obj.cameraPosition) continue;
|
|
854
|
+
|
|
855
|
+
// Skip if this is the selected journey object (drawn separately with path)
|
|
856
|
+
if (obj.globalId === selectedJourneyId) continue;
|
|
857
|
+
|
|
858
|
+
const pos = obj.cameraPosition;
|
|
859
|
+
const color = objectColors[obj.className] || objectColors.default;
|
|
860
|
+
const ageSeconds = (Date.now() - obj.lastSeen) / 1000;
|
|
861
|
+
|
|
862
|
+
// Fade old objects
|
|
863
|
+
const alpha = Math.max(0.3, 1 - ageSeconds / 60);
|
|
864
|
+
|
|
865
|
+
// Draw object indicator
|
|
866
|
+
ctx.beginPath();
|
|
867
|
+
ctx.arc(pos.x, pos.y, 12, 0, Math.PI * 2);
|
|
868
|
+
ctx.fillStyle = color.replace(')', ', ' + alpha + ')').replace('rgb', 'rgba');
|
|
869
|
+
ctx.fill();
|
|
870
|
+
ctx.strokeStyle = 'rgba(255, 255, 255, ' + alpha + ')';
|
|
871
|
+
ctx.lineWidth = 2;
|
|
872
|
+
ctx.stroke();
|
|
873
|
+
|
|
874
|
+
// Draw class icon
|
|
875
|
+
ctx.fillStyle = 'rgba(255, 255, 255, ' + alpha + ')';
|
|
876
|
+
ctx.font = 'bold 10px sans-serif';
|
|
877
|
+
ctx.textAlign = 'center';
|
|
878
|
+
ctx.textBaseline = 'middle';
|
|
879
|
+
const icon = obj.className === 'person' ? 'P' : obj.className === 'car' ? 'C' : obj.className === 'animal' ? 'A' : '?';
|
|
880
|
+
ctx.fillText(icon, pos.x, pos.y);
|
|
881
|
+
|
|
882
|
+
// Draw label below
|
|
883
|
+
if (obj.label) {
|
|
884
|
+
ctx.font = '9px sans-serif';
|
|
885
|
+
ctx.fillText(obj.label.slice(0, 10), pos.x, pos.y + 20);
|
|
886
|
+
}
|
|
887
|
+
}
|
|
632
888
|
}
|
|
633
889
|
|
|
634
890
|
function drawLandmark(landmark) {
|