@siftd/connect-agent 0.2.9 → 0.2.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/file-watcher.d.ts +109 -0
- package/dist/core/file-watcher.js +275 -0
- package/dist/core/memory-interface.d.ts +66 -0
- package/dist/core/memory-interface.js +6 -0
- package/dist/core/memory-postgres.d.ts +109 -0
- package/dist/core/memory-postgres.js +374 -0
- package/dist/heartbeat.js +1 -1
- package/dist/orchestrator.d.ts +21 -0
- package/dist/orchestrator.js +208 -0
- package/package.json +3 -1
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File Watcher for Reactive Workflows
|
|
3
|
+
*
|
|
4
|
+
* Enables the orchestrator to:
|
|
5
|
+
* - Watch directories/files for changes
|
|
6
|
+
* - Trigger callbacks on file events
|
|
7
|
+
* - Support glob patterns for filtering
|
|
8
|
+
* - Debounce rapid changes
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from 'events';
|
|
11
|
+
export type FileEventType = 'add' | 'change' | 'unlink' | 'addDir' | 'unlinkDir';
|
|
12
|
+
export interface FileEvent {
|
|
13
|
+
type: FileEventType;
|
|
14
|
+
path: string;
|
|
15
|
+
timestamp: string;
|
|
16
|
+
}
|
|
17
|
+
export interface WatchRule {
|
|
18
|
+
id: string;
|
|
19
|
+
pattern: string;
|
|
20
|
+
events: FileEventType[];
|
|
21
|
+
action: string;
|
|
22
|
+
debounceMs?: number;
|
|
23
|
+
enabled: boolean;
|
|
24
|
+
lastTriggered?: string;
|
|
25
|
+
}
|
|
26
|
+
export interface WatcherConfig {
|
|
27
|
+
rulesFile: string;
|
|
28
|
+
defaultDebounce: number;
|
|
29
|
+
}
|
|
30
|
+
export declare class FileWatcher extends EventEmitter {
|
|
31
|
+
private watchers;
|
|
32
|
+
private rules;
|
|
33
|
+
private debounceTimers;
|
|
34
|
+
private config;
|
|
35
|
+
private rulesFile;
|
|
36
|
+
constructor(workspaceDir: string, config?: Partial<WatcherConfig>);
|
|
37
|
+
/**
|
|
38
|
+
* Add a watch rule
|
|
39
|
+
*/
|
|
40
|
+
addRule(rule: Omit<WatchRule, 'id' | 'enabled'>): string;
|
|
41
|
+
/**
|
|
42
|
+
* Remove a watch rule
|
|
43
|
+
*/
|
|
44
|
+
removeRule(id: string): boolean;
|
|
45
|
+
/**
|
|
46
|
+
* Enable/disable a rule
|
|
47
|
+
*/
|
|
48
|
+
toggleRule(id: string, enabled: boolean): boolean;
|
|
49
|
+
/**
|
|
50
|
+
* List all rules
|
|
51
|
+
*/
|
|
52
|
+
listRules(): WatchRule[];
|
|
53
|
+
/**
|
|
54
|
+
* Get a specific rule
|
|
55
|
+
*/
|
|
56
|
+
getRule(id: string): WatchRule | undefined;
|
|
57
|
+
/**
|
|
58
|
+
* Watch a specific path
|
|
59
|
+
*/
|
|
60
|
+
watch(targetPath: string, callback: (event: FileEvent) => void): string;
|
|
61
|
+
/**
|
|
62
|
+
* Stop watching a specific watcher
|
|
63
|
+
*/
|
|
64
|
+
unwatch(watcherId: string): void;
|
|
65
|
+
/**
|
|
66
|
+
* Stop all watchers
|
|
67
|
+
*/
|
|
68
|
+
stopAll(): void;
|
|
69
|
+
/**
|
|
70
|
+
* Get watcher status
|
|
71
|
+
*/
|
|
72
|
+
status(): {
|
|
73
|
+
activeWatchers: number;
|
|
74
|
+
rules: number;
|
|
75
|
+
enabledRules: number;
|
|
76
|
+
};
|
|
77
|
+
private watchDirectory;
|
|
78
|
+
private watchFile;
|
|
79
|
+
private determineEventType;
|
|
80
|
+
private startWatching;
|
|
81
|
+
private stopWatching;
|
|
82
|
+
private loadRules;
|
|
83
|
+
private saveRules;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Common watch patterns for development workflows
|
|
87
|
+
*/
|
|
88
|
+
export declare const COMMON_PATTERNS: {
|
|
89
|
+
typescript: {
|
|
90
|
+
pattern: string;
|
|
91
|
+
events: FileEventType[];
|
|
92
|
+
action: string;
|
|
93
|
+
};
|
|
94
|
+
tests: {
|
|
95
|
+
pattern: string;
|
|
96
|
+
events: FileEventType[];
|
|
97
|
+
action: string;
|
|
98
|
+
};
|
|
99
|
+
config: {
|
|
100
|
+
pattern: string;
|
|
101
|
+
events: FileEventType[];
|
|
102
|
+
action: string;
|
|
103
|
+
};
|
|
104
|
+
markdown: {
|
|
105
|
+
pattern: string;
|
|
106
|
+
events: FileEventType[];
|
|
107
|
+
action: string;
|
|
108
|
+
};
|
|
109
|
+
};
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File Watcher for Reactive Workflows
|
|
3
|
+
*
|
|
4
|
+
* Enables the orchestrator to:
|
|
5
|
+
* - Watch directories/files for changes
|
|
6
|
+
* - Trigger callbacks on file events
|
|
7
|
+
* - Support glob patterns for filtering
|
|
8
|
+
* - Debounce rapid changes
|
|
9
|
+
*/
|
|
10
|
+
import * as fs from 'fs';
|
|
11
|
+
import * as path from 'path';
|
|
12
|
+
import { EventEmitter } from 'events';
|
|
13
|
+
export class FileWatcher extends EventEmitter {
|
|
14
|
+
watchers = new Map();
|
|
15
|
+
rules = new Map();
|
|
16
|
+
debounceTimers = new Map();
|
|
17
|
+
config;
|
|
18
|
+
rulesFile;
|
|
19
|
+
constructor(workspaceDir, config) {
|
|
20
|
+
super();
|
|
21
|
+
this.rulesFile = config?.rulesFile || path.join(workspaceDir, '.watch-rules.json');
|
|
22
|
+
this.config = {
|
|
23
|
+
rulesFile: this.rulesFile,
|
|
24
|
+
defaultDebounce: config?.defaultDebounce || 500
|
|
25
|
+
};
|
|
26
|
+
this.loadRules();
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Add a watch rule
|
|
30
|
+
*/
|
|
31
|
+
addRule(rule) {
|
|
32
|
+
const id = `watch_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`;
|
|
33
|
+
const fullRule = {
|
|
34
|
+
...rule,
|
|
35
|
+
id,
|
|
36
|
+
enabled: true,
|
|
37
|
+
debounceMs: rule.debounceMs || this.config.defaultDebounce
|
|
38
|
+
};
|
|
39
|
+
this.rules.set(id, fullRule);
|
|
40
|
+
this.saveRules();
|
|
41
|
+
this.startWatching(fullRule);
|
|
42
|
+
return id;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Remove a watch rule
|
|
46
|
+
*/
|
|
47
|
+
removeRule(id) {
|
|
48
|
+
const rule = this.rules.get(id);
|
|
49
|
+
if (!rule)
|
|
50
|
+
return false;
|
|
51
|
+
this.stopWatching(rule);
|
|
52
|
+
this.rules.delete(id);
|
|
53
|
+
this.saveRules();
|
|
54
|
+
return true;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Enable/disable a rule
|
|
58
|
+
*/
|
|
59
|
+
toggleRule(id, enabled) {
|
|
60
|
+
const rule = this.rules.get(id);
|
|
61
|
+
if (!rule)
|
|
62
|
+
return false;
|
|
63
|
+
rule.enabled = enabled;
|
|
64
|
+
if (enabled) {
|
|
65
|
+
this.startWatching(rule);
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
this.stopWatching(rule);
|
|
69
|
+
}
|
|
70
|
+
this.saveRules();
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* List all rules
|
|
75
|
+
*/
|
|
76
|
+
listRules() {
|
|
77
|
+
return Array.from(this.rules.values());
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Get a specific rule
|
|
81
|
+
*/
|
|
82
|
+
getRule(id) {
|
|
83
|
+
return this.rules.get(id);
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Watch a specific path
|
|
87
|
+
*/
|
|
88
|
+
watch(targetPath, callback) {
|
|
89
|
+
const id = `direct_${Date.now()}`;
|
|
90
|
+
try {
|
|
91
|
+
const resolvedPath = path.resolve(targetPath);
|
|
92
|
+
const stat = fs.statSync(resolvedPath);
|
|
93
|
+
if (stat.isDirectory()) {
|
|
94
|
+
this.watchDirectory(resolvedPath, callback, id);
|
|
95
|
+
}
|
|
96
|
+
else {
|
|
97
|
+
this.watchFile(resolvedPath, callback, id);
|
|
98
|
+
}
|
|
99
|
+
return id;
|
|
100
|
+
}
|
|
101
|
+
catch (error) {
|
|
102
|
+
console.error(`[WATCHER] Failed to watch ${targetPath}:`, error);
|
|
103
|
+
throw error;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Stop watching a specific watcher
|
|
108
|
+
*/
|
|
109
|
+
unwatch(watcherId) {
|
|
110
|
+
const watcher = this.watchers.get(watcherId);
|
|
111
|
+
if (watcher) {
|
|
112
|
+
watcher.close();
|
|
113
|
+
this.watchers.delete(watcherId);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Stop all watchers
|
|
118
|
+
*/
|
|
119
|
+
stopAll() {
|
|
120
|
+
for (const [id, watcher] of this.watchers) {
|
|
121
|
+
watcher.close();
|
|
122
|
+
}
|
|
123
|
+
this.watchers.clear();
|
|
124
|
+
for (const timer of this.debounceTimers.values()) {
|
|
125
|
+
clearTimeout(timer);
|
|
126
|
+
}
|
|
127
|
+
this.debounceTimers.clear();
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Get watcher status
|
|
131
|
+
*/
|
|
132
|
+
status() {
|
|
133
|
+
return {
|
|
134
|
+
activeWatchers: this.watchers.size,
|
|
135
|
+
rules: this.rules.size,
|
|
136
|
+
enabledRules: Array.from(this.rules.values()).filter(r => r.enabled).length
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
watchDirectory(dirPath, callback, id) {
|
|
140
|
+
const watcher = fs.watch(dirPath, { recursive: true }, (eventType, filename) => {
|
|
141
|
+
if (!filename)
|
|
142
|
+
return;
|
|
143
|
+
const fullPath = path.join(dirPath, filename);
|
|
144
|
+
const event = {
|
|
145
|
+
type: eventType === 'rename' ? this.determineEventType(fullPath) : 'change',
|
|
146
|
+
path: fullPath,
|
|
147
|
+
timestamp: new Date().toISOString()
|
|
148
|
+
};
|
|
149
|
+
callback(event);
|
|
150
|
+
});
|
|
151
|
+
this.watchers.set(id, watcher);
|
|
152
|
+
}
|
|
153
|
+
watchFile(filePath, callback, id) {
|
|
154
|
+
const watcher = fs.watch(filePath, (eventType) => {
|
|
155
|
+
const event = {
|
|
156
|
+
type: eventType === 'rename' ? 'unlink' : 'change',
|
|
157
|
+
path: filePath,
|
|
158
|
+
timestamp: new Date().toISOString()
|
|
159
|
+
};
|
|
160
|
+
callback(event);
|
|
161
|
+
});
|
|
162
|
+
this.watchers.set(id, watcher);
|
|
163
|
+
}
|
|
164
|
+
determineEventType(filePath) {
|
|
165
|
+
try {
|
|
166
|
+
const stat = fs.statSync(filePath);
|
|
167
|
+
return stat.isDirectory() ? 'addDir' : 'add';
|
|
168
|
+
}
|
|
169
|
+
catch {
|
|
170
|
+
return 'unlink';
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
startWatching(rule) {
|
|
174
|
+
if (!rule.enabled)
|
|
175
|
+
return;
|
|
176
|
+
try {
|
|
177
|
+
const targetPath = path.resolve(rule.pattern);
|
|
178
|
+
// Check if path exists
|
|
179
|
+
if (!fs.existsSync(targetPath)) {
|
|
180
|
+
console.log(`[WATCHER] Path does not exist yet: ${targetPath}`);
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
const callback = (event) => {
|
|
184
|
+
if (!rule.events.includes(event.type))
|
|
185
|
+
return;
|
|
186
|
+
// Debounce
|
|
187
|
+
const debounceKey = `${rule.id}_${event.path}`;
|
|
188
|
+
const existing = this.debounceTimers.get(debounceKey);
|
|
189
|
+
if (existing) {
|
|
190
|
+
clearTimeout(existing);
|
|
191
|
+
}
|
|
192
|
+
const timer = setTimeout(() => {
|
|
193
|
+
this.debounceTimers.delete(debounceKey);
|
|
194
|
+
rule.lastTriggered = new Date().toISOString();
|
|
195
|
+
this.saveRules();
|
|
196
|
+
// Emit event for orchestrator to handle
|
|
197
|
+
this.emit('trigger', {
|
|
198
|
+
rule,
|
|
199
|
+
event,
|
|
200
|
+
action: rule.action
|
|
201
|
+
});
|
|
202
|
+
}, rule.debounceMs || this.config.defaultDebounce);
|
|
203
|
+
this.debounceTimers.set(debounceKey, timer);
|
|
204
|
+
};
|
|
205
|
+
this.watch(targetPath, callback);
|
|
206
|
+
console.log(`[WATCHER] Started watching: ${rule.pattern}`);
|
|
207
|
+
}
|
|
208
|
+
catch (error) {
|
|
209
|
+
console.error(`[WATCHER] Failed to start watching ${rule.pattern}:`, error);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
stopWatching(rule) {
|
|
213
|
+
// Find and close watchers for this rule
|
|
214
|
+
for (const [id, watcher] of this.watchers) {
|
|
215
|
+
if (id.includes(rule.id)) {
|
|
216
|
+
watcher.close();
|
|
217
|
+
this.watchers.delete(id);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
loadRules() {
|
|
222
|
+
try {
|
|
223
|
+
if (fs.existsSync(this.rulesFile)) {
|
|
224
|
+
const data = JSON.parse(fs.readFileSync(this.rulesFile, 'utf8'));
|
|
225
|
+
for (const rule of data.rules || []) {
|
|
226
|
+
this.rules.set(rule.id, rule);
|
|
227
|
+
if (rule.enabled) {
|
|
228
|
+
this.startWatching(rule);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
console.log(`[WATCHER] Loaded ${this.rules.size} watch rules`);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
catch (error) {
|
|
235
|
+
console.error('[WATCHER] Failed to load rules:', error);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
saveRules() {
|
|
239
|
+
try {
|
|
240
|
+
const data = {
|
|
241
|
+
rules: Array.from(this.rules.values()),
|
|
242
|
+
savedAt: new Date().toISOString()
|
|
243
|
+
};
|
|
244
|
+
fs.writeFileSync(this.rulesFile, JSON.stringify(data, null, 2));
|
|
245
|
+
}
|
|
246
|
+
catch (error) {
|
|
247
|
+
console.error('[WATCHER] Failed to save rules:', error);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Common watch patterns for development workflows
|
|
253
|
+
*/
|
|
254
|
+
export const COMMON_PATTERNS = {
|
|
255
|
+
typescript: {
|
|
256
|
+
pattern: '**/*.ts',
|
|
257
|
+
events: ['change', 'add'],
|
|
258
|
+
action: 'TypeScript file changed - run type check and tests'
|
|
259
|
+
},
|
|
260
|
+
tests: {
|
|
261
|
+
pattern: '**/*.test.ts',
|
|
262
|
+
events: ['change', 'add'],
|
|
263
|
+
action: 'Test file changed - run tests'
|
|
264
|
+
},
|
|
265
|
+
config: {
|
|
266
|
+
pattern: '**/package.json',
|
|
267
|
+
events: ['change'],
|
|
268
|
+
action: 'Package.json changed - check for dependency updates'
|
|
269
|
+
},
|
|
270
|
+
markdown: {
|
|
271
|
+
pattern: '**/*.md',
|
|
272
|
+
events: ['change', 'add'],
|
|
273
|
+
action: 'Documentation changed - validate links and formatting'
|
|
274
|
+
}
|
|
275
|
+
};
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Store Interface
|
|
3
|
+
*
|
|
4
|
+
* Common interface for memory backends (SQLite, Postgres, etc.)
|
|
5
|
+
*/
|
|
6
|
+
export type MemoryType = 'episodic' | 'semantic' | 'procedural' | 'working';
|
|
7
|
+
export interface Memory {
|
|
8
|
+
id: string;
|
|
9
|
+
type: MemoryType;
|
|
10
|
+
content: string;
|
|
11
|
+
summary?: string;
|
|
12
|
+
source: string;
|
|
13
|
+
timestamp: string;
|
|
14
|
+
lastAccessed: string;
|
|
15
|
+
importance: number;
|
|
16
|
+
accessCount: number;
|
|
17
|
+
decayRate: number;
|
|
18
|
+
associations: string[];
|
|
19
|
+
tags: string[];
|
|
20
|
+
hasEmbedding: boolean;
|
|
21
|
+
}
|
|
22
|
+
export interface MemorySearchOptions {
|
|
23
|
+
type?: MemoryType;
|
|
24
|
+
limit?: number;
|
|
25
|
+
minImportance?: number;
|
|
26
|
+
}
|
|
27
|
+
export interface MemoryStore {
|
|
28
|
+
/**
|
|
29
|
+
* Store a new memory
|
|
30
|
+
*/
|
|
31
|
+
remember(content: string, options?: {
|
|
32
|
+
type?: MemoryType;
|
|
33
|
+
source?: string;
|
|
34
|
+
importance?: number;
|
|
35
|
+
tags?: string[];
|
|
36
|
+
}): Promise<string>;
|
|
37
|
+
/**
|
|
38
|
+
* Search memories using semantic similarity
|
|
39
|
+
*/
|
|
40
|
+
search(query: string, options?: MemorySearchOptions): Promise<Memory[]>;
|
|
41
|
+
/**
|
|
42
|
+
* Get a specific memory by ID
|
|
43
|
+
*/
|
|
44
|
+
get(id: string): Promise<Memory | null>;
|
|
45
|
+
/**
|
|
46
|
+
* Delete a memory
|
|
47
|
+
*/
|
|
48
|
+
forget(id: string): Promise<boolean>;
|
|
49
|
+
/**
|
|
50
|
+
* Boost importance of a memory
|
|
51
|
+
*/
|
|
52
|
+
reinforce(id: string, boost?: number): Promise<void>;
|
|
53
|
+
/**
|
|
54
|
+
* Get memory statistics
|
|
55
|
+
*/
|
|
56
|
+
stats(): {
|
|
57
|
+
total: number;
|
|
58
|
+
byType: Record<string, number>;
|
|
59
|
+
avgImportance: number;
|
|
60
|
+
totalAssociations: number;
|
|
61
|
+
};
|
|
62
|
+
/**
|
|
63
|
+
* Close the store and release resources
|
|
64
|
+
*/
|
|
65
|
+
close(): void | Promise<void>;
|
|
66
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL Memory Store with pgvector
|
|
3
|
+
*
|
|
4
|
+
* Cloud-native memory backend for Railway deployments.
|
|
5
|
+
* Uses pgvector extension for efficient vector similarity search.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - All AdvancedMemoryStore features
|
|
9
|
+
* - Scalable Postgres backend
|
|
10
|
+
* - pgvector for vector similarity
|
|
11
|
+
* - Shared across multiple agent instances
|
|
12
|
+
*/
|
|
13
|
+
export type MemoryType = 'episodic' | 'semantic' | 'procedural' | 'working';
|
|
14
|
+
export interface Memory {
|
|
15
|
+
id: string;
|
|
16
|
+
type: MemoryType;
|
|
17
|
+
content: string;
|
|
18
|
+
summary?: string;
|
|
19
|
+
source: string;
|
|
20
|
+
timestamp: string;
|
|
21
|
+
lastAccessed: string;
|
|
22
|
+
importance: number;
|
|
23
|
+
accessCount: number;
|
|
24
|
+
decayRate: number;
|
|
25
|
+
associations: string[];
|
|
26
|
+
tags: string[];
|
|
27
|
+
hasEmbedding: boolean;
|
|
28
|
+
}
|
|
29
|
+
interface PostgresMemoryConfig {
|
|
30
|
+
connectionString: string;
|
|
31
|
+
userId?: string;
|
|
32
|
+
dimensions?: number;
|
|
33
|
+
}
|
|
34
|
+
export declare class PostgresMemoryStore {
|
|
35
|
+
private pool;
|
|
36
|
+
private userId;
|
|
37
|
+
private dimensions;
|
|
38
|
+
private initialized;
|
|
39
|
+
constructor(config: PostgresMemoryConfig);
|
|
40
|
+
/**
|
|
41
|
+
* Initialize database schema
|
|
42
|
+
*/
|
|
43
|
+
initialize(): Promise<void>;
|
|
44
|
+
/**
|
|
45
|
+
* Store a memory
|
|
46
|
+
*/
|
|
47
|
+
remember(content: string, options?: {
|
|
48
|
+
type?: MemoryType;
|
|
49
|
+
source?: string;
|
|
50
|
+
importance?: number;
|
|
51
|
+
tags?: string[];
|
|
52
|
+
}): Promise<string>;
|
|
53
|
+
/**
|
|
54
|
+
* Search memories using vector similarity
|
|
55
|
+
*/
|
|
56
|
+
search(query: string, options?: {
|
|
57
|
+
type?: MemoryType;
|
|
58
|
+
limit?: number;
|
|
59
|
+
minImportance?: number;
|
|
60
|
+
}): Promise<Memory[]>;
|
|
61
|
+
/**
|
|
62
|
+
* Get memory by ID
|
|
63
|
+
*/
|
|
64
|
+
get(id: string): Promise<Memory | null>;
|
|
65
|
+
/**
|
|
66
|
+
* Delete a memory
|
|
67
|
+
*/
|
|
68
|
+
forget(id: string): Promise<boolean>;
|
|
69
|
+
/**
|
|
70
|
+
* Update memory importance
|
|
71
|
+
*/
|
|
72
|
+
reinforce(id: string, boost?: number): Promise<void>;
|
|
73
|
+
/**
|
|
74
|
+
* Get memory statistics
|
|
75
|
+
*/
|
|
76
|
+
stats(): {
|
|
77
|
+
total: number;
|
|
78
|
+
byType: Record<string, number>;
|
|
79
|
+
avgImportance: number;
|
|
80
|
+
totalAssociations: number;
|
|
81
|
+
};
|
|
82
|
+
private _cachedStats;
|
|
83
|
+
/**
|
|
84
|
+
* Update stats cache (call periodically)
|
|
85
|
+
*/
|
|
86
|
+
updateStatsCache(): Promise<void>;
|
|
87
|
+
/**
|
|
88
|
+
* Apply temporal decay to all memories
|
|
89
|
+
*/
|
|
90
|
+
applyDecay(): Promise<void>;
|
|
91
|
+
/**
|
|
92
|
+
* Cleanup old low-importance memories
|
|
93
|
+
*/
|
|
94
|
+
cleanup(options?: {
|
|
95
|
+
maxAge?: number;
|
|
96
|
+
minImportance?: number;
|
|
97
|
+
}): Promise<number>;
|
|
98
|
+
/**
|
|
99
|
+
* Close the connection pool
|
|
100
|
+
*/
|
|
101
|
+
close(): Promise<void>;
|
|
102
|
+
private inferType;
|
|
103
|
+
private calculateImportance;
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Check if Postgres is available and configured
|
|
107
|
+
*/
|
|
108
|
+
export declare function isPostgresConfigured(): boolean;
|
|
109
|
+
export {};
|
|
@@ -0,0 +1,374 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PostgreSQL Memory Store with pgvector
|
|
3
|
+
*
|
|
4
|
+
* Cloud-native memory backend for Railway deployments.
|
|
5
|
+
* Uses pgvector extension for efficient vector similarity search.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - All AdvancedMemoryStore features
|
|
9
|
+
* - Scalable Postgres backend
|
|
10
|
+
* - pgvector for vector similarity
|
|
11
|
+
* - Shared across multiple agent instances
|
|
12
|
+
*/
|
|
13
|
+
import pg from 'pg';
|
|
14
|
+
const { Pool } = pg;
|
|
15
|
+
// Simple local embedding using character n-grams (same as SQLite version)
|
|
16
|
+
function computeEmbedding(text, dimensions = 384) {
|
|
17
|
+
const embedding = new Array(dimensions).fill(0);
|
|
18
|
+
const normalized = text.toLowerCase();
|
|
19
|
+
// Character trigrams
|
|
20
|
+
for (let i = 0; i < normalized.length - 2; i++) {
|
|
21
|
+
const trigram = normalized.slice(i, i + 3);
|
|
22
|
+
const hash = hashString(trigram);
|
|
23
|
+
const idx = Math.abs(hash) % dimensions;
|
|
24
|
+
embedding[idx] += 1;
|
|
25
|
+
}
|
|
26
|
+
// Word unigrams
|
|
27
|
+
const words = normalized.split(/\s+/);
|
|
28
|
+
for (const word of words) {
|
|
29
|
+
const hash = hashString(word);
|
|
30
|
+
const idx = Math.abs(hash) % dimensions;
|
|
31
|
+
embedding[idx] += 2;
|
|
32
|
+
}
|
|
33
|
+
// Normalize
|
|
34
|
+
const magnitude = Math.sqrt(embedding.reduce((sum, val) => sum + val * val, 0));
|
|
35
|
+
if (magnitude > 0) {
|
|
36
|
+
for (let i = 0; i < embedding.length; i++) {
|
|
37
|
+
embedding[i] /= magnitude;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return embedding;
|
|
41
|
+
}
|
|
42
|
+
function hashString(str) {
|
|
43
|
+
let hash = 0;
|
|
44
|
+
for (let i = 0; i < str.length; i++) {
|
|
45
|
+
const char = str.charCodeAt(i);
|
|
46
|
+
hash = ((hash << 5) - hash) + char;
|
|
47
|
+
hash = hash & hash;
|
|
48
|
+
}
|
|
49
|
+
return hash;
|
|
50
|
+
}
|
|
51
|
+
export class PostgresMemoryStore {
|
|
52
|
+
pool;
|
|
53
|
+
userId;
|
|
54
|
+
dimensions;
|
|
55
|
+
initialized = false;
|
|
56
|
+
constructor(config) {
|
|
57
|
+
this.pool = new Pool({
|
|
58
|
+
connectionString: config.connectionString,
|
|
59
|
+
ssl: { rejectUnauthorized: false } // Required for Railway
|
|
60
|
+
});
|
|
61
|
+
this.userId = config.userId || 'default';
|
|
62
|
+
this.dimensions = config.dimensions || 384;
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Initialize database schema
|
|
66
|
+
*/
|
|
67
|
+
async initialize() {
|
|
68
|
+
if (this.initialized)
|
|
69
|
+
return;
|
|
70
|
+
const client = await this.pool.connect();
|
|
71
|
+
try {
|
|
72
|
+
// Enable pgvector extension
|
|
73
|
+
await client.query('CREATE EXTENSION IF NOT EXISTS vector');
|
|
74
|
+
// Create memories table
|
|
75
|
+
await client.query(`
|
|
76
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
77
|
+
id TEXT PRIMARY KEY,
|
|
78
|
+
user_id TEXT NOT NULL,
|
|
79
|
+
type TEXT NOT NULL,
|
|
80
|
+
content TEXT NOT NULL,
|
|
81
|
+
summary TEXT,
|
|
82
|
+
source TEXT,
|
|
83
|
+
timestamp TIMESTAMPTZ DEFAULT NOW(),
|
|
84
|
+
last_accessed TIMESTAMPTZ DEFAULT NOW(),
|
|
85
|
+
importance REAL DEFAULT 0.5,
|
|
86
|
+
access_count INTEGER DEFAULT 0,
|
|
87
|
+
decay_rate REAL DEFAULT 0.01,
|
|
88
|
+
associations TEXT[] DEFAULT '{}',
|
|
89
|
+
tags TEXT[] DEFAULT '{}',
|
|
90
|
+
embedding vector(${this.dimensions}),
|
|
91
|
+
created_at TIMESTAMPTZ DEFAULT NOW()
|
|
92
|
+
)
|
|
93
|
+
`);
|
|
94
|
+
// Create indexes
|
|
95
|
+
await client.query(`
|
|
96
|
+
CREATE INDEX IF NOT EXISTS idx_memories_user_id ON memories(user_id)
|
|
97
|
+
`);
|
|
98
|
+
await client.query(`
|
|
99
|
+
CREATE INDEX IF NOT EXISTS idx_memories_type ON memories(type)
|
|
100
|
+
`);
|
|
101
|
+
await client.query(`
|
|
102
|
+
CREATE INDEX IF NOT EXISTS idx_memories_importance ON memories(importance DESC)
|
|
103
|
+
`);
|
|
104
|
+
// Create vector similarity index (IVFFlat for performance)
|
|
105
|
+
await client.query(`
|
|
106
|
+
CREATE INDEX IF NOT EXISTS idx_memories_embedding
|
|
107
|
+
ON memories USING ivfflat (embedding vector_cosine_ops)
|
|
108
|
+
WITH (lists = 100)
|
|
109
|
+
`).catch(() => {
|
|
110
|
+
// IVFFlat requires at least 100 rows, fall back to exact search
|
|
111
|
+
console.log('[POSTGRES] IVFFlat index skipped (will use exact search)');
|
|
112
|
+
});
|
|
113
|
+
this.initialized = true;
|
|
114
|
+
console.log('[POSTGRES] Memory store initialized');
|
|
115
|
+
}
|
|
116
|
+
finally {
|
|
117
|
+
client.release();
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Store a memory
|
|
122
|
+
*/
|
|
123
|
+
async remember(content, options) {
|
|
124
|
+
await this.initialize();
|
|
125
|
+
const id = `mem_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
|
|
126
|
+
const type = options?.type || this.inferType(content);
|
|
127
|
+
const importance = options?.importance ?? this.calculateImportance(content);
|
|
128
|
+
const embedding = computeEmbedding(content, this.dimensions);
|
|
129
|
+
await this.pool.query(`
|
|
130
|
+
INSERT INTO memories (id, user_id, type, content, source, importance, tags, embedding)
|
|
131
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
132
|
+
`, [
|
|
133
|
+
id,
|
|
134
|
+
this.userId,
|
|
135
|
+
type,
|
|
136
|
+
content,
|
|
137
|
+
options?.source || 'user',
|
|
138
|
+
importance,
|
|
139
|
+
options?.tags || [],
|
|
140
|
+
`[${embedding.join(',')}]`
|
|
141
|
+
]);
|
|
142
|
+
console.log(`[POSTGRES] Stored memory ${id} (${type}, importance=${importance.toFixed(2)})`);
|
|
143
|
+
return id;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Search memories using vector similarity
|
|
147
|
+
*/
|
|
148
|
+
async search(query, options) {
|
|
149
|
+
await this.initialize();
|
|
150
|
+
const embedding = computeEmbedding(query, this.dimensions);
|
|
151
|
+
const limit = options?.limit || 10;
|
|
152
|
+
const minImportance = options?.minImportance || 0;
|
|
153
|
+
let sql = `
|
|
154
|
+
SELECT
|
|
155
|
+
id, type, content, summary, source, timestamp, last_accessed,
|
|
156
|
+
importance, access_count, decay_rate, associations, tags,
|
|
157
|
+
1 - (embedding <=> $1::vector) as similarity
|
|
158
|
+
FROM memories
|
|
159
|
+
WHERE user_id = $2
|
|
160
|
+
AND importance >= $3
|
|
161
|
+
`;
|
|
162
|
+
const params = [
|
|
163
|
+
`[${embedding.join(',')}]`,
|
|
164
|
+
this.userId,
|
|
165
|
+
minImportance
|
|
166
|
+
];
|
|
167
|
+
if (options?.type) {
|
|
168
|
+
sql += ` AND type = $${params.length + 1}`;
|
|
169
|
+
params.push(options.type);
|
|
170
|
+
}
|
|
171
|
+
sql += ` ORDER BY similarity DESC LIMIT $${params.length + 1}`;
|
|
172
|
+
params.push(limit);
|
|
173
|
+
const result = await this.pool.query(sql, params);
|
|
174
|
+
// Update access counts for retrieved memories
|
|
175
|
+
if (result.rows.length > 0) {
|
|
176
|
+
const ids = result.rows.map(r => r.id);
|
|
177
|
+
await this.pool.query(`
|
|
178
|
+
UPDATE memories
|
|
179
|
+
SET access_count = access_count + 1,
|
|
180
|
+
last_accessed = NOW()
|
|
181
|
+
WHERE id = ANY($1)
|
|
182
|
+
`, [ids]);
|
|
183
|
+
}
|
|
184
|
+
return result.rows.map(row => ({
|
|
185
|
+
id: row.id,
|
|
186
|
+
type: row.type,
|
|
187
|
+
content: row.content,
|
|
188
|
+
summary: row.summary,
|
|
189
|
+
source: row.source,
|
|
190
|
+
timestamp: row.timestamp.toISOString(),
|
|
191
|
+
lastAccessed: row.last_accessed.toISOString(),
|
|
192
|
+
importance: row.importance,
|
|
193
|
+
accessCount: row.access_count,
|
|
194
|
+
decayRate: row.decay_rate,
|
|
195
|
+
associations: row.associations || [],
|
|
196
|
+
tags: row.tags || [],
|
|
197
|
+
hasEmbedding: true
|
|
198
|
+
}));
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Get memory by ID
|
|
202
|
+
*/
|
|
203
|
+
async get(id) {
|
|
204
|
+
await this.initialize();
|
|
205
|
+
const result = await this.pool.query(`
|
|
206
|
+
SELECT * FROM memories WHERE id = $1 AND user_id = $2
|
|
207
|
+
`, [id, this.userId]);
|
|
208
|
+
if (result.rows.length === 0)
|
|
209
|
+
return null;
|
|
210
|
+
const row = result.rows[0];
|
|
211
|
+
return {
|
|
212
|
+
id: row.id,
|
|
213
|
+
type: row.type,
|
|
214
|
+
content: row.content,
|
|
215
|
+
summary: row.summary,
|
|
216
|
+
source: row.source,
|
|
217
|
+
timestamp: row.timestamp.toISOString(),
|
|
218
|
+
lastAccessed: row.last_accessed.toISOString(),
|
|
219
|
+
importance: row.importance,
|
|
220
|
+
accessCount: row.access_count,
|
|
221
|
+
decayRate: row.decay_rate,
|
|
222
|
+
associations: row.associations || [],
|
|
223
|
+
tags: row.tags || [],
|
|
224
|
+
hasEmbedding: true
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Delete a memory
|
|
229
|
+
*/
|
|
230
|
+
async forget(id) {
|
|
231
|
+
await this.initialize();
|
|
232
|
+
const result = await this.pool.query(`
|
|
233
|
+
DELETE FROM memories WHERE id = $1 AND user_id = $2
|
|
234
|
+
`, [id, this.userId]);
|
|
235
|
+
return (result.rowCount ?? 0) > 0;
|
|
236
|
+
}
|
|
237
|
+
/**
|
|
238
|
+
* Update memory importance
|
|
239
|
+
*/
|
|
240
|
+
async reinforce(id, boost = 0.1) {
|
|
241
|
+
await this.initialize();
|
|
242
|
+
await this.pool.query(`
|
|
243
|
+
UPDATE memories
|
|
244
|
+
SET importance = LEAST(1.0, importance + $1)
|
|
245
|
+
WHERE id = $2 AND user_id = $3
|
|
246
|
+
`, [boost, id, this.userId]);
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Get memory statistics
|
|
250
|
+
*/
|
|
251
|
+
stats() {
|
|
252
|
+
// This is synchronous in the interface, but we need async for Postgres
|
|
253
|
+
// Return cached stats or default values
|
|
254
|
+
return this._cachedStats || {
|
|
255
|
+
total: 0,
|
|
256
|
+
byType: { episodic: 0, semantic: 0, procedural: 0, working: 0 },
|
|
257
|
+
avgImportance: 0.5,
|
|
258
|
+
totalAssociations: 0
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
_cachedStats = null;
|
|
262
|
+
/**
|
|
263
|
+
* Update stats cache (call periodically)
|
|
264
|
+
*/
|
|
265
|
+
async updateStatsCache() {
|
|
266
|
+
await this.initialize();
|
|
267
|
+
const result = await this.pool.query(`
|
|
268
|
+
SELECT
|
|
269
|
+
COUNT(*) as total,
|
|
270
|
+
AVG(importance) as avg_importance,
|
|
271
|
+
SUM(array_length(associations, 1)) as total_associations
|
|
272
|
+
FROM memories
|
|
273
|
+
WHERE user_id = $1
|
|
274
|
+
`, [this.userId]);
|
|
275
|
+
const typeResult = await this.pool.query(`
|
|
276
|
+
SELECT type, COUNT(*) as count
|
|
277
|
+
FROM memories
|
|
278
|
+
WHERE user_id = $1
|
|
279
|
+
GROUP BY type
|
|
280
|
+
`, [this.userId]);
|
|
281
|
+
const byType = {
|
|
282
|
+
episodic: 0, semantic: 0, procedural: 0, working: 0
|
|
283
|
+
};
|
|
284
|
+
for (const row of typeResult.rows) {
|
|
285
|
+
byType[row.type] = parseInt(row.count);
|
|
286
|
+
}
|
|
287
|
+
this._cachedStats = {
|
|
288
|
+
total: parseInt(result.rows[0].total) || 0,
|
|
289
|
+
byType,
|
|
290
|
+
avgImportance: parseFloat(result.rows[0].avg_importance) || 0.5,
|
|
291
|
+
totalAssociations: parseInt(result.rows[0].total_associations) || 0
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
/**
|
|
295
|
+
* Apply temporal decay to all memories
|
|
296
|
+
*/
|
|
297
|
+
async applyDecay() {
|
|
298
|
+
await this.initialize();
|
|
299
|
+
await this.pool.query(`
|
|
300
|
+
UPDATE memories
|
|
301
|
+
SET importance = GREATEST(0.1, importance * (1 - decay_rate))
|
|
302
|
+
WHERE user_id = $1
|
|
303
|
+
AND importance > 0.1
|
|
304
|
+
AND last_accessed < NOW() - INTERVAL '1 day'
|
|
305
|
+
`, [this.userId]);
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* Cleanup old low-importance memories
|
|
309
|
+
*/
|
|
310
|
+
async cleanup(options) {
|
|
311
|
+
await this.initialize();
|
|
312
|
+
const maxAgeDays = (options?.maxAge || 30 * 24 * 60 * 60 * 1000) / (1000 * 60 * 60 * 24);
|
|
313
|
+
const minImportance = options?.minImportance || 0.2;
|
|
314
|
+
const result = await this.pool.query(`
|
|
315
|
+
DELETE FROM memories
|
|
316
|
+
WHERE user_id = $1
|
|
317
|
+
AND importance < $2
|
|
318
|
+
AND last_accessed < NOW() - INTERVAL '1 day' * $3
|
|
319
|
+
`, [this.userId, minImportance, maxAgeDays]);
|
|
320
|
+
const count = result.rowCount ?? 0;
|
|
321
|
+
if (count > 0) {
|
|
322
|
+
console.log(`[POSTGRES] Cleaned up ${count} old memories`);
|
|
323
|
+
}
|
|
324
|
+
return count;
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Close the connection pool
|
|
328
|
+
*/
|
|
329
|
+
async close() {
|
|
330
|
+
await this.pool.end();
|
|
331
|
+
console.log('[POSTGRES] Connection pool closed');
|
|
332
|
+
}
|
|
333
|
+
// Helper methods
|
|
334
|
+
inferType(content) {
|
|
335
|
+
const lower = content.toLowerCase();
|
|
336
|
+
// Procedural indicators
|
|
337
|
+
if (lower.includes('how to') || lower.includes('steps to') ||
|
|
338
|
+
lower.includes('process for') || lower.includes('when doing')) {
|
|
339
|
+
return 'procedural';
|
|
340
|
+
}
|
|
341
|
+
// Episodic indicators (past tense, specific events)
|
|
342
|
+
if (lower.includes('yesterday') || lower.includes('last week') ||
|
|
343
|
+
lower.includes('we did') || lower.includes('happened')) {
|
|
344
|
+
return 'episodic';
|
|
345
|
+
}
|
|
346
|
+
// Default to semantic (facts)
|
|
347
|
+
return 'semantic';
|
|
348
|
+
}
|
|
349
|
+
calculateImportance(content) {
|
|
350
|
+
let importance = 0.5;
|
|
351
|
+
// Boost for longer, detailed content
|
|
352
|
+
if (content.length > 200)
|
|
353
|
+
importance += 0.1;
|
|
354
|
+
if (content.length > 500)
|
|
355
|
+
importance += 0.1;
|
|
356
|
+
// Boost for structured content
|
|
357
|
+
if (content.includes(':') || content.includes('-') || content.includes('•')) {
|
|
358
|
+
importance += 0.1;
|
|
359
|
+
}
|
|
360
|
+
// Boost for preference indicators
|
|
361
|
+
const lower = content.toLowerCase();
|
|
362
|
+
if (lower.includes('prefer') || lower.includes('always') ||
|
|
363
|
+
lower.includes('never') || lower.includes('important')) {
|
|
364
|
+
importance += 0.15;
|
|
365
|
+
}
|
|
366
|
+
return Math.min(1.0, importance);
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
/**
|
|
370
|
+
* Check if Postgres is available and configured
|
|
371
|
+
*/
|
|
372
|
+
export function isPostgresConfigured() {
|
|
373
|
+
return !!process.env.DATABASE_URL;
|
|
374
|
+
}
|
package/dist/heartbeat.js
CHANGED
|
@@ -10,7 +10,7 @@ import { hostname } from 'os';
|
|
|
10
10
|
import { createHash } from 'crypto';
|
|
11
11
|
import { getServerUrl, getAgentToken, getUserId, isCloudMode } from './config.js';
|
|
12
12
|
const HEARTBEAT_INTERVAL = 10000; // 10 seconds
|
|
13
|
-
const VERSION = '0.2.
|
|
13
|
+
const VERSION = '0.2.11'; // Should match package.json
|
|
14
14
|
const state = {
|
|
15
15
|
intervalId: null,
|
|
16
16
|
runnerId: null,
|
package/dist/orchestrator.d.ts
CHANGED
|
@@ -23,6 +23,7 @@ export declare class MasterOrchestrator {
|
|
|
23
23
|
private webTools;
|
|
24
24
|
private workerTools;
|
|
25
25
|
private sharedState;
|
|
26
|
+
private fileWatcher;
|
|
26
27
|
private verboseMode;
|
|
27
28
|
constructor(options: {
|
|
28
29
|
apiKey: string;
|
|
@@ -119,6 +120,26 @@ export declare class MasterOrchestrator {
|
|
|
119
120
|
* Stop a running server on a port
|
|
120
121
|
*/
|
|
121
122
|
private executeStopServer;
|
|
123
|
+
/**
|
|
124
|
+
* Add a file watch rule
|
|
125
|
+
*/
|
|
126
|
+
private executeAddWatchRule;
|
|
127
|
+
/**
|
|
128
|
+
* Remove a file watch rule
|
|
129
|
+
*/
|
|
130
|
+
private executeRemoveWatchRule;
|
|
131
|
+
/**
|
|
132
|
+
* List all file watch rules
|
|
133
|
+
*/
|
|
134
|
+
private executeListWatchRules;
|
|
135
|
+
/**
|
|
136
|
+
* Toggle a file watch rule
|
|
137
|
+
*/
|
|
138
|
+
private executeToggleWatchRule;
|
|
139
|
+
/**
|
|
140
|
+
* Get file watcher status
|
|
141
|
+
*/
|
|
142
|
+
private executeWatchStatus;
|
|
122
143
|
/**
|
|
123
144
|
* Format tool preview for user
|
|
124
145
|
*/
|
package/dist/orchestrator.js
CHANGED
|
@@ -10,6 +10,7 @@ import { existsSync } from 'fs';
|
|
|
10
10
|
import { AdvancedMemoryStore } from './core/memory-advanced.js';
|
|
11
11
|
import { TaskScheduler } from './core/scheduler.js';
|
|
12
12
|
import { SystemIndexer } from './core/system-indexer.js';
|
|
13
|
+
import { FileWatcher } from './core/file-watcher.js';
|
|
13
14
|
import { BashTool } from './tools/bash.js';
|
|
14
15
|
import { WebTools } from './tools/web.js';
|
|
15
16
|
import { WorkerTools } from './tools/worker.js';
|
|
@@ -87,6 +88,7 @@ export class MasterOrchestrator {
|
|
|
87
88
|
webTools;
|
|
88
89
|
workerTools;
|
|
89
90
|
sharedState;
|
|
91
|
+
fileWatcher;
|
|
90
92
|
verboseMode = new Map(); // per-user verbose mode
|
|
91
93
|
constructor(options) {
|
|
92
94
|
this.client = new Anthropic({ apiKey: options.apiKey });
|
|
@@ -111,6 +113,14 @@ export class MasterOrchestrator {
|
|
|
111
113
|
this.webTools = new WebTools();
|
|
112
114
|
this.workerTools = new WorkerTools(this.workspaceDir);
|
|
113
115
|
this.sharedState = new SharedState(this.workspaceDir);
|
|
116
|
+
this.fileWatcher = new FileWatcher(this.workspaceDir);
|
|
117
|
+
// Set up file watcher trigger handler
|
|
118
|
+
this.fileWatcher.on('trigger', async ({ rule, event, action }) => {
|
|
119
|
+
console.log(`[WATCHER] Triggered: ${rule.pattern} (${event.type}: ${event.path})`);
|
|
120
|
+
// Auto-delegate to worker when file changes
|
|
121
|
+
const task = `${action}\n\nTriggered by file change:\n- File: ${event.path}\n- Event: ${event.type}\n- Rule: ${rule.pattern}`;
|
|
122
|
+
this.delegateToWorker(task, undefined, this.workspaceDir);
|
|
123
|
+
});
|
|
114
124
|
}
|
|
115
125
|
/**
|
|
116
126
|
* Initialize the orchestrator - indexes filesystem on first call
|
|
@@ -593,6 +603,84 @@ Be specific about what you want done.`,
|
|
|
593
603
|
},
|
|
594
604
|
required: ['port']
|
|
595
605
|
}
|
|
606
|
+
},
|
|
607
|
+
// File watching tools for reactive workflows
|
|
608
|
+
{
|
|
609
|
+
name: 'add_watch_rule',
|
|
610
|
+
description: 'Add a file watch rule to automatically trigger actions when files change. Useful for reactive workflows like auto-running tests when code changes.',
|
|
611
|
+
input_schema: {
|
|
612
|
+
type: 'object',
|
|
613
|
+
properties: {
|
|
614
|
+
pattern: {
|
|
615
|
+
type: 'string',
|
|
616
|
+
description: 'File path or directory to watch (e.g., "./src", "/path/to/file.ts")'
|
|
617
|
+
},
|
|
618
|
+
events: {
|
|
619
|
+
type: 'array',
|
|
620
|
+
items: { type: 'string', enum: ['add', 'change', 'unlink', 'addDir', 'unlinkDir'] },
|
|
621
|
+
description: 'Events to watch for (default: ["change", "add"])'
|
|
622
|
+
},
|
|
623
|
+
action: {
|
|
624
|
+
type: 'string',
|
|
625
|
+
description: 'Task description to execute when triggered (sent to a worker)'
|
|
626
|
+
},
|
|
627
|
+
debounce_ms: {
|
|
628
|
+
type: 'number',
|
|
629
|
+
description: 'Debounce delay in milliseconds (default: 500)'
|
|
630
|
+
}
|
|
631
|
+
},
|
|
632
|
+
required: ['pattern', 'action']
|
|
633
|
+
}
|
|
634
|
+
},
|
|
635
|
+
{
|
|
636
|
+
name: 'remove_watch_rule',
|
|
637
|
+
description: 'Remove a file watch rule by its ID.',
|
|
638
|
+
input_schema: {
|
|
639
|
+
type: 'object',
|
|
640
|
+
properties: {
|
|
641
|
+
rule_id: {
|
|
642
|
+
type: 'string',
|
|
643
|
+
description: 'The ID of the watch rule to remove'
|
|
644
|
+
}
|
|
645
|
+
},
|
|
646
|
+
required: ['rule_id']
|
|
647
|
+
}
|
|
648
|
+
},
|
|
649
|
+
{
|
|
650
|
+
name: 'list_watch_rules',
|
|
651
|
+
description: 'List all active file watch rules.',
|
|
652
|
+
input_schema: {
|
|
653
|
+
type: 'object',
|
|
654
|
+
properties: {},
|
|
655
|
+
required: []
|
|
656
|
+
}
|
|
657
|
+
},
|
|
658
|
+
{
|
|
659
|
+
name: 'toggle_watch_rule',
|
|
660
|
+
description: 'Enable or disable a file watch rule.',
|
|
661
|
+
input_schema: {
|
|
662
|
+
type: 'object',
|
|
663
|
+
properties: {
|
|
664
|
+
rule_id: {
|
|
665
|
+
type: 'string',
|
|
666
|
+
description: 'The ID of the watch rule to toggle'
|
|
667
|
+
},
|
|
668
|
+
enabled: {
|
|
669
|
+
type: 'boolean',
|
|
670
|
+
description: 'Whether to enable (true) or disable (false) the rule'
|
|
671
|
+
}
|
|
672
|
+
},
|
|
673
|
+
required: ['rule_id', 'enabled']
|
|
674
|
+
}
|
|
675
|
+
},
|
|
676
|
+
{
|
|
677
|
+
name: 'watch_status',
|
|
678
|
+
description: 'Get the status of the file watcher system.',
|
|
679
|
+
input_schema: {
|
|
680
|
+
type: 'object',
|
|
681
|
+
properties: {},
|
|
682
|
+
required: []
|
|
683
|
+
}
|
|
596
684
|
}
|
|
597
685
|
];
|
|
598
686
|
}
|
|
@@ -672,6 +760,22 @@ Be specific about what you want done.`,
|
|
|
672
760
|
case 'stop_local_server':
|
|
673
761
|
result = await this.executeStopServer(input.port);
|
|
674
762
|
break;
|
|
763
|
+
// File watching tools
|
|
764
|
+
case 'add_watch_rule':
|
|
765
|
+
result = await this.executeAddWatchRule(input.pattern, input.action, input.events, input.debounce_ms);
|
|
766
|
+
break;
|
|
767
|
+
case 'remove_watch_rule':
|
|
768
|
+
result = await this.executeRemoveWatchRule(input.rule_id);
|
|
769
|
+
break;
|
|
770
|
+
case 'list_watch_rules':
|
|
771
|
+
result = await this.executeListWatchRules();
|
|
772
|
+
break;
|
|
773
|
+
case 'toggle_watch_rule':
|
|
774
|
+
result = await this.executeToggleWatchRule(input.rule_id, input.enabled);
|
|
775
|
+
break;
|
|
776
|
+
case 'watch_status':
|
|
777
|
+
result = await this.executeWatchStatus();
|
|
778
|
+
break;
|
|
675
779
|
default:
|
|
676
780
|
result = { success: false, output: `Unknown tool: ${toolUse.name}` };
|
|
677
781
|
}
|
|
@@ -1068,6 +1172,99 @@ This enables parallel workers to coordinate.`;
|
|
|
1068
1172
|
return { success: false, output: `No server running on port ${port}` };
|
|
1069
1173
|
}
|
|
1070
1174
|
}
|
|
1175
|
+
/**
|
|
1176
|
+
* Add a file watch rule
|
|
1177
|
+
*/
|
|
1178
|
+
async executeAddWatchRule(pattern, action, events, debounceMs) {
|
|
1179
|
+
try {
|
|
1180
|
+
const validEvents = ['add', 'change', 'unlink', 'addDir', 'unlinkDir'];
|
|
1181
|
+
const eventList = events
|
|
1182
|
+
? events.filter((e) => validEvents.includes(e))
|
|
1183
|
+
: ['change', 'add'];
|
|
1184
|
+
const ruleId = this.fileWatcher.addRule({
|
|
1185
|
+
pattern,
|
|
1186
|
+
events: eventList,
|
|
1187
|
+
action,
|
|
1188
|
+
debounceMs
|
|
1189
|
+
});
|
|
1190
|
+
console.log(`[ORCHESTRATOR] Added watch rule ${ruleId}: ${pattern}`);
|
|
1191
|
+
return {
|
|
1192
|
+
success: true,
|
|
1193
|
+
output: `Watch rule added:\n- ID: ${ruleId}\n- Pattern: ${pattern}\n- Events: ${eventList.join(', ')}\n- Action: ${action}\n- Debounce: ${debounceMs || 500}ms`
|
|
1194
|
+
};
|
|
1195
|
+
}
|
|
1196
|
+
catch (error) {
|
|
1197
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1198
|
+
return { success: false, output: `Failed to add watch rule: ${msg}` };
|
|
1199
|
+
}
|
|
1200
|
+
}
|
|
1201
|
+
/**
|
|
1202
|
+
* Remove a file watch rule
|
|
1203
|
+
*/
|
|
1204
|
+
async executeRemoveWatchRule(ruleId) {
|
|
1205
|
+
try {
|
|
1206
|
+
const removed = this.fileWatcher.removeRule(ruleId);
|
|
1207
|
+
if (removed) {
|
|
1208
|
+
console.log(`[ORCHESTRATOR] Removed watch rule ${ruleId}`);
|
|
1209
|
+
return { success: true, output: `Watch rule ${ruleId} removed` };
|
|
1210
|
+
}
|
|
1211
|
+
return { success: false, output: `Watch rule ${ruleId} not found` };
|
|
1212
|
+
}
|
|
1213
|
+
catch (error) {
|
|
1214
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1215
|
+
return { success: false, output: `Failed to remove watch rule: ${msg}` };
|
|
1216
|
+
}
|
|
1217
|
+
}
|
|
1218
|
+
/**
|
|
1219
|
+
* List all file watch rules
|
|
1220
|
+
*/
|
|
1221
|
+
async executeListWatchRules() {
|
|
1222
|
+
try {
|
|
1223
|
+
const rules = this.fileWatcher.listRules();
|
|
1224
|
+
if (rules.length === 0) {
|
|
1225
|
+
return { success: true, output: 'No watch rules configured.' };
|
|
1226
|
+
}
|
|
1227
|
+
const output = rules.map(r => `${r.enabled ? '✓' : '⏸'} ${r.id}\n Pattern: ${r.pattern}\n Events: ${r.events.join(', ')}\n Action: ${r.action.slice(0, 60)}...${r.lastTriggered ? `\n Last triggered: ${r.lastTriggered}` : ''}`).join('\n\n');
|
|
1228
|
+
return { success: true, output: `Watch Rules (${rules.length}):\n\n${output}` };
|
|
1229
|
+
}
|
|
1230
|
+
catch (error) {
|
|
1231
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1232
|
+
return { success: false, output: `Failed to list watch rules: ${msg}` };
|
|
1233
|
+
}
|
|
1234
|
+
}
|
|
1235
|
+
/**
|
|
1236
|
+
* Toggle a file watch rule
|
|
1237
|
+
*/
|
|
1238
|
+
async executeToggleWatchRule(ruleId, enabled) {
|
|
1239
|
+
try {
|
|
1240
|
+
const toggled = this.fileWatcher.toggleRule(ruleId, enabled);
|
|
1241
|
+
if (toggled) {
|
|
1242
|
+
console.log(`[ORCHESTRATOR] ${enabled ? 'Enabled' : 'Disabled'} watch rule ${ruleId}`);
|
|
1243
|
+
return { success: true, output: `Watch rule ${ruleId} ${enabled ? 'enabled' : 'disabled'}` };
|
|
1244
|
+
}
|
|
1245
|
+
return { success: false, output: `Watch rule ${ruleId} not found` };
|
|
1246
|
+
}
|
|
1247
|
+
catch (error) {
|
|
1248
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1249
|
+
return { success: false, output: `Failed to toggle watch rule: ${msg}` };
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
/**
|
|
1253
|
+
* Get file watcher status
|
|
1254
|
+
*/
|
|
1255
|
+
async executeWatchStatus() {
|
|
1256
|
+
try {
|
|
1257
|
+
const status = this.fileWatcher.status();
|
|
1258
|
+
return {
|
|
1259
|
+
success: true,
|
|
1260
|
+
output: `File Watcher Status:\n- Active watchers: ${status.activeWatchers}\n- Total rules: ${status.rules}\n- Enabled rules: ${status.enabledRules}`
|
|
1261
|
+
};
|
|
1262
|
+
}
|
|
1263
|
+
catch (error) {
|
|
1264
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1265
|
+
return { success: false, output: `Failed to get watcher status: ${msg}` };
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1071
1268
|
/**
|
|
1072
1269
|
* Format tool preview for user
|
|
1073
1270
|
*/
|
|
@@ -1111,6 +1308,16 @@ This enables parallel workers to coordinate.`;
|
|
|
1111
1308
|
return `Starting server on port ${input.port || 8080}...`;
|
|
1112
1309
|
case 'stop_local_server':
|
|
1113
1310
|
return `Stopping server on port ${input.port}...`;
|
|
1311
|
+
case 'add_watch_rule':
|
|
1312
|
+
return `Adding watch rule for ${input.pattern}...`;
|
|
1313
|
+
case 'remove_watch_rule':
|
|
1314
|
+
return `Removing watch rule ${input.rule_id}...`;
|
|
1315
|
+
case 'list_watch_rules':
|
|
1316
|
+
return 'Listing watch rules...';
|
|
1317
|
+
case 'toggle_watch_rule':
|
|
1318
|
+
return `${input.enabled ? 'Enabling' : 'Disabling'} watch rule ${input.rule_id}...`;
|
|
1319
|
+
case 'watch_status':
|
|
1320
|
+
return 'Getting watcher status...';
|
|
1114
1321
|
default:
|
|
1115
1322
|
return null;
|
|
1116
1323
|
}
|
|
@@ -1131,6 +1338,7 @@ This enables parallel workers to coordinate.`;
|
|
|
1131
1338
|
* Shutdown cleanly
|
|
1132
1339
|
*/
|
|
1133
1340
|
shutdown() {
|
|
1341
|
+
this.fileWatcher.stopAll();
|
|
1134
1342
|
this.scheduler.shutdown();
|
|
1135
1343
|
this.memory.close();
|
|
1136
1344
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@siftd/connect-agent",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.11",
|
|
4
4
|
"description": "Master orchestrator agent - control Claude Code remotely via web",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -43,6 +43,7 @@
|
|
|
43
43
|
"conf": "^13.0.1",
|
|
44
44
|
"node-cron": "^3.0.3",
|
|
45
45
|
"ora": "^8.1.1",
|
|
46
|
+
"pg": "^8.13.1",
|
|
46
47
|
"vectra": "^0.9.0",
|
|
47
48
|
"ws": "^8.18.3"
|
|
48
49
|
},
|
|
@@ -50,6 +51,7 @@
|
|
|
50
51
|
"@types/better-sqlite3": "^7.6.12",
|
|
51
52
|
"@types/node": "^22.10.2",
|
|
52
53
|
"@types/node-cron": "^3.0.11",
|
|
54
|
+
"@types/pg": "^8.11.10",
|
|
53
55
|
"@types/ws": "^8.18.1",
|
|
54
56
|
"typescript": "^5.7.2"
|
|
55
57
|
}
|