@qwickapps/qwickbrain-proxy 1.0.2 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/dist/db/schema.d.ts +63 -6
  3. package/dist/db/schema.d.ts.map +1 -1
  4. package/dist/db/schema.js +17 -2
  5. package/dist/db/schema.js.map +1 -1
  6. package/dist/lib/__tests__/cache-manager.test.js +146 -83
  7. package/dist/lib/__tests__/cache-manager.test.js.map +1 -1
  8. package/dist/lib/__tests__/connection-manager.test.js +2 -2
  9. package/dist/lib/__tests__/connection-manager.test.js.map +1 -1
  10. package/dist/lib/__tests__/proxy-server.test.js +16 -44
  11. package/dist/lib/__tests__/proxy-server.test.js.map +1 -1
  12. package/dist/lib/__tests__/qwickbrain-client.test.js +3 -1
  13. package/dist/lib/__tests__/qwickbrain-client.test.js.map +1 -1
  14. package/dist/lib/__tests__/sse-invalidation-listener.test.d.ts +2 -0
  15. package/dist/lib/__tests__/sse-invalidation-listener.test.d.ts.map +1 -0
  16. package/dist/lib/__tests__/sse-invalidation-listener.test.js +245 -0
  17. package/dist/lib/__tests__/sse-invalidation-listener.test.js.map +1 -0
  18. package/dist/lib/__tests__/write-queue-manager.test.d.ts +2 -0
  19. package/dist/lib/__tests__/write-queue-manager.test.d.ts.map +1 -0
  20. package/dist/lib/__tests__/write-queue-manager.test.js +291 -0
  21. package/dist/lib/__tests__/write-queue-manager.test.js.map +1 -0
  22. package/dist/lib/cache-manager.d.ts +35 -6
  23. package/dist/lib/cache-manager.d.ts.map +1 -1
  24. package/dist/lib/cache-manager.js +154 -41
  25. package/dist/lib/cache-manager.js.map +1 -1
  26. package/dist/lib/connection-manager.d.ts +7 -0
  27. package/dist/lib/connection-manager.d.ts.map +1 -1
  28. package/dist/lib/connection-manager.js +57 -8
  29. package/dist/lib/connection-manager.js.map +1 -1
  30. package/dist/lib/proxy-server.d.ts +12 -0
  31. package/dist/lib/proxy-server.d.ts.map +1 -1
  32. package/dist/lib/proxy-server.js +184 -87
  33. package/dist/lib/proxy-server.js.map +1 -1
  34. package/dist/lib/qwickbrain-client.d.ts +4 -0
  35. package/dist/lib/qwickbrain-client.d.ts.map +1 -1
  36. package/dist/lib/qwickbrain-client.js +152 -13
  37. package/dist/lib/qwickbrain-client.js.map +1 -1
  38. package/dist/lib/sse-invalidation-listener.d.ts +31 -0
  39. package/dist/lib/sse-invalidation-listener.d.ts.map +1 -0
  40. package/dist/lib/sse-invalidation-listener.js +151 -0
  41. package/dist/lib/sse-invalidation-listener.js.map +1 -0
  42. package/dist/lib/tools.d.ts +21 -0
  43. package/dist/lib/tools.d.ts.map +1 -0
  44. package/dist/lib/tools.js +513 -0
  45. package/dist/lib/tools.js.map +1 -0
  46. package/dist/lib/write-queue-manager.d.ts +88 -0
  47. package/dist/lib/write-queue-manager.d.ts.map +1 -0
  48. package/dist/lib/write-queue-manager.js +191 -0
  49. package/dist/lib/write-queue-manager.js.map +1 -0
  50. package/dist/types/config.d.ts +7 -42
  51. package/dist/types/config.d.ts.map +1 -1
  52. package/dist/types/config.js +1 -6
  53. package/dist/types/config.js.map +1 -1
  54. package/drizzle/0002_lru_cache_migration.sql +94 -0
  55. package/drizzle/meta/_journal.json +7 -0
  56. package/package.json +6 -2
  57. package/scripts/rebuild-sqlite.sh +26 -0
  58. package/src/db/schema.ts +17 -2
  59. package/src/lib/__tests__/cache-manager.test.ts +180 -90
  60. package/src/lib/__tests__/connection-manager.test.ts +2 -2
  61. package/src/lib/__tests__/proxy-server.test.ts +16 -51
  62. package/src/lib/__tests__/qwickbrain-client.test.ts +3 -1
  63. package/src/lib/__tests__/sse-invalidation-listener.test.ts +326 -0
  64. package/src/lib/__tests__/write-queue-manager.test.ts +383 -0
  65. package/src/lib/cache-manager.ts +198 -46
  66. package/src/lib/connection-manager.ts +67 -8
  67. package/src/lib/proxy-server.ts +231 -90
  68. package/src/lib/qwickbrain-client.ts +166 -12
  69. package/src/lib/sse-invalidation-listener.ts +185 -0
  70. package/src/lib/tools.ts +525 -0
  71. package/src/lib/write-queue-manager.ts +271 -0
  72. package/src/types/config.ts +1 -6
  73. package/.github/workflows/publish.yml +0 -92
@@ -0,0 +1,326 @@
1
+ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
2
+ import { mkdtempSync, rmSync } from 'fs';
3
+ import { join } from 'path';
4
+ import { tmpdir } from 'os';
5
+ import { createDatabase, runMigrations } from '../../db/client.js';
6
+ import { CacheManager } from '../cache-manager.js';
7
+ import { SSEInvalidationListener } from '../sse-invalidation-listener.js';
8
+ import type { Config } from '../../types/config.js';
9
+ import { EventSource } from 'eventsource';
10
+
11
+ // Mock EventSource
12
+ vi.mock('eventsource', () => {
13
+ const EventSourceMock = vi.fn();
14
+ return { EventSource: EventSourceMock };
15
+ });
16
+
17
+ describe('SSEInvalidationListener', () => {
18
+ let tmpDir: string;
19
+ let cacheManager: CacheManager;
20
+ let listener: SSEInvalidationListener;
21
+ let db: ReturnType<typeof createDatabase>['db'];
22
+ let mockEventSource: any;
23
+ let eventListeners: Map<string, Function[]>;
24
+
25
+ beforeEach(() => {
26
+ // Create temporary directory for test database
27
+ tmpDir = mkdtempSync(join(tmpdir(), 'sse-test-'));
28
+ const dbResult = createDatabase(tmpDir);
29
+ db = dbResult.db;
30
+
31
+ // Run migrations to create tables
32
+ runMigrations(db);
33
+
34
+ const config: Config['cache'] = {
35
+ dir: tmpDir,
36
+ maxCacheSizeBytes: 100 * 1024 * 1024,
37
+ preload: [],
38
+ };
39
+
40
+ cacheManager = new CacheManager(db, config);
41
+
42
+ // Setup EventSource mock
43
+ eventListeners = new Map();
44
+
45
+ mockEventSource = {
46
+ readyState: 1, // OPEN
47
+ addEventListener: vi.fn((event: string, handler: Function) => {
48
+ if (!eventListeners.has(event)) {
49
+ eventListeners.set(event, []);
50
+ }
51
+ eventListeners.get(event)!.push(handler);
52
+ }),
53
+ close: vi.fn(),
54
+ onopen: null,
55
+ onerror: null,
56
+ };
57
+
58
+ (EventSource as any).mockImplementation(() => mockEventSource);
59
+ (EventSource as any).OPEN = 1;
60
+ (EventSource as any).CLOSED = 2;
61
+
62
+ listener = new SSEInvalidationListener(
63
+ 'http://test.local:3000',
64
+ cacheManager,
65
+ 'test-api-key'
66
+ );
67
+ });
68
+
69
+ afterEach(() => {
70
+ listener.stop();
71
+ rmSync(tmpDir, { recursive: true, force: true });
72
+ vi.clearAllMocks();
73
+ });
74
+
75
+ describe('start', () => {
76
+ it('should connect to SSE endpoint', async () => {
77
+ await listener.start();
78
+
79
+ expect(EventSource).toHaveBeenCalledWith(
80
+ 'http://test.local:3000/sse/cache-invalidation',
81
+ expect.objectContaining({
82
+ headers: expect.objectContaining({
83
+ Authorization: 'Bearer test-api-key',
84
+ }),
85
+ })
86
+ );
87
+ });
88
+
89
+ it('should register event listeners', async () => {
90
+ await listener.start();
91
+
92
+ expect(mockEventSource.addEventListener).toHaveBeenCalledWith(
93
+ 'document:invalidate',
94
+ expect.any(Function)
95
+ );
96
+ expect(mockEventSource.addEventListener).toHaveBeenCalledWith(
97
+ 'memory:invalidate',
98
+ expect.any(Function)
99
+ );
100
+ expect(mockEventSource.addEventListener).toHaveBeenCalledWith(
101
+ 'cache:invalidate:batch',
102
+ expect.any(Function)
103
+ );
104
+ });
105
+
106
+ it('should call onopen handler', async () => {
107
+ await listener.start();
108
+
109
+ // Trigger onopen
110
+ mockEventSource.onopen?.();
111
+
112
+ // Should be listening
113
+ expect(listener.isListening()).toBe(true);
114
+ });
115
+ });
116
+
117
+ describe('document invalidation', () => {
118
+ it('should invalidate document cache on event', async () => {
119
+ // Pre-populate cache
120
+ await cacheManager.setDocument('workflow', 'test-workflow', 'content');
121
+
122
+ const cached1 = await cacheManager.getDocument('workflow', 'test-workflow');
123
+ expect(cached1).not.toBeNull();
124
+
125
+ // Start listener
126
+ await listener.start();
127
+
128
+ // Trigger invalidation event
129
+ const handlers = eventListeners.get('document:invalidate') || [];
130
+ const invalidationEvent = {
131
+ data: JSON.stringify({
132
+ type: 'document',
133
+ docType: 'workflow',
134
+ name: 'test-workflow',
135
+ }),
136
+ };
137
+
138
+ for (const handler of handlers) {
139
+ await handler(invalidationEvent);
140
+ }
141
+
142
+ // Cache should be invalidated
143
+ const cached2 = await cacheManager.getDocument('workflow', 'test-workflow');
144
+ expect(cached2).toBeNull();
145
+ });
146
+
147
+ it('should invalidate project-scoped document', async () => {
148
+ await cacheManager.setDocument('rule', 'test-rule', 'content', 'my-project');
149
+
150
+ const cached1 = await cacheManager.getDocument('rule', 'test-rule', 'my-project');
151
+ expect(cached1).not.toBeNull();
152
+
153
+ await listener.start();
154
+
155
+ const handlers = eventListeners.get('document:invalidate') || [];
156
+ const invalidationEvent = {
157
+ data: JSON.stringify({
158
+ type: 'document',
159
+ docType: 'rule',
160
+ name: 'test-rule',
161
+ project: 'my-project',
162
+ }),
163
+ };
164
+
165
+ for (const handler of handlers) {
166
+ await handler(invalidationEvent);
167
+ }
168
+
169
+ const cached2 = await cacheManager.getDocument('rule', 'test-rule', 'my-project');
170
+ expect(cached2).toBeNull();
171
+ });
172
+ });
173
+
174
+ describe('memory invalidation', () => {
175
+ it('should invalidate memory cache on event', async () => {
176
+ await cacheManager.setMemory('test-memory', 'content');
177
+
178
+ const cached1 = await cacheManager.getMemory('test-memory');
179
+ expect(cached1).not.toBeNull();
180
+
181
+ await listener.start();
182
+
183
+ const handlers = eventListeners.get('memory:invalidate') || [];
184
+ const invalidationEvent = {
185
+ data: JSON.stringify({
186
+ type: 'memory',
187
+ name: 'test-memory',
188
+ }),
189
+ };
190
+
191
+ for (const handler of handlers) {
192
+ await handler(invalidationEvent);
193
+ }
194
+
195
+ const cached2 = await cacheManager.getMemory('test-memory');
196
+ expect(cached2).toBeNull();
197
+ });
198
+
199
+ it('should invalidate project-scoped memory', async () => {
200
+ await cacheManager.setMemory('test-memory', 'content', 'my-project');
201
+
202
+ const cached1 = await cacheManager.getMemory('test-memory', 'my-project');
203
+ expect(cached1).not.toBeNull();
204
+
205
+ await listener.start();
206
+
207
+ const handlers = eventListeners.get('memory:invalidate') || [];
208
+ const invalidationEvent = {
209
+ data: JSON.stringify({
210
+ type: 'memory',
211
+ name: 'test-memory',
212
+ project: 'my-project',
213
+ }),
214
+ };
215
+
216
+ for (const handler of handlers) {
217
+ await handler(invalidationEvent);
218
+ }
219
+
220
+ const cached2 = await cacheManager.getMemory('test-memory', 'my-project');
221
+ expect(cached2).toBeNull();
222
+ });
223
+ });
224
+
225
+ describe('batch invalidation', () => {
226
+ it('should invalidate multiple items in batch', async () => {
227
+ // Pre-populate cache
228
+ await cacheManager.setDocument('workflow', 'wf1', 'content');
229
+ await cacheManager.setDocument('rule', 'rule1', 'content');
230
+ await cacheManager.setMemory('mem1', 'content');
231
+
232
+ await listener.start();
233
+
234
+ const handlers = eventListeners.get('cache:invalidate:batch') || [];
235
+ const batchEvent = {
236
+ data: JSON.stringify([
237
+ { type: 'document', docType: 'workflow', name: 'wf1' },
238
+ { type: 'document', docType: 'rule', name: 'rule1' },
239
+ { type: 'memory', name: 'mem1' },
240
+ ]),
241
+ };
242
+
243
+ for (const handler of handlers) {
244
+ await handler(batchEvent);
245
+ }
246
+
247
+ // All should be invalidated
248
+ const wf1 = await cacheManager.getDocument('workflow', 'wf1');
249
+ const rule1 = await cacheManager.getDocument('rule', 'rule1');
250
+ const mem1 = await cacheManager.getMemory('mem1');
251
+
252
+ expect(wf1).toBeNull();
253
+ expect(rule1).toBeNull();
254
+ expect(mem1).toBeNull();
255
+ });
256
+ });
257
+
258
+ describe('error handling', () => {
259
+ it('should handle malformed invalidation events', async () => {
260
+ await listener.start();
261
+
262
+ const handlers = eventListeners.get('document:invalidate') || [];
263
+
264
+ // Should not throw on malformed JSON
265
+ expect(() => {
266
+ for (const handler of handlers) {
267
+ handler({ data: 'invalid json' });
268
+ }
269
+ }).not.toThrow();
270
+ });
271
+
272
+ it('should handle missing docType in document event', async () => {
273
+ await cacheManager.setDocument('workflow', 'test', 'content');
274
+
275
+ await listener.start();
276
+
277
+ const handlers = eventListeners.get('document:invalidate') || [];
278
+ const invalidEvent = {
279
+ data: JSON.stringify({
280
+ type: 'document',
281
+ name: 'test',
282
+ // Missing docType
283
+ }),
284
+ };
285
+
286
+ // Should not throw
287
+ expect(() => {
288
+ for (const handler of handlers) {
289
+ handler(invalidEvent);
290
+ }
291
+ }).not.toThrow();
292
+
293
+ // Cache should still be present (invalidation skipped)
294
+ const cached = await cacheManager.getDocument('workflow', 'test');
295
+ expect(cached).not.toBeNull();
296
+ });
297
+ });
298
+
299
+ describe('stop', () => {
300
+ it('should close EventSource and stop listening', async () => {
301
+ await listener.start();
302
+
303
+ expect(listener.isListening()).toBe(true);
304
+
305
+ listener.stop();
306
+
307
+ expect(mockEventSource.close).toHaveBeenCalled();
308
+ expect(listener.isListening()).toBe(false);
309
+ });
310
+
311
+ it('should prevent reconnection after stop', async () => {
312
+ await listener.start();
313
+
314
+ listener.stop();
315
+
316
+ // Trigger error (would normally trigger reconnect)
317
+ mockEventSource.onerror?.(new Error('Connection lost'));
318
+
319
+ // Wait a bit
320
+ await new Promise(resolve => setTimeout(resolve, 100));
321
+
322
+ // Should not have tried to reconnect
323
+ expect(EventSource).toHaveBeenCalledTimes(1);
324
+ });
325
+ });
326
+ });
@@ -0,0 +1,383 @@
1
+ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
2
+ import { mkdtempSync, rmSync } from 'fs';
3
+ import { join } from 'path';
4
+ import { tmpdir } from 'os';
5
+ import { createDatabase, runMigrations } from '../../db/client.js';
6
+ import { WriteQueueManager } from '../write-queue-manager.js';
7
+ import { QwickBrainClient } from '../qwickbrain-client.js';
8
+ import type { Config } from '../../types/config.js';
9
+
10
+ describe('WriteQueueManager', () => {
11
+ let tmpDir: string;
12
+ let writeQueueManager: WriteQueueManager;
13
+ let qwickbrainClient: QwickBrainClient;
14
+ let db: ReturnType<typeof createDatabase>['db'];
15
+
16
+ beforeEach(() => {
17
+ // Create temporary directory for test database
18
+ tmpDir = mkdtempSync(join(tmpdir(), 'queue-test-'));
19
+ const dbResult = createDatabase(tmpDir);
20
+ db = dbResult.db;
21
+
22
+ // Run migrations to create tables
23
+ runMigrations(db);
24
+
25
+ // Create mock client
26
+ const config: Config['qwickbrain'] = {
27
+ mode: 'sse',
28
+ url: 'http://test.local:3000',
29
+ };
30
+
31
+ qwickbrainClient = new QwickBrainClient(config);
32
+
33
+ // Mock the write methods
34
+ vi.spyOn(qwickbrainClient, 'createDocument').mockResolvedValue();
35
+ vi.spyOn(qwickbrainClient, 'setMemory').mockResolvedValue();
36
+ vi.spyOn(qwickbrainClient, 'deleteDocument').mockResolvedValue();
37
+ vi.spyOn(qwickbrainClient, 'deleteMemory').mockResolvedValue();
38
+
39
+ writeQueueManager = new WriteQueueManager(db, qwickbrainClient);
40
+ });
41
+
42
+ afterEach(() => {
43
+ // Clean up temporary directory
44
+ rmSync(tmpDir, { recursive: true, force: true });
45
+ vi.restoreAllMocks();
46
+ });
47
+
48
+ describe('queueOperation', () => {
49
+ it('should queue a create_document operation', async () => {
50
+ await writeQueueManager.queueOperation('create_document', {
51
+ docType: 'workflow',
52
+ name: 'test-workflow',
53
+ content: 'workflow content',
54
+ });
55
+
56
+ const stats = await writeQueueManager.getQueueStats();
57
+ expect(stats.pending).toBe(1);
58
+ expect(stats.total).toBe(1);
59
+ });
60
+
61
+ it('should queue a set_memory operation', async () => {
62
+ await writeQueueManager.queueOperation('set_memory', {
63
+ name: 'test-memory',
64
+ content: 'memory content',
65
+ });
66
+
67
+ const stats = await writeQueueManager.getQueueStats();
68
+ expect(stats.pending).toBe(1);
69
+ });
70
+
71
+ it('should queue multiple operations', async () => {
72
+ await writeQueueManager.queueOperation('create_document', {
73
+ docType: 'workflow',
74
+ name: 'wf1',
75
+ content: 'content1',
76
+ });
77
+
78
+ await writeQueueManager.queueOperation('set_memory', {
79
+ name: 'mem1',
80
+ content: 'content2',
81
+ });
82
+
83
+ await writeQueueManager.queueOperation('create_document', {
84
+ docType: 'rule',
85
+ name: 'rule1',
86
+ content: 'content3',
87
+ });
88
+
89
+ const stats = await writeQueueManager.getQueueStats();
90
+ expect(stats.pending).toBe(3);
91
+ expect(stats.total).toBe(3);
92
+ });
93
+ });
94
+
95
+ describe('syncPendingOperations', () => {
96
+ it('should sync pending create_document operations', async () => {
97
+ await writeQueueManager.queueOperation('create_document', {
98
+ docType: 'workflow',
99
+ name: 'test-workflow',
100
+ content: 'workflow content',
101
+ project: 'my-project',
102
+ metadata: { author: 'test' },
103
+ });
104
+
105
+ const { synced, failed } = await writeQueueManager.syncPendingOperations();
106
+
107
+ expect(synced).toBe(1);
108
+ expect(failed).toBe(0);
109
+ expect(qwickbrainClient.createDocument).toHaveBeenCalledWith(
110
+ 'workflow',
111
+ 'test-workflow',
112
+ 'workflow content',
113
+ 'my-project',
114
+ { author: 'test' }
115
+ );
116
+
117
+ const stats = await writeQueueManager.getQueueStats();
118
+ expect(stats.pending).toBe(0);
119
+ expect(stats.total).toBe(0); // Completed operations are cleaned up
120
+ });
121
+
122
+ it('should sync pending set_memory operations', async () => {
123
+ await writeQueueManager.queueOperation('set_memory', {
124
+ name: 'test-memory',
125
+ content: 'memory content',
126
+ project: 'my-project',
127
+ });
128
+
129
+ const { synced, failed } = await writeQueueManager.syncPendingOperations();
130
+
131
+ expect(synced).toBe(1);
132
+ expect(failed).toBe(0);
133
+ expect(qwickbrainClient.setMemory).toHaveBeenCalledWith(
134
+ 'test-memory',
135
+ 'memory content',
136
+ 'my-project',
137
+ undefined
138
+ );
139
+ });
140
+
141
+ it('should sync multiple operations in order (FIFO)', async () => {
142
+ const callOrder: string[] = [];
143
+
144
+ vi.spyOn(qwickbrainClient, 'createDocument').mockImplementation(async (docType, name) => {
145
+ callOrder.push(`doc:${name}`);
146
+ });
147
+
148
+ vi.spyOn(qwickbrainClient, 'setMemory').mockImplementation(async (name) => {
149
+ callOrder.push(`mem:${name}`);
150
+ });
151
+
152
+ await writeQueueManager.queueOperation('create_document', {
153
+ docType: 'workflow',
154
+ name: 'first',
155
+ content: 'content',
156
+ });
157
+
158
+ await writeQueueManager.queueOperation('set_memory', {
159
+ name: 'second',
160
+ content: 'content',
161
+ });
162
+
163
+ await writeQueueManager.queueOperation('create_document', {
164
+ docType: 'rule',
165
+ name: 'third',
166
+ content: 'content',
167
+ });
168
+
169
+ const { synced } = await writeQueueManager.syncPendingOperations();
170
+
171
+ expect(synced).toBe(3);
172
+ expect(callOrder).toEqual(['doc:first', 'mem:second', 'doc:third']);
173
+ });
174
+
175
+ it('should handle operation failures and retry', async () => {
176
+ let callCount = 0;
177
+ vi.spyOn(qwickbrainClient, 'createDocument').mockImplementation(async () => {
178
+ callCount++;
179
+ if (callCount < 3) {
180
+ throw new Error('Network error');
181
+ }
182
+ });
183
+
184
+ await writeQueueManager.queueOperation('create_document', {
185
+ docType: 'workflow',
186
+ name: 'test',
187
+ content: 'content',
188
+ });
189
+
190
+ // First sync - should fail (attempt 1)
191
+ let result = await writeQueueManager.syncPendingOperations();
192
+ expect(result.synced).toBe(0);
193
+ expect(result.failed).toBe(0);
194
+
195
+ let stats = await writeQueueManager.getQueueStats();
196
+ expect(stats.pending).toBe(1); // Still pending
197
+
198
+ // Second sync - should fail (attempt 2)
199
+ result = await writeQueueManager.syncPendingOperations();
200
+ expect(result.synced).toBe(0);
201
+ expect(result.failed).toBe(0);
202
+
203
+ // Third sync - should succeed (attempt 3)
204
+ result = await writeQueueManager.syncPendingOperations();
205
+ expect(result.synced).toBe(1);
206
+ expect(result.failed).toBe(0);
207
+
208
+ stats = await writeQueueManager.getQueueStats();
209
+ expect(stats.pending).toBe(0);
210
+ });
211
+
212
+ it('should mark operation as failed after max attempts', async () => {
213
+ vi.spyOn(qwickbrainClient, 'createDocument').mockRejectedValue(new Error('Permanent error'));
214
+
215
+ await writeQueueManager.queueOperation('create_document', {
216
+ docType: 'workflow',
217
+ name: 'test',
218
+ content: 'content',
219
+ });
220
+
221
+ // Attempt 1
222
+ let result = await writeQueueManager.syncPendingOperations();
223
+ expect(result.failed).toBe(0);
224
+
225
+ // Attempt 2
226
+ result = await writeQueueManager.syncPendingOperations();
227
+ expect(result.failed).toBe(0);
228
+
229
+ // Attempt 3 - max reached, marked as failed
230
+ result = await writeQueueManager.syncPendingOperations();
231
+ expect(result.synced).toBe(0);
232
+ expect(result.failed).toBe(1);
233
+
234
+ const stats = await writeQueueManager.getQueueStats();
235
+ expect(stats.pending).toBe(0);
236
+ expect(stats.failed).toBe(1);
237
+
238
+ const failedOps = await writeQueueManager.getFailedOperations();
239
+ expect(failedOps.length).toBe(1);
240
+ expect(failedOps[0].error).toContain('Permanent error');
241
+ expect(failedOps[0].attempts).toBe(3);
242
+ });
243
+
244
+ it('should skip sync if already syncing', async () => {
245
+ // Queue an operation that takes time
246
+ let resolveSync: () => void;
247
+ const syncPromise = new Promise<void>((resolve) => {
248
+ resolveSync = resolve;
249
+ });
250
+
251
+ vi.spyOn(qwickbrainClient, 'createDocument').mockImplementation(async () => {
252
+ await syncPromise;
253
+ });
254
+
255
+ await writeQueueManager.queueOperation('create_document', {
256
+ docType: 'workflow',
257
+ name: 'test',
258
+ content: 'content',
259
+ });
260
+
261
+ // Start first sync (won't complete)
262
+ const sync1 = writeQueueManager.syncPendingOperations();
263
+
264
+ // Start second sync while first is running
265
+ const sync2 = writeQueueManager.syncPendingOperations();
266
+
267
+ // Second should skip
268
+ const result2 = await sync2;
269
+ expect(result2.synced).toBe(0);
270
+ expect(result2.failed).toBe(0);
271
+
272
+ // Complete first sync
273
+ resolveSync!();
274
+ const result1 = await sync1;
275
+ expect(result1.synced).toBe(1);
276
+ });
277
+ });
278
+
279
+ describe('retryOperation', () => {
280
+ it('should reset a failed operation for retry', async () => {
281
+ vi.spyOn(qwickbrainClient, 'createDocument').mockRejectedValue(new Error('Error'));
282
+
283
+ await writeQueueManager.queueOperation('create_document', {
284
+ docType: 'workflow',
285
+ name: 'test',
286
+ content: 'content',
287
+ });
288
+
289
+ // Fail 3 times to mark as failed
290
+ await writeQueueManager.syncPendingOperations();
291
+ await writeQueueManager.syncPendingOperations();
292
+ await writeQueueManager.syncPendingOperations();
293
+
294
+ let stats = await writeQueueManager.getQueueStats();
295
+ expect(stats.failed).toBe(1);
296
+
297
+ const failedOps = await writeQueueManager.getFailedOperations();
298
+ const opId = failedOps[0].id;
299
+
300
+ // Fix the mock
301
+ vi.spyOn(qwickbrainClient, 'createDocument').mockResolvedValue();
302
+
303
+ // Retry the operation
304
+ await writeQueueManager.retryOperation(opId);
305
+
306
+ stats = await writeQueueManager.getQueueStats();
307
+ expect(stats.pending).toBe(1);
308
+ expect(stats.failed).toBe(0);
309
+
310
+ // Sync should now succeed
311
+ const result = await writeQueueManager.syncPendingOperations();
312
+ expect(result.synced).toBe(1);
313
+ });
314
+ });
315
+
316
+ describe('clearFailed', () => {
317
+ it('should clear all failed operations', async () => {
318
+ vi.spyOn(qwickbrainClient, 'createDocument').mockRejectedValue(new Error('Error'));
319
+
320
+ // Queue and fail 2 operations
321
+ await writeQueueManager.queueOperation('create_document', {
322
+ docType: 'workflow',
323
+ name: 'test1',
324
+ content: 'content',
325
+ });
326
+
327
+ await writeQueueManager.queueOperation('create_document', {
328
+ docType: 'rule',
329
+ name: 'test2',
330
+ content: 'content',
331
+ });
332
+
333
+ // Fail them
334
+ for (let i = 0; i < 3; i++) {
335
+ await writeQueueManager.syncPendingOperations();
336
+ }
337
+
338
+ let stats = await writeQueueManager.getQueueStats();
339
+ expect(stats.failed).toBe(2);
340
+
341
+ const cleared = await writeQueueManager.clearFailed();
342
+ expect(cleared).toBe(2);
343
+
344
+ stats = await writeQueueManager.getQueueStats();
345
+ expect(stats.failed).toBe(0);
346
+ expect(stats.total).toBe(0);
347
+ });
348
+ });
349
+
350
+ describe('delete operations', () => {
351
+ it('should sync delete_document operations', async () => {
352
+ await writeQueueManager.queueOperation('delete_document', {
353
+ docType: 'workflow',
354
+ name: 'test-workflow',
355
+ project: 'my-project',
356
+ });
357
+
358
+ const { synced } = await writeQueueManager.syncPendingOperations();
359
+
360
+ expect(synced).toBe(1);
361
+ expect(qwickbrainClient.deleteDocument).toHaveBeenCalledWith(
362
+ 'workflow',
363
+ 'test-workflow',
364
+ 'my-project'
365
+ );
366
+ });
367
+
368
+ it('should sync delete_memory operations', async () => {
369
+ await writeQueueManager.queueOperation('delete_memory', {
370
+ name: 'test-memory',
371
+ project: 'my-project',
372
+ });
373
+
374
+ const { synced } = await writeQueueManager.syncPendingOperations();
375
+
376
+ expect(synced).toBe(1);
377
+ expect(qwickbrainClient.deleteMemory).toHaveBeenCalledWith(
378
+ 'test-memory',
379
+ 'my-project'
380
+ );
381
+ });
382
+ });
383
+ });