@zooid/server 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/package.json +37 -0
- package/src/cloudflare-test.d.ts +4 -0
- package/src/db/queries.test.ts +501 -0
- package/src/db/queries.ts +450 -0
- package/src/db/schema.sql +56 -0
- package/src/do/channel.ts +69 -0
- package/src/index.ts +88 -0
- package/src/lib/jwt.test.ts +89 -0
- package/src/lib/jwt.ts +28 -0
- package/src/lib/schema-validator.test.ts +101 -0
- package/src/lib/schema-validator.ts +64 -0
- package/src/lib/signing.test.ts +73 -0
- package/src/lib/signing.ts +60 -0
- package/src/lib/ulid.test.ts +25 -0
- package/src/lib/ulid.ts +8 -0
- package/src/lib/validation.test.ts +35 -0
- package/src/lib/validation.ts +8 -0
- package/src/lib/xml.ts +13 -0
- package/src/middleware/auth.test.ts +125 -0
- package/src/middleware/auth.ts +103 -0
- package/src/routes/channels.test.ts +335 -0
- package/src/routes/channels.ts +220 -0
- package/src/routes/directory.test.ts +223 -0
- package/src/routes/directory.ts +109 -0
- package/src/routes/events.test.ts +477 -0
- package/src/routes/events.ts +315 -0
- package/src/routes/feed.test.ts +238 -0
- package/src/routes/feed.ts +101 -0
- package/src/routes/opml.test.ts +131 -0
- package/src/routes/opml.ts +41 -0
- package/src/routes/rss.test.ts +224 -0
- package/src/routes/rss.ts +91 -0
- package/src/routes/server-meta.test.ts +157 -0
- package/src/routes/server-meta.ts +100 -0
- package/src/routes/webhooks.test.ts +238 -0
- package/src/routes/webhooks.ts +111 -0
- package/src/routes/well-known.test.ts +34 -0
- package/src/routes/well-known.ts +58 -0
- package/src/routes/ws.test.ts +503 -0
- package/src/routes/ws.ts +25 -0
- package/src/test-utils.ts +79 -0
- package/src/types.ts +63 -0
- package/wrangler.toml +26 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 zooid-ai
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@zooid/server",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "Ori Ben",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "https://github.com/zooid-ai/zooid",
|
|
10
|
+
"directory": "packages/server"
|
|
11
|
+
},
|
|
12
|
+
"files": [
|
|
13
|
+
"src",
|
|
14
|
+
"wrangler.toml"
|
|
15
|
+
],
|
|
16
|
+
"dependencies": {
|
|
17
|
+
"@cfworker/json-schema": "^4.1.1",
|
|
18
|
+
"chanfana": "^3.0.0",
|
|
19
|
+
"fast-xml-parser": "^5.3.6",
|
|
20
|
+
"hono": "^4.11.9",
|
|
21
|
+
"ulidx": "^2.4.1",
|
|
22
|
+
"yaml": "^2.8.2",
|
|
23
|
+
"zod": "^4.3.6",
|
|
24
|
+
"@zooid/web": "0.0.1",
|
|
25
|
+
"@zooid/types": "0.0.1"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"@cloudflare/vitest-pool-workers": "^0.12.13",
|
|
29
|
+
"@cloudflare/workers-types": "^4.20260217.0",
|
|
30
|
+
"wrangler": "^4.66.0"
|
|
31
|
+
},
|
|
32
|
+
"scripts": {
|
|
33
|
+
"dev": "wrangler dev",
|
|
34
|
+
"deploy": "wrangler deploy",
|
|
35
|
+
"test": "vitest run"
|
|
36
|
+
}
|
|
37
|
+
}
|
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
import { describe, it, expect, beforeAll, beforeEach } from 'vitest';
|
|
2
|
+
import { env } from 'cloudflare:test';
|
|
3
|
+
import { setupTestDb, cleanTestDb } from '../test-utils';
|
|
4
|
+
import {
|
|
5
|
+
createChannel,
|
|
6
|
+
listChannels,
|
|
7
|
+
createEvent,
|
|
8
|
+
createEvents,
|
|
9
|
+
pollEvents,
|
|
10
|
+
cleanupExpiredEvents,
|
|
11
|
+
createWebhook,
|
|
12
|
+
deleteWebhook,
|
|
13
|
+
getWebhooksForChannel,
|
|
14
|
+
getServerMeta,
|
|
15
|
+
upsertServerMeta,
|
|
16
|
+
} from './queries';
|
|
17
|
+
|
|
18
|
+
describe('Event queries', () => {
|
|
19
|
+
beforeAll(async () => {
|
|
20
|
+
await setupTestDb();
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
beforeEach(async () => {
|
|
24
|
+
await cleanTestDb();
|
|
25
|
+
await createChannel(env.DB, {
|
|
26
|
+
id: 'test-channel',
|
|
27
|
+
name: 'Test Channel',
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
describe('createEvent', () => {
|
|
32
|
+
it('creates an event with ULID and returns it', async () => {
|
|
33
|
+
const event = await createEvent(env.DB, {
|
|
34
|
+
channelId: 'test-channel',
|
|
35
|
+
publisherId: 'pub-1',
|
|
36
|
+
type: 'signal',
|
|
37
|
+
data: { message: 'hello' },
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
expect(event.id).toHaveLength(26); // ULID
|
|
41
|
+
expect(event.channel_id).toBe('test-channel');
|
|
42
|
+
expect(event.publisher_id).toBe('pub-1');
|
|
43
|
+
expect(event.type).toBe('signal');
|
|
44
|
+
expect(event.data).toBe(JSON.stringify({ message: 'hello' }));
|
|
45
|
+
expect(event.created_at).toBeTruthy();
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it('rejects payloads over 64KB', async () => {
|
|
49
|
+
const bigData = { content: 'x'.repeat(65 * 1024) };
|
|
50
|
+
await expect(
|
|
51
|
+
createEvent(env.DB, {
|
|
52
|
+
channelId: 'test-channel',
|
|
53
|
+
data: bigData,
|
|
54
|
+
}),
|
|
55
|
+
).rejects.toThrow(/64KB/);
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
describe('createEvents (batch)', () => {
|
|
60
|
+
it('creates multiple events atomically', async () => {
|
|
61
|
+
const events = await createEvents(env.DB, 'test-channel', 'pub-1', [
|
|
62
|
+
{ type: 'a', data: { v: 1 } },
|
|
63
|
+
{ type: 'b', data: { v: 2 } },
|
|
64
|
+
]);
|
|
65
|
+
|
|
66
|
+
expect(events).toHaveLength(2);
|
|
67
|
+
expect(events[0].type).toBe('a');
|
|
68
|
+
expect(events[1].type).toBe('b');
|
|
69
|
+
// ULIDs should be monotonically ordered
|
|
70
|
+
expect(events[1].id > events[0].id).toBe(true);
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
it('rejects batches over 100 events', async () => {
|
|
74
|
+
const tooMany = Array.from({ length: 101 }, (_, i) => ({
|
|
75
|
+
type: 'x',
|
|
76
|
+
data: { i },
|
|
77
|
+
}));
|
|
78
|
+
await expect(
|
|
79
|
+
createEvents(env.DB, 'test-channel', null, tooMany),
|
|
80
|
+
).rejects.toThrow(/100/);
|
|
81
|
+
});
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
describe('pollEvents', () => {
|
|
85
|
+
it('returns events for a channel ordered by creation', async () => {
|
|
86
|
+
await createEvent(env.DB, {
|
|
87
|
+
channelId: 'test-channel',
|
|
88
|
+
type: 'first',
|
|
89
|
+
data: { n: 1 },
|
|
90
|
+
});
|
|
91
|
+
await createEvent(env.DB, {
|
|
92
|
+
channelId: 'test-channel',
|
|
93
|
+
type: 'second',
|
|
94
|
+
data: { n: 2 },
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
const result = await pollEvents(env.DB, 'test-channel', {});
|
|
98
|
+
expect(result.events).toHaveLength(2);
|
|
99
|
+
expect(result.events[0].type).toBe('first');
|
|
100
|
+
expect(result.events[1].type).toBe('second');
|
|
101
|
+
expect(result.has_more).toBe(false);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('filters by type', async () => {
|
|
105
|
+
await createEvent(env.DB, {
|
|
106
|
+
channelId: 'test-channel',
|
|
107
|
+
type: 'signal',
|
|
108
|
+
data: {},
|
|
109
|
+
});
|
|
110
|
+
await createEvent(env.DB, {
|
|
111
|
+
channelId: 'test-channel',
|
|
112
|
+
type: 'alert',
|
|
113
|
+
data: {},
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
const result = await pollEvents(env.DB, 'test-channel', {
|
|
117
|
+
type: 'signal',
|
|
118
|
+
});
|
|
119
|
+
expect(result.events).toHaveLength(1);
|
|
120
|
+
expect(result.events[0].type).toBe('signal');
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
it('supports cursor-based pagination', async () => {
|
|
124
|
+
for (let i = 0; i < 5; i++) {
|
|
125
|
+
await createEvent(env.DB, {
|
|
126
|
+
channelId: 'test-channel',
|
|
127
|
+
type: 'evt',
|
|
128
|
+
data: { i },
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const page1 = await pollEvents(env.DB, 'test-channel', { limit: 2 });
|
|
133
|
+
expect(page1.events).toHaveLength(2);
|
|
134
|
+
expect(page1.has_more).toBe(true);
|
|
135
|
+
expect(page1.cursor).toBeTruthy();
|
|
136
|
+
|
|
137
|
+
const page2 = await pollEvents(env.DB, 'test-channel', {
|
|
138
|
+
limit: 2,
|
|
139
|
+
cursor: page1.cursor!,
|
|
140
|
+
});
|
|
141
|
+
expect(page2.events).toHaveLength(2);
|
|
142
|
+
expect(page2.has_more).toBe(true);
|
|
143
|
+
|
|
144
|
+
const page3 = await pollEvents(env.DB, 'test-channel', {
|
|
145
|
+
limit: 2,
|
|
146
|
+
cursor: page2.cursor!,
|
|
147
|
+
});
|
|
148
|
+
expect(page3.events).toHaveLength(1);
|
|
149
|
+
expect(page3.has_more).toBe(false);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
it('supports since parameter (ISO timestamp)', async () => {
|
|
153
|
+
await env.DB.prepare(
|
|
154
|
+
`INSERT INTO events (id, channel_id, type, data, created_at) VALUES (?, ?, ?, ?, ?)`,
|
|
155
|
+
)
|
|
156
|
+
.bind(
|
|
157
|
+
'00000000000000000000000001',
|
|
158
|
+
'test-channel',
|
|
159
|
+
'old',
|
|
160
|
+
'{}',
|
|
161
|
+
'2026-02-01T00:00:00Z',
|
|
162
|
+
)
|
|
163
|
+
.run();
|
|
164
|
+
await env.DB.prepare(
|
|
165
|
+
`INSERT INTO events (id, channel_id, type, data, created_at) VALUES (?, ?, ?, ?, ?)`,
|
|
166
|
+
)
|
|
167
|
+
.bind(
|
|
168
|
+
'00000000000000000000000002',
|
|
169
|
+
'test-channel',
|
|
170
|
+
'new',
|
|
171
|
+
'{}',
|
|
172
|
+
'2026-02-17T00:00:00Z',
|
|
173
|
+
)
|
|
174
|
+
.run();
|
|
175
|
+
|
|
176
|
+
const result = await pollEvents(env.DB, 'test-channel', {
|
|
177
|
+
since: '2026-02-10T00:00:00Z',
|
|
178
|
+
});
|
|
179
|
+
expect(result.events).toHaveLength(1);
|
|
180
|
+
expect(result.events[0].type).toBe('new');
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
it('defaults to limit of 50', async () => {
|
|
184
|
+
const result = await pollEvents(env.DB, 'test-channel', {});
|
|
185
|
+
expect(result.events).toBeInstanceOf(Array);
|
|
186
|
+
});
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
describe('cleanupExpiredEvents', () => {
|
|
190
|
+
it('deletes events older than 7 days', async () => {
|
|
191
|
+
await env.DB.prepare(
|
|
192
|
+
`INSERT INTO events (id, channel_id, type, data, created_at) VALUES (?, ?, ?, ?, ?)`,
|
|
193
|
+
)
|
|
194
|
+
.bind(
|
|
195
|
+
'00000000000000000000000001',
|
|
196
|
+
'test-channel',
|
|
197
|
+
'old',
|
|
198
|
+
'{}',
|
|
199
|
+
'2020-01-01T00:00:00Z',
|
|
200
|
+
)
|
|
201
|
+
.run();
|
|
202
|
+
await createEvent(env.DB, {
|
|
203
|
+
channelId: 'test-channel',
|
|
204
|
+
type: 'fresh',
|
|
205
|
+
data: {},
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
const deleted = await cleanupExpiredEvents(env.DB, 'test-channel');
|
|
209
|
+
expect(deleted).toBe(1);
|
|
210
|
+
|
|
211
|
+
const result = await pollEvents(env.DB, 'test-channel', {});
|
|
212
|
+
expect(result.events).toHaveLength(1);
|
|
213
|
+
expect(result.events[0].type).toBe('fresh');
|
|
214
|
+
});
|
|
215
|
+
});
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
describe('Webhook queries', () => {
|
|
219
|
+
beforeAll(async () => {
|
|
220
|
+
await setupTestDb();
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
beforeEach(async () => {
|
|
224
|
+
await cleanTestDb();
|
|
225
|
+
await createChannel(env.DB, { id: 'wh-channel', name: 'WH Channel' });
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
describe('createWebhook', () => {
|
|
229
|
+
it('creates a webhook for a channel with default 3-day TTL', async () => {
|
|
230
|
+
const webhook = await createWebhook(env.DB, {
|
|
231
|
+
channelId: 'wh-channel',
|
|
232
|
+
url: 'https://example.com/hook',
|
|
233
|
+
eventTypes: ['signal'],
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
expect(webhook.id).toBeTruthy();
|
|
237
|
+
expect(webhook.channel_id).toBe('wh-channel');
|
|
238
|
+
expect(webhook.url).toBe('https://example.com/hook');
|
|
239
|
+
expect(webhook.expires_at).toBeTruthy();
|
|
240
|
+
const expiresAt = new Date(webhook.expires_at).getTime();
|
|
241
|
+
const threeDaysFromNow = Date.now() + 3 * 24 * 60 * 60 * 1000;
|
|
242
|
+
expect(Math.abs(expiresAt - threeDaysFromNow)).toBeLessThan(5000);
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
it('creates a webhook with custom TTL', async () => {
|
|
246
|
+
const webhook = await createWebhook(env.DB, {
|
|
247
|
+
channelId: 'wh-channel',
|
|
248
|
+
url: 'https://example.com/hook',
|
|
249
|
+
ttlSeconds: 3600,
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
expect(webhook.id).toBeTruthy();
|
|
253
|
+
const expiresAt = new Date(webhook.expires_at).getTime();
|
|
254
|
+
const oneHourFromNow = Date.now() + 3600 * 1000;
|
|
255
|
+
expect(Math.abs(expiresAt - oneHourFromNow)).toBeLessThan(5000);
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
it('creates a webhook with no event type filter', async () => {
|
|
259
|
+
const webhook = await createWebhook(env.DB, {
|
|
260
|
+
channelId: 'wh-channel',
|
|
261
|
+
url: 'https://example.com/hook',
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
expect(webhook.id).toBeTruthy();
|
|
265
|
+
});
|
|
266
|
+
|
|
267
|
+
it('re-registration of same URL extends expires_at (upsert)', async () => {
|
|
268
|
+
const wh1 = await createWebhook(env.DB, {
|
|
269
|
+
channelId: 'wh-channel',
|
|
270
|
+
url: 'https://example.com/hook',
|
|
271
|
+
ttlSeconds: 3600,
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
const wh2 = await createWebhook(env.DB, {
|
|
275
|
+
channelId: 'wh-channel',
|
|
276
|
+
url: 'https://example.com/hook',
|
|
277
|
+
ttlSeconds: 86400,
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
expect(wh2.id).toBe(wh1.id);
|
|
281
|
+
const expires1 = new Date(wh1.expires_at).getTime();
|
|
282
|
+
const expires2 = new Date(wh2.expires_at).getTime();
|
|
283
|
+
expect(expires2).toBeGreaterThan(expires1);
|
|
284
|
+
|
|
285
|
+
const webhooks = await getWebhooksForChannel(env.DB, 'wh-channel');
|
|
286
|
+
expect(webhooks).toHaveLength(1);
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
describe('deleteWebhook', () => {
|
|
291
|
+
it('deletes a webhook by ID', async () => {
|
|
292
|
+
const webhook = await createWebhook(env.DB, {
|
|
293
|
+
channelId: 'wh-channel',
|
|
294
|
+
url: 'https://example.com/hook',
|
|
295
|
+
});
|
|
296
|
+
|
|
297
|
+
const deleted = await deleteWebhook(env.DB, webhook.id, 'wh-channel');
|
|
298
|
+
expect(deleted).toBe(true);
|
|
299
|
+
|
|
300
|
+
const webhooks = await getWebhooksForChannel(env.DB, 'wh-channel');
|
|
301
|
+
expect(webhooks).toHaveLength(0);
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
it('returns false for non-existent webhook', async () => {
|
|
305
|
+
const deleted = await deleteWebhook(env.DB, 'nonexistent', 'wh-channel');
|
|
306
|
+
expect(deleted).toBe(false);
|
|
307
|
+
});
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
describe('getWebhooksForChannel', () => {
|
|
311
|
+
it('returns all non-expired webhooks for a channel', async () => {
|
|
312
|
+
await createWebhook(env.DB, {
|
|
313
|
+
channelId: 'wh-channel',
|
|
314
|
+
url: 'https://a.com/hook',
|
|
315
|
+
});
|
|
316
|
+
await createWebhook(env.DB, {
|
|
317
|
+
channelId: 'wh-channel',
|
|
318
|
+
url: 'https://b.com/hook',
|
|
319
|
+
eventTypes: ['alert'],
|
|
320
|
+
});
|
|
321
|
+
|
|
322
|
+
const webhooks = await getWebhooksForChannel(env.DB, 'wh-channel');
|
|
323
|
+
expect(webhooks).toHaveLength(2);
|
|
324
|
+
});
|
|
325
|
+
|
|
326
|
+
it('filters by event type when provided', async () => {
|
|
327
|
+
await createWebhook(env.DB, {
|
|
328
|
+
channelId: 'wh-channel',
|
|
329
|
+
url: 'https://a.com/hook',
|
|
330
|
+
eventTypes: ['signal'],
|
|
331
|
+
});
|
|
332
|
+
await createWebhook(env.DB, {
|
|
333
|
+
channelId: 'wh-channel',
|
|
334
|
+
url: 'https://b.com/hook',
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
const webhooks = await getWebhooksForChannel(
|
|
338
|
+
env.DB,
|
|
339
|
+
'wh-channel',
|
|
340
|
+
'signal',
|
|
341
|
+
);
|
|
342
|
+
expect(webhooks).toHaveLength(2);
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
it('excludes expired webhooks', async () => {
|
|
346
|
+
await env.DB.prepare(
|
|
347
|
+
`INSERT INTO webhooks (id, channel_id, url, expires_at) VALUES (?, ?, ?, ?)`,
|
|
348
|
+
)
|
|
349
|
+
.bind(
|
|
350
|
+
'expired-wh',
|
|
351
|
+
'wh-channel',
|
|
352
|
+
'https://expired.com/hook',
|
|
353
|
+
'2020-01-01T00:00:00Z',
|
|
354
|
+
)
|
|
355
|
+
.run();
|
|
356
|
+
|
|
357
|
+
await createWebhook(env.DB, {
|
|
358
|
+
channelId: 'wh-channel',
|
|
359
|
+
url: 'https://valid.com/hook',
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
const webhooks = await getWebhooksForChannel(env.DB, 'wh-channel');
|
|
363
|
+
expect(webhooks).toHaveLength(1);
|
|
364
|
+
expect(webhooks[0].url).toBe('https://valid.com/hook');
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
|
|
369
|
+
describe('Channel tags', () => {
|
|
370
|
+
beforeAll(async () => {
|
|
371
|
+
await setupTestDb();
|
|
372
|
+
});
|
|
373
|
+
|
|
374
|
+
beforeEach(async () => {
|
|
375
|
+
await cleanTestDb();
|
|
376
|
+
});
|
|
377
|
+
|
|
378
|
+
it('creates a channel with tags', async () => {
|
|
379
|
+
const channel = await createChannel(env.DB, {
|
|
380
|
+
id: 'tagged-channel',
|
|
381
|
+
name: 'Tagged Channel',
|
|
382
|
+
tags: ['ai', 'crypto'],
|
|
383
|
+
});
|
|
384
|
+
|
|
385
|
+
expect(channel.tags).toBe(JSON.stringify(['ai', 'crypto']));
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
it('creates a channel without tags (null)', async () => {
|
|
389
|
+
const channel = await createChannel(env.DB, {
|
|
390
|
+
id: 'no-tags',
|
|
391
|
+
name: 'No Tags',
|
|
392
|
+
});
|
|
393
|
+
|
|
394
|
+
expect(channel.tags).toBeNull();
|
|
395
|
+
});
|
|
396
|
+
|
|
397
|
+
it('lists channels with parsed tags', async () => {
|
|
398
|
+
await createChannel(env.DB, {
|
|
399
|
+
id: 'tagged',
|
|
400
|
+
name: 'Tagged',
|
|
401
|
+
tags: ['ai', 'agents'],
|
|
402
|
+
});
|
|
403
|
+
await createChannel(env.DB, {
|
|
404
|
+
id: 'untagged',
|
|
405
|
+
name: 'Untagged',
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
const channels = await listChannels(env.DB);
|
|
409
|
+
const tagged = channels.find((c) => c.id === 'tagged')!;
|
|
410
|
+
const untagged = channels.find((c) => c.id === 'untagged')!;
|
|
411
|
+
|
|
412
|
+
expect(tagged.tags).toEqual(['ai', 'agents']);
|
|
413
|
+
expect(untagged.tags).toEqual([]);
|
|
414
|
+
});
|
|
415
|
+
});
|
|
416
|
+
|
|
417
|
+
describe('Server meta queries', () => {
|
|
418
|
+
beforeAll(async () => {
|
|
419
|
+
await setupTestDb();
|
|
420
|
+
});
|
|
421
|
+
|
|
422
|
+
beforeEach(async () => {
|
|
423
|
+
await cleanTestDb();
|
|
424
|
+
});
|
|
425
|
+
|
|
426
|
+
describe('getServerMeta', () => {
|
|
427
|
+
it('returns null when no row exists', async () => {
|
|
428
|
+
const meta = await getServerMeta(env.DB);
|
|
429
|
+
expect(meta).toBeNull();
|
|
430
|
+
});
|
|
431
|
+
|
|
432
|
+
it('returns the stored metadata', async () => {
|
|
433
|
+
await upsertServerMeta(env.DB, {
|
|
434
|
+
name: 'Test Server',
|
|
435
|
+
description: 'A test',
|
|
436
|
+
tags: ['test'],
|
|
437
|
+
});
|
|
438
|
+
|
|
439
|
+
const meta = await getServerMeta(env.DB);
|
|
440
|
+
expect(meta).not.toBeNull();
|
|
441
|
+
expect(meta!.name).toBe('Test Server');
|
|
442
|
+
expect(meta!.description).toBe('A test');
|
|
443
|
+
expect(meta!.tags).toEqual(['test']);
|
|
444
|
+
});
|
|
445
|
+
});
|
|
446
|
+
|
|
447
|
+
describe('upsertServerMeta', () => {
|
|
448
|
+
it('inserts a new row with defaults', async () => {
|
|
449
|
+
const meta = await upsertServerMeta(env.DB, {
|
|
450
|
+
name: 'My Server',
|
|
451
|
+
});
|
|
452
|
+
|
|
453
|
+
expect(meta.name).toBe('My Server');
|
|
454
|
+
expect(meta.description).toBeNull();
|
|
455
|
+
expect(meta.tags).toEqual([]);
|
|
456
|
+
expect(meta.owner).toBeNull();
|
|
457
|
+
expect(meta.company).toBeNull();
|
|
458
|
+
expect(meta.email).toBeNull();
|
|
459
|
+
expect(meta.updated_at).toBeTruthy();
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
it('inserts with all fields', async () => {
|
|
463
|
+
const meta = await upsertServerMeta(env.DB, {
|
|
464
|
+
name: 'Full Server',
|
|
465
|
+
description: 'All fields set',
|
|
466
|
+
tags: ['a', 'b'],
|
|
467
|
+
owner: 'alice',
|
|
468
|
+
company: 'Acme',
|
|
469
|
+
email: 'alice@acme.com',
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
expect(meta.name).toBe('Full Server');
|
|
473
|
+
expect(meta.description).toBe('All fields set');
|
|
474
|
+
expect(meta.tags).toEqual(['a', 'b']);
|
|
475
|
+
expect(meta.owner).toBe('alice');
|
|
476
|
+
expect(meta.company).toBe('Acme');
|
|
477
|
+
expect(meta.email).toBe('alice@acme.com');
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
it('updates existing row on second call', async () => {
|
|
481
|
+
await upsertServerMeta(env.DB, {
|
|
482
|
+
name: 'First',
|
|
483
|
+
owner: 'alice',
|
|
484
|
+
});
|
|
485
|
+
|
|
486
|
+
const meta = await upsertServerMeta(env.DB, {
|
|
487
|
+
name: 'Second',
|
|
488
|
+
owner: 'bob',
|
|
489
|
+
});
|
|
490
|
+
|
|
491
|
+
expect(meta.name).toBe('Second');
|
|
492
|
+
expect(meta.owner).toBe('bob');
|
|
493
|
+
|
|
494
|
+
// Verify only one row exists
|
|
495
|
+
const result = await env.DB
|
|
496
|
+
.prepare('SELECT COUNT(*) as count FROM server_meta')
|
|
497
|
+
.first<{ count: number }>();
|
|
498
|
+
expect(result!.count).toBe(1);
|
|
499
|
+
});
|
|
500
|
+
});
|
|
501
|
+
});
|