orez 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +116 -0
  2. package/dist/config.d.ts +15 -0
  3. package/dist/config.d.ts.map +1 -0
  4. package/dist/config.js +20 -0
  5. package/dist/config.js.map +1 -0
  6. package/dist/index.d.ts +15 -0
  7. package/dist/index.d.ts.map +1 -0
  8. package/dist/index.js +195 -0
  9. package/dist/index.js.map +1 -0
  10. package/dist/pg-proxy.d.ts +14 -0
  11. package/dist/pg-proxy.d.ts.map +1 -0
  12. package/dist/pg-proxy.js +385 -0
  13. package/dist/pg-proxy.js.map +1 -0
  14. package/dist/pglite-manager.d.ts +5 -0
  15. package/dist/pglite-manager.d.ts.map +1 -0
  16. package/dist/pglite-manager.js +71 -0
  17. package/dist/pglite-manager.js.map +1 -0
  18. package/dist/replication/change-tracker.d.ts +14 -0
  19. package/dist/replication/change-tracker.d.ts.map +1 -0
  20. package/dist/replication/change-tracker.js +86 -0
  21. package/dist/replication/change-tracker.js.map +1 -0
  22. package/dist/replication/handler.d.ts +24 -0
  23. package/dist/replication/handler.d.ts.map +1 -0
  24. package/dist/replication/handler.js +300 -0
  25. package/dist/replication/handler.js.map +1 -0
  26. package/dist/replication/pgoutput-encoder.d.ts +26 -0
  27. package/dist/replication/pgoutput-encoder.d.ts.map +1 -0
  28. package/dist/replication/pgoutput-encoder.js +204 -0
  29. package/dist/replication/pgoutput-encoder.js.map +1 -0
  30. package/dist/s3-local.d.ts +8 -0
  31. package/dist/s3-local.d.ts.map +1 -0
  32. package/dist/s3-local.js +131 -0
  33. package/dist/s3-local.js.map +1 -0
  34. package/package.json +56 -0
  35. package/src/config.ts +40 -0
  36. package/src/index.ts +255 -0
  37. package/src/pg-proxy.ts +474 -0
  38. package/src/pglite-manager.ts +105 -0
  39. package/src/replication/change-tracker.test.ts +179 -0
  40. package/src/replication/change-tracker.ts +115 -0
  41. package/src/replication/handler.test.ts +331 -0
  42. package/src/replication/handler.ts +378 -0
  43. package/src/replication/pgoutput-encoder.test.ts +381 -0
  44. package/src/replication/pgoutput-encoder.ts +252 -0
  45. package/src/replication/tcp-replication.test.ts +824 -0
  46. package/src/replication/zero-compat.test.ts +882 -0
  47. package/src/s3-local.ts +179 -0
@@ -0,0 +1,179 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest'
2
+ import { PGlite } from '@electric-sql/pglite'
3
+ import { installChangeTracking, getChangesSince, getCurrentWatermark } from './change-tracker'
4
+
5
+ describe('change-tracker', () => {
6
+ let db: PGlite
7
+
8
+ beforeEach(async () => {
9
+ db = new PGlite()
10
+ await db.waitReady
11
+ await db.exec(`
12
+ CREATE TABLE public.items (
13
+ id SERIAL PRIMARY KEY,
14
+ name TEXT NOT NULL,
15
+ value INTEGER
16
+ )
17
+ `)
18
+ await installChangeTracking(db)
19
+ })
20
+
21
+ afterEach(async () => {
22
+ await db.close()
23
+ })
24
+
25
+ it('captures INSERT', async () => {
26
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
27
+
28
+ const changes = await getChangesSince(db, 0)
29
+ expect(changes).toHaveLength(1)
30
+ expect(changes[0].op).toBe('INSERT')
31
+ expect(changes[0].table_name).toBe('items')
32
+ expect(changes[0].row_data).toMatchObject({ name: 'a', value: 1 })
33
+ expect(changes[0].old_data).toBeNull()
34
+ })
35
+
36
+ it('captures UPDATE with old + new data', async () => {
37
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
38
+ await db.exec(`UPDATE public.items SET value = 99 WHERE name = 'a'`)
39
+
40
+ const changes = await getChangesSince(db, 0)
41
+ expect(changes).toHaveLength(2)
42
+ expect(changes[1].op).toBe('UPDATE')
43
+ expect(changes[1].row_data).toMatchObject({ value: 99 })
44
+ expect(changes[1].old_data).toMatchObject({ value: 1 })
45
+ })
46
+
47
+ it('captures DELETE with old data', async () => {
48
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
49
+ await db.exec(`DELETE FROM public.items WHERE name = 'a'`)
50
+
51
+ const changes = await getChangesSince(db, 0)
52
+ expect(changes).toHaveLength(2)
53
+ expect(changes[1].op).toBe('DELETE')
54
+ expect(changes[1].old_data).toMatchObject({ name: 'a', value: 1 })
55
+ expect(changes[1].row_data).toBeNull()
56
+ })
57
+
58
+ it('watermarks increase monotonically', async () => {
59
+ for (let i = 0; i < 5; i++) {
60
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('item${i}', ${i})`)
61
+ }
62
+
63
+ const changes = await getChangesSince(db, 0)
64
+ expect(changes).toHaveLength(5)
65
+ for (let i = 1; i < changes.length; i++) {
66
+ expect(changes[i].watermark).toBeGreaterThan(changes[i - 1].watermark)
67
+ }
68
+ })
69
+
70
+ it('getChangesSince filters by watermark', async () => {
71
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
72
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('b', 2)`)
73
+
74
+ const all = await getChangesSince(db, 0)
75
+ const afterFirst = await getChangesSince(db, all[0].watermark)
76
+
77
+ expect(afterFirst).toHaveLength(1)
78
+ expect(afterFirst[0].row_data).toMatchObject({ name: 'b' })
79
+ })
80
+
81
+ it('respects limit', async () => {
82
+ for (let i = 0; i < 10; i++) {
83
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('x', ${i})`)
84
+ }
85
+
86
+ const limited = await getChangesSince(db, 0, 3)
87
+ expect(limited).toHaveLength(3)
88
+ })
89
+
90
+ it('getCurrentWatermark returns 0 before any inserts', async () => {
91
+ const wm = await getCurrentWatermark(db)
92
+ expect(wm).toBe(0)
93
+ })
94
+
95
+ it('getCurrentWatermark advances', async () => {
96
+ // first insert consumes the initial sequence value
97
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('x', 1)`)
98
+ const before = await getCurrentWatermark(db)
99
+ expect(before).toBeGreaterThan(0)
100
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('y', 2)`)
101
+ const after = await getCurrentWatermark(db)
102
+ expect(after).toBeGreaterThan(before)
103
+ })
104
+
105
+ it('tracks multiple tables', async () => {
106
+ await db.exec(`CREATE TABLE public.other (id SERIAL PRIMARY KEY, label TEXT)`)
107
+ await installChangeTracking(db) // reinstall picks up new table
108
+
109
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
110
+ await db.exec(`INSERT INTO public.other (label) VALUES ('b')`)
111
+
112
+ const changes = await getChangesSince(db, 0)
113
+ const tables = new Set(changes.map((c) => c.table_name))
114
+ expect(tables).toContain('items')
115
+ expect(tables).toContain('other')
116
+ })
117
+
118
+ it('handles rapid inserts (50 rows)', async () => {
119
+ for (let i = 0; i < 50; i++) {
120
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('r${i}', ${i})`)
121
+ }
122
+
123
+ const changes = await getChangesSince(db, 0)
124
+ expect(changes).toHaveLength(50)
125
+
126
+ for (let i = 1; i < changes.length; i++) {
127
+ expect(changes[i].watermark).toBeGreaterThan(changes[i - 1].watermark)
128
+ }
129
+ })
130
+
131
+ it('does not track internal _zero_ tables', async () => {
132
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('x', 1)`)
133
+
134
+ const changes = await getChangesSince(db, 0)
135
+ // only the items insert, not the _zero_changes insert that the trigger itself caused
136
+ const internalChanges = changes.filter((c) => c.table_name.startsWith('_zero_'))
137
+ expect(internalChanges).toHaveLength(0)
138
+ })
139
+
140
+ it('handles NULL column values', async () => {
141
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('nulltest', NULL)`)
142
+
143
+ const changes = await getChangesSince(db, 0)
144
+ expect(changes[0].row_data).toMatchObject({ name: 'nulltest', value: null })
145
+ })
146
+
147
+ it('handles multi-row update', async () => {
148
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1), ('b', 2), ('c', 3)`)
149
+ await db.exec(`UPDATE public.items SET value = value * 10`)
150
+
151
+ const changes = await getChangesSince(db, 0)
152
+ const updates = changes.filter((c) => c.op === 'UPDATE')
153
+ expect(updates).toHaveLength(3)
154
+ })
155
+
156
+ it('preserves change ordering across mixed operations', async () => {
157
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
158
+ await db.exec(`UPDATE public.items SET value = 2 WHERE name = 'a'`)
159
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('b', 3)`)
160
+ await db.exec(`DELETE FROM public.items WHERE name = 'a'`)
161
+
162
+ const changes = await getChangesSince(db, 0)
163
+ const ops = changes.map((c) => c.op)
164
+ expect(ops).toEqual(['INSERT', 'UPDATE', 'INSERT', 'DELETE'])
165
+ })
166
+
167
+ it('tracks tables with special characters in names', async () => {
168
+ await db.exec(`CREATE TABLE public."my""table" (id SERIAL PRIMARY KEY, val TEXT)`)
169
+ await installChangeTracking(db)
170
+
171
+ await db.exec(`INSERT INTO public."my""table" (val) VALUES ('works')`)
172
+
173
+ const changes = await getChangesSince(db, 0)
174
+ const special = changes.filter((c) => c.table_name === 'my"table')
175
+ expect(special).toHaveLength(1)
176
+ expect(special[0].op).toBe('INSERT')
177
+ expect(special[0].row_data).toMatchObject({ val: 'works' })
178
+ })
179
+ })
@@ -0,0 +1,115 @@
1
+ import type { PGlite } from '@electric-sql/pglite'
2
+
3
+ export interface ChangeRecord {
4
+ id: number
5
+ watermark: number
6
+ table_name: string
7
+ op: 'INSERT' | 'UPDATE' | 'DELETE'
8
+ row_data: Record<string, unknown> | null
9
+ old_data: Record<string, unknown> | null
10
+ changed_at: string
11
+ }
12
+
13
+ export async function installChangeTracking(db: PGlite): Promise<void> {
14
+ // create changes table and watermark sequence
15
+ await db.exec(`
16
+ CREATE SEQUENCE IF NOT EXISTS public._zero_watermark;
17
+
18
+ CREATE TABLE IF NOT EXISTS public._zero_changes (
19
+ id BIGSERIAL PRIMARY KEY,
20
+ watermark BIGINT NOT NULL DEFAULT nextval('public._zero_watermark'),
21
+ table_name TEXT NOT NULL,
22
+ op TEXT NOT NULL,
23
+ row_data JSONB,
24
+ old_data JSONB,
25
+ changed_at TIMESTAMPTZ DEFAULT NOW()
26
+ );
27
+
28
+ CREATE INDEX IF NOT EXISTS _zero_changes_watermark_idx ON public._zero_changes (watermark);
29
+
30
+ CREATE TABLE IF NOT EXISTS public._zero_replication_slots (
31
+ slot_name TEXT PRIMARY KEY,
32
+ restart_lsn TEXT NOT NULL DEFAULT '0/1000000',
33
+ confirmed_flush_lsn TEXT NOT NULL DEFAULT '0/1000000',
34
+ wal_status TEXT NOT NULL DEFAULT 'reserved',
35
+ plugin TEXT NOT NULL DEFAULT 'pgoutput',
36
+ slot_type TEXT NOT NULL DEFAULT 'logical',
37
+ active BOOLEAN NOT NULL DEFAULT false,
38
+ active_pid INTEGER DEFAULT NULL,
39
+ created_at TIMESTAMPTZ DEFAULT NOW()
40
+ );
41
+ `)
42
+
43
+ // create trigger function
44
+ await db.exec(`
45
+ CREATE OR REPLACE FUNCTION public._zero_track_change() RETURNS TRIGGER AS $$
46
+ BEGIN
47
+ IF TG_OP = 'DELETE' THEN
48
+ INSERT INTO public._zero_changes (table_name, op, old_data)
49
+ VALUES (TG_TABLE_NAME, 'DELETE', row_to_json(OLD)::jsonb);
50
+ RETURN OLD;
51
+ ELSIF TG_OP = 'UPDATE' THEN
52
+ INSERT INTO public._zero_changes (table_name, op, row_data, old_data)
53
+ VALUES (TG_TABLE_NAME, 'UPDATE', row_to_json(NEW)::jsonb, row_to_json(OLD)::jsonb);
54
+ RETURN NEW;
55
+ ELSIF TG_OP = 'INSERT' THEN
56
+ INSERT INTO public._zero_changes (table_name, op, row_data)
57
+ VALUES (TG_TABLE_NAME, 'INSERT', row_to_json(NEW)::jsonb);
58
+ RETURN NEW;
59
+ END IF;
60
+ RETURN NULL;
61
+ END;
62
+ $$ LANGUAGE plpgsql;
63
+ `)
64
+
65
+ // install triggers on all public tables
66
+ await installTriggersOnAllTables(db)
67
+ }
68
+
69
+ function quoteIdent(name: string): string {
70
+ return '"' + name.replace(/"/g, '""') + '"'
71
+ }
72
+
73
+ async function installTriggersOnAllTables(db: PGlite): Promise<void> {
74
+ const tables = await db.query<{ tablename: string }>(
75
+ `SELECT tablename FROM pg_tables
76
+ WHERE schemaname = 'public'
77
+ AND tablename NOT IN ('migrations', '_zero_changes')
78
+ AND tablename NOT LIKE '_zero_%'`
79
+ )
80
+
81
+ let count = 0
82
+ for (const { tablename } of tables.rows) {
83
+ const quoted = quoteIdent(tablename)
84
+ await db.exec(`
85
+ DROP TRIGGER IF EXISTS _zero_change_trigger ON public.${quoted};
86
+ CREATE TRIGGER _zero_change_trigger
87
+ AFTER INSERT OR UPDATE OR DELETE ON public.${quoted}
88
+ FOR EACH ROW EXECUTE FUNCTION public._zero_track_change();
89
+ `)
90
+ count++
91
+ }
92
+
93
+ console.info(`[orez] installed change tracking triggers on ${count} tables`)
94
+ }
95
+
96
+ export async function getChangesSince(
97
+ db: PGlite,
98
+ watermark: number,
99
+ limit = 1000
100
+ ): Promise<ChangeRecord[]> {
101
+ const result = await db.query<ChangeRecord>(
102
+ 'SELECT * FROM public._zero_changes WHERE watermark > $1 ORDER BY watermark LIMIT $2',
103
+ [watermark, limit]
104
+ )
105
+ return result.rows
106
+ }
107
+
108
+ export async function getCurrentWatermark(db: PGlite): Promise<number> {
109
+ const result = await db.query<{ last_value: string; is_called: boolean }>(
110
+ 'SELECT last_value, is_called FROM public._zero_watermark'
111
+ )
112
+ const { last_value, is_called } = result.rows[0]
113
+ if (!is_called) return 0
114
+ return Number(last_value)
115
+ }
@@ -0,0 +1,331 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest'
2
+ import { PGlite } from '@electric-sql/pglite'
3
+ import {
4
+ handleReplicationQuery,
5
+ handleStartReplication,
6
+ type ReplicationWriter,
7
+ } from './handler'
8
+ import { installChangeTracking } from './change-tracker'
9
+
10
+ // parse wire protocol RowDescription+DataRow response into columns/values
11
+ function parseResponse(buf: Uint8Array): { columns: string[]; values: string[] } | null {
12
+ if (buf[0] !== 0x54) return null // RowDescription
13
+
14
+ const dv = new DataView(buf.buffer, buf.byteOffset)
15
+ let pos = 7
16
+ const numFields = dv.getInt16(5)
17
+ const columns: string[] = []
18
+ for (let i = 0; i < numFields; i++) {
19
+ let end = pos
20
+ while (buf[end] !== 0) end++
21
+ columns.push(new TextDecoder().decode(buf.subarray(pos, end)))
22
+ pos = end + 1 + 4 + 2 + 4 + 2 + 4 + 2
23
+ }
24
+
25
+ if (buf[pos] !== 0x44) return { columns, values: [] }
26
+ pos += 7
27
+ const values: string[] = []
28
+ for (let i = 0; i < numFields; i++) {
29
+ const len = dv.getInt32(pos)
30
+ pos += 4
31
+ values.push(new TextDecoder().decode(buf.subarray(pos, pos + len)))
32
+ pos += len
33
+ }
34
+
35
+ return { columns, values }
36
+ }
37
+
38
+ describe('handleReplicationQuery', () => {
39
+ let db: PGlite
40
+
41
+ beforeEach(async () => {
42
+ db = new PGlite()
43
+ await db.waitReady
44
+ await installChangeTracking(db)
45
+ })
46
+
47
+ afterEach(async () => {
48
+ await db.close()
49
+ })
50
+
51
+ it('IDENTIFY_SYSTEM returns system info', async () => {
52
+ const res = await handleReplicationQuery('IDENTIFY_SYSTEM', db)
53
+ expect(res).not.toBeNull()
54
+
55
+ const parsed = parseResponse(res!)
56
+ expect(parsed!.columns).toEqual(['systemid', 'timeline', 'xlogpos', 'dbname'])
57
+ expect(parsed!.values[0]).toBe('1234567890')
58
+ expect(parsed!.values[1]).toBe('1')
59
+ expect(parsed!.values[3]).toBe('postgres')
60
+ // xlogpos should be a valid LSN format
61
+ expect(parsed!.values[2]).toMatch(/^[0-9A-F]+\/[0-9A-F]+$/)
62
+ })
63
+
64
+ it('CREATE_REPLICATION_SLOT persists and returns slot info', async () => {
65
+ const res = await handleReplicationQuery(
66
+ 'CREATE_REPLICATION_SLOT "test_slot" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT',
67
+ db
68
+ )
69
+
70
+ const parsed = parseResponse(res!)
71
+ expect(parsed!.values[0]).toBe('test_slot')
72
+ expect(parsed!.values[3]).toBe('pgoutput')
73
+
74
+ const slots = await db.query<{ slot_name: string }>(
75
+ `SELECT slot_name FROM public._zero_replication_slots WHERE slot_name = 'test_slot'`
76
+ )
77
+ expect(slots.rows).toHaveLength(1)
78
+ })
79
+
80
+ it('DROP_REPLICATION_SLOT removes slot', async () => {
81
+ await handleReplicationQuery('CREATE_REPLICATION_SLOT "drop_me" TEMPORARY LOGICAL pgoutput', db)
82
+ await handleReplicationQuery('DROP_REPLICATION_SLOT "drop_me"', db)
83
+
84
+ const slots = await db.query<{ count: string }>(
85
+ `SELECT count(*) as count FROM public._zero_replication_slots WHERE slot_name = 'drop_me'`
86
+ )
87
+ expect(Number(slots.rows[0].count)).toBe(0)
88
+ })
89
+
90
+ it('wal_level query returns logical', async () => {
91
+ const res = await handleReplicationQuery(
92
+ "SELECT current_setting('wal_level'), version()",
93
+ db
94
+ )
95
+ expect(res).not.toBeNull()
96
+ const parsed = parseResponse(res!)
97
+ expect(parsed!.values[0]).toBe('logical')
98
+ })
99
+
100
+ it('ALTER ROLE returns success', async () => {
101
+ const res = await handleReplicationQuery('ALTER ROLE user REPLICATION', db)
102
+ expect(res).not.toBeNull()
103
+ // should contain CommandComplete
104
+ expect(res![0]).toBe(0x43) // 'C'
105
+ })
106
+
107
+ it('returns null for unknown queries', async () => {
108
+ expect(await handleReplicationQuery('SELECT 1', db)).toBeNull()
109
+ })
110
+ })
111
+
112
+ describe('handleStartReplication', () => {
113
+ let db: PGlite
114
+ let replicationPromise: Promise<void>
115
+
116
+ beforeEach(async () => {
117
+ db = new PGlite()
118
+ await db.waitReady
119
+ await db.exec(`
120
+ CREATE TABLE public.items (
121
+ id SERIAL PRIMARY KEY,
122
+ name TEXT NOT NULL,
123
+ value INTEGER
124
+ )
125
+ `)
126
+ await installChangeTracking(db)
127
+ })
128
+
129
+ afterEach(async () => {
130
+ // closing db causes poll loop to exit with 'closed' error
131
+ await db.close()
132
+ // wait for the replication promise to settle
133
+ await replicationPromise?.catch(() => {})
134
+ })
135
+
136
+ function createWriter() {
137
+ const written: Uint8Array[] = []
138
+ const writer: ReplicationWriter = {
139
+ write(data: Uint8Array) {
140
+ written.push(new Uint8Array(data))
141
+ },
142
+ }
143
+ return { written, writer }
144
+ }
145
+
146
+ // extract the pgoutput message type from a CopyData(XLogData(msg)) frame
147
+ function payloadType(msg: Uint8Array): number | null {
148
+ if (msg[0] !== 0x64) return null // CopyData
149
+ if (msg[5] !== 0x77) return null // XLogData
150
+ return msg[30] // actual message type byte
151
+ }
152
+
153
+ it('sends CopyBothResponse first', async () => {
154
+ const { written, writer } = createWriter()
155
+
156
+ replicationPromise = handleStartReplication(
157
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
158
+ writer,
159
+ db
160
+ )
161
+
162
+ await new Promise((r) => setTimeout(r, 200))
163
+
164
+ expect(written.length).toBeGreaterThan(0)
165
+ expect(written[0][0]).toBe(0x57) // 'W' CopyBothResponse
166
+ })
167
+
168
+ it('sends keepalives', async () => {
169
+ const { written, writer } = createWriter()
170
+
171
+ replicationPromise = handleStartReplication(
172
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
173
+ writer,
174
+ db
175
+ )
176
+
177
+ await new Promise((r) => setTimeout(r, 700))
178
+
179
+ const keepalives = written.filter(
180
+ (msg) => msg[0] === 0x64 && msg[5] === 0x6b
181
+ )
182
+ expect(keepalives.length).toBeGreaterThan(0)
183
+ })
184
+
185
+ it('streams INSERT as BEGIN+RELATION+INSERT+COMMIT', async () => {
186
+ const { written, writer } = createWriter()
187
+
188
+ replicationPromise = handleStartReplication(
189
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
190
+ writer,
191
+ db
192
+ )
193
+
194
+ await new Promise((r) => setTimeout(r, 100))
195
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('streamed', 123)`)
196
+ await new Promise((r) => setTimeout(r, 700))
197
+
198
+ const types = written.map(payloadType).filter((t): t is number => t !== null)
199
+
200
+ expect(types).toContain(0x42) // BEGIN
201
+ expect(types).toContain(0x52) // RELATION
202
+ expect(types).toContain(0x49) // INSERT
203
+ expect(types).toContain(0x43) // COMMIT
204
+
205
+ // order: BEGIN before RELATION before INSERT before COMMIT
206
+ const beginIdx = types.indexOf(0x42)
207
+ const relIdx = types.indexOf(0x52)
208
+ const insIdx = types.indexOf(0x49)
209
+ const comIdx = types.indexOf(0x43)
210
+ expect(beginIdx).toBeLessThan(relIdx)
211
+ expect(relIdx).toBeLessThan(insIdx)
212
+ expect(insIdx).toBeLessThan(comIdx)
213
+ })
214
+
215
+ it('streams UPDATE and DELETE operations', async () => {
216
+ const { written, writer } = createWriter()
217
+
218
+ replicationPromise = handleStartReplication(
219
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
220
+ writer,
221
+ db
222
+ )
223
+
224
+ await new Promise((r) => setTimeout(r, 100))
225
+
226
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('mut', 1)`)
227
+ await new Promise((r) => setTimeout(r, 700))
228
+
229
+ await db.exec(`UPDATE public.items SET value = 2 WHERE name = 'mut'`)
230
+ await new Promise((r) => setTimeout(r, 700))
231
+
232
+ await db.exec(`DELETE FROM public.items WHERE name = 'mut'`)
233
+ await new Promise((r) => setTimeout(r, 700))
234
+
235
+ const types = written.map(payloadType).filter((t): t is number => t !== null)
236
+ expect(types).toContain(0x49) // INSERT
237
+ expect(types).toContain(0x55) // UPDATE
238
+ expect(types).toContain(0x44) // DELETE
239
+ }, 10_000)
240
+
241
+ it('only sends RELATION once per table', async () => {
242
+ const { written, writer } = createWriter()
243
+
244
+ replicationPromise = handleStartReplication(
245
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
246
+ writer,
247
+ db
248
+ )
249
+
250
+ await new Promise((r) => setTimeout(r, 100))
251
+
252
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
253
+ await new Promise((r) => setTimeout(r, 700))
254
+
255
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('b', 2)`)
256
+ await new Promise((r) => setTimeout(r, 700))
257
+
258
+ const types = written.map(payloadType).filter((t): t is number => t !== null)
259
+ const relationCount = types.filter((t) => t === 0x52).length
260
+ expect(relationCount).toBe(1)
261
+ }, 10_000)
262
+
263
+ it('sends RELATION for each distinct table', async () => {
264
+ await db.exec(`CREATE TABLE public.other (id SERIAL PRIMARY KEY, label TEXT)`)
265
+ await installChangeTracking(db)
266
+
267
+ const { written, writer } = createWriter()
268
+
269
+ replicationPromise = handleStartReplication(
270
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
271
+ writer,
272
+ db
273
+ )
274
+
275
+ await new Promise((r) => setTimeout(r, 100))
276
+
277
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1)`)
278
+ await db.exec(`INSERT INTO public.other (label) VALUES ('b')`)
279
+ await new Promise((r) => setTimeout(r, 700))
280
+
281
+ const types = written.map(payloadType).filter((t): t is number => t !== null)
282
+ const relationCount = types.filter((t) => t === 0x52).length
283
+ expect(relationCount).toBe(2)
284
+ })
285
+
286
+ it('handles rapid sequential inserts', async () => {
287
+ const { written, writer } = createWriter()
288
+
289
+ replicationPromise = handleStartReplication(
290
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
291
+ writer,
292
+ db
293
+ )
294
+
295
+ await new Promise((r) => setTimeout(r, 100))
296
+
297
+ for (let i = 0; i < 20; i++) {
298
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('r${i}', ${i})`)
299
+ }
300
+
301
+ // wait multiple poll cycles
302
+ await new Promise((r) => setTimeout(r, 1500))
303
+
304
+ const inserts = written.map(payloadType).filter((t) => t === 0x49)
305
+ expect(inserts.length).toBe(20)
306
+ }, 10_000)
307
+
308
+ it('each transaction has matching BEGIN and COMMIT', async () => {
309
+ const { written, writer } = createWriter()
310
+
311
+ replicationPromise = handleStartReplication(
312
+ 'START_REPLICATION SLOT "s" LOGICAL 0/0',
313
+ writer,
314
+ db
315
+ )
316
+
317
+ await new Promise((r) => setTimeout(r, 100))
318
+
319
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('tx1', 1)`)
320
+ await new Promise((r) => setTimeout(r, 700))
321
+
322
+ await db.exec(`INSERT INTO public.items (name, value) VALUES ('tx2', 2)`)
323
+ await new Promise((r) => setTimeout(r, 700))
324
+
325
+ const types = written.map(payloadType).filter((t): t is number => t !== null)
326
+ const begins = types.filter((t) => t === 0x42).length
327
+ const commits = types.filter((t) => t === 0x43).length
328
+ expect(begins).toBe(commits)
329
+ expect(begins).toBeGreaterThanOrEqual(1)
330
+ }, 10_000)
331
+ })