orez 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +47 -12
  3. package/dist/cli.d.ts +3 -0
  4. package/dist/cli.d.ts.map +1 -0
  5. package/dist/cli.js +120 -0
  6. package/dist/cli.js.map +1 -0
  7. package/dist/config.d.ts +0 -1
  8. package/dist/config.d.ts.map +1 -1
  9. package/dist/config.js +1 -4
  10. package/dist/config.js.map +1 -1
  11. package/dist/index.d.ts +3 -3
  12. package/dist/index.d.ts.map +1 -1
  13. package/dist/index.js +7 -14
  14. package/dist/index.js.map +1 -1
  15. package/dist/pg-proxy.d.ts +1 -1
  16. package/dist/pg-proxy.d.ts.map +1 -1
  17. package/dist/pg-proxy.js +6 -15
  18. package/dist/pg-proxy.js.map +1 -1
  19. package/dist/pglite-manager.d.ts.map +1 -1
  20. package/dist/pglite-manager.js.map +1 -1
  21. package/dist/replication/handler.d.ts.map +1 -1
  22. package/dist/replication/handler.js +4 -2
  23. package/dist/replication/handler.js.map +1 -1
  24. package/dist/replication/pgoutput-encoder.d.ts.map +1 -1
  25. package/dist/replication/pgoutput-encoder.js.map +1 -1
  26. package/dist/s3-local.d.ts +6 -3
  27. package/dist/s3-local.d.ts.map +1 -1
  28. package/dist/s3-local.js +23 -20
  29. package/dist/s3-local.js.map +1 -1
  30. package/package.json +18 -2
  31. package/src/cli.test.ts +38 -0
  32. package/src/cli.ts +128 -0
  33. package/src/config.ts +4 -15
  34. package/src/index.ts +14 -41
  35. package/src/pg-proxy.ts +21 -73
  36. package/src/pglite-manager.ts +5 -16
  37. package/src/replication/change-tracker.test.ts +10 -3
  38. package/src/replication/handler.test.ts +8 -6
  39. package/src/replication/handler.ts +10 -4
  40. package/src/replication/pgoutput-encoder.test.ts +13 -2
  41. package/src/replication/pgoutput-encoder.ts +28 -6
  42. package/src/replication/tcp-replication.test.ts +33 -15
  43. package/src/replication/zero-compat.test.ts +78 -25
  44. package/src/s3-local.ts +102 -118
package/src/pg-proxy.ts CHANGED
@@ -12,13 +12,10 @@ import { createServer, type Server, type Socket } from 'node:net'
12
12
 
13
13
  import { fromNodeSocket } from 'pg-gateway/node'
14
14
 
15
- import type { PGlite } from '@electric-sql/pglite'
15
+ import { handleReplicationQuery, handleStartReplication } from './replication/handler'
16
16
 
17
17
  import type { ZeroLiteConfig } from './config'
18
- import {
19
- handleReplicationQuery,
20
- handleStartReplication,
21
- } from './replication/handler'
18
+ import type { PGlite } from '@electric-sql/pglite'
22
19
 
23
20
  // database name -> search_path mapping
24
21
  const DB_SCHEMA_MAP: Record<string, string> = {
@@ -47,9 +44,7 @@ const QUERY_REWRITES: Array<{ match: RegExp; replace: string }> = [
47
44
  ]
48
45
 
49
46
  // queries to intercept and return no-op success
50
- const NOOP_QUERY_PATTERNS = [
51
- /^\s*SET\s+TRANSACTION\s+SNAPSHOT\s+/i,
52
- ]
47
+ const NOOP_QUERY_PATTERNS = [/^\s*SET\s+TRANSACTION\s+SNAPSHOT\s+/i]
53
48
 
54
49
  /**
55
50
  * extract query text from a Parse message (0x50).
@@ -67,10 +62,7 @@ function extractParseQuery(data: Uint8Array): string | null {
67
62
  /**
68
63
  * rebuild a Parse message with a modified query string.
69
64
  */
70
- function rebuildParseMessage(
71
- data: Uint8Array,
72
- newQuery: string
73
- ): Uint8Array {
65
+ function rebuildParseMessage(data: Uint8Array, newQuery: string): Uint8Array {
74
66
  let offset = 5
75
67
  while (offset < data.length && data[offset] !== 0) offset++
76
68
  const nameEnd = offset + 1
@@ -84,8 +76,7 @@ function rebuildParseMessage(
84
76
  const encoder = new TextEncoder()
85
77
  const queryBytes = encoder.encode(newQuery)
86
78
 
87
- const totalLen =
88
- 4 + nameBytes.length + queryBytes.length + 1 + suffix.length
79
+ const totalLen = 4 + nameBytes.length + queryBytes.length + 1 + suffix.length
89
80
  const result = new Uint8Array(1 + totalLen)
90
81
  const dv = new DataView(result.buffer)
91
82
  result[0] = 0x50
@@ -120,15 +111,9 @@ function interceptQuery(data: Uint8Array): Uint8Array {
120
111
  const msgType = data[0]
121
112
 
122
113
  if (msgType === 0x51) {
123
- const view = new DataView(
124
- data.buffer,
125
- data.byteOffset,
126
- data.byteLength
127
- )
114
+ const view = new DataView(data.buffer, data.byteOffset, data.byteLength)
128
115
  const len = view.getInt32(1)
129
- let query = new TextDecoder()
130
- .decode(data.subarray(5, 1 + len - 1))
131
- .replace(/\0$/, '')
116
+ let query = new TextDecoder().decode(data.subarray(5, 1 + len - 1)).replace(/\0$/, '')
132
117
 
133
118
  let modified = false
134
119
  for (const rw of QUERY_REWRITES) {
@@ -171,15 +156,9 @@ function interceptQuery(data: Uint8Array): Uint8Array {
171
156
  function isNoopQuery(data: Uint8Array): boolean {
172
157
  let query: string | null = null
173
158
  if (data[0] === 0x51) {
174
- const view = new DataView(
175
- data.buffer,
176
- data.byteOffset,
177
- data.byteLength
178
- )
159
+ const view = new DataView(data.buffer, data.byteOffset, data.byteLength)
179
160
  const len = view.getInt32(1)
180
- query = new TextDecoder()
181
- .decode(data.subarray(5, 1 + len - 1))
182
- .replace(/\0$/, '')
161
+ query = new TextDecoder().decode(data.subarray(5, 1 + len - 1)).replace(/\0$/, '')
183
162
  } else if (data[0] === 0x50) {
184
163
  query = extractParseQuery(data)
185
164
  }
@@ -230,10 +209,7 @@ function stripReadyForQuery(data: Uint8Array): Uint8Array {
230
209
  while (offset < data.length) {
231
210
  const msgType = data[offset]
232
211
  if (offset + 5 > data.length) break
233
- const msgLen = new DataView(
234
- data.buffer,
235
- data.byteOffset + offset + 1
236
- ).getInt32(0)
212
+ const msgLen = new DataView(data.buffer, data.byteOffset + offset + 1).getInt32(0)
237
213
  const totalLen = 1 + msgLen
238
214
 
239
215
  if (msgType !== 0x5a) {
@@ -286,10 +262,7 @@ const mutex = new Mutex()
286
262
  // module-level search_path tracking
287
263
  let currentSearchPath = 'public'
288
264
 
289
- export async function startPgProxy(
290
- db: PGlite,
291
- config: ZeroLiteConfig
292
- ): Promise<Server> {
265
+ export async function startPgProxy(db: PGlite, config: ZeroLiteConfig): Promise<Server> {
293
266
  const server = createServer(async (socket: Socket) => {
294
267
  let dbName = 'postgres'
295
268
  let isReplicationConnection = false
@@ -307,8 +280,7 @@ export async function startPgProxy(
307
280
  clearTextPassword: string
308
281
  }) {
309
282
  return (
310
- credentials.password ===
311
- credentials.clearTextPassword &&
283
+ credentials.password === credentials.clearTextPassword &&
312
284
  credentials.username === config.pgUser
313
285
  )
314
286
  },
@@ -332,25 +304,14 @@ export async function startPgProxy(
332
304
  // handle replication connections
333
305
  if (isReplicationConnection) {
334
306
  if (data[0] === 0x51) {
335
- const view = new DataView(
336
- data.buffer,
337
- data.byteOffset,
338
- data.byteLength
339
- )
307
+ const view = new DataView(data.buffer, data.byteOffset, data.byteLength)
340
308
  const len = view.getInt32(1)
341
309
  const query = new TextDecoder()
342
310
  .decode(data.subarray(5, 1 + len - 1))
343
311
  .replace(/\0$/, '')
344
- console.info(
345
- `[orez] repl query: ${query.slice(0, 200)}`
346
- )
312
+ console.info(`[orez] repl query: ${query.slice(0, 200)}`)
347
313
  }
348
- return handleReplicationMessage(
349
- data,
350
- socket,
351
- db,
352
- connection
353
- )
314
+ return handleReplicationMessage(data, socket, db, connection)
354
315
  }
355
316
 
356
317
  // check for no-op queries
@@ -368,12 +329,9 @@ export async function startPgProxy(
368
329
  // regular query: set search_path based on database name, then forward
369
330
  await mutex.acquire()
370
331
  try {
371
- const searchPath =
372
- DB_SCHEMA_MAP[dbName] || 'public'
332
+ const searchPath = DB_SCHEMA_MAP[dbName] || 'public'
373
333
  if (currentSearchPath !== searchPath) {
374
- await db.exec(
375
- `SET search_path TO ${searchPath}`
376
- )
334
+ await db.exec(`SET search_path TO ${searchPath}`)
377
335
  currentSearchPath = searchPath
378
336
  }
379
337
  let result = await db.execProtocolRaw(data, {
@@ -398,9 +356,7 @@ export async function startPgProxy(
398
356
 
399
357
  return new Promise((resolve, reject) => {
400
358
  server.listen(config.pgPort, '127.0.0.1', () => {
401
- console.info(
402
- `[orez] pg proxy listening on port ${config.pgPort}`
403
- )
359
+ console.info(`[orez] pg proxy listening on port ${config.pgPort}`)
404
360
  resolve(server)
405
361
  })
406
362
  server.on('error', reject)
@@ -415,15 +371,9 @@ async function handleReplicationMessage(
415
371
  ): Promise<Uint8Array | undefined> {
416
372
  if (data[0] !== 0x51) return undefined
417
373
 
418
- const view = new DataView(
419
- data.buffer,
420
- data.byteOffset,
421
- data.byteLength
422
- )
374
+ const view = new DataView(data.buffer, data.byteOffset, data.byteLength)
423
375
  const len = view.getInt32(1)
424
- const query = new TextDecoder()
425
- .decode(data.subarray(5, 1 + len - 1))
426
- .replace(/\0$/, '')
376
+ const query = new TextDecoder().decode(data.subarray(5, 1 + len - 1)).replace(/\0$/, '')
427
377
  const upper = query.trim().toUpperCase()
428
378
 
429
379
  // check if this is a START_REPLICATION command
@@ -446,9 +396,7 @@ async function handleReplicationMessage(
446
396
  })
447
397
 
448
398
  handleStartReplication(query, writer, db).catch((err) => {
449
- console.info(
450
- `[orez] replication stream ended: ${err}`
451
- )
399
+ console.info(`[orez] replication stream ended: ${err}`)
452
400
  })
453
401
  return undefined
454
402
  }
@@ -5,9 +5,7 @@ import { PGlite } from '@electric-sql/pglite'
5
5
 
6
6
  import type { ZeroLiteConfig } from './config'
7
7
 
8
- export async function createPGliteInstance(
9
- config: ZeroLiteConfig
10
- ): Promise<PGlite> {
8
+ export async function createPGliteInstance(config: ZeroLiteConfig): Promise<PGlite> {
11
9
  const dataPath = resolve(config.dataDir, 'pgdata')
12
10
  mkdirSync(dataPath, { recursive: true })
13
11
 
@@ -22,8 +20,7 @@ export async function createPGliteInstance(
22
20
  await db.exec('CREATE SCHEMA IF NOT EXISTS zero_cdb')
23
21
 
24
22
  // create publication for zero-cache
25
- const pubName =
26
- process.env.ZERO_APP_PUBLICATIONS || 'zero_pub'
23
+ const pubName = process.env.ZERO_APP_PUBLICATIONS || 'zero_pub'
27
24
  const pubs = await db.query<{ count: string }>(
28
25
  `SELECT count(*) as count FROM pg_publication WHERE pubname = $1`,
29
26
  [pubName]
@@ -36,10 +33,7 @@ export async function createPGliteInstance(
36
33
  return db
37
34
  }
38
35
 
39
- export async function runMigrations(
40
- db: PGlite,
41
- config: ZeroLiteConfig
42
- ): Promise<void> {
36
+ export async function runMigrations(db: PGlite, config: ZeroLiteConfig): Promise<void> {
43
37
  const migrationsDir = resolve(config.migrationsDir)
44
38
  if (!existsSync(migrationsDir)) {
45
39
  console.info('[orez] no migrations directory found, skipping')
@@ -60,9 +54,7 @@ export async function runMigrations(
60
54
  let files: string[]
61
55
  if (existsSync(journalPath)) {
62
56
  const journal = JSON.parse(readFileSync(journalPath, 'utf-8'))
63
- files = journal.entries.map(
64
- (e: { tag: string }) => `${e.tag}.sql`
65
- )
57
+ files = journal.entries.map((e: { tag: string }) => `${e.tag}.sql`)
66
58
  } else {
67
59
  files = readdirSync(migrationsDir)
68
60
  .filter((f) => f.endsWith('.sql'))
@@ -94,10 +86,7 @@ export async function runMigrations(
94
86
  await db.exec(stmt)
95
87
  }
96
88
 
97
- await db.query(
98
- 'INSERT INTO public.migrations (name) VALUES ($1)',
99
- [name]
100
- )
89
+ await db.query('INSERT INTO public.migrations (name) VALUES ($1)', [name])
101
90
  console.info(`[orez] applied migration: ${name}`)
102
91
  }
103
92
 
@@ -1,6 +1,11 @@
1
- import { describe, it, expect, beforeEach, afterEach } from 'vitest'
2
1
  import { PGlite } from '@electric-sql/pglite'
3
- import { installChangeTracking, getChangesSince, getCurrentWatermark } from './change-tracker'
2
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest'
3
+
4
+ import {
5
+ installChangeTracking,
6
+ getChangesSince,
7
+ getCurrentWatermark,
8
+ } from './change-tracker'
4
9
 
5
10
  describe('change-tracker', () => {
6
11
  let db: PGlite
@@ -145,7 +150,9 @@ describe('change-tracker', () => {
145
150
  })
146
151
 
147
152
  it('handles multi-row update', async () => {
148
- await db.exec(`INSERT INTO public.items (name, value) VALUES ('a', 1), ('b', 2), ('c', 3)`)
153
+ await db.exec(
154
+ `INSERT INTO public.items (name, value) VALUES ('a', 1), ('b', 2), ('c', 3)`
155
+ )
149
156
  await db.exec(`UPDATE public.items SET value = value * 10`)
150
157
 
151
158
  const changes = await getChangesSince(db, 0)
@@ -1,11 +1,12 @@
1
- import { describe, it, expect, beforeEach, afterEach } from 'vitest'
2
1
  import { PGlite } from '@electric-sql/pglite'
2
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest'
3
+
4
+ import { installChangeTracking } from './change-tracker'
3
5
  import {
4
6
  handleReplicationQuery,
5
7
  handleStartReplication,
6
8
  type ReplicationWriter,
7
9
  } from './handler'
8
- import { installChangeTracking } from './change-tracker'
9
10
 
10
11
  // parse wire protocol RowDescription+DataRow response into columns/values
11
12
  function parseResponse(buf: Uint8Array): { columns: string[]; values: string[] } | null {
@@ -78,7 +79,10 @@ describe('handleReplicationQuery', () => {
78
79
  })
79
80
 
80
81
  it('DROP_REPLICATION_SLOT removes slot', async () => {
81
- await handleReplicationQuery('CREATE_REPLICATION_SLOT "drop_me" TEMPORARY LOGICAL pgoutput', db)
82
+ await handleReplicationQuery(
83
+ 'CREATE_REPLICATION_SLOT "drop_me" TEMPORARY LOGICAL pgoutput',
84
+ db
85
+ )
82
86
  await handleReplicationQuery('DROP_REPLICATION_SLOT "drop_me"', db)
83
87
 
84
88
  const slots = await db.query<{ count: string }>(
@@ -176,9 +180,7 @@ describe('handleStartReplication', () => {
176
180
 
177
181
  await new Promise((r) => setTimeout(r, 700))
178
182
 
179
- const keepalives = written.filter(
180
- (msg) => msg[0] === 0x64 && msg[5] === 0x6b
181
- )
183
+ const keepalives = written.filter((msg) => msg[0] === 0x64 && msg[5] === 0x6b)
182
184
  expect(keepalives.length).toBeGreaterThan(0)
183
185
  })
184
186
 
@@ -6,7 +6,7 @@
6
6
  * it's talking to a real postgres with logical replication.
7
7
  */
8
8
 
9
- import type { PGlite } from '@electric-sql/pglite'
9
+ import { getChangesSince, getCurrentWatermark, type ChangeRecord } from './change-tracker'
10
10
  import {
11
11
  encodeBegin,
12
12
  encodeCommit,
@@ -21,7 +21,8 @@ import {
21
21
  inferColumns,
22
22
  type ColumnInfo,
23
23
  } from './pgoutput-encoder'
24
- import { getChangesSince, getCurrentWatermark, type ChangeRecord } from './change-tracker'
24
+
25
+ import type { PGlite } from '@electric-sql/pglite'
25
26
 
26
27
  export interface ReplicationWriter {
27
28
  write(data: Uint8Array): void
@@ -176,7 +177,10 @@ function buildErrorResponse(message: string): Uint8Array {
176
177
  * handle a replication query. returns response bytes or null if not handled.
177
178
  * async because slot operations need to write to pglite.
178
179
  */
179
- export async function handleReplicationQuery(query: string, db: PGlite): Promise<Uint8Array | null> {
180
+ export async function handleReplicationQuery(
181
+ query: string,
182
+ db: PGlite
183
+ ): Promise<Uint8Array | null> {
180
184
  const trimmed = query.trim().replace(/;$/, '').trim()
181
185
  const upper = trimmed.toUpperCase()
182
186
 
@@ -212,7 +216,9 @@ export async function handleReplicationQuery(query: string, db: PGlite): Promise
212
216
  const match = trimmed.match(/DROP_REPLICATION_SLOT\s+"?(\w[^"\s]*)"?/i)
213
217
  const slotName = match?.[1]
214
218
  if (slotName) {
215
- await db.query(`DELETE FROM public._zero_replication_slots WHERE slot_name = $1`, [slotName])
219
+ await db.query(`DELETE FROM public._zero_replication_slots WHERE slot_name = $1`, [
220
+ slotName,
221
+ ])
216
222
  }
217
223
  return buildCommandComplete('DROP_REPLICATION_SLOT')
218
224
  }
@@ -1,4 +1,5 @@
1
1
  import { describe, it, expect } from 'vitest'
2
+
2
3
  import {
3
4
  encodeBegin,
4
5
  encodeCommit,
@@ -226,7 +227,12 @@ describe('pgoutput-encoder', () => {
226
227
  ]
227
228
 
228
229
  it('includes old tuple when provided', () => {
229
- const buf = encodeUpdate(16384, { id: '1', val: 'new' }, { id: '1', val: 'old' }, cols)
230
+ const buf = encodeUpdate(
231
+ 16384,
232
+ { id: '1', val: 'new' },
233
+ { id: '1', val: 'old' },
234
+ cols
235
+ )
230
236
 
231
237
  expect(buf[0]).toBe(0x55) // 'U'
232
238
  expect(r32(buf, 1)).toBe(16384)
@@ -241,7 +247,12 @@ describe('pgoutput-encoder', () => {
241
247
  })
242
248
 
243
249
  it('old tuple precedes new tuple', () => {
244
- const buf = encodeUpdate(16384, { id: '1', val: 'new' }, { id: '1', val: 'old' }, cols)
250
+ const buf = encodeUpdate(
251
+ 16384,
252
+ { id: '1', val: 'new' },
253
+ { id: '1', val: 'old' },
254
+ cols
255
+ )
245
256
 
246
257
  // 'O' at offset 5, then old tuple, then 'N', then new tuple
247
258
  expect(buf[5]).toBe(0x4f)
@@ -65,7 +65,12 @@ export function encodeBegin(lsn: bigint, timestamp: bigint, xid: number): Uint8A
65
65
  }
66
66
 
67
67
  // encode a COMMIT message
68
- export function encodeCommit(flags: number, lsn: bigint, endLsn: bigint, timestamp: bigint): Uint8Array {
68
+ export function encodeCommit(
69
+ flags: number,
70
+ lsn: bigint,
71
+ endLsn: bigint,
72
+ timestamp: bigint
73
+ ): Uint8Array {
69
74
  const buf = new Uint8Array(1 + 1 + 8 + 8 + 8)
70
75
  buf[0] = 0x43 // 'C'
71
76
  buf[1] = flags
@@ -95,7 +100,8 @@ export function encodeRelation(
95
100
  columnsSize += 1 + nb.length + 1 + 4 + 4 // flags + name + null + typeOid + typeMod
96
101
  }
97
102
 
98
- const total = 1 + 4 + schemaBytes.length + 1 + nameBytes.length + 1 + 1 + 2 + columnsSize
103
+ const total =
104
+ 1 + 4 + schemaBytes.length + 1 + nameBytes.length + 1 + 1 + 2 + columnsSize
99
105
  const buf = new Uint8Array(total)
100
106
  let pos = 0
101
107
 
@@ -126,7 +132,10 @@ export function encodeRelation(
126
132
  return buf
127
133
  }
128
134
 
129
- function encodeTupleData(row: Record<string, unknown>, columns: ColumnInfo[]): Uint8Array {
135
+ function encodeTupleData(
136
+ row: Record<string, unknown>,
137
+ columns: ColumnInfo[]
138
+ ): Uint8Array {
130
139
  const parts: Uint8Array[] = []
131
140
  let totalSize = 2 // ncolumns (int16)
132
141
 
@@ -165,7 +174,11 @@ function encodeTupleData(row: Record<string, unknown>, columns: ColumnInfo[]): U
165
174
  }
166
175
 
167
176
  // encode an INSERT message
168
- export function encodeInsert(tableOid: number, row: Record<string, unknown>, columns: ColumnInfo[]): Uint8Array {
177
+ export function encodeInsert(
178
+ tableOid: number,
179
+ row: Record<string, unknown>,
180
+ columns: ColumnInfo[]
181
+ ): Uint8Array {
169
182
  const tuple = encodeTupleData(row, columns)
170
183
  const buf = new Uint8Array(1 + 4 + 1 + tuple.length)
171
184
  buf[0] = 0x49 // 'I'
@@ -220,7 +233,12 @@ export function encodeDelete(
220
233
  }
221
234
 
222
235
  // wrap a pgoutput message in XLogData format
223
- export function wrapXLogData(walStart: bigint, walEnd: bigint, timestamp: bigint, data: Uint8Array): Uint8Array {
236
+ export function wrapXLogData(
237
+ walStart: bigint,
238
+ walEnd: bigint,
239
+ timestamp: bigint,
240
+ data: Uint8Array
241
+ ): Uint8Array {
224
242
  const buf = new Uint8Array(1 + 8 + 8 + 8 + data.length)
225
243
  buf[0] = 0x77 // 'w' XLogData
226
244
  writeInt64(buf, 1, walStart)
@@ -240,7 +258,11 @@ export function wrapCopyData(data: Uint8Array): Uint8Array {
240
258
  }
241
259
 
242
260
  // encode a primary keepalive message
243
- export function encodeKeepalive(walEnd: bigint, timestamp: bigint, replyRequested: boolean): Uint8Array {
261
+ export function encodeKeepalive(
262
+ walEnd: bigint,
263
+ timestamp: bigint,
264
+ replyRequested: boolean
265
+ ): Uint8Array {
244
266
  const inner = new Uint8Array(1 + 8 + 8 + 1)
245
267
  inner[0] = 0x6b // 'k' keepalive
246
268
  writeInt64(inner, 1, walEnd)
@@ -9,13 +9,16 @@
9
9
  * query routing) that unit tests on individual components miss.
10
10
  */
11
11
 
12
- import { describe, it, expect, beforeEach, afterEach } from 'vitest'
13
12
  import { createConnection, type Socket } from 'node:net'
13
+
14
14
  import { PGlite } from '@electric-sql/pglite'
15
- import type { Server, AddressInfo } from 'node:net'
15
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest'
16
+
17
+ import { getConfig } from '../config'
16
18
  import { startPgProxy } from '../pg-proxy'
17
19
  import { installChangeTracking } from './change-tracker'
18
- import { getConfig } from '../config'
20
+
21
+ import type { Server, AddressInfo } from 'node:net'
19
22
 
20
23
  // --- pgoutput decoder (validates against pg protocol spec) ---
21
24
 
@@ -179,13 +182,22 @@ function decodePgOutput(data: Uint8Array): PgOutputMessage {
179
182
  pos += 4
180
183
  columns.push({ flags, name, typeOid, typeMod })
181
184
  }
182
- return { type: 'Relation', raw: data, tableOid, schema, tableName, replicaIdentity, columns }
185
+ return {
186
+ type: 'Relation',
187
+ raw: data,
188
+ tableOid,
189
+ schema,
190
+ tableName,
191
+ replicaIdentity,
192
+ columns,
193
+ }
183
194
  }
184
195
  case 0x49: {
185
196
  // Insert
186
197
  const tableOid = r32(data, 1)
187
198
  const marker = data[5] // should be 'N'
188
- if (marker !== 0x4e) throw new Error(`insert: expected 'N' marker, got 0x${marker.toString(16)}`)
199
+ if (marker !== 0x4e)
200
+ throw new Error(`insert: expected 'N' marker, got 0x${marker.toString(16)}`)
189
201
  const [tupleData] = decodeTupleData(data, 6)
190
202
  return { type: 'Insert', raw: data, tableOid, tupleData }
191
203
  }
@@ -211,7 +223,9 @@ function decodePgOutput(data: Uint8Array): PgOutputMessage {
211
223
  const tableOid = r32(data, 1)
212
224
  const marker = data[5]
213
225
  if (marker !== 0x4b && marker !== 0x4f)
214
- throw new Error(`delete: expected 'K' or 'O' marker, got 0x${marker.toString(16)}`)
226
+ throw new Error(
227
+ `delete: expected 'K' or 'O' marker, got 0x${marker.toString(16)}`
228
+ )
215
229
  const [keyTupleData] = decodeTupleData(data, 6)
216
230
  return { type: 'Delete', raw: data, tableOid, keyTupleData }
217
231
  }
@@ -542,7 +556,7 @@ describe('tcp replication', () => {
542
556
  )
543
557
 
544
558
  await replClient.startReplication(
545
- 'START_REPLICATION SLOT "stream_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_takeout\')'
559
+ "START_REPLICATION SLOT \"stream_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_takeout')"
546
560
  )
547
561
 
548
562
  // insert data right away - the poll loop will pick it up once it starts
@@ -606,7 +620,7 @@ describe('tcp replication', () => {
606
620
  'CREATE_REPLICATION_SLOT "upd_test" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT'
607
621
  )
608
622
  await replClient.startReplication(
609
- 'START_REPLICATION SLOT "upd_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_takeout\')'
623
+ "START_REPLICATION SLOT \"upd_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_takeout')"
610
624
  )
611
625
 
612
626
  await replClient.collectStream(200) // skip CopyBothResponse
@@ -650,7 +664,7 @@ describe('tcp replication', () => {
650
664
  'CREATE_REPLICATION_SLOT "del_test" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT'
651
665
  )
652
666
  await replClient.startReplication(
653
- 'START_REPLICATION SLOT "del_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_takeout\')'
667
+ "START_REPLICATION SLOT \"del_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_takeout')"
654
668
  )
655
669
 
656
670
  await replClient.collectStream(200)
@@ -693,7 +707,7 @@ describe('tcp replication', () => {
693
707
  'CREATE_REPLICATION_SLOT "multi_test" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT'
694
708
  )
695
709
  await replClient.startReplication(
696
- 'START_REPLICATION SLOT "multi_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_multi\')'
710
+ "START_REPLICATION SLOT \"multi_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_multi')"
697
711
  )
698
712
 
699
713
  await replClient.collectStream(200)
@@ -701,7 +715,7 @@ describe('tcp replication', () => {
701
715
  await db.exec(`INSERT INTO public.items (name, value) VALUES ('t1', 1)`)
702
716
  await db.exec(`INSERT INTO public.other (label) VALUES ('t2')`)
703
717
 
704
- const stream = await replClient.collectStream(1200)
718
+ const stream = await replClient.collectStream(2500)
705
719
 
706
720
  const decoded: PgOutputMessage[] = []
707
721
  for (const msg of stream) {
@@ -731,7 +745,7 @@ describe('tcp replication', () => {
731
745
  'CREATE_REPLICATION_SLOT "rapid_test" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT'
732
746
  )
733
747
  await replClient.startReplication(
734
- 'START_REPLICATION SLOT "rapid_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_takeout\')'
748
+ "START_REPLICATION SLOT \"rapid_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_takeout')"
735
749
  )
736
750
 
737
751
  await replClient.collectStream(200)
@@ -770,7 +784,9 @@ describe('tcp replication', () => {
770
784
  await client.query(`INSERT INTO public.items (name, value) VALUES ('tcp_direct', 77)`)
771
785
 
772
786
  // select back
773
- const response = await client.query(`SELECT name, value FROM public.items WHERE name = 'tcp_direct'`)
787
+ const response = await client.query(
788
+ `SELECT name, value FROM public.items WHERE name = 'tcp_direct'`
789
+ )
774
790
  const dataRow = response.find((m) => m.type === 0x44) // DataRow
775
791
  expect(dataRow).toBeDefined()
776
792
 
@@ -790,7 +806,7 @@ describe('tcp replication', () => {
790
806
  'CREATE_REPLICATION_SLOT "concurrent_test" TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT'
791
807
  )
792
808
  await replClient.startReplication(
793
- 'START_REPLICATION SLOT "concurrent_test" LOGICAL 0/0 (proto_version \'1\', publication_names \'zero_takeout\')'
809
+ "START_REPLICATION SLOT \"concurrent_test\" LOGICAL 0/0 (proto_version '1', publication_names 'zero_takeout')"
794
810
  )
795
811
  await replClient.collectStream(200)
796
812
 
@@ -801,7 +817,9 @@ describe('tcp replication', () => {
801
817
  password: 'password',
802
818
  database: 'postgres',
803
819
  })
804
- await dataClient.query(`INSERT INTO public.items (name, value) VALUES ('concurrent', 123)`)
820
+ await dataClient.query(
821
+ `INSERT INTO public.items (name, value) VALUES ('concurrent', 123)`
822
+ )
805
823
 
806
824
  // replication stream should pick up the change
807
825
  const stream = await replClient.collectStream(1200)