@elench/testkit 0.1.115 → 0.1.117

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/README.md +42 -8
  2. package/lib/cli/args.mjs +3 -3
  3. package/lib/cli/command-flags.mjs +4 -0
  4. package/lib/cli/commands/db/schema/refresh.mjs +21 -0
  5. package/lib/cli/commands/db/schema/verify.mjs +27 -0
  6. package/lib/cli/entrypoint.mjs +1 -0
  7. package/lib/cli/operations/db/schema/refresh/operation.mjs +58 -0
  8. package/lib/cli/operations/db/{snapshot/capture → schema/verify}/operation.mjs +6 -27
  9. package/lib/cli/operations/run/operation.mjs +1 -0
  10. package/lib/cli/renderers/db-schema/text.mjs +14 -0
  11. package/lib/config/database.mjs +64 -0
  12. package/lib/config-api/index.d.ts +16 -1
  13. package/lib/config-api/index.mjs +31 -16
  14. package/lib/database/fingerprint.mjs +2 -0
  15. package/lib/database/index.mjs +142 -104
  16. package/lib/database/schema-source.mjs +349 -0
  17. package/lib/database/source-refresh-lock.mjs +69 -0
  18. package/lib/database/source-url.mjs +110 -0
  19. package/lib/database/template-steps.mjs +166 -38
  20. package/lib/runner/orchestrator.mjs +4 -3
  21. package/lib/runner/template-steps.mjs +12 -1
  22. package/lib/runner/template.mjs +16 -1
  23. package/node_modules/@elench/next-analysis/package.json +1 -1
  24. package/node_modules/@elench/testkit-bridge/package.json +2 -2
  25. package/node_modules/@elench/testkit-protocol/package.json +1 -1
  26. package/node_modules/@elench/ts-analysis/package.json +1 -1
  27. package/package.json +8 -5
  28. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/package.json +1 -1
  29. package/lib/cli/commands/db/snapshot/capture.mjs +0 -26
  30. package/lib/cli/renderers/db-snapshot-capture/text.mjs +0 -3
@@ -0,0 +1,69 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+
4
+ const DEFAULT_STALE_MS = 10 * 60 * 1000;
5
+ const DEFAULT_TIMEOUT_MS = 120 * 1000;
6
+ const DEFAULT_POLL_MS = 100;
7
+
8
+ export async function withSourceSchemaRefreshLock(lockPath, task, options = {}) {
9
+ fs.mkdirSync(path.dirname(lockPath), { recursive: true });
10
+ const staleMs = options.staleMs || DEFAULT_STALE_MS;
11
+ const timeoutMs = options.timeoutMs || DEFAULT_TIMEOUT_MS;
12
+ const pollMs = options.pollMs || DEFAULT_POLL_MS;
13
+ const startedAt = Date.now();
14
+
15
+ let waited = false;
16
+ while (true) {
17
+ try {
18
+ const fd = fs.openSync(lockPath, "wx");
19
+ try {
20
+ fs.writeFileSync(
21
+ fd,
22
+ `${JSON.stringify(
23
+ {
24
+ pid: process.pid,
25
+ createdAt: new Date().toISOString(),
26
+ },
27
+ null,
28
+ 2
29
+ )}\n`
30
+ );
31
+ } finally {
32
+ fs.closeSync(fd);
33
+ }
34
+
35
+ try {
36
+ return await task({ waited, requestedAt: startedAt });
37
+ } finally {
38
+ fs.rmSync(lockPath, { force: true });
39
+ }
40
+ } catch (error) {
41
+ if (error?.code !== "EEXIST") throw error;
42
+ waited = true;
43
+ removeStaleLock(lockPath, staleMs);
44
+ if (Date.now() - startedAt > timeoutMs) {
45
+ throw new Error(`Timed out waiting for source schema refresh lock: ${lockPath}`);
46
+ }
47
+ await sleep(pollMs);
48
+ }
49
+ }
50
+ }
51
+
52
+ export function getSourceSchemaRefreshLockPath(cachePath) {
53
+ return path.join(path.dirname(cachePath), "source-schema.refresh.lock");
54
+ }
55
+
56
+ function removeStaleLock(lockPath, staleMs) {
57
+ try {
58
+ const stat = fs.statSync(lockPath);
59
+ if (Date.now() - stat.mtimeMs > staleMs) {
60
+ fs.rmSync(lockPath, { force: true });
61
+ }
62
+ } catch (error) {
63
+ if (error?.code !== "ENOENT") throw error;
64
+ }
65
+ }
66
+
67
+ function sleep(ms) {
68
+ return new Promise((resolve) => setTimeout(resolve, ms));
69
+ }
@@ -0,0 +1,110 @@
1
+ const NEON_HOST_SUFFIX = ".neon.tech";
2
+ const NEON_POOLER_MARKER = "-pooler.";
3
+ const UNSUPPORTED_POOLER_PATTERN = /(^|[-.])(pgbouncer|pooler)([-.]|$)/i;
4
+
5
+ export function resolveSourceSchemaDumpUrl(databaseUrl) {
6
+ const original = parseDatabaseUrl(databaseUrl);
7
+ const originalClassification = classifyDatabaseHost(original.hostname);
8
+
9
+ if (originalClassification.kind === "neon-pooler") {
10
+ const resolved = new URL(original.url.href);
11
+ resolved.hostname = original.hostname.replace(NEON_POOLER_MARKER, ".");
12
+ return buildResolution({
13
+ original,
14
+ resolved,
15
+ originalClassification,
16
+ resolvedClassification: classifyDatabaseHost(resolved.hostname),
17
+ rewritten: true,
18
+ notice: "Source schema URL uses Neon pooler; Testkit is using the direct endpoint for pg_dump.",
19
+ });
20
+ }
21
+
22
+ if (originalClassification.kind === "unsupported-pooler") {
23
+ throw new Error(
24
+ [
25
+ "Refusing to run pg_dump through a pooled source database URL.",
26
+ `Host: ${original.hostname}`,
27
+ "Configure a direct source database URL.",
28
+ ].join("\n")
29
+ );
30
+ }
31
+
32
+ return buildResolution({
33
+ original,
34
+ resolved: original.url,
35
+ originalClassification,
36
+ resolvedClassification: originalClassification,
37
+ rewritten: false,
38
+ notice: null,
39
+ });
40
+ }
41
+
42
+ export function classifyDatabaseHost(hostname) {
43
+ const host = String(hostname || "").toLowerCase();
44
+ if (host.endsWith(NEON_HOST_SUFFIX) && host.includes(NEON_POOLER_MARKER)) {
45
+ return { kind: "neon-pooler", host };
46
+ }
47
+ if (host.endsWith(NEON_HOST_SUFFIX)) {
48
+ return { kind: "neon-direct", host };
49
+ }
50
+ if (UNSUPPORTED_POOLER_PATTERN.test(host)) {
51
+ return { kind: "unsupported-pooler", host };
52
+ }
53
+ return { kind: "unknown-direct", host };
54
+ }
55
+
56
+ export function redactDatabaseUrl(databaseUrl) {
57
+ try {
58
+ const parsed = new URL(databaseUrl);
59
+ if (parsed.username) parsed.username = "REDACTED";
60
+ if (parsed.password) parsed.password = "REDACTED";
61
+ return parsed.toString();
62
+ } catch {
63
+ return "[invalid database URL]";
64
+ }
65
+ }
66
+
67
+ export function assertPgDumpArgsAreSourceSafe(args) {
68
+ for (let index = 0; index < args.length; index += 1) {
69
+ const arg = String(args[index]);
70
+ if (arg === "-j" || arg === "--jobs" || arg.startsWith("--jobs=")) {
71
+ throw new Error("Source schema pg_dump must not use parallel jobs.");
72
+ }
73
+ }
74
+ }
75
+
76
+ function parseDatabaseUrl(databaseUrl) {
77
+ try {
78
+ const url = new URL(databaseUrl);
79
+ if (!url.hostname) throw new Error("missing host");
80
+ return {
81
+ url,
82
+ hostname: url.hostname.toLowerCase(),
83
+ };
84
+ } catch (error) {
85
+ throw new Error(`Invalid source database URL: ${error?.message || error}`);
86
+ }
87
+ }
88
+
89
+ function buildResolution({
90
+ original,
91
+ resolved,
92
+ originalClassification,
93
+ resolvedClassification,
94
+ rewritten,
95
+ notice,
96
+ }) {
97
+ return {
98
+ originalUrl: original.url.href,
99
+ dumpUrl: resolved.href,
100
+ rewritten,
101
+ notice,
102
+ metadata: {
103
+ originalHost: original.hostname,
104
+ originalClassification: originalClassification.kind,
105
+ resolvedHost: resolved.hostname.toLowerCase(),
106
+ resolvedClassification: resolvedClassification.kind,
107
+ rewritten,
108
+ },
109
+ };
110
+ }
@@ -1,4 +1,5 @@
1
1
  import fs from "fs";
2
+ import os from "os";
2
3
  import path from "path";
3
4
  import { execa } from "execa";
4
5
  import { buildTemplateExecutionEnv } from "../runner/template.mjs";
@@ -7,6 +8,7 @@ import {
7
8
  runConfiguredSteps,
8
9
  } from "../runner/template-steps.mjs";
9
10
  import { captureOutput } from "../runner/processes.mjs";
11
+ import { assertPgDumpArgsAreSourceSafe } from "./source-url.mjs";
10
12
 
11
13
  export async function runTemplateStage(config, stageName, databaseUrl, options = {}) {
12
14
  const steps = config.testkit.database?.template?.[stageName] || [];
@@ -25,6 +27,25 @@ export async function runTemplateStage(config, stageName, databaseUrl, options =
25
27
  reporter: options.reporter || null,
26
28
  setupRegistry: options.setupRegistry || null,
27
29
  parentOperation: options.parentOperation || null,
30
+ afterStep: options.afterStep || null,
31
+ });
32
+ }
33
+
34
+ export async function runTemplateStep(config, stageName, step, stepIndex, databaseUrl, options = {}) {
35
+ const env = {
36
+ ...buildTemplateExecutionEnv(config, {}, process.env),
37
+ DATABASE_URL: databaseUrl,
38
+ };
39
+
40
+ await runConfiguredSteps({
41
+ config,
42
+ steps: [step],
43
+ env,
44
+ labelPrefix: `template:${stageName}`,
45
+ reporter: options.reporter || null,
46
+ setupRegistry: options.setupRegistry || null,
47
+ parentOperation: options.parentOperation || null,
48
+ startIndex: stepIndex,
28
49
  });
29
50
  }
30
51
 
@@ -36,45 +57,101 @@ export function collectTemplateInputs(productDir, template = {}) {
36
57
  });
37
58
  }
38
59
 
39
- export async function captureTemplateSnapshot(config, outputPath, databaseUrl, options = {}) {
40
- const templateDbUrl = databaseUrl;
41
- const absoluteOutputPath = path.resolve(config.productDir, outputPath);
42
- fs.mkdirSync(path.dirname(absoluteOutputPath), { recursive: true });
60
+ export async function captureTemplateSnapshotText(config, databaseUrl, options = {}) {
61
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "testkit-schema-snapshot-"));
62
+ const tempPath = path.join(tempDir, "schema.sql");
63
+ try {
64
+ await dumpPostgresSchemaToFile(config, tempPath, databaseUrl, options);
65
+ return fs.readFileSync(tempPath, "utf8");
66
+ } finally {
67
+ fs.rmSync(tempDir, { recursive: true, force: true });
68
+ }
69
+ }
43
70
 
44
- const child = execa(
45
- "pg_dump",
71
+ export async function dumpPostgresSchemaToFile(config, outputPath, databaseUrl, options = {}) {
72
+ const env = {
73
+ ...buildTemplateExecutionEnv(config, {}, options.env || process.env),
74
+ ...buildPostgresConnectionEnv(databaseUrl, {
75
+ applicationName: options.pgApplicationName,
76
+ }),
77
+ };
78
+ const args = pgDumpArgs();
79
+ assertPgDumpArgsAreSourceSafe(args);
80
+ const result = await runPgDumpCommand(config, "pg_dump", args, env, options);
81
+ if (result.exitCode !== 0 && isPgDumpServerVersionMismatch(result)) {
82
+ const serverMajor = parsePgDumpServerMajor(result);
83
+ if (serverMajor) {
84
+ const fallback = await runDockerizedPgDump(config, serverMajor, env, args, options);
85
+ if (fallback.exitCode === 0) {
86
+ fs.writeFileSync(outputPath, fallback.stdout);
87
+ sanitizeSnapshotFile(outputPath);
88
+ return;
89
+ }
90
+ throw new Error(fallback.shortMessage || fallback.stderr || fallback.stdout || "dockerized pg_dump failed");
91
+ }
92
+ }
93
+ if (result.exitCode !== 0) {
94
+ throw new Error(result.shortMessage || result.stderr || result.stdout || "pg_dump failed");
95
+ }
96
+ fs.writeFileSync(outputPath, result.stdout);
97
+ sanitizeSnapshotFile(outputPath);
98
+ }
99
+
100
+ export function pgDumpArgs() {
101
+ return [
102
+ "--schema-only",
103
+ "--no-owner",
104
+ "--no-privileges",
105
+ ];
106
+ }
107
+
108
+ async function runDockerizedPgDump(config, serverMajor, env, pgDumpCommandArgs, options) {
109
+ const image = `${process.env.TESTKIT_PG_DUMP_IMAGE_PREFIX || "postgres"}:${serverMajor}`;
110
+ return runPgDumpCommand(
111
+ config,
112
+ "docker",
46
113
  [
47
- "--schema-only",
48
- "--no-owner",
49
- "--no-privileges",
50
- "--file",
51
- absoluteOutputPath,
52
- templateDbUrl,
114
+ "run",
115
+ "--rm",
116
+ "--network",
117
+ "host",
118
+ "-e",
119
+ "PGHOST",
120
+ "-e",
121
+ "PGPORT",
122
+ "-e",
123
+ "PGDATABASE",
124
+ "-e",
125
+ "PGUSER",
126
+ "-e",
127
+ "PGPASSWORD",
128
+ "-e",
129
+ "PGSSLMODE",
130
+ "-e",
131
+ "PGAPPNAME",
132
+ image,
133
+ "pg_dump",
134
+ ...pgDumpCommandArgs,
53
135
  ],
54
- {
55
- cwd: config.productDir,
56
- env: {
57
- ...buildTemplateExecutionEnv(config, {}, process.env),
58
- DATABASE_URL: templateDbUrl,
59
- },
60
- stdout: "pipe",
61
- stderr: "pipe",
62
- reject: false,
63
- }
136
+ env,
137
+ options
64
138
  );
139
+ }
140
+
141
+ async function runPgDumpCommand(config, command, args, env, options = {}) {
142
+ const child = execa(command, args, {
143
+ cwd: config.productDir,
144
+ env,
145
+ stdout: "pipe",
146
+ stderr: "pipe",
147
+ reject: false,
148
+ });
65
149
  const liveWriter =
66
150
  options.reporter?.outputMode === "debug"
67
151
  ? (line) => options.reporter.writeDebugLine?.(line)
68
152
  : null;
69
153
  const logRecord = options.logRecord || null;
70
154
  const drains = [
71
- captureOutput(child.stdout, {
72
- livePrefix: `[${config.runtimeLabel || config.name}:${config.name}]`,
73
- liveWriter,
74
- onLine(line) {
75
- if (logRecord) logRecord.stream.write(`${new Date().toISOString()} [stdout] ${line}\n`);
76
- },
77
- }),
78
155
  captureOutput(child.stderr, {
79
156
  livePrefix: `[${config.runtimeLabel || config.name}:${config.name}]`,
80
157
  liveWriter,
@@ -85,29 +162,80 @@ export async function captureTemplateSnapshot(config, outputPath, databaseUrl, o
85
162
  ];
86
163
  const result = await child;
87
164
  await Promise.all(drains);
88
- if (result.exitCode !== 0) {
89
- throw new Error(result.shortMessage || result.stderr || result.stdout || "pg_dump failed");
90
- }
165
+ return result;
166
+ }
167
+
168
+ function isPgDumpServerVersionMismatch(result) {
169
+ const text = `${result.stderr || ""}\n${result.stdout || ""}`;
170
+ return text.includes("server version mismatch");
171
+ }
91
172
 
92
- sanitizeSnapshotFile(absoluteOutputPath);
93
- return absoluteOutputPath;
173
+ function parsePgDumpServerMajor(result) {
174
+ const text = `${result.stderr || ""}\n${result.stdout || ""}`;
175
+ const match = text.match(/server version:\s*([0-9]+)/i);
176
+ return match ? Number(match[1]) : null;
177
+ }
178
+
179
+ function buildPostgresConnectionEnv(databaseUrl, options = {}) {
180
+ const parsed = new URL(databaseUrl);
181
+ return compactObject({
182
+ PGHOST: parsed.hostname,
183
+ PGPORT: parsed.port || "5432",
184
+ PGDATABASE: decodeURIComponent(parsed.pathname.replace(/^\//, "")),
185
+ PGUSER: decodeURIComponent(parsed.username || ""),
186
+ PGPASSWORD: decodeURIComponent(parsed.password || ""),
187
+ PGSSLMODE: parsed.searchParams.get("sslmode") || undefined,
188
+ PGAPPNAME: options.applicationName || undefined,
189
+ });
190
+ }
191
+
192
+ function compactObject(value) {
193
+ return Object.fromEntries(
194
+ Object.entries(value).filter(([_key, entry]) => entry !== undefined && entry !== null && entry !== "")
195
+ );
94
196
  }
95
197
 
96
198
  export function sanitizeSnapshotFile(filePath) {
97
199
  const dump = fs.readFileSync(filePath, "utf8");
98
- const sanitized = dump
99
- .split("\n")
200
+ const sanitized = sanitizeSnapshotText(dump);
201
+
202
+ if (sanitized !== dump) {
203
+ fs.writeFileSync(filePath, sanitized);
204
+ }
205
+ }
206
+
207
+ export function sanitizeSnapshotText(dump) {
208
+ return removePublicSchemaInitdbBlock(String(dump).split("\n"))
100
209
  .filter((line) => {
101
210
  const trimmed = line.trim();
102
211
  return (
212
+ !trimmed.startsWith("-- Dumped from database version ") &&
213
+ !trimmed.startsWith("-- Dumped by pg_dump version ") &&
103
214
  trimmed !== "SET transaction_timeout = 0;" &&
104
215
  !trimmed.startsWith("\\restrict ") &&
105
216
  !trimmed.startsWith("\\unrestrict ")
106
217
  );
107
218
  })
108
219
  .join("\n");
220
+ }
109
221
 
110
- if (sanitized !== dump) {
111
- fs.writeFileSync(filePath, sanitized);
222
+ function removePublicSchemaInitdbBlock(lines) {
223
+ const normalized = [];
224
+ for (let index = 0; index < lines.length; index += 1) {
225
+ if (lines[index]?.trim() === "--" && lines[index + 1]?.startsWith("-- Name: public; Type: SCHEMA;")) {
226
+ let cursor = index + 1;
227
+ while (cursor < lines.length && !lines[cursor]?.includes("*not* creating schema")) {
228
+ cursor += 1;
229
+ }
230
+ if (cursor < lines.length) {
231
+ while (cursor + 1 < lines.length && lines[cursor + 1]?.trim() === "") {
232
+ cursor += 1;
233
+ }
234
+ index = cursor;
235
+ continue;
236
+ }
237
+ }
238
+ normalized.push(lines[index]);
112
239
  }
240
+ return normalized;
113
241
  }
@@ -146,9 +146,10 @@ export async function runAll(configs, typeValues, suiteSelectors, opts, allConfi
146
146
  runtimeOptions: {
147
147
  reporter,
148
148
  logRegistry,
149
- setupRegistry,
150
- },
151
- });
149
+ setupRegistry,
150
+ skipSchemaSourceVerify: opts.skipSchemaSourceVerify,
151
+ },
152
+ });
152
153
  const timingUpdates = [];
153
154
 
154
155
  try {
@@ -51,13 +51,16 @@ export async function runConfiguredSteps({
51
51
  reporter = null,
52
52
  setupRegistry = null,
53
53
  parentOperation = null,
54
+ startIndex = 0,
55
+ afterStep = null,
54
56
  }) {
55
57
  if (steps.length === 0) return;
56
58
  const resolvedToolchain = await resolveConfiguredToolchain(config);
57
59
  await announceResolvedToolchain(config, resolvedToolchain, reporter);
58
60
 
59
61
  for (const [index, step] of steps.entries()) {
60
- const label = `${labelPrefix}:${config.name}:${index + 1}`;
62
+ const stepNumber = startIndex + index + 1;
63
+ const label = `${labelPrefix}:${config.name}:${stepNumber}`;
61
64
  const stepOperation = setupRegistry?.start({
62
65
  config,
63
66
  stage: label,
@@ -89,6 +92,14 @@ export async function runConfiguredSteps({
89
92
  if (finished) reporter?.setupOperationFinished?.(finished);
90
93
  throw error;
91
94
  }
95
+ if (afterStep) {
96
+ await afterStep({
97
+ step,
98
+ index: startIndex + index,
99
+ stepNumber,
100
+ label,
101
+ });
102
+ }
92
103
  }
93
104
  }
94
105
 
@@ -1,5 +1,8 @@
1
1
  import path from "path";
2
- import { finalizeConfiguredInputs, finalizeConfiguredSteps } from "../shared/configured-steps.mjs";
2
+ import {
3
+ finalizeConfiguredInputs,
4
+ finalizeConfiguredSteps,
5
+ } from "../shared/configured-steps.mjs";
3
6
  import { readDatabaseInfo } from "./state-io.mjs";
4
7
 
5
8
  const PORT_STRIDE = 100;
@@ -135,6 +138,7 @@ export function resolveRuntimeConfig(
135
138
  const database = config.testkit.database
136
139
  ? {
137
140
  ...config.testkit.database,
141
+ sourceSchema: finalizeSourceSchema(config.testkit.database.sourceSchema, context),
138
142
  template: finalizeDatabaseTemplate(config.testkit.database.template, context),
139
143
  }
140
144
  : undefined;
@@ -193,6 +197,17 @@ function finalizeDatabaseTemplate(template, context) {
193
197
  };
194
198
  }
195
199
 
200
+ function finalizeSourceSchema(sourceSchema, context) {
201
+ if (!sourceSchema) return null;
202
+ return {
203
+ ...sourceSchema,
204
+ ...(typeof sourceSchema.env === "string" ? { env: finalizeString(sourceSchema.env, context) } : {}),
205
+ ...(typeof sourceSchema.cachePath === "string"
206
+ ? { cachePath: finalizeString(sourceSchema.cachePath, context) }
207
+ : {}),
208
+ };
209
+ }
210
+
196
211
  export function resolveServiceStateDir(runtimeDir, config) {
197
212
  return path.join(runtimeDir, "services", config.name);
198
213
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/next-analysis",
3
- "version": "0.1.115",
3
+ "version": "0.1.117",
4
4
  "description": "SWC-backed Next.js source analysis primitives for Erench tools",
5
5
  "type": "module",
6
6
  "exports": {
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit-bridge",
3
- "version": "0.1.115",
3
+ "version": "0.1.117",
4
4
  "description": "Browser bridge helpers for testkit",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -22,7 +22,7 @@
22
22
  "typecheck": "tsc -p tsconfig.json --noEmit"
23
23
  },
24
24
  "dependencies": {
25
- "@elench/testkit-protocol": "0.1.115"
25
+ "@elench/testkit-protocol": "0.1.117"
26
26
  },
27
27
  "private": false
28
28
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit-protocol",
3
- "version": "0.1.115",
3
+ "version": "0.1.117",
4
4
  "description": "Shared browser protocol for testkit bridge and extension consumers",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/ts-analysis",
3
- "version": "0.1.115",
3
+ "version": "0.1.117",
4
4
  "description": "TypeScript compiler-backed source analysis primitives for Erench tools",
5
5
  "type": "module",
6
6
  "exports": {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit",
3
- "version": "0.1.115",
3
+ "version": "0.1.117",
4
4
  "description": "Assistant-first CLI for running, inspecting, and debugging local testkit suites",
5
5
  "type": "module",
6
6
  "workspaces": [
@@ -62,7 +62,10 @@
62
62
  "test": "npm run build:assistant && npm run build:packages && vitest run && npm run test:live",
63
63
  "test:audit": "node scripts/test-boundary-audit.mjs",
64
64
  "test:compat:node20": "volta run --node 20.19.5 --npm 10.8.2 npm test",
65
+ "test:database-version:compat": "node scripts/test-database-version-compat.mjs",
66
+ "test:engine-version:compat": "node scripts/test-engine-version-compat.mjs",
65
67
  "test:live": "node scripts/live-sandbox/harness.mjs",
68
+ "test:live:neon": "node scripts/test-database-version-compat.mjs --neon-only",
66
69
  "test:unit": "npm run build:assistant && npm run build:packages && npm run test:audit && vitest run --config vitest.unit.config.mjs",
67
70
  "test:integration": "npm run build:assistant && npm run build:packages && vitest run test/integration",
68
71
  "test:system": "npm run build:assistant && npm run build:packages && vitest run test/system"
@@ -91,10 +94,10 @@
91
94
  },
92
95
  "dependencies": {
93
96
  "@babel/code-frame": "^7.29.0",
94
- "@elench/next-analysis": "0.1.115",
95
- "@elench/testkit-bridge": "0.1.115",
96
- "@elench/testkit-protocol": "0.1.115",
97
- "@elench/ts-analysis": "0.1.115",
97
+ "@elench/next-analysis": "0.1.117",
98
+ "@elench/testkit-bridge": "0.1.117",
99
+ "@elench/testkit-protocol": "0.1.117",
100
+ "@elench/ts-analysis": "0.1.117",
98
101
  "@oclif/core": "^4.10.6",
99
102
  "@playwright/test": "^1.52.0",
100
103
  "esbuild": "^0.25.11",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit-protocol",
3
- "version": "0.1.114",
3
+ "version": "0.1.116",
4
4
  "description": "Shared browser protocol for testkit bridge and extension consumers",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -1,26 +0,0 @@
1
- import { Command, Flags } from "@oclif/core";
2
- import { sharedFlags } from "../../../command-flags.mjs";
3
- import { executeDatabaseSnapshotCaptureOperation } from "../../../operations/db/snapshot/capture/operation.mjs";
4
- import { renderDatabaseSnapshotCaptureResult } from "../../../renderers/db-snapshot-capture/text.mjs";
5
-
6
- export default class DbSnapshotCaptureCommand extends Command {
7
- static summary = "Capture a database schema snapshot";
8
-
9
- static enableJsonFlag = true;
10
-
11
- static flags = {
12
- ...sharedFlags,
13
- output: Flags.string({
14
- description: "Output path for the snapshot",
15
- }),
16
- };
17
-
18
- async run() {
19
- const { flags } = await this.parse(DbSnapshotCaptureCommand);
20
- const result = await executeDatabaseSnapshotCaptureOperation(flags);
21
- if (!this.jsonEnabled()) {
22
- for (const line of renderDatabaseSnapshotCaptureResult(result)) this.log(line);
23
- }
24
- return result;
25
- }
26
- }
@@ -1,3 +0,0 @@
1
- export function renderDatabaseSnapshotCaptureResult(result) {
2
- return [`Wrote ${result.outputLabel}`];
3
- }