@elench/testkit 0.1.116 → 0.1.118

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/README.md +31 -7
  2. package/lib/cli/assistant/context-pack.mjs +31 -11
  3. package/lib/cli/operations/db/schema/refresh/operation.mjs +6 -2
  4. package/lib/cli/renderers/db-schema/text.mjs +11 -1
  5. package/lib/config/database.mjs +9 -13
  6. package/lib/config-api/index.d.ts +1 -2
  7. package/lib/config-api/index.mjs +5 -0
  8. package/lib/database/fingerprint.mjs +2 -2
  9. package/lib/database/index.mjs +4 -4
  10. package/lib/database/schema-source.mjs +174 -27
  11. package/lib/database/source-refresh-lock.mjs +69 -0
  12. package/lib/database/source-url.mjs +110 -0
  13. package/lib/database/template-steps.mjs +16 -8
  14. package/lib/repo/state.mjs +164 -0
  15. package/lib/runner/metadata.mjs +11 -24
  16. package/lib/runner/template.mjs +0 -3
  17. package/node_modules/@elench/next-analysis/package.json +1 -1
  18. package/node_modules/@elench/testkit-bridge/package.json +2 -2
  19. package/node_modules/@elench/testkit-protocol/package.json +1 -1
  20. package/node_modules/@elench/ts-analysis/package.json +1 -1
  21. package/package.json +6 -5
  22. package/node_modules/es-toolkit/CHANGELOG.md +0 -801
  23. package/node_modules/es-toolkit/src/compat/_internal/Equals.d.ts +0 -1
  24. package/node_modules/es-toolkit/src/compat/_internal/IsWritable.d.ts +0 -3
  25. package/node_modules/es-toolkit/src/compat/_internal/MutableList.d.ts +0 -4
  26. package/node_modules/es-toolkit/src/compat/_internal/RejectReadonly.d.ts +0 -4
  27. package/node_modules/esprima/ChangeLog +0 -235
  28. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/dist/index.d.ts +0 -188
  29. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/dist/index.d.ts.map +0 -1
  30. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/dist/index.js +0 -293
  31. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/dist/index.js.map +0 -1
  32. package/packages/testkit-bridge/node_modules/@elench/testkit-protocol/package.json +0 -25
@@ -0,0 +1,69 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+
4
+ const DEFAULT_STALE_MS = 10 * 60 * 1000;
5
+ const DEFAULT_TIMEOUT_MS = 120 * 1000;
6
+ const DEFAULT_POLL_MS = 100;
7
+
8
+ export async function withSourceSchemaRefreshLock(lockPath, task, options = {}) {
9
+ fs.mkdirSync(path.dirname(lockPath), { recursive: true });
10
+ const staleMs = options.staleMs || DEFAULT_STALE_MS;
11
+ const timeoutMs = options.timeoutMs || DEFAULT_TIMEOUT_MS;
12
+ const pollMs = options.pollMs || DEFAULT_POLL_MS;
13
+ const startedAt = Date.now();
14
+
15
+ let waited = false;
16
+ while (true) {
17
+ try {
18
+ const fd = fs.openSync(lockPath, "wx");
19
+ try {
20
+ fs.writeFileSync(
21
+ fd,
22
+ `${JSON.stringify(
23
+ {
24
+ pid: process.pid,
25
+ createdAt: new Date().toISOString(),
26
+ },
27
+ null,
28
+ 2
29
+ )}\n`
30
+ );
31
+ } finally {
32
+ fs.closeSync(fd);
33
+ }
34
+
35
+ try {
36
+ return await task({ waited, requestedAt: startedAt });
37
+ } finally {
38
+ fs.rmSync(lockPath, { force: true });
39
+ }
40
+ } catch (error) {
41
+ if (error?.code !== "EEXIST") throw error;
42
+ waited = true;
43
+ removeStaleLock(lockPath, staleMs);
44
+ if (Date.now() - startedAt > timeoutMs) {
45
+ throw new Error(`Timed out waiting for source schema refresh lock: ${lockPath}`);
46
+ }
47
+ await sleep(pollMs);
48
+ }
49
+ }
50
+ }
51
+
52
+ export function getSourceSchemaRefreshLockPath(cachePath) {
53
+ return path.join(path.dirname(cachePath), "source-schema.refresh.lock");
54
+ }
55
+
56
+ function removeStaleLock(lockPath, staleMs) {
57
+ try {
58
+ const stat = fs.statSync(lockPath);
59
+ if (Date.now() - stat.mtimeMs > staleMs) {
60
+ fs.rmSync(lockPath, { force: true });
61
+ }
62
+ } catch (error) {
63
+ if (error?.code !== "ENOENT") throw error;
64
+ }
65
+ }
66
+
67
+ function sleep(ms) {
68
+ return new Promise((resolve) => setTimeout(resolve, ms));
69
+ }
@@ -0,0 +1,110 @@
1
+ const NEON_HOST_SUFFIX = ".neon.tech";
2
+ const NEON_POOLER_MARKER = "-pooler.";
3
+ const UNSUPPORTED_POOLER_PATTERN = /(^|[-.])(pgbouncer|pooler)([-.]|$)/i;
4
+
5
+ export function resolveSourceSchemaDumpUrl(databaseUrl) {
6
+ const original = parseDatabaseUrl(databaseUrl);
7
+ const originalClassification = classifyDatabaseHost(original.hostname);
8
+
9
+ if (originalClassification.kind === "neon-pooler") {
10
+ const resolved = new URL(original.url.href);
11
+ resolved.hostname = original.hostname.replace(NEON_POOLER_MARKER, ".");
12
+ return buildResolution({
13
+ original,
14
+ resolved,
15
+ originalClassification,
16
+ resolvedClassification: classifyDatabaseHost(resolved.hostname),
17
+ rewritten: true,
18
+ notice: "Source schema URL uses Neon pooler; Testkit is using the direct endpoint for pg_dump.",
19
+ });
20
+ }
21
+
22
+ if (originalClassification.kind === "unsupported-pooler") {
23
+ throw new Error(
24
+ [
25
+ "Refusing to run pg_dump through a pooled source database URL.",
26
+ `Host: ${original.hostname}`,
27
+ "Configure a direct source database URL.",
28
+ ].join("\n")
29
+ );
30
+ }
31
+
32
+ return buildResolution({
33
+ original,
34
+ resolved: original.url,
35
+ originalClassification,
36
+ resolvedClassification: originalClassification,
37
+ rewritten: false,
38
+ notice: null,
39
+ });
40
+ }
41
+
42
+ export function classifyDatabaseHost(hostname) {
43
+ const host = String(hostname || "").toLowerCase();
44
+ if (host.endsWith(NEON_HOST_SUFFIX) && host.includes(NEON_POOLER_MARKER)) {
45
+ return { kind: "neon-pooler", host };
46
+ }
47
+ if (host.endsWith(NEON_HOST_SUFFIX)) {
48
+ return { kind: "neon-direct", host };
49
+ }
50
+ if (UNSUPPORTED_POOLER_PATTERN.test(host)) {
51
+ return { kind: "unsupported-pooler", host };
52
+ }
53
+ return { kind: "unknown-direct", host };
54
+ }
55
+
56
+ export function redactDatabaseUrl(databaseUrl) {
57
+ try {
58
+ const parsed = new URL(databaseUrl);
59
+ if (parsed.username) parsed.username = "REDACTED";
60
+ if (parsed.password) parsed.password = "REDACTED";
61
+ return parsed.toString();
62
+ } catch {
63
+ return "[invalid database URL]";
64
+ }
65
+ }
66
+
67
+ export function assertPgDumpArgsAreSourceSafe(args) {
68
+ for (let index = 0; index < args.length; index += 1) {
69
+ const arg = String(args[index]);
70
+ if (arg === "-j" || arg === "--jobs" || arg.startsWith("--jobs=")) {
71
+ throw new Error("Source schema pg_dump must not use parallel jobs.");
72
+ }
73
+ }
74
+ }
75
+
76
+ function parseDatabaseUrl(databaseUrl) {
77
+ try {
78
+ const url = new URL(databaseUrl);
79
+ if (!url.hostname) throw new Error("missing host");
80
+ return {
81
+ url,
82
+ hostname: url.hostname.toLowerCase(),
83
+ };
84
+ } catch (error) {
85
+ throw new Error(`Invalid source database URL: ${error?.message || error}`);
86
+ }
87
+ }
88
+
89
+ function buildResolution({
90
+ original,
91
+ resolved,
92
+ originalClassification,
93
+ resolvedClassification,
94
+ rewritten,
95
+ notice,
96
+ }) {
97
+ return {
98
+ originalUrl: original.url.href,
99
+ dumpUrl: resolved.href,
100
+ rewritten,
101
+ notice,
102
+ metadata: {
103
+ originalHost: original.hostname,
104
+ originalClassification: originalClassification.kind,
105
+ resolvedHost: resolved.hostname.toLowerCase(),
106
+ resolvedClassification: resolvedClassification.kind,
107
+ rewritten,
108
+ },
109
+ };
110
+ }
@@ -8,6 +8,7 @@ import {
8
8
  runConfiguredSteps,
9
9
  } from "../runner/template-steps.mjs";
10
10
  import { captureOutput } from "../runner/processes.mjs";
11
+ import { assertPgDumpArgsAreSourceSafe } from "./source-url.mjs";
11
12
 
12
13
  export async function runTemplateStage(config, stageName, databaseUrl, options = {}) {
13
14
  const steps = config.testkit.database?.template?.[stageName] || [];
@@ -69,14 +70,18 @@ export async function captureTemplateSnapshotText(config, databaseUrl, options =
69
70
 
70
71
  export async function dumpPostgresSchemaToFile(config, outputPath, databaseUrl, options = {}) {
71
72
  const env = {
72
- ...buildTemplateExecutionEnv(config, {}, process.env),
73
- ...buildPostgresConnectionEnv(databaseUrl),
73
+ ...buildTemplateExecutionEnv(config, {}, options.env || process.env),
74
+ ...buildPostgresConnectionEnv(databaseUrl, {
75
+ applicationName: options.pgApplicationName,
76
+ }),
74
77
  };
75
- const result = await runPgDumpCommand(config, "pg_dump", pgDumpArgs(), env, options);
78
+ const args = pgDumpArgs();
79
+ assertPgDumpArgsAreSourceSafe(args);
80
+ const result = await runPgDumpCommand(config, "pg_dump", args, env, options);
76
81
  if (result.exitCode !== 0 && isPgDumpServerVersionMismatch(result)) {
77
82
  const serverMajor = parsePgDumpServerMajor(result);
78
83
  if (serverMajor) {
79
- const fallback = await runDockerizedPgDump(config, serverMajor, env, options);
84
+ const fallback = await runDockerizedPgDump(config, serverMajor, env, args, options);
80
85
  if (fallback.exitCode === 0) {
81
86
  fs.writeFileSync(outputPath, fallback.stdout);
82
87
  sanitizeSnapshotFile(outputPath);
@@ -92,7 +97,7 @@ export async function dumpPostgresSchemaToFile(config, outputPath, databaseUrl,
92
97
  sanitizeSnapshotFile(outputPath);
93
98
  }
94
99
 
95
- function pgDumpArgs() {
100
+ export function pgDumpArgs() {
96
101
  return [
97
102
  "--schema-only",
98
103
  "--no-owner",
@@ -100,7 +105,7 @@ function pgDumpArgs() {
100
105
  ];
101
106
  }
102
107
 
103
- async function runDockerizedPgDump(config, serverMajor, env, options) {
108
+ async function runDockerizedPgDump(config, serverMajor, env, pgDumpCommandArgs, options) {
104
109
  const image = `${process.env.TESTKIT_PG_DUMP_IMAGE_PREFIX || "postgres"}:${serverMajor}`;
105
110
  return runPgDumpCommand(
106
111
  config,
@@ -122,9 +127,11 @@ async function runDockerizedPgDump(config, serverMajor, env, options) {
122
127
  "PGPASSWORD",
123
128
  "-e",
124
129
  "PGSSLMODE",
130
+ "-e",
131
+ "PGAPPNAME",
125
132
  image,
126
133
  "pg_dump",
127
- ...pgDumpArgs(),
134
+ ...pgDumpCommandArgs,
128
135
  ],
129
136
  env,
130
137
  options
@@ -169,7 +176,7 @@ function parsePgDumpServerMajor(result) {
169
176
  return match ? Number(match[1]) : null;
170
177
  }
171
178
 
172
- function buildPostgresConnectionEnv(databaseUrl) {
179
+ function buildPostgresConnectionEnv(databaseUrl, options = {}) {
173
180
  const parsed = new URL(databaseUrl);
174
181
  return compactObject({
175
182
  PGHOST: parsed.hostname,
@@ -178,6 +185,7 @@ function buildPostgresConnectionEnv(databaseUrl) {
178
185
  PGUSER: decodeURIComponent(parsed.username || ""),
179
186
  PGPASSWORD: decodeURIComponent(parsed.password || ""),
180
187
  PGSSLMODE: parsed.searchParams.get("sslmode") || undefined,
188
+ PGAPPNAME: options.applicationName || undefined,
181
189
  });
182
190
  }
183
191
 
@@ -0,0 +1,164 @@
1
+ import crypto from "crypto";
2
+ import fs from "fs";
3
+ import path from "path";
4
+ import { execFileSync } from "child_process";
5
+ import { parseGitHubRepoSlug } from "../regressions/github.mjs";
6
+
7
+ const IGNORED_DIRS = new Set([".git", ".testkit", "node_modules"]);
8
+
9
+ export function collectRepoState(productDir) {
10
+ const repoRoot = readGit(productDir, ["rev-parse", "--show-toplevel"]);
11
+ if (!repoRoot) {
12
+ const fingerprint = fingerprintDirectory(productDir);
13
+ return {
14
+ kind: "nogit",
15
+ repoRoot: null,
16
+ worktreePath: path.resolve(productDir),
17
+ branch: null,
18
+ detached: false,
19
+ commitSha: null,
20
+ remoteUrl: null,
21
+ repoSlug: null,
22
+ dirty: false,
23
+ dirtyFingerprint: null,
24
+ contentFingerprint: fingerprint,
25
+ cacheKey: `nogit/${fingerprint}`,
26
+ };
27
+ }
28
+
29
+ const commitSha = readGit(productDir, ["rev-parse", "--verify", "HEAD"]);
30
+ const branchName = readGit(productDir, ["rev-parse", "--abbrev-ref", "HEAD"]);
31
+ const remoteUrl = readGit(productDir, ["remote", "get-url", "origin"]);
32
+ const detached = branchName === "HEAD";
33
+ const dirtyFingerprint = fingerprintGitDirtyState(productDir);
34
+ const dirty = Boolean(dirtyFingerprint);
35
+ const baseCommit = commitSha || "unborn";
36
+
37
+ return {
38
+ kind: dirty ? "dirty" : "commit",
39
+ repoRoot,
40
+ worktreePath: path.resolve(repoRoot),
41
+ branch: detached ? null : branchName,
42
+ detached,
43
+ commitSha,
44
+ remoteUrl,
45
+ repoSlug: parseGitHubRepoSlug(remoteUrl),
46
+ dirty,
47
+ dirtyFingerprint,
48
+ contentFingerprint: null,
49
+ cacheKey: dirty
50
+ ? `dirty/${baseCommit}-${dirtyFingerprint}`
51
+ : `commits/${baseCommit}`,
52
+ };
53
+ }
54
+
55
+ export function summarizeRepoStateForMetadata(repoState) {
56
+ if (!repoState) return null;
57
+ return {
58
+ kind: repoState.kind,
59
+ cacheKey: repoState.cacheKey,
60
+ branch: repoState.branch,
61
+ detached: repoState.detached,
62
+ commitSha: repoState.commitSha,
63
+ dirty: repoState.dirty,
64
+ dirtyFingerprint: repoState.dirtyFingerprint,
65
+ contentFingerprint: repoState.contentFingerprint,
66
+ repoRoot: repoState.repoRoot,
67
+ worktreePath: repoState.worktreePath,
68
+ remoteUrl: repoState.remoteUrl,
69
+ repoSlug: repoState.repoSlug,
70
+ };
71
+ }
72
+
73
+ function fingerprintGitDirtyState(productDir) {
74
+ const hash = crypto.createHash("sha256");
75
+ let hasChanges = false;
76
+
77
+ const trackedStatus = readGit(productDir, ["status", "--porcelain=v1", "-uno"]) || "";
78
+ if (trackedStatus.trim()) {
79
+ hasChanges = true;
80
+ hash.update("tracked-status\0");
81
+ hash.update(trackedStatus);
82
+ appendGitOutput(hash, productDir, ["diff", "--binary", "--no-ext-diff"]);
83
+ appendGitOutput(hash, productDir, ["diff", "--binary", "--cached", "--no-ext-diff"]);
84
+ }
85
+
86
+ const untracked = readGit(productDir, ["ls-files", "--others", "--exclude-standard", "-z"]) || "";
87
+ const untrackedFiles = untracked
88
+ .split("\0")
89
+ .filter(Boolean)
90
+ .filter((entry) => !hasIgnoredPathSegment(entry))
91
+ .sort();
92
+ if (untrackedFiles.length > 0) {
93
+ hasChanges = true;
94
+ hash.update("untracked\0");
95
+ for (const relativePath of untrackedFiles) {
96
+ const absPath = path.join(productDir, relativePath);
97
+ if (!fs.existsSync(absPath) || !fs.statSync(absPath).isFile()) continue;
98
+ hash.update(`file:${normalizePath(relativePath)}\0`);
99
+ const stat = fs.statSync(absPath);
100
+ hash.update(`${stat.size}\0`);
101
+ hash.update(fs.readFileSync(absPath));
102
+ }
103
+ }
104
+
105
+ return hasChanges ? hash.digest("hex").slice(0, 24) : null;
106
+ }
107
+
108
+ function appendGitOutput(hash, cwd, args) {
109
+ const output = readGit(cwd, args) || "";
110
+ hash.update(args.join(" "));
111
+ hash.update("\0");
112
+ hash.update(output);
113
+ hash.update("\0");
114
+ }
115
+
116
+ function fingerprintDirectory(rootDir) {
117
+ const hash = crypto.createHash("sha256");
118
+ appendDirectoryToHash(hash, rootDir, rootDir);
119
+ return hash.digest("hex").slice(0, 24);
120
+ }
121
+
122
+ function appendDirectoryToHash(hash, rootDir, absPath) {
123
+ if (!fs.existsSync(absPath)) {
124
+ hash.update(`missing:${normalizePath(path.relative(rootDir, absPath))}`);
125
+ return;
126
+ }
127
+ const stat = fs.statSync(absPath);
128
+ if (stat.isDirectory()) {
129
+ const relative = path.relative(rootDir, absPath);
130
+ if (relative && hasIgnoredPathSegment(relative)) return;
131
+ hash.update(`dir:${normalizePath(relative)}`);
132
+ for (const entry of fs.readdirSync(absPath).sort()) {
133
+ if (IGNORED_DIRS.has(entry)) continue;
134
+ appendDirectoryToHash(hash, rootDir, path.join(absPath, entry));
135
+ }
136
+ return;
137
+ }
138
+ if (!stat.isFile()) return;
139
+ const relative = normalizePath(path.relative(rootDir, absPath));
140
+ hash.update(`file:${relative}:${stat.size}:${stat.mtimeMs}`);
141
+ hash.update(fs.readFileSync(absPath));
142
+ }
143
+
144
+ function hasIgnoredPathSegment(relativePath) {
145
+ return normalizePath(relativePath)
146
+ .split("/")
147
+ .some((segment) => IGNORED_DIRS.has(segment));
148
+ }
149
+
150
+ function normalizePath(value) {
151
+ return String(value).split(path.sep).join("/");
152
+ }
153
+
154
+ function readGit(cwd, args) {
155
+ try {
156
+ return execFileSync("git", args, {
157
+ cwd,
158
+ encoding: "utf8",
159
+ stdio: ["ignore", "pipe", "ignore"],
160
+ }).trim() || null;
161
+ } catch {
162
+ return null;
163
+ }
164
+ }
@@ -1,27 +1,22 @@
1
1
  import fs from "fs";
2
2
  import os from "os";
3
3
  import path from "path";
4
- import { execFileSync } from "child_process";
5
4
  import { fileURLToPath } from "url";
6
- import { parseGitHubRepoSlug } from "../regressions/github.mjs";
5
+ import { collectRepoState } from "../repo/state.mjs";
7
6
 
8
7
  export function collectGitMetadata(productDir) {
9
- const read = (args) => {
10
- try {
11
- return execaSyncCompat("git", args, { cwd: productDir }).trim() || null;
12
- } catch {
13
- return null;
14
- }
15
- };
16
-
17
- const remoteUrl = read(["remote", "get-url", "origin"]);
8
+ const state = collectRepoState(productDir);
18
9
 
19
10
  return {
20
- branch: read(["rev-parse", "--abbrev-ref", "HEAD"]),
21
- commitSha: read(["rev-parse", "HEAD"]),
22
- repoRoot: read(["rev-parse", "--show-toplevel"]),
23
- remoteUrl,
24
- repoSlug: parseGitHubRepoSlug(remoteUrl),
11
+ branch: state.branch || (state.detached ? "HEAD" : null),
12
+ commitSha: state.commitSha,
13
+ repoRoot: state.repoRoot,
14
+ remoteUrl: state.remoteUrl,
15
+ repoSlug: state.repoSlug,
16
+ detached: state.detached,
17
+ dirty: state.dirty,
18
+ dirtyFingerprint: state.dirtyFingerprint,
19
+ worktreePath: state.worktreePath,
25
20
  };
26
21
  }
27
22
 
@@ -50,11 +45,3 @@ export function safeUsername() {
50
45
  return process.env.USER || process.env.USERNAME || null;
51
46
  }
52
47
  }
53
-
54
- function execaSyncCompat(command, args, options) {
55
- return execFileSync(command, args, {
56
- cwd: options?.cwd,
57
- encoding: "utf8",
58
- stdio: ["ignore", "pipe", "pipe"],
59
- });
60
- }
@@ -202,9 +202,6 @@ function finalizeSourceSchema(sourceSchema, context) {
202
202
  return {
203
203
  ...sourceSchema,
204
204
  ...(typeof sourceSchema.env === "string" ? { env: finalizeString(sourceSchema.env, context) } : {}),
205
- ...(typeof sourceSchema.cachePath === "string"
206
- ? { cachePath: finalizeString(sourceSchema.cachePath, context) }
207
- : {}),
208
205
  };
209
206
  }
210
207
 
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/next-analysis",
3
- "version": "0.1.116",
3
+ "version": "0.1.118",
4
4
  "description": "SWC-backed Next.js source analysis primitives for Erench tools",
5
5
  "type": "module",
6
6
  "exports": {
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit-bridge",
3
- "version": "0.1.116",
3
+ "version": "0.1.118",
4
4
  "description": "Browser bridge helpers for testkit",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -22,7 +22,7 @@
22
22
  "typecheck": "tsc -p tsconfig.json --noEmit"
23
23
  },
24
24
  "dependencies": {
25
- "@elench/testkit-protocol": "0.1.116"
25
+ "@elench/testkit-protocol": "0.1.118"
26
26
  },
27
27
  "private": false
28
28
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit-protocol",
3
- "version": "0.1.116",
3
+ "version": "0.1.118",
4
4
  "description": "Shared browser protocol for testkit bridge and extension consumers",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/ts-analysis",
3
- "version": "0.1.116",
3
+ "version": "0.1.118",
4
4
  "description": "TypeScript compiler-backed source analysis primitives for Erench tools",
5
5
  "type": "module",
6
6
  "exports": {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elench/testkit",
3
- "version": "0.1.116",
3
+ "version": "0.1.118",
4
4
  "description": "Assistant-first CLI for running, inspecting, and debugging local testkit suites",
5
5
  "type": "module",
6
6
  "workspaces": [
@@ -65,6 +65,7 @@
65
65
  "test:database-version:compat": "node scripts/test-database-version-compat.mjs",
66
66
  "test:engine-version:compat": "node scripts/test-engine-version-compat.mjs",
67
67
  "test:live": "node scripts/live-sandbox/harness.mjs",
68
+ "test:live:github": "node scripts/test-live-github-fixture.mjs",
68
69
  "test:live:neon": "node scripts/test-database-version-compat.mjs --neon-only",
69
70
  "test:unit": "npm run build:assistant && npm run build:packages && npm run test:audit && vitest run --config vitest.unit.config.mjs",
70
71
  "test:integration": "npm run build:assistant && npm run build:packages && vitest run test/integration",
@@ -94,10 +95,10 @@
94
95
  },
95
96
  "dependencies": {
96
97
  "@babel/code-frame": "^7.29.0",
97
- "@elench/next-analysis": "0.1.116",
98
- "@elench/testkit-bridge": "0.1.116",
99
- "@elench/testkit-protocol": "0.1.116",
100
- "@elench/ts-analysis": "0.1.116",
98
+ "@elench/next-analysis": "0.1.118",
99
+ "@elench/testkit-bridge": "0.1.118",
100
+ "@elench/testkit-protocol": "0.1.118",
101
+ "@elench/ts-analysis": "0.1.118",
101
102
  "@oclif/core": "^4.10.6",
102
103
  "@playwright/test": "^1.52.0",
103
104
  "esbuild": "^0.25.11",